problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.71k
9.01k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 465
11.3k
| num_tokens_prompt
int64 557
2.05k
| num_tokens_diff
int64 48
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_15553 | rasdani/github-patches | git_diff | mindee__doctr-477 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Import error synthetize_page func in streamlit demo script
## 🐛 Bug
Import bug while running the streamlit demo script
## To Reproduce
Steps to reproduce the behavior:
1. Install the current package version
2. Run streamlit demo/app.py
Error message : ImportError: cannot import name 'synthetize_page' from 'doctr.utils.visualization' (/home/ubuntu/repos/mindee/doctr/doctr/utils/visualization.py)
## Correction
Try to import "synthetize_page" [from](https://github.com/mindee/doctr/blob/cfc329f8b21cd7d8c08d5c9190c53bd77a3149c4/doctr/utils/visualization.py#L19) whereas it should be "synthesize_page" [here](https://github.com/mindee/doctr/blob/cfc329f8b21cd7d8c08d5c9190c53bd77a3149c4/demo/app.py#L21) . It's probably a typo. It works after renaming.
</issue>
<code>
[start of demo/app.py]
1 # Copyright (C) 2021, Mindee.
2
3 # This program is licensed under the Apache License version 2.
4 # See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
5
6 import os
7 import streamlit as st
8 import matplotlib.pyplot as plt
9
10 os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
11
12 import tensorflow as tf
13 import cv2
14
15 gpu_devices = tf.config.experimental.list_physical_devices('GPU')
16 if any(gpu_devices):
17 tf.config.experimental.set_memory_growth(gpu_devices[0], True)
18
19 from doctr.io import DocumentFile
20 from doctr.models import ocr_predictor
21 from doctr.utils.visualization import synthetize_page, visualize_page
22
23 DET_ARCHS = ["db_resnet50"]
24 RECO_ARCHS = ["crnn_vgg16_bn", "master", "sar_resnet31"]
25
26
27 def main():
28
29 # Wide mode
30 st.set_page_config(layout="wide")
31
32 # Designing the interface
33 st.title("DocTR: Document Text Recognition")
34 # For newline
35 st.write('\n')
36 # Instructions
37 st.markdown("*Hint: click on the top-right corner of an image to enlarge it!*")
38 # Set the columns
39 cols = st.beta_columns((1, 1, 1, 1))
40 cols[0].subheader("Input page")
41 cols[1].subheader("Segmentation heatmap")
42 cols[2].subheader("OCR output")
43 cols[3].subheader("Page reconstitution")
44
45 # Sidebar
46 # File selection
47 st.sidebar.title("Document selection")
48 # Disabling warning
49 st.set_option('deprecation.showfileUploaderEncoding', False)
50 # Choose your own image
51 uploaded_file = st.sidebar.file_uploader("Upload files", type=['pdf', 'png', 'jpeg', 'jpg'])
52 if uploaded_file is not None:
53 if uploaded_file.name.endswith('.pdf'):
54 doc = DocumentFile.from_pdf(uploaded_file.read()).as_images()
55 else:
56 doc = DocumentFile.from_images(uploaded_file.read())
57 page_idx = st.sidebar.selectbox("Page selection", [idx + 1 for idx in range(len(doc))]) - 1
58 cols[0].image(doc[page_idx])
59
60 # Model selection
61 st.sidebar.title("Model selection")
62 det_arch = st.sidebar.selectbox("Text detection model", DET_ARCHS)
63 reco_arch = st.sidebar.selectbox("Text recognition model", RECO_ARCHS)
64
65 # For newline
66 st.sidebar.write('\n')
67
68 if st.sidebar.button("Analyze page"):
69
70 if uploaded_file is None:
71 st.sidebar.write("Please upload a document")
72
73 else:
74 with st.spinner('Loading model...'):
75 predictor = ocr_predictor(det_arch, reco_arch, pretrained=True)
76
77 with st.spinner('Analyzing...'):
78
79 # Forward the image to the model
80 processed_batches = predictor.det_predictor.pre_processor([doc[page_idx]])
81 out = predictor.det_predictor.model(processed_batches[0], return_model_output=True, training=False)
82 seg_map = out["out_map"]
83 seg_map = tf.squeeze(seg_map[0, ...], axis=[2])
84 seg_map = cv2.resize(seg_map.numpy(), (doc[page_idx].shape[1], doc[page_idx].shape[0]),
85 interpolation=cv2.INTER_LINEAR)
86 # Plot the raw heatmap
87 fig, ax = plt.subplots()
88 ax.imshow(seg_map)
89 ax.axis('off')
90 cols[1].pyplot(fig)
91
92 # Plot OCR output
93 out = predictor([doc[page_idx]], training=False)
94 fig = visualize_page(out.pages[0].export(), doc[page_idx], interactive=False)
95 cols[2].pyplot(fig)
96
97 # Page reconsitution under input page
98 page_export = out.pages[0].export()
99 img = synthetize_page(page_export)
100 cols[3].image(img, clamp=True)
101
102 # Display JSON
103 st.markdown("\nHere are your analysis results in JSON format:")
104 st.json(page_export)
105
106
107 if __name__ == '__main__':
108 main()
109
[end of demo/app.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/demo/app.py b/demo/app.py
--- a/demo/app.py
+++ b/demo/app.py
@@ -18,7 +18,7 @@
from doctr.io import DocumentFile
from doctr.models import ocr_predictor
-from doctr.utils.visualization import synthetize_page, visualize_page
+from doctr.utils.visualization import visualize_page
DET_ARCHS = ["db_resnet50"]
RECO_ARCHS = ["crnn_vgg16_bn", "master", "sar_resnet31"]
@@ -96,7 +96,7 @@
# Page reconsitution under input page
page_export = out.pages[0].export()
- img = synthetize_page(page_export)
+ img = out.pages[0].synthesize()
cols[3].image(img, clamp=True)
# Display JSON
| {"golden_diff": "diff --git a/demo/app.py b/demo/app.py\n--- a/demo/app.py\n+++ b/demo/app.py\n@@ -18,7 +18,7 @@\n \n from doctr.io import DocumentFile\n from doctr.models import ocr_predictor\n-from doctr.utils.visualization import synthetize_page, visualize_page\n+from doctr.utils.visualization import visualize_page\n \n DET_ARCHS = [\"db_resnet50\"]\n RECO_ARCHS = [\"crnn_vgg16_bn\", \"master\", \"sar_resnet31\"]\n@@ -96,7 +96,7 @@\n \n # Page reconsitution under input page\n page_export = out.pages[0].export()\n- img = synthetize_page(page_export)\n+ img = out.pages[0].synthesize()\n cols[3].image(img, clamp=True)\n \n # Display JSON\n", "issue": "Import error synthetize_page func in streamlit demo script\n## \ud83d\udc1b Bug\r\n\r\nImport bug while running the streamlit demo script\r\n\r\n## To Reproduce\r\n\r\nSteps to reproduce the behavior:\r\n\r\n1. Install the current package version \r\n2. Run streamlit demo/app.py\r\n\r\nError message : ImportError: cannot import name 'synthetize_page' from 'doctr.utils.visualization' (/home/ubuntu/repos/mindee/doctr/doctr/utils/visualization.py)\r\n\r\n## Correction \r\n\r\nTry to import \"synthetize_page\" [from](https://github.com/mindee/doctr/blob/cfc329f8b21cd7d8c08d5c9190c53bd77a3149c4/doctr/utils/visualization.py#L19) whereas it should be \"synthesize_page\" [here](https://github.com/mindee/doctr/blob/cfc329f8b21cd7d8c08d5c9190c53bd77a3149c4/demo/app.py#L21) . It's probably a typo. It works after renaming.\n", "before_files": [{"content": "# Copyright (C) 2021, Mindee.\n\n# This program is licensed under the Apache License version 2.\n# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.\n\nimport os\nimport streamlit as st\nimport matplotlib.pyplot as plt\n\nos.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"2\"\n\nimport tensorflow as tf\nimport cv2\n\ngpu_devices = tf.config.experimental.list_physical_devices('GPU')\nif any(gpu_devices):\n tf.config.experimental.set_memory_growth(gpu_devices[0], True)\n\nfrom doctr.io import DocumentFile\nfrom doctr.models import ocr_predictor\nfrom doctr.utils.visualization import synthetize_page, visualize_page\n\nDET_ARCHS = [\"db_resnet50\"]\nRECO_ARCHS = [\"crnn_vgg16_bn\", \"master\", \"sar_resnet31\"]\n\n\ndef main():\n\n # Wide mode\n st.set_page_config(layout=\"wide\")\n\n # Designing the interface\n st.title(\"DocTR: Document Text Recognition\")\n # For newline\n st.write('\\n')\n # Instructions\n st.markdown(\"*Hint: click on the top-right corner of an image to enlarge it!*\")\n # Set the columns\n cols = st.beta_columns((1, 1, 1, 1))\n cols[0].subheader(\"Input page\")\n cols[1].subheader(\"Segmentation heatmap\")\n cols[2].subheader(\"OCR output\")\n cols[3].subheader(\"Page reconstitution\")\n\n # Sidebar\n # File selection\n st.sidebar.title(\"Document selection\")\n # Disabling warning\n st.set_option('deprecation.showfileUploaderEncoding', False)\n # Choose your own image\n uploaded_file = st.sidebar.file_uploader(\"Upload files\", type=['pdf', 'png', 'jpeg', 'jpg'])\n if uploaded_file is not None:\n if uploaded_file.name.endswith('.pdf'):\n doc = DocumentFile.from_pdf(uploaded_file.read()).as_images()\n else:\n doc = DocumentFile.from_images(uploaded_file.read())\n page_idx = st.sidebar.selectbox(\"Page selection\", [idx + 1 for idx in range(len(doc))]) - 1\n cols[0].image(doc[page_idx])\n\n # Model selection\n st.sidebar.title(\"Model selection\")\n det_arch = st.sidebar.selectbox(\"Text detection model\", DET_ARCHS)\n reco_arch = st.sidebar.selectbox(\"Text recognition model\", RECO_ARCHS)\n\n # For newline\n st.sidebar.write('\\n')\n\n if st.sidebar.button(\"Analyze page\"):\n\n if uploaded_file is None:\n st.sidebar.write(\"Please upload a document\")\n\n else:\n with st.spinner('Loading model...'):\n predictor = ocr_predictor(det_arch, reco_arch, pretrained=True)\n\n with st.spinner('Analyzing...'):\n\n # Forward the image to the model\n processed_batches = predictor.det_predictor.pre_processor([doc[page_idx]])\n out = predictor.det_predictor.model(processed_batches[0], return_model_output=True, training=False)\n seg_map = out[\"out_map\"]\n seg_map = tf.squeeze(seg_map[0, ...], axis=[2])\n seg_map = cv2.resize(seg_map.numpy(), (doc[page_idx].shape[1], doc[page_idx].shape[0]),\n interpolation=cv2.INTER_LINEAR)\n # Plot the raw heatmap\n fig, ax = plt.subplots()\n ax.imshow(seg_map)\n ax.axis('off')\n cols[1].pyplot(fig)\n\n # Plot OCR output\n out = predictor([doc[page_idx]], training=False)\n fig = visualize_page(out.pages[0].export(), doc[page_idx], interactive=False)\n cols[2].pyplot(fig)\n\n # Page reconsitution under input page\n page_export = out.pages[0].export()\n img = synthetize_page(page_export)\n cols[3].image(img, clamp=True)\n\n # Display JSON\n st.markdown(\"\\nHere are your analysis results in JSON format:\")\n st.json(page_export)\n\n\nif __name__ == '__main__':\n main()\n", "path": "demo/app.py"}]} | 1,900 | 184 |
gh_patches_debug_23443 | rasdani/github-patches | git_diff | keras-team__keras-nlp-521 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Fix saved model testing
We have a silly bug for a few of our saved model tests where we are calling
```
model.save(save_path, save_format)
```
Instead of
```
model.save(save_path, save_format=save_format)
```
Which means we aren't properly the h5 format. If you fix this issue, you actually hit another issue with h5 saving.
`FAILED keras_nlp/models/bert/bert_tasks_test.py::BertClassifierTest::test_saving_model_save_format_h5 - ValueError: Unknown layer: BertClassifier. Please ensure this object is passed to the `custom_objects` argument.`
</issue>
<code>
[start of keras_nlp/conftest.py]
1 # Copyright 2022 The KerasNLP Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 import sys
15
16 import pytest
17
18
19 def pytest_addoption(parser):
20 parser.addoption(
21 "--run_large",
22 action="store_true",
23 default=False,
24 help="run large tests",
25 )
26 parser.addoption(
27 "--run_extra_large",
28 action="store_true",
29 default=False,
30 help="run extra_large tests",
31 )
32
33
34 def pytest_configure(config):
35 config.addinivalue_line(
36 "markers", "large: mark test as being slow or requiring a network"
37 )
38 config.addinivalue_line(
39 "markers",
40 "extra_large: mark test as being too large to run continuously",
41 )
42
43
44 def pytest_collection_modifyitems(config, items):
45 run_extra_large_tests = config.getoption("--run_extra_large")
46 # Run large tests for --run_extra_large or --run_large.
47 run_large_tests = config.getoption("--run_large") or run_extra_large_tests
48
49 # Messages to annotate skipped tests with.
50 skip_xla = pytest.mark.skipif(
51 sys.platform == "darwin", reason="XLA unsupported on MacOS."
52 )
53 skip_large = pytest.mark.skipif(
54 not run_large_tests, reason="need --run_large option to run"
55 )
56 skip_extra_large = pytest.mark.skipif(
57 not run_extra_large_tests, reason="need --run_extra_large option to run"
58 )
59 for item in items:
60 if "jit_compile_true" in item.name:
61 item.add_marker(skip_xla)
62 if "large" in item.keywords:
63 item.add_marker(skip_large)
64 if "extra_large" in item.keywords:
65 item.add_marker(skip_extra_large)
66
[end of keras_nlp/conftest.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/keras_nlp/conftest.py b/keras_nlp/conftest.py
--- a/keras_nlp/conftest.py
+++ b/keras_nlp/conftest.py
@@ -14,6 +14,8 @@
import sys
import pytest
+import tensorflow as tf
+from packaging import version
def pytest_addoption(parser):
@@ -50,6 +52,10 @@
skip_xla = pytest.mark.skipif(
sys.platform == "darwin", reason="XLA unsupported on MacOS."
)
+ skip_keras_saving_test = pytest.mark.skipif(
+ version.parse(tf.__version__) < version.parse("2.12"),
+ reason="keras_v3 format requires tf > 2.12.",
+ )
skip_large = pytest.mark.skipif(
not run_large_tests, reason="need --run_large option to run"
)
@@ -59,6 +65,8 @@
for item in items:
if "jit_compile_true" in item.name:
item.add_marker(skip_xla)
+ if "keras_format" in item.name:
+ item.add_marker(skip_keras_saving_test)
if "large" in item.keywords:
item.add_marker(skip_large)
if "extra_large" in item.keywords:
| {"golden_diff": "diff --git a/keras_nlp/conftest.py b/keras_nlp/conftest.py\n--- a/keras_nlp/conftest.py\n+++ b/keras_nlp/conftest.py\n@@ -14,6 +14,8 @@\n import sys\n \n import pytest\n+import tensorflow as tf\n+from packaging import version\n \n \n def pytest_addoption(parser):\n@@ -50,6 +52,10 @@\n skip_xla = pytest.mark.skipif(\n sys.platform == \"darwin\", reason=\"XLA unsupported on MacOS.\"\n )\n+ skip_keras_saving_test = pytest.mark.skipif(\n+ version.parse(tf.__version__) < version.parse(\"2.12\"),\n+ reason=\"keras_v3 format requires tf > 2.12.\",\n+ )\n skip_large = pytest.mark.skipif(\n not run_large_tests, reason=\"need --run_large option to run\"\n )\n@@ -59,6 +65,8 @@\n for item in items:\n if \"jit_compile_true\" in item.name:\n item.add_marker(skip_xla)\n+ if \"keras_format\" in item.name:\n+ item.add_marker(skip_keras_saving_test)\n if \"large\" in item.keywords:\n item.add_marker(skip_large)\n if \"extra_large\" in item.keywords:\n", "issue": "Fix saved model testing\nWe have a silly bug for a few of our saved model tests where we are calling\r\n\r\n```\r\nmodel.save(save_path, save_format)\r\n```\r\n\r\nInstead of\r\n\r\n```\r\nmodel.save(save_path, save_format=save_format)\r\n```\r\n\r\nWhich means we aren't properly the h5 format. If you fix this issue, you actually hit another issue with h5 saving.\r\n\r\n`FAILED keras_nlp/models/bert/bert_tasks_test.py::BertClassifierTest::test_saving_model_save_format_h5 - ValueError: Unknown layer: BertClassifier. Please ensure this object is passed to the `custom_objects` argument.`\n", "before_files": [{"content": "# Copyright 2022 The KerasNLP Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport sys\n\nimport pytest\n\n\ndef pytest_addoption(parser):\n parser.addoption(\n \"--run_large\",\n action=\"store_true\",\n default=False,\n help=\"run large tests\",\n )\n parser.addoption(\n \"--run_extra_large\",\n action=\"store_true\",\n default=False,\n help=\"run extra_large tests\",\n )\n\n\ndef pytest_configure(config):\n config.addinivalue_line(\n \"markers\", \"large: mark test as being slow or requiring a network\"\n )\n config.addinivalue_line(\n \"markers\",\n \"extra_large: mark test as being too large to run continuously\",\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n run_extra_large_tests = config.getoption(\"--run_extra_large\")\n # Run large tests for --run_extra_large or --run_large.\n run_large_tests = config.getoption(\"--run_large\") or run_extra_large_tests\n\n # Messages to annotate skipped tests with.\n skip_xla = pytest.mark.skipif(\n sys.platform == \"darwin\", reason=\"XLA unsupported on MacOS.\"\n )\n skip_large = pytest.mark.skipif(\n not run_large_tests, reason=\"need --run_large option to run\"\n )\n skip_extra_large = pytest.mark.skipif(\n not run_extra_large_tests, reason=\"need --run_extra_large option to run\"\n )\n for item in items:\n if \"jit_compile_true\" in item.name:\n item.add_marker(skip_xla)\n if \"large\" in item.keywords:\n item.add_marker(skip_large)\n if \"extra_large\" in item.keywords:\n item.add_marker(skip_extra_large)\n", "path": "keras_nlp/conftest.py"}]} | 1,277 | 292 |
gh_patches_debug_29570 | rasdani/github-patches | git_diff | pantsbuild__pants-14580 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Interrupting an `(Interactive)Process` should attempt to kill the process gracefully
Currently, both `Process` and `InteractiveProcess` will send `KILL` when they are dropped (via `tokio`s facilities for `kill_on_drop`). But in both cases, kill doesn't give the underlying process time to do its own teardown, and this can lead to resource leaks. `pants` itself has [concerns](https://github.com/pantsbuild/pants/issues/12996) with being killed without getting a TERM first, and end user processes like `uvicorn` would like to be able to run their user shutdown hooks before dying.
To do this, we will likely need to implement a workaround for the lack of `AsyncDrop`: by essentially adding an optional `TERM` and synchronous wait in a `Drop` guard wrapped around relevant processes, or optionally gracefully interrupting graph nodes.
</issue>
<code>
[start of testprojects/src/python/coordinated_runs/waiter.py]
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4
5 import os
6 import sys
7 import time
8 from multiprocessing import Process
9
10 waiting_for_file = sys.argv[1]
11 pid_file = sys.argv[2]
12 child_pid_file = sys.argv[3]
13 attempts = 60
14
15
16 def run_child():
17 while True:
18 print("Child running...")
19 time.sleep(1)
20
21
22 child = Process(target=run_child, daemon=True)
23 child.start()
24
25 with open(child_pid_file, "w") as pf:
26 pf.write(str(child.pid))
27
28 with open(pid_file, "w") as pf:
29 pf.write(str(os.getpid()))
30
31 try:
32 while not os.path.isfile(waiting_for_file):
33 if attempts <= 0:
34 raise Exception("File was never written.")
35 attempts -= 1
36 sys.stderr.write("Waiting for file {}\n".format(waiting_for_file))
37 time.sleep(1)
38 finally:
39 child.terminate()
40
[end of testprojects/src/python/coordinated_runs/waiter.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/testprojects/src/python/coordinated_runs/waiter.py b/testprojects/src/python/coordinated_runs/waiter.py
--- a/testprojects/src/python/coordinated_runs/waiter.py
+++ b/testprojects/src/python/coordinated_runs/waiter.py
@@ -7,11 +7,6 @@
import time
from multiprocessing import Process
-waiting_for_file = sys.argv[1]
-pid_file = sys.argv[2]
-child_pid_file = sys.argv[3]
-attempts = 60
-
def run_child():
while True:
@@ -19,21 +14,46 @@
time.sleep(1)
-child = Process(target=run_child, daemon=True)
-child.start()
+def main():
+ waiting_for_file = sys.argv[1]
+ pid_file = sys.argv[2]
+ child_pid_file = sys.argv[3]
+ cleanup_wait_time = int(sys.argv[4])
+ attempts = 60
-with open(child_pid_file, "w") as pf:
- pf.write(str(child.pid))
+ child = Process(target=run_child, daemon=True)
+ child.start()
-with open(pid_file, "w") as pf:
- pf.write(str(os.getpid()))
+ with open(child_pid_file, "w") as pf:
+ pf.write(str(child.pid))
-try:
- while not os.path.isfile(waiting_for_file):
- if attempts <= 0:
- raise Exception("File was never written.")
- attempts -= 1
- sys.stderr.write("Waiting for file {}\n".format(waiting_for_file))
- time.sleep(1)
-finally:
- child.terminate()
+ with open(pid_file, "w") as pf:
+ pf.write(str(os.getpid()))
+
+ try:
+ while not os.path.isfile(waiting_for_file):
+ if attempts <= 0:
+ raise Exception("File was never written.")
+ attempts -= 1
+ sys.stderr.write("Waiting for file {}\n".format(waiting_for_file))
+ sys.stderr.flush()
+ time.sleep(1)
+
+ except KeyboardInterrupt:
+ sys.stderr.write("keyboard int received\n")
+ sys.stderr.flush()
+
+ finally:
+ sys.stderr.write("waiter cleaning up\n")
+ sys.stderr.flush()
+
+ child.terminate()
+ if cleanup_wait_time > 0:
+ time.sleep(cleanup_wait_time)
+
+ sys.stderr.write("waiter cleanup complete\n")
+ sys.stderr.flush()
+
+
+if __name__ == "__main__":
+ main()
| {"golden_diff": "diff --git a/testprojects/src/python/coordinated_runs/waiter.py b/testprojects/src/python/coordinated_runs/waiter.py\n--- a/testprojects/src/python/coordinated_runs/waiter.py\n+++ b/testprojects/src/python/coordinated_runs/waiter.py\n@@ -7,11 +7,6 @@\n import time\n from multiprocessing import Process\n \n-waiting_for_file = sys.argv[1]\n-pid_file = sys.argv[2]\n-child_pid_file = sys.argv[3]\n-attempts = 60\n-\n \n def run_child():\n while True:\n@@ -19,21 +14,46 @@\n time.sleep(1)\n \n \n-child = Process(target=run_child, daemon=True)\n-child.start()\n+def main():\n+ waiting_for_file = sys.argv[1]\n+ pid_file = sys.argv[2]\n+ child_pid_file = sys.argv[3]\n+ cleanup_wait_time = int(sys.argv[4])\n+ attempts = 60\n \n-with open(child_pid_file, \"w\") as pf:\n- pf.write(str(child.pid))\n+ child = Process(target=run_child, daemon=True)\n+ child.start()\n \n-with open(pid_file, \"w\") as pf:\n- pf.write(str(os.getpid()))\n+ with open(child_pid_file, \"w\") as pf:\n+ pf.write(str(child.pid))\n \n-try:\n- while not os.path.isfile(waiting_for_file):\n- if attempts <= 0:\n- raise Exception(\"File was never written.\")\n- attempts -= 1\n- sys.stderr.write(\"Waiting for file {}\\n\".format(waiting_for_file))\n- time.sleep(1)\n-finally:\n- child.terminate()\n+ with open(pid_file, \"w\") as pf:\n+ pf.write(str(os.getpid()))\n+\n+ try:\n+ while not os.path.isfile(waiting_for_file):\n+ if attempts <= 0:\n+ raise Exception(\"File was never written.\")\n+ attempts -= 1\n+ sys.stderr.write(\"Waiting for file {}\\n\".format(waiting_for_file))\n+ sys.stderr.flush()\n+ time.sleep(1)\n+\n+ except KeyboardInterrupt:\n+ sys.stderr.write(\"keyboard int received\\n\")\n+ sys.stderr.flush()\n+\n+ finally:\n+ sys.stderr.write(\"waiter cleaning up\\n\")\n+ sys.stderr.flush()\n+\n+ child.terminate()\n+ if cleanup_wait_time > 0:\n+ time.sleep(cleanup_wait_time)\n+\n+ sys.stderr.write(\"waiter cleanup complete\\n\")\n+ sys.stderr.flush()\n+\n+\n+if __name__ == \"__main__\":\n+ main()\n", "issue": "Interrupting an `(Interactive)Process` should attempt to kill the process gracefully\nCurrently, both `Process` and `InteractiveProcess` will send `KILL` when they are dropped (via `tokio`s facilities for `kill_on_drop`). But in both cases, kill doesn't give the underlying process time to do its own teardown, and this can lead to resource leaks. `pants` itself has [concerns](https://github.com/pantsbuild/pants/issues/12996) with being killed without getting a TERM first, and end user processes like `uvicorn` would like to be able to run their user shutdown hooks before dying.\r\n\r\nTo do this, we will likely need to implement a workaround for the lack of `AsyncDrop`: by essentially adding an optional `TERM` and synchronous wait in a `Drop` guard wrapped around relevant processes, or optionally gracefully interrupting graph nodes.\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n\nimport os\nimport sys\nimport time\nfrom multiprocessing import Process\n\nwaiting_for_file = sys.argv[1]\npid_file = sys.argv[2]\nchild_pid_file = sys.argv[3]\nattempts = 60\n\n\ndef run_child():\n while True:\n print(\"Child running...\")\n time.sleep(1)\n\n\nchild = Process(target=run_child, daemon=True)\nchild.start()\n\nwith open(child_pid_file, \"w\") as pf:\n pf.write(str(child.pid))\n\nwith open(pid_file, \"w\") as pf:\n pf.write(str(os.getpid()))\n\ntry:\n while not os.path.isfile(waiting_for_file):\n if attempts <= 0:\n raise Exception(\"File was never written.\")\n attempts -= 1\n sys.stderr.write(\"Waiting for file {}\\n\".format(waiting_for_file))\n time.sleep(1)\nfinally:\n child.terminate()\n", "path": "testprojects/src/python/coordinated_runs/waiter.py"}]} | 1,026 | 564 |
gh_patches_debug_1173 | rasdani/github-patches | git_diff | dask__dask-533 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ProgressBar is not visible in the notebook
The `ProgressBar` doesn't update itself during execution while in the notebook. Afterwards the full bar will pop up but it doesn't give you any cues during execution.
</issue>
<code>
[start of dask/diagnostics/progress.py]
1 from __future__ import division
2 import sys
3 import threading
4 import time
5 from timeit import default_timer
6
7 from ..core import istask
8 from .core import Diagnostic
9
10
11 def format_time(t):
12 """Format seconds into a human readable form.
13
14 >>> format_time(10.4)
15 '10.4s'
16 >>> format_time(1000.4)
17 '16min 40.4s'
18 """
19 m, s = divmod(t, 60)
20 h, m = divmod(m, 60)
21 if h:
22 return '{0:2.0f}hr {1:2.0f}min {2:4.1f}s'.format(h, m, s)
23 elif m:
24 return '{0:2.0f}min {1:4.1f}s'.format(m, s)
25 else:
26 return '{0:4.1f}s'.format(s)
27
28
29 class ProgressBar(Diagnostic):
30 """A progress bar for dask.
31
32 Can be used as a context manager around dask computations.
33
34 Examples
35 --------
36 >>> with ProgressBar(): # doctest: +SKIP
37 ... out = res.compute()
38 [########################################] | 100% Completed | 10.4 s
39 """
40
41 def __init__(self, width=40, dt=0.1):
42 self._width = width
43 self._dt = dt
44
45 def _start(self, dsk, state):
46 self._ntasks = len([k for (k, v) in dsk.items() if istask(v)])
47 self._ndone = 0
48 self._update_rate = max(1, self._ntasks // self._width)
49 self._start_time = default_timer()
50 # Start background thread
51 self._running = True
52 self._timer = threading.Thread(target=self._timer_func)
53 self._timer.start()
54
55 def _posttask(self, key, value, dsk, state, id):
56 self._ndone += 1
57
58 def _finish(self, dsk, state, errored):
59 self._running = False
60 self._timer.join()
61 self._finalize_bar()
62
63 def _timer_func(self):
64 """Background thread for updating the progress bar"""
65 while self._running:
66 self._update_bar()
67 time.sleep(self._dt)
68
69 def _update_bar(self):
70 tics = int(self._ndone * self._width / self._ntasks)
71 bar = '#' * tics
72 percent = (100 * self._ndone) // self._ntasks
73 elapsed = format_time(default_timer() - self._start_time)
74 msg = '\r[{0:<{1}}] | {2}% Completed | {3}'.format(bar, self._width,
75 percent, elapsed)
76 sys.stdout.write(msg)
77 sys.stdout.flush()
78
79 def _finalize_bar(self):
80 self._update_bar()
81 sys.stdout.write('\n')
82 sys.stdout.flush()
83
[end of dask/diagnostics/progress.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dask/diagnostics/progress.py b/dask/diagnostics/progress.py
--- a/dask/diagnostics/progress.py
+++ b/dask/diagnostics/progress.py
@@ -54,6 +54,7 @@
def _posttask(self, key, value, dsk, state, id):
self._ndone += 1
+ sys.stdout.flush()
def _finish(self, dsk, state, errored):
self._running = False
| {"golden_diff": "diff --git a/dask/diagnostics/progress.py b/dask/diagnostics/progress.py\n--- a/dask/diagnostics/progress.py\n+++ b/dask/diagnostics/progress.py\n@@ -54,6 +54,7 @@\n \n def _posttask(self, key, value, dsk, state, id):\n self._ndone += 1\n+ sys.stdout.flush()\n \n def _finish(self, dsk, state, errored):\n self._running = False\n", "issue": "ProgressBar is not visible in the notebook\nThe `ProgressBar` doesn't update itself during execution while in the notebook. Afterwards the full bar will pop up but it doesn't give you any cues during execution.\n\n", "before_files": [{"content": "from __future__ import division\nimport sys\nimport threading\nimport time\nfrom timeit import default_timer\n\nfrom ..core import istask\nfrom .core import Diagnostic\n\n\ndef format_time(t):\n \"\"\"Format seconds into a human readable form.\n\n >>> format_time(10.4)\n '10.4s'\n >>> format_time(1000.4)\n '16min 40.4s'\n \"\"\"\n m, s = divmod(t, 60)\n h, m = divmod(m, 60)\n if h:\n return '{0:2.0f}hr {1:2.0f}min {2:4.1f}s'.format(h, m, s)\n elif m:\n return '{0:2.0f}min {1:4.1f}s'.format(m, s)\n else:\n return '{0:4.1f}s'.format(s)\n\n\nclass ProgressBar(Diagnostic):\n \"\"\"A progress bar for dask.\n\n Can be used as a context manager around dask computations.\n\n Examples\n --------\n >>> with ProgressBar(): # doctest: +SKIP\n ... out = res.compute()\n [########################################] | 100% Completed | 10.4 s\n \"\"\"\n\n def __init__(self, width=40, dt=0.1):\n self._width = width\n self._dt = dt\n\n def _start(self, dsk, state):\n self._ntasks = len([k for (k, v) in dsk.items() if istask(v)])\n self._ndone = 0\n self._update_rate = max(1, self._ntasks // self._width)\n self._start_time = default_timer()\n # Start background thread\n self._running = True\n self._timer = threading.Thread(target=self._timer_func)\n self._timer.start()\n\n def _posttask(self, key, value, dsk, state, id):\n self._ndone += 1\n\n def _finish(self, dsk, state, errored):\n self._running = False\n self._timer.join()\n self._finalize_bar()\n\n def _timer_func(self):\n \"\"\"Background thread for updating the progress bar\"\"\"\n while self._running:\n self._update_bar()\n time.sleep(self._dt)\n\n def _update_bar(self):\n tics = int(self._ndone * self._width / self._ntasks)\n bar = '#' * tics\n percent = (100 * self._ndone) // self._ntasks\n elapsed = format_time(default_timer() - self._start_time)\n msg = '\\r[{0:<{1}}] | {2}% Completed | {3}'.format(bar, self._width,\n percent, elapsed)\n sys.stdout.write(msg)\n sys.stdout.flush()\n\n def _finalize_bar(self):\n self._update_bar()\n sys.stdout.write('\\n')\n sys.stdout.flush()\n", "path": "dask/diagnostics/progress.py"}]} | 1,405 | 109 |
gh_patches_debug_7522 | rasdani/github-patches | git_diff | boto__botocore-3141 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add support for urllib3 2.2.1
### Describe the bug
Underlying `botocore` requires outdated `urllib3` version:
- 1.x in case of Python pre-3.10
- < 2.1 in case of Python 3.10(+)
background from: https://github.com/boto/botocore/issues/2926
I do totally understand the motivation for these outdated `urllib3` version dependencies. IMHO I think it should be up to whoever uses boto3 to specify whether or not to use an outdated version of `urllib3`.
Incidentally I am using this for an application that may run on Amazon Linux, which may need the older `urllib3` dependency, would have preferred to make this choice myself.
I am taking liberty to file this issue for `boto3`, no objections if maintainers want to transfer this to `botocore`.
I am also taking liberty to leave out some info that I think should not matter - please post a loud holler if any further info is needed. Thanks in advance!
### Expected Behavior
I think it should be possible to install boto3 and another package such as requests with recently updated version of urllib3 such as `urllib3 == 2.2.1`.
### Current Behavior
Combinaiton of `boto3 == 1.34.46` & `urllib3 == 2.2.1` leads to pip installation error.
### Reproduction Steps
see above - please post a loud holler if any further info is needed
### Possible Solution
_No response_
### Additional Information/Context
_No response_
### SDK version used
Python 3.9 / 3.11; boto3 v1.34.46
### Environment details (OS name and version, etc.)
Ubuntu 22.04.3 LTS 64-bit (with some customized updates) on Intel core i7
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2 import codecs
3 import os.path
4 import re
5
6 from setuptools import find_packages, setup
7
8 here = os.path.abspath(os.path.dirname(__file__))
9
10
11 def read(*parts):
12 return codecs.open(os.path.join(here, *parts), 'r').read()
13
14
15 def find_version(*file_paths):
16 version_file = read(*file_paths)
17 version_match = re.search(
18 r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M
19 )
20 if version_match:
21 return version_match.group(1)
22 raise RuntimeError("Unable to find version string.")
23
24
25 requires = [
26 'jmespath>=0.7.1,<2.0.0',
27 'python-dateutil>=2.1,<3.0.0',
28 'urllib3>=1.25.4,<1.27 ; python_version < "3.10"',
29 'urllib3>=1.25.4,<2.1 ; python_version >= "3.10"',
30 ]
31
32 extras_require = {
33 'crt': ['awscrt==0.19.19'],
34 }
35
36 setup(
37 name='botocore',
38 version=find_version("botocore", "__init__.py"),
39 description='Low-level, data-driven core of boto 3.',
40 long_description=open('README.rst').read(),
41 author='Amazon Web Services',
42 url='https://github.com/boto/botocore',
43 scripts=[],
44 packages=find_packages(exclude=['tests*']),
45 package_data={
46 'botocore': ['cacert.pem', 'data/*.json', 'data/*/*.json'],
47 'botocore.vendored.requests': ['*.pem'],
48 },
49 include_package_data=True,
50 install_requires=requires,
51 extras_require=extras_require,
52 license="Apache License 2.0",
53 python_requires=">= 3.8",
54 classifiers=[
55 'Development Status :: 5 - Production/Stable',
56 'Intended Audience :: Developers',
57 'Intended Audience :: System Administrators',
58 'Natural Language :: English',
59 'License :: OSI Approved :: Apache Software License',
60 'Programming Language :: Python',
61 'Programming Language :: Python :: 3 :: Only',
62 'Programming Language :: Python :: 3',
63 'Programming Language :: Python :: 3.8',
64 'Programming Language :: Python :: 3.9',
65 'Programming Language :: Python :: 3.10',
66 'Programming Language :: Python :: 3.11',
67 'Programming Language :: Python :: 3.12',
68 ],
69 )
70
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -25,8 +25,11 @@
requires = [
'jmespath>=0.7.1,<2.0.0',
'python-dateutil>=2.1,<3.0.0',
+ # Prior to Python 3.10, Python doesn't require openssl 1.1.1
+ # but urllib3 2.0+ does. This means all botocore users will be
+ # broken by default on Amazon Linux 2 and AWS Lambda without this pin.
'urllib3>=1.25.4,<1.27 ; python_version < "3.10"',
- 'urllib3>=1.25.4,<2.1 ; python_version >= "3.10"',
+ 'urllib3>=1.25.4,!=2.2.0,<3 ; python_version >= "3.10"',
]
extras_require = {
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -25,8 +25,11 @@\n requires = [\n 'jmespath>=0.7.1,<2.0.0',\n 'python-dateutil>=2.1,<3.0.0',\n+ # Prior to Python 3.10, Python doesn't require openssl 1.1.1\n+ # but urllib3 2.0+ does. This means all botocore users will be\n+ # broken by default on Amazon Linux 2 and AWS Lambda without this pin.\n 'urllib3>=1.25.4,<1.27 ; python_version < \"3.10\"',\n- 'urllib3>=1.25.4,<2.1 ; python_version >= \"3.10\"',\n+ 'urllib3>=1.25.4,!=2.2.0,<3 ; python_version >= \"3.10\"',\n ]\n \n extras_require = {\n", "issue": "Add support for urllib3 2.2.1\n### Describe the bug\n\nUnderlying `botocore` requires outdated `urllib3` version:\r\n\r\n- 1.x in case of Python pre-3.10\r\n- < 2.1 in case of Python 3.10(+)\r\n\r\nbackground from: https://github.com/boto/botocore/issues/2926\r\n\r\nI do totally understand the motivation for these outdated `urllib3` version dependencies. IMHO I think it should be up to whoever uses boto3 to specify whether or not to use an outdated version of `urllib3`.\r\n\r\nIncidentally I am using this for an application that may run on Amazon Linux, which may need the older `urllib3` dependency, would have preferred to make this choice myself.\r\n\r\nI am taking liberty to file this issue for `boto3`, no objections if maintainers want to transfer this to `botocore`.\r\n\r\nI am also taking liberty to leave out some info that I think should not matter - please post a loud holler if any further info is needed. Thanks in advance!\n\n### Expected Behavior\n\nI think it should be possible to install boto3 and another package such as requests with recently updated version of urllib3 such as `urllib3 == 2.2.1`.\n\n### Current Behavior\n\nCombinaiton of `boto3 == 1.34.46` & `urllib3 == 2.2.1` leads to pip installation error.\n\n### Reproduction Steps\n\nsee above - please post a loud holler if any further info is needed\n\n### Possible Solution\n\n_No response_\n\n### Additional Information/Context\n\n_No response_\n\n### SDK version used\n\nPython 3.9 / 3.11; boto3 v1.34.46\n\n### Environment details (OS name and version, etc.)\n\nUbuntu 22.04.3 LTS 64-bit (with some customized updates) on Intel core i7\n", "before_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\n\nfrom setuptools import find_packages, setup\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M\n )\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\nrequires = [\n 'jmespath>=0.7.1,<2.0.0',\n 'python-dateutil>=2.1,<3.0.0',\n 'urllib3>=1.25.4,<1.27 ; python_version < \"3.10\"',\n 'urllib3>=1.25.4,<2.1 ; python_version >= \"3.10\"',\n]\n\nextras_require = {\n 'crt': ['awscrt==0.19.19'],\n}\n\nsetup(\n name='botocore',\n version=find_version(\"botocore\", \"__init__.py\"),\n description='Low-level, data-driven core of boto 3.',\n long_description=open('README.rst').read(),\n author='Amazon Web Services',\n url='https://github.com/boto/botocore',\n scripts=[],\n packages=find_packages(exclude=['tests*']),\n package_data={\n 'botocore': ['cacert.pem', 'data/*.json', 'data/*/*.json'],\n 'botocore.vendored.requests': ['*.pem'],\n },\n include_package_data=True,\n install_requires=requires,\n extras_require=extras_require,\n license=\"Apache License 2.0\",\n python_requires=\">= 3.8\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3 :: Only',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Programming Language :: Python :: 3.11',\n 'Programming Language :: Python :: 3.12',\n ],\n)\n", "path": "setup.py"}]} | 1,643 | 228 |
gh_patches_debug_15490 | rasdani/github-patches | git_diff | kubeflow__pipelines-6193 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[pH] v2 sample test - enable kaniko caching
https://cloud.google.com/build/docs/kaniko-cache
this will help improve local testing speed
https://github.com/kubeflow/pipelines/blob/master/v2/test/components/kaniko.yaml
</issue>
<code>
[start of samples/v2/hello_world.py]
1 # Copyright 2021 The Kubeflow Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from kfp.v2 import dsl
16 from kfp.v2 import compiler
17 from kfp.v2 import components
18
19
20 @components.create_component_from_func
21 def hello_world(text: str):
22 print(text)
23 return text
24
25
26 @dsl.pipeline(name='hello-world', description='A simple intro pipeline')
27 def pipeline_parameter_to_consumer(text: str = 'hi there'):
28 '''Pipeline that passes small pipeline parameter string to consumer op'''
29
30 consume_task = hello_world(
31 text
32 ) # Passing pipeline parameter as argument to consumer op
33
34
35 if __name__ == "__main__":
36 # execute only if run as a script
37 compiler.Compiler().compile(
38 pipeline_func=pipeline_parameter_to_consumer,
39 package_path='hello_world_pipeline.json'
40 )
41
[end of samples/v2/hello_world.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/samples/v2/hello_world.py b/samples/v2/hello_world.py
--- a/samples/v2/hello_world.py
+++ b/samples/v2/hello_world.py
@@ -24,7 +24,7 @@
@dsl.pipeline(name='hello-world', description='A simple intro pipeline')
-def pipeline_parameter_to_consumer(text: str = 'hi there'):
+def pipeline_hello_world(text: str = 'hi there'):
'''Pipeline that passes small pipeline parameter string to consumer op'''
consume_task = hello_world(
@@ -35,6 +35,6 @@
if __name__ == "__main__":
# execute only if run as a script
compiler.Compiler().compile(
- pipeline_func=pipeline_parameter_to_consumer,
+ pipeline_func=pipeline_hello_world,
package_path='hello_world_pipeline.json'
)
| {"golden_diff": "diff --git a/samples/v2/hello_world.py b/samples/v2/hello_world.py\n--- a/samples/v2/hello_world.py\n+++ b/samples/v2/hello_world.py\n@@ -24,7 +24,7 @@\n \n \n @dsl.pipeline(name='hello-world', description='A simple intro pipeline')\n-def pipeline_parameter_to_consumer(text: str = 'hi there'):\n+def pipeline_hello_world(text: str = 'hi there'):\n '''Pipeline that passes small pipeline parameter string to consumer op'''\n \n consume_task = hello_world(\n@@ -35,6 +35,6 @@\n if __name__ == \"__main__\":\n # execute only if run as a script\n compiler.Compiler().compile(\n- pipeline_func=pipeline_parameter_to_consumer,\n+ pipeline_func=pipeline_hello_world,\n package_path='hello_world_pipeline.json'\n )\n", "issue": "[pH] v2 sample test - enable kaniko caching\nhttps://cloud.google.com/build/docs/kaniko-cache\r\n\r\nthis will help improve local testing speed\r\nhttps://github.com/kubeflow/pipelines/blob/master/v2/test/components/kaniko.yaml\n", "before_files": [{"content": "# Copyright 2021 The Kubeflow Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom kfp.v2 import dsl\nfrom kfp.v2 import compiler\nfrom kfp.v2 import components\n\n\[email protected]_component_from_func\ndef hello_world(text: str):\n print(text)\n return text\n\n\[email protected](name='hello-world', description='A simple intro pipeline')\ndef pipeline_parameter_to_consumer(text: str = 'hi there'):\n '''Pipeline that passes small pipeline parameter string to consumer op'''\n\n consume_task = hello_world(\n text\n ) # Passing pipeline parameter as argument to consumer op\n\n\nif __name__ == \"__main__\":\n # execute only if run as a script\n compiler.Compiler().compile(\n pipeline_func=pipeline_parameter_to_consumer,\n package_path='hello_world_pipeline.json'\n )\n", "path": "samples/v2/hello_world.py"}]} | 963 | 186 |
gh_patches_debug_57144 | rasdani/github-patches | git_diff | wemake-services__wemake-python-styleguide-188 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Feature: forbid `credits()` builtin function
We should add `credits()` as a forbidden function:
```
» python -c 'credits()'
Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
for supporting Python development. See www.python.org for more information.
```
We need to add it here: https://github.com/wemake-services/wemake-python-styleguide/blob/3cedeb3c13ab6b16980a39edf657ab93d4c1f19e/wemake_python_styleguide/constants.py#L36-L38
</issue>
<code>
[start of wemake_python_styleguide/constants.py]
1 # -*- coding: utf-8 -*-
2
3 """
4 This module contains list of white- and black-listed ``python`` members.
5
6 It contains lists of keywords and built-in functions we discourage to use.
7 It also contains some exceptions that we allow to use in our codebase.
8 """
9
10 import re
11 import sys
12
13 # TODO: use consistent `.` for the `#:` comments
14 # TODO: use consistent names: `*_BLACKLIST` and `*_WHITELIST`
15
16 #: List of functions we forbid to use.
17 BAD_FUNCTIONS = frozenset((
18 # Code generation:
19 'eval',
20 'exec',
21 'compile',
22
23 # Magic:
24 'globals',
25 'locals',
26 'vars',
27 'dir',
28
29 # IO:
30 'input',
31
32 # Attribute access:
33 'hasattr',
34 'delattr',
35
36 # Misc:
37 'copyright',
38 'help',
39
40 # Dynamic imports:
41 '__import__',
42
43 # OOP:
44 'staticmethod',
45 ))
46
47 #: List of module metadata we forbid to use.
48 BAD_MODULE_METADATA_VARIABLES = frozenset((
49 '__author__',
50 '__all__',
51 '__version__',
52 '__about__',
53 ))
54
55
56 _BAD_VARIABLE_NAMES = [
57 # Meaningless words:
58 'data',
59 'result',
60 'results',
61 'item',
62 'items',
63 'value',
64 'values',
65 'val',
66 'vals',
67 'var',
68 'vars',
69 'content',
70 'contents',
71 'info',
72 'handle',
73 'handler',
74 'file',
75 'obj',
76 'objects',
77 'objs',
78 'some',
79
80 # Confusables:
81 'no',
82 'true',
83 'false',
84
85 # Names from examples:
86 'foo',
87 'bar',
88 'baz',
89 ]
90
91 if sys.version_info < (3, 7): # pragma: no cover
92 _BAD_VARIABLE_NAMES.extend([
93 # Compatibility with `python3.7`:
94 'async',
95 'await',
96 ])
97
98 #: List of variable names we forbid to use.
99 BAD_VARIABLE_NAMES = frozenset(_BAD_VARIABLE_NAMES)
100
101 #: List of magic methods that are forbiden to use.
102 BAD_MAGIC_METHODS = frozenset((
103 # Since we don't use `del`:
104 '__del__',
105 '__delitem__',
106 '__delete__',
107
108 '__dir__', # since we don't use `dir()`
109 '__delattr__', # since we don't use `delattr()`
110 ))
111
112 #: List of nested classes' names we allow to use.
113 NESTED_CLASSES_WHITELIST = frozenset((
114 'Meta', # django forms, models, drf, etc
115 'Params', # factoryboy specific
116 ))
117
118 #: List of nested functions' names we allow to use.
119 NESTED_FUNCTIONS_WHITELIST = frozenset((
120 'decorator',
121 'factory',
122 ))
123
124 #: List of allowed ``__future__`` imports.
125 FUTURE_IMPORTS_WHITELIST = frozenset((
126 'annotations',
127 'generator_stop',
128 ))
129
130 #: List of blacklisted module names:
131 BAD_MODULE_NAMES = frozenset((
132 'util',
133 'utils',
134 'utilities',
135 'helpers',
136 ))
137
138 #: List of allowed module magic names:
139 MAGIC_MODULE_NAMES_WHITELIST = frozenset((
140 '__init__',
141 '__main__',
142 ))
143
144 #: Regex pattern to name modules:
145 MODULE_NAME_PATTERN = re.compile(r'^_?_?[a-z][a-z\d_]+[a-z\d](__)?$')
146
147 #: Common numbers that are allowed to be used without being called "magic":
148 MAGIC_NUMBERS_WHITELIST = frozenset((
149 0.5,
150 100,
151 1000,
152 1024, # bytes
153 24, # hours
154 60, # seconds, minutes
155 ))
156
157
158 # Internal variables
159 # They are not publicly documented since they are not used by the end user.
160
161 # This variable is used as a default filename, when it is not passed by flake8:
162 STDIN = 'stdin'
163
164 # TODO: rename to `INIT_MODULE`
165 # This variable is used to specify as a placeholder for `__init__.py`:
166 INIT = '__init__'
167
[end of wemake_python_styleguide/constants.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/wemake_python_styleguide/constants.py b/wemake_python_styleguide/constants.py
--- a/wemake_python_styleguide/constants.py
+++ b/wemake_python_styleguide/constants.py
@@ -36,6 +36,7 @@
# Misc:
'copyright',
'help',
+ 'credits',
# Dynamic imports:
'__import__',
| {"golden_diff": "diff --git a/wemake_python_styleguide/constants.py b/wemake_python_styleguide/constants.py\n--- a/wemake_python_styleguide/constants.py\n+++ b/wemake_python_styleguide/constants.py\n@@ -36,6 +36,7 @@\n # Misc:\n 'copyright',\n 'help',\n+ 'credits',\n \n # Dynamic imports:\n '__import__',\n", "issue": "Feature: forbid `credits()` builtin function\nWe should add `credits()` as a forbidden function:\r\n\r\n```\r\n\u00bb python -c 'credits()'\r\n Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands\r\n for supporting Python development. See www.python.org for more information.\r\n\r\n```\r\n\r\nWe need to add it here: https://github.com/wemake-services/wemake-python-styleguide/blob/3cedeb3c13ab6b16980a39edf657ab93d4c1f19e/wemake_python_styleguide/constants.py#L36-L38\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nThis module contains list of white- and black-listed ``python`` members.\n\nIt contains lists of keywords and built-in functions we discourage to use.\nIt also contains some exceptions that we allow to use in our codebase.\n\"\"\"\n\nimport re\nimport sys\n\n# TODO: use consistent `.` for the `#:` comments\n# TODO: use consistent names: `*_BLACKLIST` and `*_WHITELIST`\n\n#: List of functions we forbid to use.\nBAD_FUNCTIONS = frozenset((\n # Code generation:\n 'eval',\n 'exec',\n 'compile',\n\n # Magic:\n 'globals',\n 'locals',\n 'vars',\n 'dir',\n\n # IO:\n 'input',\n\n # Attribute access:\n 'hasattr',\n 'delattr',\n\n # Misc:\n 'copyright',\n 'help',\n\n # Dynamic imports:\n '__import__',\n\n # OOP:\n 'staticmethod',\n))\n\n#: List of module metadata we forbid to use.\nBAD_MODULE_METADATA_VARIABLES = frozenset((\n '__author__',\n '__all__',\n '__version__',\n '__about__',\n))\n\n\n_BAD_VARIABLE_NAMES = [\n # Meaningless words:\n 'data',\n 'result',\n 'results',\n 'item',\n 'items',\n 'value',\n 'values',\n 'val',\n 'vals',\n 'var',\n 'vars',\n 'content',\n 'contents',\n 'info',\n 'handle',\n 'handler',\n 'file',\n 'obj',\n 'objects',\n 'objs',\n 'some',\n\n # Confusables:\n 'no',\n 'true',\n 'false',\n\n # Names from examples:\n 'foo',\n 'bar',\n 'baz',\n]\n\nif sys.version_info < (3, 7): # pragma: no cover\n _BAD_VARIABLE_NAMES.extend([\n # Compatibility with `python3.7`:\n 'async',\n 'await',\n ])\n\n#: List of variable names we forbid to use.\nBAD_VARIABLE_NAMES = frozenset(_BAD_VARIABLE_NAMES)\n\n#: List of magic methods that are forbiden to use.\nBAD_MAGIC_METHODS = frozenset((\n # Since we don't use `del`:\n '__del__',\n '__delitem__',\n '__delete__',\n\n '__dir__', # since we don't use `dir()`\n '__delattr__', # since we don't use `delattr()`\n))\n\n#: List of nested classes' names we allow to use.\nNESTED_CLASSES_WHITELIST = frozenset((\n 'Meta', # django forms, models, drf, etc\n 'Params', # factoryboy specific\n))\n\n#: List of nested functions' names we allow to use.\nNESTED_FUNCTIONS_WHITELIST = frozenset((\n 'decorator',\n 'factory',\n))\n\n#: List of allowed ``__future__`` imports.\nFUTURE_IMPORTS_WHITELIST = frozenset((\n 'annotations',\n 'generator_stop',\n))\n\n#: List of blacklisted module names:\nBAD_MODULE_NAMES = frozenset((\n 'util',\n 'utils',\n 'utilities',\n 'helpers',\n))\n\n#: List of allowed module magic names:\nMAGIC_MODULE_NAMES_WHITELIST = frozenset((\n '__init__',\n '__main__',\n))\n\n#: Regex pattern to name modules:\nMODULE_NAME_PATTERN = re.compile(r'^_?_?[a-z][a-z\\d_]+[a-z\\d](__)?$')\n\n#: Common numbers that are allowed to be used without being called \"magic\":\nMAGIC_NUMBERS_WHITELIST = frozenset((\n 0.5,\n 100,\n 1000,\n 1024, # bytes\n 24, # hours\n 60, # seconds, minutes\n))\n\n\n# Internal variables\n# They are not publicly documented since they are not used by the end user.\n\n# This variable is used as a default filename, when it is not passed by flake8:\nSTDIN = 'stdin'\n\n# TODO: rename to `INIT_MODULE`\n# This variable is used to specify as a placeholder for `__init__.py`:\nINIT = '__init__'\n", "path": "wemake_python_styleguide/constants.py"}]} | 2,011 | 85 |
gh_patches_debug_40576 | rasdani/github-patches | git_diff | Cog-Creators__Red-DiscordBot-2212 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[V3 Config] Config does not reject bad keys
Including a `.` or starting a keyname with `$` needs to be invalid in config because of mongoDB backend support.
Config will work just fine for this on the json backend, but not with the mongodb one because these characters are not allowed in field names for mongo. Instead, it works with json, and throws uncaught exceptions on mongo.
I'd be in favor of limiting keynames to alpha-numeric and space, underscore, and hyphens. to prevent future issues like this with other future possible backends without possibly breaking cogs twice for this.
</issue>
<code>
[start of redbot/core/drivers/red_mongo.py]
1 import motor.motor_asyncio
2 from .red_base import BaseDriver
3 from urllib.parse import quote_plus
4
5 __all__ = ["Mongo"]
6
7
8 _conn = None
9
10
11 def _initialize(**kwargs):
12 uri = kwargs.get("URI", "mongodb")
13 host = kwargs["HOST"]
14 port = kwargs["PORT"]
15 admin_user = kwargs["USERNAME"]
16 admin_pass = kwargs["PASSWORD"]
17 db_name = kwargs.get("DB_NAME", "default_db")
18
19 if port is 0:
20 ports = ""
21 else:
22 ports = ":{}".format(port)
23
24 if admin_user is not None and admin_pass is not None:
25 url = "{}://{}:{}@{}{}/{}".format(
26 uri, quote_plus(admin_user), quote_plus(admin_pass), host, ports, db_name
27 )
28 else:
29 url = "{}://{}{}/{}".format(uri, host, ports, db_name)
30
31 global _conn
32 _conn = motor.motor_asyncio.AsyncIOMotorClient(url)
33
34
35 class Mongo(BaseDriver):
36 """
37 Subclass of :py:class:`.red_base.BaseDriver`.
38 """
39
40 def __init__(self, cog_name, identifier, **kwargs):
41 super().__init__(cog_name, identifier)
42
43 if _conn is None:
44 _initialize(**kwargs)
45
46 @property
47 def db(self) -> motor.core.Database:
48 """
49 Gets the mongo database for this cog's name.
50
51 .. warning::
52
53 Right now this will cause a new connection to be made every time the
54 database is accessed. We will want to create a connection pool down the
55 line to limit the number of connections.
56
57 :return:
58 PyMongo Database object.
59 """
60 return _conn.get_database()
61
62 def get_collection(self) -> motor.core.Collection:
63 """
64 Gets a specified collection within the PyMongo database for this cog.
65
66 Unless you are doing custom stuff ``collection_name`` should be one of the class
67 attributes of :py:class:`core.config.Config`.
68
69 :param str collection_name:
70 :return:
71 PyMongo collection object.
72 """
73 return self.db[self.cog_name]
74
75 @staticmethod
76 def _parse_identifiers(identifiers):
77 uuid, identifiers = identifiers[0], identifiers[1:]
78 return uuid, identifiers
79
80 async def get(self, *identifiers: str):
81 mongo_collection = self.get_collection()
82
83 dot_identifiers = ".".join(identifiers)
84
85 partial = await mongo_collection.find_one(
86 filter={"_id": self.unique_cog_identifier}, projection={dot_identifiers: True}
87 )
88
89 if partial is None:
90 raise KeyError("No matching document was found and Config expects a KeyError.")
91
92 for i in identifiers:
93 partial = partial[i]
94 return partial
95
96 async def set(self, *identifiers: str, value=None):
97 dot_identifiers = ".".join(identifiers)
98
99 mongo_collection = self.get_collection()
100
101 await mongo_collection.update_one(
102 {"_id": self.unique_cog_identifier},
103 update={"$set": {dot_identifiers: value}},
104 upsert=True,
105 )
106
107 async def clear(self, *identifiers: str):
108 dot_identifiers = ".".join(identifiers)
109 mongo_collection = self.get_collection()
110
111 if len(identifiers) > 0:
112 await mongo_collection.update_one(
113 {"_id": self.unique_cog_identifier}, update={"$unset": {dot_identifiers: 1}}
114 )
115 else:
116 await mongo_collection.delete_one({"_id": self.unique_cog_identifier})
117
118
119 def get_config_details():
120 uri = None
121 while True:
122 uri = input("Enter URI scheme (mongodb or mongodb+srv): ")
123 if uri is "":
124 uri = "mongodb"
125
126 if uri in ["mongodb", "mongodb+srv"]:
127 break
128 else:
129 print("Invalid URI scheme")
130
131 host = input("Enter host address: ")
132 if uri is "mongodb":
133 port = int(input("Enter host port: "))
134 else:
135 port = 0
136
137 admin_uname = input("Enter login username: ")
138 admin_password = input("Enter login password: ")
139
140 db_name = input("Enter mongodb database name: ")
141
142 if admin_uname == "":
143 admin_uname = admin_password = None
144
145 ret = {
146 "HOST": host,
147 "PORT": port,
148 "USERNAME": admin_uname,
149 "PASSWORD": admin_password,
150 "DB_NAME": db_name,
151 "URI": uri,
152 }
153 return ret
154
[end of redbot/core/drivers/red_mongo.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/redbot/core/drivers/red_mongo.py b/redbot/core/drivers/red_mongo.py
--- a/redbot/core/drivers/red_mongo.py
+++ b/redbot/core/drivers/red_mongo.py
@@ -1,6 +1,11 @@
+import re
+from typing import Match, Pattern
+from urllib.parse import quote_plus
+
+import motor.core
import motor.motor_asyncio
+
from .red_base import BaseDriver
-from urllib.parse import quote_plus
__all__ = ["Mongo"]
@@ -80,6 +85,7 @@
async def get(self, *identifiers: str):
mongo_collection = self.get_collection()
+ identifiers = (*map(self._escape_key, identifiers),)
dot_identifiers = ".".join(identifiers)
partial = await mongo_collection.find_one(
@@ -91,10 +97,14 @@
for i in identifiers:
partial = partial[i]
+ if isinstance(partial, dict):
+ return self._unescape_dict_keys(partial)
return partial
async def set(self, *identifiers: str, value=None):
- dot_identifiers = ".".join(identifiers)
+ dot_identifiers = ".".join(map(self._escape_key, identifiers))
+ if isinstance(value, dict):
+ value = self._escape_dict_keys(value)
mongo_collection = self.get_collection()
@@ -105,7 +115,7 @@
)
async def clear(self, *identifiers: str):
- dot_identifiers = ".".join(identifiers)
+ dot_identifiers = ".".join(map(self._escape_key, identifiers))
mongo_collection = self.get_collection()
if len(identifiers) > 0:
@@ -115,6 +125,62 @@
else:
await mongo_collection.delete_one({"_id": self.unique_cog_identifier})
+ @staticmethod
+ def _escape_key(key: str) -> str:
+ return _SPECIAL_CHAR_PATTERN.sub(_replace_with_escaped, key)
+
+ @staticmethod
+ def _unescape_key(key: str) -> str:
+ return _CHAR_ESCAPE_PATTERN.sub(_replace_with_unescaped, key)
+
+ @classmethod
+ def _escape_dict_keys(cls, data: dict) -> dict:
+ """Recursively escape all keys in a dict."""
+ ret = {}
+ for key, value in data.items():
+ key = cls._escape_key(key)
+ if isinstance(value, dict):
+ value = cls._escape_dict_keys(value)
+ ret[key] = value
+ return ret
+
+ @classmethod
+ def _unescape_dict_keys(cls, data: dict) -> dict:
+ """Recursively unescape all keys in a dict."""
+ ret = {}
+ for key, value in data.items():
+ key = cls._unescape_key(key)
+ if isinstance(value, dict):
+ value = cls._unescape_dict_keys(value)
+ ret[key] = value
+ return ret
+
+
+_SPECIAL_CHAR_PATTERN: Pattern[str] = re.compile(r"([.$]|\\U0000002E|\\U00000024)")
+_SPECIAL_CHARS = {
+ ".": "\\U0000002E",
+ "$": "\\U00000024",
+ "\\U0000002E": "\\U&0000002E",
+ "\\U00000024": "\\U&00000024",
+}
+
+
+def _replace_with_escaped(match: Match[str]) -> str:
+ return _SPECIAL_CHARS[match[0]]
+
+
+_CHAR_ESCAPE_PATTERN: Pattern[str] = re.compile(r"(\\U0000002E|\\U00000024)")
+_CHAR_ESCAPES = {
+ "\\U0000002E": ".",
+ "\\U00000024": "$",
+ "\\U&0000002E": "\\U0000002E",
+ "\\U&00000024": "\\U00000024",
+}
+
+
+def _replace_with_unescaped(match: Match[str]) -> str:
+ return _CHAR_ESCAPES[match[0]]
+
def get_config_details():
uri = None
| {"golden_diff": "diff --git a/redbot/core/drivers/red_mongo.py b/redbot/core/drivers/red_mongo.py\n--- a/redbot/core/drivers/red_mongo.py\n+++ b/redbot/core/drivers/red_mongo.py\n@@ -1,6 +1,11 @@\n+import re\n+from typing import Match, Pattern\n+from urllib.parse import quote_plus\n+\n+import motor.core\n import motor.motor_asyncio\n+\n from .red_base import BaseDriver\n-from urllib.parse import quote_plus\n \n __all__ = [\"Mongo\"]\n \n@@ -80,6 +85,7 @@\n async def get(self, *identifiers: str):\n mongo_collection = self.get_collection()\n \n+ identifiers = (*map(self._escape_key, identifiers),)\n dot_identifiers = \".\".join(identifiers)\n \n partial = await mongo_collection.find_one(\n@@ -91,10 +97,14 @@\n \n for i in identifiers:\n partial = partial[i]\n+ if isinstance(partial, dict):\n+ return self._unescape_dict_keys(partial)\n return partial\n \n async def set(self, *identifiers: str, value=None):\n- dot_identifiers = \".\".join(identifiers)\n+ dot_identifiers = \".\".join(map(self._escape_key, identifiers))\n+ if isinstance(value, dict):\n+ value = self._escape_dict_keys(value)\n \n mongo_collection = self.get_collection()\n \n@@ -105,7 +115,7 @@\n )\n \n async def clear(self, *identifiers: str):\n- dot_identifiers = \".\".join(identifiers)\n+ dot_identifiers = \".\".join(map(self._escape_key, identifiers))\n mongo_collection = self.get_collection()\n \n if len(identifiers) > 0:\n@@ -115,6 +125,62 @@\n else:\n await mongo_collection.delete_one({\"_id\": self.unique_cog_identifier})\n \n+ @staticmethod\n+ def _escape_key(key: str) -> str:\n+ return _SPECIAL_CHAR_PATTERN.sub(_replace_with_escaped, key)\n+\n+ @staticmethod\n+ def _unescape_key(key: str) -> str:\n+ return _CHAR_ESCAPE_PATTERN.sub(_replace_with_unescaped, key)\n+\n+ @classmethod\n+ def _escape_dict_keys(cls, data: dict) -> dict:\n+ \"\"\"Recursively escape all keys in a dict.\"\"\"\n+ ret = {}\n+ for key, value in data.items():\n+ key = cls._escape_key(key)\n+ if isinstance(value, dict):\n+ value = cls._escape_dict_keys(value)\n+ ret[key] = value\n+ return ret\n+\n+ @classmethod\n+ def _unescape_dict_keys(cls, data: dict) -> dict:\n+ \"\"\"Recursively unescape all keys in a dict.\"\"\"\n+ ret = {}\n+ for key, value in data.items():\n+ key = cls._unescape_key(key)\n+ if isinstance(value, dict):\n+ value = cls._unescape_dict_keys(value)\n+ ret[key] = value\n+ return ret\n+\n+\n+_SPECIAL_CHAR_PATTERN: Pattern[str] = re.compile(r\"([.$]|\\\\U0000002E|\\\\U00000024)\")\n+_SPECIAL_CHARS = {\n+ \".\": \"\\\\U0000002E\",\n+ \"$\": \"\\\\U00000024\",\n+ \"\\\\U0000002E\": \"\\\\U&0000002E\",\n+ \"\\\\U00000024\": \"\\\\U&00000024\",\n+}\n+\n+\n+def _replace_with_escaped(match: Match[str]) -> str:\n+ return _SPECIAL_CHARS[match[0]]\n+\n+\n+_CHAR_ESCAPE_PATTERN: Pattern[str] = re.compile(r\"(\\\\U0000002E|\\\\U00000024)\")\n+_CHAR_ESCAPES = {\n+ \"\\\\U0000002E\": \".\",\n+ \"\\\\U00000024\": \"$\",\n+ \"\\\\U&0000002E\": \"\\\\U0000002E\",\n+ \"\\\\U&00000024\": \"\\\\U00000024\",\n+}\n+\n+\n+def _replace_with_unescaped(match: Match[str]) -> str:\n+ return _CHAR_ESCAPES[match[0]]\n+\n \n def get_config_details():\n uri = None\n", "issue": "[V3 Config] Config does not reject bad keys\nIncluding a `.` or starting a keyname with `$` needs to be invalid in config because of mongoDB backend support. \r\n\r\nConfig will work just fine for this on the json backend, but not with the mongodb one because these characters are not allowed in field names for mongo. Instead, it works with json, and throws uncaught exceptions on mongo.\r\n\r\nI'd be in favor of limiting keynames to alpha-numeric and space, underscore, and hyphens. to prevent future issues like this with other future possible backends without possibly breaking cogs twice for this.\n", "before_files": [{"content": "import motor.motor_asyncio\nfrom .red_base import BaseDriver\nfrom urllib.parse import quote_plus\n\n__all__ = [\"Mongo\"]\n\n\n_conn = None\n\n\ndef _initialize(**kwargs):\n uri = kwargs.get(\"URI\", \"mongodb\")\n host = kwargs[\"HOST\"]\n port = kwargs[\"PORT\"]\n admin_user = kwargs[\"USERNAME\"]\n admin_pass = kwargs[\"PASSWORD\"]\n db_name = kwargs.get(\"DB_NAME\", \"default_db\")\n\n if port is 0:\n ports = \"\"\n else:\n ports = \":{}\".format(port)\n\n if admin_user is not None and admin_pass is not None:\n url = \"{}://{}:{}@{}{}/{}\".format(\n uri, quote_plus(admin_user), quote_plus(admin_pass), host, ports, db_name\n )\n else:\n url = \"{}://{}{}/{}\".format(uri, host, ports, db_name)\n\n global _conn\n _conn = motor.motor_asyncio.AsyncIOMotorClient(url)\n\n\nclass Mongo(BaseDriver):\n \"\"\"\n Subclass of :py:class:`.red_base.BaseDriver`.\n \"\"\"\n\n def __init__(self, cog_name, identifier, **kwargs):\n super().__init__(cog_name, identifier)\n\n if _conn is None:\n _initialize(**kwargs)\n\n @property\n def db(self) -> motor.core.Database:\n \"\"\"\n Gets the mongo database for this cog's name.\n\n .. warning::\n\n Right now this will cause a new connection to be made every time the\n database is accessed. We will want to create a connection pool down the\n line to limit the number of connections.\n\n :return:\n PyMongo Database object.\n \"\"\"\n return _conn.get_database()\n\n def get_collection(self) -> motor.core.Collection:\n \"\"\"\n Gets a specified collection within the PyMongo database for this cog.\n\n Unless you are doing custom stuff ``collection_name`` should be one of the class\n attributes of :py:class:`core.config.Config`.\n\n :param str collection_name:\n :return:\n PyMongo collection object.\n \"\"\"\n return self.db[self.cog_name]\n\n @staticmethod\n def _parse_identifiers(identifiers):\n uuid, identifiers = identifiers[0], identifiers[1:]\n return uuid, identifiers\n\n async def get(self, *identifiers: str):\n mongo_collection = self.get_collection()\n\n dot_identifiers = \".\".join(identifiers)\n\n partial = await mongo_collection.find_one(\n filter={\"_id\": self.unique_cog_identifier}, projection={dot_identifiers: True}\n )\n\n if partial is None:\n raise KeyError(\"No matching document was found and Config expects a KeyError.\")\n\n for i in identifiers:\n partial = partial[i]\n return partial\n\n async def set(self, *identifiers: str, value=None):\n dot_identifiers = \".\".join(identifiers)\n\n mongo_collection = self.get_collection()\n\n await mongo_collection.update_one(\n {\"_id\": self.unique_cog_identifier},\n update={\"$set\": {dot_identifiers: value}},\n upsert=True,\n )\n\n async def clear(self, *identifiers: str):\n dot_identifiers = \".\".join(identifiers)\n mongo_collection = self.get_collection()\n\n if len(identifiers) > 0:\n await mongo_collection.update_one(\n {\"_id\": self.unique_cog_identifier}, update={\"$unset\": {dot_identifiers: 1}}\n )\n else:\n await mongo_collection.delete_one({\"_id\": self.unique_cog_identifier})\n\n\ndef get_config_details():\n uri = None\n while True:\n uri = input(\"Enter URI scheme (mongodb or mongodb+srv): \")\n if uri is \"\":\n uri = \"mongodb\"\n\n if uri in [\"mongodb\", \"mongodb+srv\"]:\n break\n else:\n print(\"Invalid URI scheme\")\n\n host = input(\"Enter host address: \")\n if uri is \"mongodb\":\n port = int(input(\"Enter host port: \"))\n else:\n port = 0\n\n admin_uname = input(\"Enter login username: \")\n admin_password = input(\"Enter login password: \")\n\n db_name = input(\"Enter mongodb database name: \")\n\n if admin_uname == \"\":\n admin_uname = admin_password = None\n\n ret = {\n \"HOST\": host,\n \"PORT\": port,\n \"USERNAME\": admin_uname,\n \"PASSWORD\": admin_password,\n \"DB_NAME\": db_name,\n \"URI\": uri,\n }\n return ret\n", "path": "redbot/core/drivers/red_mongo.py"}]} | 2,011 | 1,001 |
gh_patches_debug_11890 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-1553 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add materializations dirs to include paths for pg/redshift
## Issue
### Issue description
The 0.14.0-a1 pypi distribution of dbt does not include specific macro directories in its `package_data` list. As a result, dbt installed via pip from pypi does not ship with materialization overrides for archival. This manifests as a `merge` statement running for snapshot jobs on pg/redshift projects.
- [package_data](https://github.com/fishtown-analytics/dbt/blob/dev/wilt-chamberlain/plugins/postgres/setup.py#L24-L29)
- unincluded macros:
- [postgres](https://github.com/fishtown-analytics/dbt/tree/dev/wilt-chamberlain/plugins/postgres/dbt/include/postgres/macros/materializations)
- [redshift](https://github.com/fishtown-analytics/dbt/tree/dev/wilt-chamberlain/plugins/redshift/dbt/include/redshift/macros/materializations)
We should:
1. include these macro directories in `setup.py` (is there anywhere else to add them?)
2. figure out if there's a good way to keep these things synchronized, or understand if they've fallen out of sync. This works great in development, but only fails once the release is cut on pypi. Is there a way for us to find this earlier?
### Results
dbt used the default snapshot materialization (using a `merge`) and not the pg/redshift-specific implementation provided in the respective plugins.
### System information
The output of `dbt --version`:
```
0.14.0-a1
```
The operating system you're running on:
The python version you're using (probably the output of `python --version`)
### Steps to reproduce
```
pip install dbt
dbt snapshot # on redshift
```
</issue>
<code>
[start of plugins/postgres/setup.py]
1 #!/usr/bin/env python
2 from setuptools import find_packages
3 from distutils.core import setup
4 import os
5
6 package_name = "dbt-postgres"
7 package_version = "0.14.0a1"
8 description = """The postgres adpter plugin for dbt (data build tool)"""
9
10 this_directory = os.path.abspath(os.path.dirname(__file__))
11 with open(os.path.join(this_directory, 'README.md')) as f:
12 long_description = f.read()
13
14 setup(
15 name=package_name,
16 version=package_version,
17 description=description,
18 long_description=description,
19 long_description_content_type='text/markdown',
20 author="Fishtown Analytics",
21 author_email="[email protected]",
22 url="https://github.com/fishtown-analytics/dbt",
23 packages=find_packages(),
24 package_data={
25 'dbt': [
26 'include/postgres/dbt_project.yml',
27 'include/postgres/macros/*.sql',
28 ]
29 },
30 install_requires=[
31 'dbt-core=={}'.format(package_version),
32 'psycopg2>=2.7.5,<2.8',
33 ]
34 )
35
[end of plugins/postgres/setup.py]
[start of plugins/redshift/setup.py]
1 #!/usr/bin/env python
2 from setuptools import find_packages
3 from distutils.core import setup
4 import os
5
6 package_name = "dbt-redshift"
7 package_version = "0.14.0a1"
8 description = """The redshift adapter plugin for dbt (data build tool)"""
9
10 this_directory = os.path.abspath(os.path.dirname(__file__))
11 with open(os.path.join(this_directory, 'README.md')) as f:
12 long_description = f.read()
13
14 setup(
15 name=package_name,
16 version=package_version,
17 description=description,
18 long_description=description,
19 long_description_content_type='text/markdown',
20 author="Fishtown Analytics",
21 author_email="[email protected]",
22 url="https://github.com/fishtown-analytics/dbt",
23 packages=find_packages(),
24 package_data={
25 'dbt': [
26 'include/redshift/dbt_project.yml',
27 'include/redshift/macros/*.sql',
28 ]
29 },
30 install_requires=[
31 'dbt-core=={}'.format(package_version),
32 'dbt-postgres=={}'.format(package_version),
33 'boto3>=1.6.23,<1.10.0',
34 'botocore>=1.9.23,<1.13.0',
35 'psycopg2>=2.7.5,<2.8',
36 ]
37 )
38
[end of plugins/redshift/setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/plugins/postgres/setup.py b/plugins/postgres/setup.py
--- a/plugins/postgres/setup.py
+++ b/plugins/postgres/setup.py
@@ -25,6 +25,7 @@
'dbt': [
'include/postgres/dbt_project.yml',
'include/postgres/macros/*.sql',
+ 'include/postgres/macros/**/*.sql',
]
},
install_requires=[
diff --git a/plugins/redshift/setup.py b/plugins/redshift/setup.py
--- a/plugins/redshift/setup.py
+++ b/plugins/redshift/setup.py
@@ -25,6 +25,7 @@
'dbt': [
'include/redshift/dbt_project.yml',
'include/redshift/macros/*.sql',
+ 'include/redshift/macros/**/*.sql',
]
},
install_requires=[
| {"golden_diff": "diff --git a/plugins/postgres/setup.py b/plugins/postgres/setup.py\n--- a/plugins/postgres/setup.py\n+++ b/plugins/postgres/setup.py\n@@ -25,6 +25,7 @@\n 'dbt': [\n 'include/postgres/dbt_project.yml',\n 'include/postgres/macros/*.sql',\n+ 'include/postgres/macros/**/*.sql',\n ]\n },\n install_requires=[\ndiff --git a/plugins/redshift/setup.py b/plugins/redshift/setup.py\n--- a/plugins/redshift/setup.py\n+++ b/plugins/redshift/setup.py\n@@ -25,6 +25,7 @@\n 'dbt': [\n 'include/redshift/dbt_project.yml',\n 'include/redshift/macros/*.sql',\n+ 'include/redshift/macros/**/*.sql',\n ]\n },\n install_requires=[\n", "issue": "Add materializations dirs to include paths for pg/redshift\n## Issue\r\n\r\n### Issue description\r\nThe 0.14.0-a1 pypi distribution of dbt does not include specific macro directories in its `package_data` list. As a result, dbt installed via pip from pypi does not ship with materialization overrides for archival. This manifests as a `merge` statement running for snapshot jobs on pg/redshift projects.\r\n\r\n- [package_data](https://github.com/fishtown-analytics/dbt/blob/dev/wilt-chamberlain/plugins/postgres/setup.py#L24-L29)\r\n- unincluded macros:\r\n - [postgres](https://github.com/fishtown-analytics/dbt/tree/dev/wilt-chamberlain/plugins/postgres/dbt/include/postgres/macros/materializations)\r\n - [redshift](https://github.com/fishtown-analytics/dbt/tree/dev/wilt-chamberlain/plugins/redshift/dbt/include/redshift/macros/materializations)\r\n\r\nWe should:\r\n1. include these macro directories in `setup.py` (is there anywhere else to add them?)\r\n2. figure out if there's a good way to keep these things synchronized, or understand if they've fallen out of sync. This works great in development, but only fails once the release is cut on pypi. Is there a way for us to find this earlier?\r\n\r\n### Results\r\ndbt used the default snapshot materialization (using a `merge`) and not the pg/redshift-specific implementation provided in the respective plugins.\r\n\r\n### System information\r\nThe output of `dbt --version`:\r\n```\r\n0.14.0-a1\r\n```\r\n\r\nThe operating system you're running on:\r\n\r\nThe python version you're using (probably the output of `python --version`)\r\n\r\n### Steps to reproduce\r\n```\r\npip install dbt\r\ndbt snapshot # on redshift\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\nfrom setuptools import find_packages\nfrom distutils.core import setup\nimport os\n\npackage_name = \"dbt-postgres\"\npackage_version = \"0.14.0a1\"\ndescription = \"\"\"The postgres adpter plugin for dbt (data build tool)\"\"\"\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, 'README.md')) as f:\n long_description = f.read()\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=description,\n long_description_content_type='text/markdown',\n author=\"Fishtown Analytics\",\n author_email=\"[email protected]\",\n url=\"https://github.com/fishtown-analytics/dbt\",\n packages=find_packages(),\n package_data={\n 'dbt': [\n 'include/postgres/dbt_project.yml',\n 'include/postgres/macros/*.sql',\n ]\n },\n install_requires=[\n 'dbt-core=={}'.format(package_version),\n 'psycopg2>=2.7.5,<2.8',\n ]\n)\n", "path": "plugins/postgres/setup.py"}, {"content": "#!/usr/bin/env python\nfrom setuptools import find_packages\nfrom distutils.core import setup\nimport os\n\npackage_name = \"dbt-redshift\"\npackage_version = \"0.14.0a1\"\ndescription = \"\"\"The redshift adapter plugin for dbt (data build tool)\"\"\"\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, 'README.md')) as f:\n long_description = f.read()\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=description,\n long_description_content_type='text/markdown',\n author=\"Fishtown Analytics\",\n author_email=\"[email protected]\",\n url=\"https://github.com/fishtown-analytics/dbt\",\n packages=find_packages(),\n package_data={\n 'dbt': [\n 'include/redshift/dbt_project.yml',\n 'include/redshift/macros/*.sql',\n ]\n },\n install_requires=[\n 'dbt-core=={}'.format(package_version),\n 'dbt-postgres=={}'.format(package_version),\n 'boto3>=1.6.23,<1.10.0',\n 'botocore>=1.9.23,<1.13.0',\n 'psycopg2>=2.7.5,<2.8',\n ]\n)\n", "path": "plugins/redshift/setup.py"}]} | 1,609 | 179 |
gh_patches_debug_3004 | rasdani/github-patches | git_diff | qtile__qtile-1624 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
widget.WindowTabs default selected task indicator produces invalid pango markup
# Issue description
The default _selected task indicator_ (``("<", ">")``) for ``widget.WindowTabs`` produces invalid pango markup and thus the call to ``pango_parse_markup`` fails.
It leads to invalid tag names for single word window names (e.g. ``<terminal>``) or invalid syntax for multiword names (e.g. ``<qtile - Mozilla Firefox>``).
Possible fixes:
- change default to e.g. ``('[', ']')`` or different foreground color
- default to no markup
- at least add a note in the documentation, but defaults should be working
If this is wanted, I'm happy to prepare a PR based on the outcome of the discussion here.
# Qtile version
Qtile version ``0.15.1``. Also [latest revision of libqtile/widget/windowtabs.py](https://github.com/qtile/qtile/blob/d47347ad0f37b4a5735faa8b7061f484e8cf81d9/libqtile/widget/windowtabs.py) (d47347a)
# Configuration
Use default ``widget.WindowTabs()``
</issue>
<code>
[start of libqtile/widget/windowtabs.py]
1 # Copyright (c) 2012-2013 Craig Barnes
2 # Copyright (c) 2012 roger
3 # Copyright (c) 2012, 2014 Tycho Andersen
4 # Copyright (c) 2014 Sean Vig
5 # Copyright (c) 2014 Adi Sieker
6 #
7 # Permission is hereby granted, free of charge, to any person obtaining a copy
8 # of this software and associated documentation files (the "Software"), to deal
9 # in the Software without restriction, including without limitation the rights
10 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 # copies of the Software, and to permit persons to whom the Software is
12 # furnished to do so, subject to the following conditions:
13 #
14 # The above copyright notice and this permission notice shall be included in
15 # all copies or substantial portions of the Software.
16 #
17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 # SOFTWARE.
24
25 from .. import hook, bar
26 from . import base
27
28
29 class WindowTabs(base._TextBox):
30 """
31 Displays the name of each window in the current group.
32 Contrary to TaskList this is not an interactive widget.
33 The window that currently has focus is highlighted.
34 """
35 orientations = base.ORIENTATION_HORIZONTAL
36 defaults = [
37 ("separator", " | ", "Task separator text."),
38 ("selected", ("<", ">"), "Selected task indicator"),
39 ]
40
41 def __init__(self, **config):
42 base._TextBox.__init__(self, width=bar.STRETCH, **config)
43 self.add_defaults(WindowTabs.defaults)
44 if not isinstance(self.selected, (tuple, list)):
45 self.selected = (self.selected, self.selected)
46
47 def _configure(self, qtile, bar):
48 base._TextBox._configure(self, qtile, bar)
49 hook.subscribe.client_name_updated(self.update)
50 hook.subscribe.focus_change(self.update)
51 hook.subscribe.float_change(self.update)
52
53 def button_press(self, x, y, button):
54 self.bar.screen.group.cmd_next_window()
55
56 def update(self, *args):
57 names = []
58 for w in self.bar.screen.group.windows:
59 state = ''
60 if w is None:
61 pass
62 elif w.maximized:
63 state = '[] '
64 elif w.minimized:
65 state = '_ '
66 elif w.floating:
67 state = 'V '
68 task = "%s%s" % (state, w.name if w and w.name else " ")
69 if w is self.bar.screen.group.current_window:
70 task = task.join(self.selected)
71 names.append(task)
72 self.text = self.separator.join(names)
73 self.bar.draw()
74
[end of libqtile/widget/windowtabs.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/libqtile/widget/windowtabs.py b/libqtile/widget/windowtabs.py
--- a/libqtile/widget/windowtabs.py
+++ b/libqtile/widget/windowtabs.py
@@ -35,7 +35,7 @@
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
("separator", " | ", "Task separator text."),
- ("selected", ("<", ">"), "Selected task indicator"),
+ ("selected", ("<b>", "</b>"), "Selected task indicator"),
]
def __init__(self, **config):
| {"golden_diff": "diff --git a/libqtile/widget/windowtabs.py b/libqtile/widget/windowtabs.py\n--- a/libqtile/widget/windowtabs.py\n+++ b/libqtile/widget/windowtabs.py\n@@ -35,7 +35,7 @@\n orientations = base.ORIENTATION_HORIZONTAL\n defaults = [\n (\"separator\", \" | \", \"Task separator text.\"),\n- (\"selected\", (\"<\", \">\"), \"Selected task indicator\"),\n+ (\"selected\", (\"<b>\", \"</b>\"), \"Selected task indicator\"),\n ]\n \n def __init__(self, **config):\n", "issue": "widget.WindowTabs default selected task indicator produces invalid pango markup\n# Issue description\r\n\r\nThe default _selected task indicator_ (``(\"<\", \">\")``) for ``widget.WindowTabs`` produces invalid pango markup and thus the call to ``pango_parse_markup`` fails.\r\n\r\nIt leads to invalid tag names for single word window names (e.g. ``<terminal>``) or invalid syntax for multiword names (e.g. ``<qtile - Mozilla Firefox>``).\r\n\r\nPossible fixes:\r\n\r\n- change default to e.g. ``('[', ']')`` or different foreground color\r\n- default to no markup\r\n- at least add a note in the documentation, but defaults should be working\r\n\r\nIf this is wanted, I'm happy to prepare a PR based on the outcome of the discussion here.\r\n\r\n# Qtile version\r\n\r\nQtile version ``0.15.1``. Also [latest revision of libqtile/widget/windowtabs.py](https://github.com/qtile/qtile/blob/d47347ad0f37b4a5735faa8b7061f484e8cf81d9/libqtile/widget/windowtabs.py) (d47347a)\r\n\r\n# Configuration\r\n\r\nUse default ``widget.WindowTabs()``\r\n\n", "before_files": [{"content": "# Copyright (c) 2012-2013 Craig Barnes\n# Copyright (c) 2012 roger\n# Copyright (c) 2012, 2014 Tycho Andersen\n# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 Adi Sieker\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nfrom .. import hook, bar\nfrom . import base\n\n\nclass WindowTabs(base._TextBox):\n \"\"\"\n Displays the name of each window in the current group.\n Contrary to TaskList this is not an interactive widget.\n The window that currently has focus is highlighted.\n \"\"\"\n orientations = base.ORIENTATION_HORIZONTAL\n defaults = [\n (\"separator\", \" | \", \"Task separator text.\"),\n (\"selected\", (\"<\", \">\"), \"Selected task indicator\"),\n ]\n\n def __init__(self, **config):\n base._TextBox.__init__(self, width=bar.STRETCH, **config)\n self.add_defaults(WindowTabs.defaults)\n if not isinstance(self.selected, (tuple, list)):\n self.selected = (self.selected, self.selected)\n\n def _configure(self, qtile, bar):\n base._TextBox._configure(self, qtile, bar)\n hook.subscribe.client_name_updated(self.update)\n hook.subscribe.focus_change(self.update)\n hook.subscribe.float_change(self.update)\n\n def button_press(self, x, y, button):\n self.bar.screen.group.cmd_next_window()\n\n def update(self, *args):\n names = []\n for w in self.bar.screen.group.windows:\n state = ''\n if w is None:\n pass\n elif w.maximized:\n state = '[] '\n elif w.minimized:\n state = '_ '\n elif w.floating:\n state = 'V '\n task = \"%s%s\" % (state, w.name if w and w.name else \" \")\n if w is self.bar.screen.group.current_window:\n task = task.join(self.selected)\n names.append(task)\n self.text = self.separator.join(names)\n self.bar.draw()\n", "path": "libqtile/widget/windowtabs.py"}]} | 1,610 | 121 |
gh_patches_debug_23999 | rasdani/github-patches | git_diff | mabel-dev__opteryx-1488 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
🪲 joins aren't pushing predicates when predicates are on each relation in the join
**Sample Code/Statement** _If you can, please submit the SQL statement or Python code snippet, or a representative example using the sample datasets._
~~~sql
SELECT *
FROM $planets AS p
INNER JOIN $satellites AS s
ON p.id = s.planet_id
WHERE p.name = 'Jupiter'
AND s.radius = 1.0
~~~
Returns the wrong result
</issue>
<code>
[start of opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py]
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 from orso.tools import random_string
14
15 from opteryx.components.logical_planner import LogicalPlan
16 from opteryx.components.logical_planner import LogicalPlanNode
17 from opteryx.components.logical_planner import LogicalPlanStepType
18 from opteryx.managers.expression import NodeType
19 from opteryx.managers.expression import get_all_nodes_of_type
20
21 from .optimization_strategy import OptimizationStrategy
22 from .optimization_strategy import OptimizerContext
23
24
25 def _inner_split(node):
26 while node.node_type == NodeType.NESTED:
27 node = node.centre
28
29 if node.node_type != NodeType.AND:
30 return [node]
31
32 # get the left and right filters
33 left_nodes = _inner_split(node.left)
34 right_nodes = _inner_split(node.right)
35
36 return left_nodes + right_nodes
37
38
39 class SplitConjunctivePredicatesStrategy(OptimizationStrategy):
40 def visit(self, node: LogicalPlanNode, context: OptimizerContext) -> OptimizerContext:
41 """
42 Conjunctive Predicates (ANDs) can be split and executed in any order to get the
43 same result. This means we can split them into separate steps in the plan.
44
45 The reason for splitting is two-fold:
46
47 1) Smaller expressions are easier to move around the query plan as they have fewer
48 dependencies.
49 2) Executing predicates like this means each runs in turn, filtering out some of
50 the records meaning susequent predicates will be operating on fewer records,
51 which is generally faster. We can also order these predicates to get a faster
52 result, balancing the selectivity (get rid of more records faster) vs cost of
53 the check (a numeric check is faster than a string check)
54 """
55 if node.node_type == LogicalPlanStepType.Filter:
56 split_predicates = _inner_split(node.condition)
57 new_nodes = []
58 for predicate in split_predicates:
59 new_node = LogicalPlanNode(
60 node_type=LogicalPlanStepType.Filter, condition=predicate
61 )
62 new_node.columns = get_all_nodes_of_type(
63 node.condition, select_nodes=(NodeType.IDENTIFIER,)
64 )
65 new_node.relations = node.relations
66 new_nodes.append(new_node)
67 else:
68 new_nodes = [node]
69
70 for i, new_node in enumerate(new_nodes):
71 nid = random_string() if (i + 1) < len(new_nodes) else context.node_id
72 context.optimized_plan.add_node(nid, LogicalPlanNode(**new_node.properties))
73 if context.parent_nid:
74 context.optimized_plan.add_edge(nid, context.parent_nid)
75 context.parent_nid = nid
76
77 return context
78
79 def complete(self, plan: LogicalPlan, context: OptimizerContext) -> LogicalPlan:
80 # No finalization needed for this strategy
81 return plan
82
[end of opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py]
[start of opteryx/__version__.py]
1 __build__ = 329
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """
16 Store the version here so:
17 1) we don't load dependencies by storing it in __init__.py
18 2) we can import it in setup.py for the same reason
19 """
20 from enum import Enum # isort: skip
21
22
23 class VersionStatus(Enum):
24 ALPHA = "alpha"
25 BETA = "beta"
26 RELEASE = "release"
27
28
29 _major = 0
30 _minor = 13
31 _revision = 4
32 _status = VersionStatus.ALPHA
33
34 __author__ = "@joocer"
35 __version__ = f"{_major}.{_minor}.{_revision}" + (
36 f"-{_status.value}.{__build__}" if _status != VersionStatus.RELEASE else ""
37 )
38
[end of opteryx/__version__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opteryx/__version__.py b/opteryx/__version__.py
--- a/opteryx/__version__.py
+++ b/opteryx/__version__.py
@@ -1,4 +1,4 @@
-__build__ = 329
+__build__ = 330
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py b/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py
--- a/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py
+++ b/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py
@@ -56,13 +56,12 @@
split_predicates = _inner_split(node.condition)
new_nodes = []
for predicate in split_predicates:
- new_node = LogicalPlanNode(
- node_type=LogicalPlanStepType.Filter, condition=predicate
- )
+ new_node = LogicalPlanNode(node_type=LogicalPlanStepType.Filter)
+ new_node.condition = predicate
new_node.columns = get_all_nodes_of_type(
- node.condition, select_nodes=(NodeType.IDENTIFIER,)
+ predicate, select_nodes=(NodeType.IDENTIFIER,)
)
- new_node.relations = node.relations
+ new_node.relations = {c.source for c in new_node.columns}
new_nodes.append(new_node)
else:
new_nodes = [node]
| {"golden_diff": "diff --git a/opteryx/__version__.py b/opteryx/__version__.py\n--- a/opteryx/__version__.py\n+++ b/opteryx/__version__.py\n@@ -1,4 +1,4 @@\n-__build__ = 329\n+__build__ = 330\n \n # Licensed under the Apache License, Version 2.0 (the \"License\");\n # you may not use this file except in compliance with the License.\ndiff --git a/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py b/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py\n--- a/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py\n+++ b/opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py\n@@ -56,13 +56,12 @@\n split_predicates = _inner_split(node.condition)\n new_nodes = []\n for predicate in split_predicates:\n- new_node = LogicalPlanNode(\n- node_type=LogicalPlanStepType.Filter, condition=predicate\n- )\n+ new_node = LogicalPlanNode(node_type=LogicalPlanStepType.Filter)\n+ new_node.condition = predicate\n new_node.columns = get_all_nodes_of_type(\n- node.condition, select_nodes=(NodeType.IDENTIFIER,)\n+ predicate, select_nodes=(NodeType.IDENTIFIER,)\n )\n- new_node.relations = node.relations\n+ new_node.relations = {c.source for c in new_node.columns}\n new_nodes.append(new_node)\n else:\n new_nodes = [node]\n", "issue": "\ud83e\udeb2 joins aren't pushing predicates when predicates are on each relation in the join\n**Sample Code/Statement** _If you can, please submit the SQL statement or Python code snippet, or a representative example using the sample datasets._\r\n\r\n~~~sql\r\nSELECT * \r\n FROM $planets AS p \r\n INNER JOIN $satellites AS s \r\n ON p.id = s.planet_id \r\n WHERE p.name = 'Jupiter' \r\n AND s.radius = 1.0\r\n~~~\r\n\r\nReturns the wrong result\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom orso.tools import random_string\n\nfrom opteryx.components.logical_planner import LogicalPlan\nfrom opteryx.components.logical_planner import LogicalPlanNode\nfrom opteryx.components.logical_planner import LogicalPlanStepType\nfrom opteryx.managers.expression import NodeType\nfrom opteryx.managers.expression import get_all_nodes_of_type\n\nfrom .optimization_strategy import OptimizationStrategy\nfrom .optimization_strategy import OptimizerContext\n\n\ndef _inner_split(node):\n while node.node_type == NodeType.NESTED:\n node = node.centre\n\n if node.node_type != NodeType.AND:\n return [node]\n\n # get the left and right filters\n left_nodes = _inner_split(node.left)\n right_nodes = _inner_split(node.right)\n\n return left_nodes + right_nodes\n\n\nclass SplitConjunctivePredicatesStrategy(OptimizationStrategy):\n def visit(self, node: LogicalPlanNode, context: OptimizerContext) -> OptimizerContext:\n \"\"\"\n Conjunctive Predicates (ANDs) can be split and executed in any order to get the\n same result. This means we can split them into separate steps in the plan.\n\n The reason for splitting is two-fold:\n\n 1) Smaller expressions are easier to move around the query plan as they have fewer\n dependencies.\n 2) Executing predicates like this means each runs in turn, filtering out some of\n the records meaning susequent predicates will be operating on fewer records,\n which is generally faster. We can also order these predicates to get a faster\n result, balancing the selectivity (get rid of more records faster) vs cost of\n the check (a numeric check is faster than a string check)\n \"\"\"\n if node.node_type == LogicalPlanStepType.Filter:\n split_predicates = _inner_split(node.condition)\n new_nodes = []\n for predicate in split_predicates:\n new_node = LogicalPlanNode(\n node_type=LogicalPlanStepType.Filter, condition=predicate\n )\n new_node.columns = get_all_nodes_of_type(\n node.condition, select_nodes=(NodeType.IDENTIFIER,)\n )\n new_node.relations = node.relations\n new_nodes.append(new_node)\n else:\n new_nodes = [node]\n\n for i, new_node in enumerate(new_nodes):\n nid = random_string() if (i + 1) < len(new_nodes) else context.node_id\n context.optimized_plan.add_node(nid, LogicalPlanNode(**new_node.properties))\n if context.parent_nid:\n context.optimized_plan.add_edge(nid, context.parent_nid)\n context.parent_nid = nid\n\n return context\n\n def complete(self, plan: LogicalPlan, context: OptimizerContext) -> LogicalPlan:\n # No finalization needed for this strategy\n return plan\n", "path": "opteryx/components/cost_based_optimizer/strategies/split_conjunctive_predicates.py"}, {"content": "__build__ = 329\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nStore the version here so:\n1) we don't load dependencies by storing it in __init__.py\n2) we can import it in setup.py for the same reason\n\"\"\"\nfrom enum import Enum # isort: skip\n\n\nclass VersionStatus(Enum):\n ALPHA = \"alpha\"\n BETA = \"beta\"\n RELEASE = \"release\"\n\n\n_major = 0\n_minor = 13\n_revision = 4\n_status = VersionStatus.ALPHA\n\n__author__ = \"@joocer\"\n__version__ = f\"{_major}.{_minor}.{_revision}\" + (\n f\"-{_status.value}.{__build__}\" if _status != VersionStatus.RELEASE else \"\"\n)\n", "path": "opteryx/__version__.py"}]} | 1,933 | 367 |
gh_patches_debug_8042 | rasdani/github-patches | git_diff | freqtrade__freqtrade-5434 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Running asyncio.get_event_loop() in some thread other than the main thread – however, asyncio only generates an event loop for the main thread.
OS: Ubuntu 21.04
Branch: develop
Command: freqtrade trade --config config.json --strategy SampleStrategy
....
Error Message:
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/lib/python3.9/threading.py", line 954, in _bootstrap_inner
self.run()
File "/usr/lib/python3.9/threading.py", line 892, in run
self._target(*self._args, **self._kwargs)
File "/fredtrade/develop/freqtrade/freqtrade/rpc/api_server/uvicorn_threaded.py", line 36, in run
loop = asyncio.get_event_loop()
File "/usr/lib/python3.9/asyncio/events.py", line 642, in get_event_loop
raise RuntimeError('There is no current event loop in thread %r.'
RuntimeError: There is no current event loop in thread 'Thread-1'.
Solution:
File - ./freqtrade/rpc/api_server/uvicorn_threaded.py
Line - 36
Change:
```
loop = asyncio.get_event_loop()
```
To:
```
try:
loop = asyncio.get_event_loop()
except RuntimeError as ex:
if "There is no current event loop in thread" in str(ex):
asyncio.set_event_loop(asyncio.new_event_loop())
loop = asyncio.get_event_loop()
```
</issue>
<code>
[start of freqtrade/rpc/api_server/uvicorn_threaded.py]
1 import contextlib
2 import threading
3 import time
4
5 import uvicorn
6
7
8 class UvicornServer(uvicorn.Server):
9 """
10 Multithreaded server - as found in https://github.com/encode/uvicorn/issues/742
11
12 Removed install_signal_handlers() override based on changes from this commit:
13 https://github.com/encode/uvicorn/commit/ce2ef45a9109df8eae038c0ec323eb63d644cbc6
14
15 Cannot rely on asyncio.get_event_loop() to create new event loop because of this check:
16 https://github.com/python/cpython/blob/4d7f11e05731f67fd2c07ec2972c6cb9861d52be/Lib/asyncio/events.py#L638
17
18 Fix by overriding run() and forcing creation of new event loop if uvloop is available
19 """
20
21 def run(self, sockets=None):
22 import asyncio
23
24 """
25 Parent implementation calls self.config.setup_event_loop(),
26 but we need to create uvloop event loop manually
27 """
28 try:
29 import uvloop # noqa
30 except ImportError: # pragma: no cover
31 from uvicorn.loops.asyncio import asyncio_setup
32 asyncio_setup()
33 else:
34 asyncio.set_event_loop(uvloop.new_event_loop())
35
36 loop = asyncio.get_event_loop()
37 loop.run_until_complete(self.serve(sockets=sockets))
38
39 @contextlib.contextmanager
40 def run_in_thread(self):
41 self.thread = threading.Thread(target=self.run)
42 self.thread.start()
43 while not self.started:
44 time.sleep(1e-3)
45
46 def cleanup(self):
47 self.should_exit = True
48 self.thread.join()
49
[end of freqtrade/rpc/api_server/uvicorn_threaded.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/freqtrade/rpc/api_server/uvicorn_threaded.py b/freqtrade/rpc/api_server/uvicorn_threaded.py
--- a/freqtrade/rpc/api_server/uvicorn_threaded.py
+++ b/freqtrade/rpc/api_server/uvicorn_threaded.py
@@ -32,8 +32,11 @@
asyncio_setup()
else:
asyncio.set_event_loop(uvloop.new_event_loop())
-
- loop = asyncio.get_event_loop()
+ try:
+ loop = asyncio.get_event_loop()
+ except RuntimeError:
+ # When running in a thread, we'll not have an eventloop yet.
+ loop = asyncio.new_event_loop()
loop.run_until_complete(self.serve(sockets=sockets))
@contextlib.contextmanager
| {"golden_diff": "diff --git a/freqtrade/rpc/api_server/uvicorn_threaded.py b/freqtrade/rpc/api_server/uvicorn_threaded.py\n--- a/freqtrade/rpc/api_server/uvicorn_threaded.py\n+++ b/freqtrade/rpc/api_server/uvicorn_threaded.py\n@@ -32,8 +32,11 @@\n asyncio_setup()\n else:\n asyncio.set_event_loop(uvloop.new_event_loop())\n-\n- loop = asyncio.get_event_loop()\n+ try:\n+ loop = asyncio.get_event_loop()\n+ except RuntimeError:\n+ # When running in a thread, we'll not have an eventloop yet.\n+ loop = asyncio.new_event_loop()\n loop.run_until_complete(self.serve(sockets=sockets))\n \n @contextlib.contextmanager\n", "issue": "Running asyncio.get_event_loop() in some thread other than the main thread \u2013 however, asyncio only generates an event loop for the main thread.\nOS: Ubuntu 21.04\r\nBranch: develop\r\nCommand: freqtrade trade --config config.json --strategy SampleStrategy\r\n....\r\nError Message: \r\nException in thread Thread-1:\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.9/threading.py\", line 954, in _bootstrap_inner\r\n self.run()\r\n File \"/usr/lib/python3.9/threading.py\", line 892, in run\r\n self._target(*self._args, **self._kwargs)\r\n File \"/fredtrade/develop/freqtrade/freqtrade/rpc/api_server/uvicorn_threaded.py\", line 36, in run\r\n loop = asyncio.get_event_loop()\r\n File \"/usr/lib/python3.9/asyncio/events.py\", line 642, in get_event_loop\r\n raise RuntimeError('There is no current event loop in thread %r.'\r\nRuntimeError: There is no current event loop in thread 'Thread-1'.\r\n\r\nSolution: \r\nFile - ./freqtrade/rpc/api_server/uvicorn_threaded.py\r\nLine - 36\r\n\r\nChange:\r\n```\r\n loop = asyncio.get_event_loop()\r\n```\r\nTo:\r\n```\r\n try:\r\n loop = asyncio.get_event_loop()\r\n except RuntimeError as ex:\r\n if \"There is no current event loop in thread\" in str(ex):\r\n asyncio.set_event_loop(asyncio.new_event_loop())\r\n loop = asyncio.get_event_loop() \r\n``` \n", "before_files": [{"content": "import contextlib\nimport threading\nimport time\n\nimport uvicorn\n\n\nclass UvicornServer(uvicorn.Server):\n \"\"\"\n Multithreaded server - as found in https://github.com/encode/uvicorn/issues/742\n\n Removed install_signal_handlers() override based on changes from this commit:\n https://github.com/encode/uvicorn/commit/ce2ef45a9109df8eae038c0ec323eb63d644cbc6\n\n Cannot rely on asyncio.get_event_loop() to create new event loop because of this check:\n https://github.com/python/cpython/blob/4d7f11e05731f67fd2c07ec2972c6cb9861d52be/Lib/asyncio/events.py#L638\n\n Fix by overriding run() and forcing creation of new event loop if uvloop is available\n \"\"\"\n\n def run(self, sockets=None):\n import asyncio\n\n \"\"\"\n Parent implementation calls self.config.setup_event_loop(),\n but we need to create uvloop event loop manually\n \"\"\"\n try:\n import uvloop # noqa\n except ImportError: # pragma: no cover\n from uvicorn.loops.asyncio import asyncio_setup\n asyncio_setup()\n else:\n asyncio.set_event_loop(uvloop.new_event_loop())\n\n loop = asyncio.get_event_loop()\n loop.run_until_complete(self.serve(sockets=sockets))\n\n @contextlib.contextmanager\n def run_in_thread(self):\n self.thread = threading.Thread(target=self.run)\n self.thread.start()\n while not self.started:\n time.sleep(1e-3)\n\n def cleanup(self):\n self.should_exit = True\n self.thread.join()\n", "path": "freqtrade/rpc/api_server/uvicorn_threaded.py"}]} | 1,371 | 173 |
gh_patches_debug_27184 | rasdani/github-patches | git_diff | electricitymaps__electricitymaps-contrib-5693 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Wrong data in Spain<->Morocco connection
## Description
Just as I suspected, the data for the Spain<->Morocco connection is wrong, as can be seen on the [ESIOS webpage](https://www.esios.ree.es/en/analysis/10209) (ignore the lack of data during last night, that's not linked to this).
It is related to the fact that the parser returns MWh of fifteen minutes intervals, while the backend expects MW, so averages instead of adding.
Apart from changing something in the backend, the easiest solution that I found is forcing the API to return hour intervals. We would have to add a small delay also, to prevent returning incomplete data. (e.g. server only adds 11:00 and 11:15 if the provided date is 11:24)
We could alternatively multiply the 15 minutes interval data by 4 to get the MW. This would increase the granularity but would imply adding specific cases for this exchange, the parser would break if the granularity ever changes, etc. Whereas by forcing the granularity to 1 hour we make sure the granularity is always correct.
What is your opinion?
</issue>
<code>
[start of parsers/ESIOS.py]
1 #!/usr/bin/env python3
2
3 from datetime import datetime, timedelta
4 from logging import Logger, getLogger
5 from typing import Optional
6 from urllib.parse import urlencode
7
8 # The arrow library is used to handle datetimes
9 import arrow
10 import pytz
11 from requests import Response, Session
12
13 from electricitymap.contrib.lib.models.event_lists import ExchangeList
14 from electricitymap.contrib.lib.types import ZoneKey
15
16 from .lib.exceptions import ParserException
17 from .lib.utils import get_token
18
19 TIMEZONE = pytz.timezone("Europe/Madrid")
20
21 # Map each exchange to the ID used in the API
22 EXCHANGE_ID_MAP = {
23 "AD->ES": "10278", # Switch to 10210 when it has data
24 "ES->MA": "10209",
25 }
26
27
28 def format_url(target_datetime: datetime, ID: str):
29 start_date = (target_datetime - timedelta(hours=24)).isoformat()
30 end_date = target_datetime.isoformat()
31 dates = {"start_date": start_date, "end_date": end_date}
32 query = urlencode(dates)
33 return f"https://api.esios.ree.es/indicators/{ID}?{query}"
34
35
36 def fetch_exchange(
37 zone_key1: ZoneKey,
38 zone_key2: ZoneKey,
39 session: Optional[Session] = None,
40 target_datetime: Optional[datetime] = None,
41 logger: Logger = getLogger(__name__),
42 ) -> list:
43
44 # Get ESIOS token
45 token = get_token("ESIOS_TOKEN")
46
47 ses = session or Session()
48 if target_datetime is None:
49 target_datetime = datetime.now(tz=TIMEZONE)
50 # Request headers
51 headers = {
52 "Content-Type": "application/json",
53 "Accept": "application/json; application/vnd.esios-api-v2+json",
54 "x-api-key": token,
55 }
56
57 zone_key = ZoneKey("->".join(sorted([zone_key1, zone_key2])))
58 if zone_key not in EXCHANGE_ID_MAP.keys():
59 raise ParserException(
60 "ESIOS.py",
61 f"This parser cannot parse data between {zone_key1} and {zone_key2}.",
62 )
63 url = format_url(target_datetime, EXCHANGE_ID_MAP[zone_key])
64
65 response: Response = ses.get(url, headers=headers)
66 if response.status_code != 200 or not response.text:
67 raise ParserException(
68 "ESIOS", "Response code: {0}".format(response.status_code)
69 )
70
71 json = response.json()
72 values = json["indicator"]["values"]
73 if not values:
74 raise ParserException("ESIOS", "No values received")
75 exchanges = ExchangeList(logger)
76
77 for value in values:
78 # Get last value in datasource
79 # Datasource negative value is exporting, positive value is importing
80 # If Spain is the first zone invert the values to match Electricity Maps schema
81 net_flow = (
82 -value["value"] if zone_key.partition("->")[0] == "ES" else value["value"]
83 )
84
85 exchanges.append(
86 zoneKey=zone_key,
87 datetime=arrow.get(value["datetime_utc"]).datetime,
88 netFlow=net_flow,
89 source="api.esios.ree.es",
90 )
91
92 return exchanges.to_list()
93
94
95 if __name__ == "__main__":
96 session = Session()
97 print(fetch_exchange(ZoneKey("ES"), ZoneKey("MA"), session))
98 print("fetch_exchange(ES, MA)")
99 print(fetch_exchange(ZoneKey("AD"), ZoneKey("ES"), session))
100 print("fetch_exchange(AD, ES)")
101
[end of parsers/ESIOS.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/parsers/ESIOS.py b/parsers/ESIOS.py
--- a/parsers/ESIOS.py
+++ b/parsers/ESIOS.py
@@ -24,6 +24,13 @@
"ES->MA": "10209",
}
+# Map each exchange to the needed factor to adjust from MWh to MW. Depends on the time granularity of the API for each request
+# E.g ES->MA is 4 because the API returns 15 minutes intervals data (15 min = 1/4 of an hour; P=E/t).
+EXCHANGE_MULTIPLICATION_FACTOR_MAP = {
+ "AD->ES": 1,
+ "ES->MA": 4,
+}
+
def format_url(target_datetime: datetime, ID: str):
start_date = (target_datetime - timedelta(hours=24)).isoformat()
@@ -55,7 +62,10 @@
}
zone_key = ZoneKey("->".join(sorted([zone_key1, zone_key2])))
- if zone_key not in EXCHANGE_ID_MAP.keys():
+ if (
+ zone_key not in EXCHANGE_ID_MAP
+ or zone_key not in EXCHANGE_MULTIPLICATION_FACTOR_MAP
+ ):
raise ParserException(
"ESIOS.py",
f"This parser cannot parse data between {zone_key1} and {zone_key2}.",
@@ -82,6 +92,8 @@
-value["value"] if zone_key.partition("->")[0] == "ES" else value["value"]
)
+ net_flow *= EXCHANGE_MULTIPLICATION_FACTOR_MAP[zone_key]
+
exchanges.append(
zoneKey=zone_key,
datetime=arrow.get(value["datetime_utc"]).datetime,
| {"golden_diff": "diff --git a/parsers/ESIOS.py b/parsers/ESIOS.py\n--- a/parsers/ESIOS.py\n+++ b/parsers/ESIOS.py\n@@ -24,6 +24,13 @@\n \"ES->MA\": \"10209\",\n }\n \n+# Map each exchange to the needed factor to adjust from MWh to MW. Depends on the time granularity of the API for each request\n+# E.g ES->MA is 4 because the API returns 15 minutes intervals data (15 min = 1/4 of an hour; P=E/t).\n+EXCHANGE_MULTIPLICATION_FACTOR_MAP = {\n+ \"AD->ES\": 1,\n+ \"ES->MA\": 4,\n+}\n+\n \n def format_url(target_datetime: datetime, ID: str):\n start_date = (target_datetime - timedelta(hours=24)).isoformat()\n@@ -55,7 +62,10 @@\n }\n \n zone_key = ZoneKey(\"->\".join(sorted([zone_key1, zone_key2])))\n- if zone_key not in EXCHANGE_ID_MAP.keys():\n+ if (\n+ zone_key not in EXCHANGE_ID_MAP\n+ or zone_key not in EXCHANGE_MULTIPLICATION_FACTOR_MAP\n+ ):\n raise ParserException(\n \"ESIOS.py\",\n f\"This parser cannot parse data between {zone_key1} and {zone_key2}.\",\n@@ -82,6 +92,8 @@\n -value[\"value\"] if zone_key.partition(\"->\")[0] == \"ES\" else value[\"value\"]\n )\n \n+ net_flow *= EXCHANGE_MULTIPLICATION_FACTOR_MAP[zone_key]\n+\n exchanges.append(\n zoneKey=zone_key,\n datetime=arrow.get(value[\"datetime_utc\"]).datetime,\n", "issue": "Wrong data in Spain<->Morocco connection\n## Description\r\nJust as I suspected, the data for the Spain<->Morocco connection is wrong, as can be seen on the [ESIOS webpage](https://www.esios.ree.es/en/analysis/10209) (ignore the lack of data during last night, that's not linked to this).\r\nIt is related to the fact that the parser returns MWh of fifteen minutes intervals, while the backend expects MW, so averages instead of adding.\r\nApart from changing something in the backend, the easiest solution that I found is forcing the API to return hour intervals. We would have to add a small delay also, to prevent returning incomplete data. (e.g. server only adds 11:00 and 11:15 if the provided date is 11:24)\r\nWe could alternatively multiply the 15 minutes interval data by 4 to get the MW. This would increase the granularity but would imply adding specific cases for this exchange, the parser would break if the granularity ever changes, etc. Whereas by forcing the granularity to 1 hour we make sure the granularity is always correct.\r\nWhat is your opinion?\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nfrom datetime import datetime, timedelta\nfrom logging import Logger, getLogger\nfrom typing import Optional\nfrom urllib.parse import urlencode\n\n# The arrow library is used to handle datetimes\nimport arrow\nimport pytz\nfrom requests import Response, Session\n\nfrom electricitymap.contrib.lib.models.event_lists import ExchangeList\nfrom electricitymap.contrib.lib.types import ZoneKey\n\nfrom .lib.exceptions import ParserException\nfrom .lib.utils import get_token\n\nTIMEZONE = pytz.timezone(\"Europe/Madrid\")\n\n# Map each exchange to the ID used in the API\nEXCHANGE_ID_MAP = {\n \"AD->ES\": \"10278\", # Switch to 10210 when it has data\n \"ES->MA\": \"10209\",\n}\n\n\ndef format_url(target_datetime: datetime, ID: str):\n start_date = (target_datetime - timedelta(hours=24)).isoformat()\n end_date = target_datetime.isoformat()\n dates = {\"start_date\": start_date, \"end_date\": end_date}\n query = urlencode(dates)\n return f\"https://api.esios.ree.es/indicators/{ID}?{query}\"\n\n\ndef fetch_exchange(\n zone_key1: ZoneKey,\n zone_key2: ZoneKey,\n session: Optional[Session] = None,\n target_datetime: Optional[datetime] = None,\n logger: Logger = getLogger(__name__),\n) -> list:\n\n # Get ESIOS token\n token = get_token(\"ESIOS_TOKEN\")\n\n ses = session or Session()\n if target_datetime is None:\n target_datetime = datetime.now(tz=TIMEZONE)\n # Request headers\n headers = {\n \"Content-Type\": \"application/json\",\n \"Accept\": \"application/json; application/vnd.esios-api-v2+json\",\n \"x-api-key\": token,\n }\n\n zone_key = ZoneKey(\"->\".join(sorted([zone_key1, zone_key2])))\n if zone_key not in EXCHANGE_ID_MAP.keys():\n raise ParserException(\n \"ESIOS.py\",\n f\"This parser cannot parse data between {zone_key1} and {zone_key2}.\",\n )\n url = format_url(target_datetime, EXCHANGE_ID_MAP[zone_key])\n\n response: Response = ses.get(url, headers=headers)\n if response.status_code != 200 or not response.text:\n raise ParserException(\n \"ESIOS\", \"Response code: {0}\".format(response.status_code)\n )\n\n json = response.json()\n values = json[\"indicator\"][\"values\"]\n if not values:\n raise ParserException(\"ESIOS\", \"No values received\")\n exchanges = ExchangeList(logger)\n\n for value in values:\n # Get last value in datasource\n # Datasource negative value is exporting, positive value is importing\n # If Spain is the first zone invert the values to match Electricity Maps schema\n net_flow = (\n -value[\"value\"] if zone_key.partition(\"->\")[0] == \"ES\" else value[\"value\"]\n )\n\n exchanges.append(\n zoneKey=zone_key,\n datetime=arrow.get(value[\"datetime_utc\"]).datetime,\n netFlow=net_flow,\n source=\"api.esios.ree.es\",\n )\n\n return exchanges.to_list()\n\n\nif __name__ == \"__main__\":\n session = Session()\n print(fetch_exchange(ZoneKey(\"ES\"), ZoneKey(\"MA\"), session))\n print(\"fetch_exchange(ES, MA)\")\n print(fetch_exchange(ZoneKey(\"AD\"), ZoneKey(\"ES\"), session))\n print(\"fetch_exchange(AD, ES)\")\n", "path": "parsers/ESIOS.py"}]} | 1,756 | 381 |
gh_patches_debug_32109 | rasdani/github-patches | git_diff | freedomofpress__securedrop-5806 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Apparmor denial for ntpd on Focal
## Description
`ntpd` throws grsec denial message.
## Steps to Reproduce
- [ ] Install focal on hardware (I hope the same will show up in vm too)
- [ ] check `/var/log/syslog`
## Expected Behavior
- no grsec error from ntpd
## Actual Behavior
```
Feb 17 03:43:33 app systemd[1]: Starting Network Time Service... Feb 17 03:43:33 app kernel: [ 202.428911] audit: type=1400 audit(1613533413.416:46): apparmor="DENIED" operation="open" profile="/usr/sbin/ntpd" name="/snap/bin/" pid=3303 comm="ntpd" requested_mask="r" denied_mask="r" fsuid=0 ouid=0 Feb 17 03:43:33 app ntpd[3303]: ntpd [email protected] (1): Starting Feb 17 03:43:33 app ntpd[3303]: Command line: /usr/sbin/ntpd -p /var/run/ntpd.pid -g -u 112:117 Feb 17 03:43:33 app ntpd[3306]: proto: precision = 0.175 usec (-22)
--
```
## Comments
Suggestions to fix, any other relevant information.
</issue>
<code>
[start of molecule/testinfra/conftest.py]
1 """
2 Configuration for TestInfra test suite for SecureDrop.
3 Handles importing host-specific test vars, so test functions
4 can be reused across multiple hosts, with varied targets.
5
6 Vars should be placed in `testinfra/vars/<hostname>.yml`.
7 """
8
9 import io
10 import os
11 import yaml
12 import testutils
13
14 # The config tests target staging by default. It's possible to override
15 # for e.g. prod, but the associated vars files are not yet ported.
16 target_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging')
17
18
19 def securedrop_import_testinfra_vars(hostname, with_header=False):
20 """
21 Import vars from a YAML file to populate tests with host-specific
22 values used in checks. For instance, the SecureDrop docroot will
23 be under /vagrant in development, but /var/www/securedrop in staging.
24
25 Vars must be stored in `testinfra/vars/<hostname>.yml`.
26 """
27 filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
28 with io.open(filepath, 'r') as f:
29 hostvars = yaml.safe_load(f)
30
31 # Testing against both Focal and Xenial must be supported for now in both
32 # staging scenarios, and in prod via `USE_FOCAL=1 ./securedrop-admin verify`
33 testing_focal = False
34 scenario_env = "MOLECULE_SCENARIO_NAME"
35 if scenario_env in os.environ and os.environ.get(scenario_env).endswith("focal"):
36 testing_focal = True
37 if "USE_FOCAL" in os.environ:
38 testing_focal = True
39
40 if testing_focal:
41 hostvars['securedrop_venv_site_packages'] = hostvars["securedrop_venv_site_packages"].format("3.8") # noqa: E501
42 hostvars['python_version'] = "3.8"
43 else:
44 hostvars['securedrop_venv_site_packages'] = hostvars["securedrop_venv_site_packages"].format("3.5") # noqa: E501
45 hostvars['python_version'] = "3.5"
46
47 if with_header:
48 hostvars = dict(securedrop_test_vars=hostvars)
49
50 return hostvars
51
52
53 def lookup_molecule_info():
54 """
55 Molecule automatically writes YAML files documenting dynamic host info
56 such as remote IPs. Read that file and pass back the config dict.
57 """
58 molecule_instance_config_path = os.path.abspath(
59 os.environ['MOLECULE_INSTANCE_CONFIG'])
60 with open(molecule_instance_config_path, 'r') as f:
61 molecule_instance_config = yaml.safe_load(f)
62 return molecule_instance_config
63
64
65 class Myvalues:
66 def __init__(self):
67 pass
68
69
70 value = securedrop_import_testinfra_vars(target_host)
71 res = Myvalues()
72 for key, value in value.items():
73 setattr(res, key, value)
74 testutils.securedrop_test_vars = res
75
[end of molecule/testinfra/conftest.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/molecule/testinfra/conftest.py b/molecule/testinfra/conftest.py
--- a/molecule/testinfra/conftest.py
+++ b/molecule/testinfra/conftest.py
@@ -9,8 +9,11 @@
import io
import os
import yaml
+from typing import Any, Dict
+
import testutils
+
# The config tests target staging by default. It's possible to override
# for e.g. prod, but the associated vars files are not yet ported.
target_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging')
@@ -50,25 +53,30 @@
return hostvars
-def lookup_molecule_info():
- """
- Molecule automatically writes YAML files documenting dynamic host info
- such as remote IPs. Read that file and pass back the config dict.
- """
- molecule_instance_config_path = os.path.abspath(
- os.environ['MOLECULE_INSTANCE_CONFIG'])
- with open(molecule_instance_config_path, 'r') as f:
- molecule_instance_config = yaml.safe_load(f)
- return molecule_instance_config
+class TestVars(dict):
+ managed_attrs = {} # type: Dict[str, Any]
+
+ def __init__(self, initial: Dict[str, Any]) -> None:
+ self.securedrop_target_distribution = os.environ.get("SECUREDROP_TARGET_DISTRIBUTION")
+ self.managed_attrs.update(initial)
+ def __getattr__(self, name: str) -> Any:
+ """
+ If the requested attribute names a dict in managed_attrs and that
+ contains a key with the name of the target distribution,
+ e.g. "focal", return that. Otherwise return the entire item
+ under the requested name.
+ """
+ try:
+ attr = self.managed_attrs[name]
+ if isinstance(attr, dict) and self.securedrop_target_distribution in attr:
+ return attr[self.securedrop_target_distribution]
+ return attr
+ except KeyError:
+ raise AttributeError(name)
-class Myvalues:
- def __init__(self):
- pass
+ def __str__(self) -> str:
+ return str(self.managed_attrs)
-value = securedrop_import_testinfra_vars(target_host)
-res = Myvalues()
-for key, value in value.items():
- setattr(res, key, value)
-testutils.securedrop_test_vars = res
+testutils.securedrop_test_vars = TestVars(securedrop_import_testinfra_vars(target_host))
| {"golden_diff": "diff --git a/molecule/testinfra/conftest.py b/molecule/testinfra/conftest.py\n--- a/molecule/testinfra/conftest.py\n+++ b/molecule/testinfra/conftest.py\n@@ -9,8 +9,11 @@\n import io\n import os\n import yaml\n+from typing import Any, Dict\n+\n import testutils\n \n+\n # The config tests target staging by default. It's possible to override\n # for e.g. prod, but the associated vars files are not yet ported.\n target_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging')\n@@ -50,25 +53,30 @@\n return hostvars\n \n \n-def lookup_molecule_info():\n- \"\"\"\n- Molecule automatically writes YAML files documenting dynamic host info\n- such as remote IPs. Read that file and pass back the config dict.\n- \"\"\"\n- molecule_instance_config_path = os.path.abspath(\n- os.environ['MOLECULE_INSTANCE_CONFIG'])\n- with open(molecule_instance_config_path, 'r') as f:\n- molecule_instance_config = yaml.safe_load(f)\n- return molecule_instance_config\n+class TestVars(dict):\n+ managed_attrs = {} # type: Dict[str, Any]\n+\n+ def __init__(self, initial: Dict[str, Any]) -> None:\n+ self.securedrop_target_distribution = os.environ.get(\"SECUREDROP_TARGET_DISTRIBUTION\")\n+ self.managed_attrs.update(initial)\n \n+ def __getattr__(self, name: str) -> Any:\n+ \"\"\"\n+ If the requested attribute names a dict in managed_attrs and that\n+ contains a key with the name of the target distribution,\n+ e.g. \"focal\", return that. Otherwise return the entire item\n+ under the requested name.\n+ \"\"\"\n+ try:\n+ attr = self.managed_attrs[name]\n+ if isinstance(attr, dict) and self.securedrop_target_distribution in attr:\n+ return attr[self.securedrop_target_distribution]\n+ return attr\n+ except KeyError:\n+ raise AttributeError(name)\n \n-class Myvalues:\n- def __init__(self):\n- pass\n+ def __str__(self) -> str:\n+ return str(self.managed_attrs)\n \n \n-value = securedrop_import_testinfra_vars(target_host)\n-res = Myvalues()\n-for key, value in value.items():\n- setattr(res, key, value)\n-testutils.securedrop_test_vars = res\n+testutils.securedrop_test_vars = TestVars(securedrop_import_testinfra_vars(target_host))\n", "issue": "Apparmor denial for ntpd on Focal\n## Description\r\n\r\n`ntpd` throws grsec denial message.\r\n\r\n## Steps to Reproduce\r\n\r\n- [ ] Install focal on hardware (I hope the same will show up in vm too)\r\n- [ ] check `/var/log/syslog`\r\n\r\n## Expected Behavior\r\n\r\n- no grsec error from ntpd\r\n\r\n## Actual Behavior\r\n\r\n```\r\n\r\nFeb 17 03:43:33 app systemd[1]: Starting Network Time Service... Feb 17 03:43:33 app kernel: [ 202.428911] audit: type=1400 audit(1613533413.416:46): apparmor=\"DENIED\" operation=\"open\" profile=\"/usr/sbin/ntpd\" name=\"/snap/bin/\" pid=3303 comm=\"ntpd\" requested_mask=\"r\" denied_mask=\"r\" fsuid=0 ouid=0 Feb 17 03:43:33 app ntpd[3303]: ntpd [email protected] (1): Starting Feb 17 03:43:33 app ntpd[3303]: Command line: /usr/sbin/ntpd -p /var/run/ntpd.pid -g -u 112:117 Feb 17 03:43:33 app ntpd[3306]: proto: precision = 0.175 usec (-22)\r\n--\r\n```\r\n\r\n## Comments\r\n\r\nSuggestions to fix, any other relevant information.\r\n\n", "before_files": [{"content": "\"\"\"\nConfiguration for TestInfra test suite for SecureDrop.\nHandles importing host-specific test vars, so test functions\ncan be reused across multiple hosts, with varied targets.\n\nVars should be placed in `testinfra/vars/<hostname>.yml`.\n\"\"\"\n\nimport io\nimport os\nimport yaml\nimport testutils\n\n# The config tests target staging by default. It's possible to override\n# for e.g. prod, but the associated vars files are not yet ported.\ntarget_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging')\n\n\ndef securedrop_import_testinfra_vars(hostname, with_header=False):\n \"\"\"\n Import vars from a YAML file to populate tests with host-specific\n values used in checks. For instance, the SecureDrop docroot will\n be under /vagrant in development, but /var/www/securedrop in staging.\n\n Vars must be stored in `testinfra/vars/<hostname>.yml`.\n \"\"\"\n filepath = os.path.join(os.path.dirname(__file__), \"vars\", hostname+\".yml\")\n with io.open(filepath, 'r') as f:\n hostvars = yaml.safe_load(f)\n\n # Testing against both Focal and Xenial must be supported for now in both\n # staging scenarios, and in prod via `USE_FOCAL=1 ./securedrop-admin verify`\n testing_focal = False\n scenario_env = \"MOLECULE_SCENARIO_NAME\"\n if scenario_env in os.environ and os.environ.get(scenario_env).endswith(\"focal\"):\n testing_focal = True\n if \"USE_FOCAL\" in os.environ:\n testing_focal = True\n\n if testing_focal:\n hostvars['securedrop_venv_site_packages'] = hostvars[\"securedrop_venv_site_packages\"].format(\"3.8\") # noqa: E501\n hostvars['python_version'] = \"3.8\"\n else:\n hostvars['securedrop_venv_site_packages'] = hostvars[\"securedrop_venv_site_packages\"].format(\"3.5\") # noqa: E501\n hostvars['python_version'] = \"3.5\"\n\n if with_header:\n hostvars = dict(securedrop_test_vars=hostvars)\n\n return hostvars\n\n\ndef lookup_molecule_info():\n \"\"\"\n Molecule automatically writes YAML files documenting dynamic host info\n such as remote IPs. Read that file and pass back the config dict.\n \"\"\"\n molecule_instance_config_path = os.path.abspath(\n os.environ['MOLECULE_INSTANCE_CONFIG'])\n with open(molecule_instance_config_path, 'r') as f:\n molecule_instance_config = yaml.safe_load(f)\n return molecule_instance_config\n\n\nclass Myvalues:\n def __init__(self):\n pass\n\n\nvalue = securedrop_import_testinfra_vars(target_host)\nres = Myvalues()\nfor key, value in value.items():\n setattr(res, key, value)\ntestutils.securedrop_test_vars = res\n", "path": "molecule/testinfra/conftest.py"}]} | 1,697 | 553 |
gh_patches_debug_25996 | rasdani/github-patches | git_diff | facebookresearch__Mephisto-779 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Errors when installing new dependencies
## Overview
Adding a new dependency into requirements.txt causes errors to occur in the github actions workflow.
For example,
When I added the `rich` dependency to the requirements.txt file I got this output for the test:
https://github.com/facebookresearch/Mephisto/runs/7237323897?check_suite_focus=true
The issue is not exclusive to `rich` as I also got this error when trying to add the `detoxify` dependency.
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python3
2
3 # Copyright (c) Facebook, Inc. and its affiliates.
4 # This source code is licensed under the MIT license found in the
5 # LICENSE file in the root directory of this source tree.
6
7 from setuptools import setup, find_packages
8 import os
9
10 here = os.path.abspath(os.path.dirname(__file__))
11
12 with open("README.md", encoding="utf8") as f:
13 # strip the header and badges etc
14 readme = f.read()
15
16 with open("requirements.txt") as f:
17 reqs = f.readlines()
18 reqs = [r for r in reqs if "--hash" not in r]
19 reqs = [r.split("\\")[0].split(";")[0].strip() for r in reqs]
20
21 with open(os.path.join(here, "mephisto", "VERSION")) as version_file:
22 version = version_file.read().strip()
23
24 setup(
25 name="mephisto",
26 version=version,
27 description="Crowdsourcing made simpler.",
28 author="Jack Urbanek, Pratik Ringshia",
29 author_email="[email protected]",
30 long_description=readme,
31 long_description_content_type="text/markdown",
32 url="https://github.com/facebookresearch/Mephisto",
33 python_requires=">=3.7",
34 packages=find_packages(include=["mephisto.*", "hydra_plugins.*"]),
35 license="MIT",
36 install_requires=reqs,
37 include_package_data=True,
38 package_data={"mephisto": ["*.yaml", "abstractions/**/*"]},
39 zip_safe=False,
40 entry_points={"console_scripts": "mephisto=mephisto.client.cli:cli"},
41 classifiers=[
42 "Programming Language :: Python :: 3",
43 "Programming Language :: Python :: 3.7",
44 "Programming Language :: Python :: 3.8",
45 "License :: OSI Approved :: MIT License",
46 "Topic :: Scientific/Engineering :: Artificial Intelligence",
47 "Natural Language :: English",
48 ],
49 )
50
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
deleted file mode 100644
--- a/setup.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright (c) Facebook, Inc. and its affiliates.
-# This source code is licensed under the MIT license found in the
-# LICENSE file in the root directory of this source tree.
-
-from setuptools import setup, find_packages
-import os
-
-here = os.path.abspath(os.path.dirname(__file__))
-
-with open("README.md", encoding="utf8") as f:
- # strip the header and badges etc
- readme = f.read()
-
-with open("requirements.txt") as f:
- reqs = f.readlines()
- reqs = [r for r in reqs if "--hash" not in r]
- reqs = [r.split("\\")[0].split(";")[0].strip() for r in reqs]
-
-with open(os.path.join(here, "mephisto", "VERSION")) as version_file:
- version = version_file.read().strip()
-
-setup(
- name="mephisto",
- version=version,
- description="Crowdsourcing made simpler.",
- author="Jack Urbanek, Pratik Ringshia",
- author_email="[email protected]",
- long_description=readme,
- long_description_content_type="text/markdown",
- url="https://github.com/facebookresearch/Mephisto",
- python_requires=">=3.7",
- packages=find_packages(include=["mephisto.*", "hydra_plugins.*"]),
- license="MIT",
- install_requires=reqs,
- include_package_data=True,
- package_data={"mephisto": ["*.yaml", "abstractions/**/*"]},
- zip_safe=False,
- entry_points={"console_scripts": "mephisto=mephisto.client.cli:cli"},
- classifiers=[
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
- "License :: OSI Approved :: MIT License",
- "Topic :: Scientific/Engineering :: Artificial Intelligence",
- "Natural Language :: English",
- ],
-)
| {"golden_diff": "diff --git a/setup.py b/setup.py\ndeleted file mode 100644\n--- a/setup.py\n+++ /dev/null\n@@ -1,49 +0,0 @@\n-#!/usr/bin/env python3\n-\n-# Copyright (c) Facebook, Inc. and its affiliates.\n-# This source code is licensed under the MIT license found in the\n-# LICENSE file in the root directory of this source tree.\n-\n-from setuptools import setup, find_packages\n-import os\n-\n-here = os.path.abspath(os.path.dirname(__file__))\n-\n-with open(\"README.md\", encoding=\"utf8\") as f:\n- # strip the header and badges etc\n- readme = f.read()\n-\n-with open(\"requirements.txt\") as f:\n- reqs = f.readlines()\n- reqs = [r for r in reqs if \"--hash\" not in r]\n- reqs = [r.split(\"\\\\\")[0].split(\";\")[0].strip() for r in reqs]\n-\n-with open(os.path.join(here, \"mephisto\", \"VERSION\")) as version_file:\n- version = version_file.read().strip()\n-\n-setup(\n- name=\"mephisto\",\n- version=version,\n- description=\"Crowdsourcing made simpler.\",\n- author=\"Jack Urbanek, Pratik Ringshia\",\n- author_email=\"[email protected]\",\n- long_description=readme,\n- long_description_content_type=\"text/markdown\",\n- url=\"https://github.com/facebookresearch/Mephisto\",\n- python_requires=\">=3.7\",\n- packages=find_packages(include=[\"mephisto.*\", \"hydra_plugins.*\"]),\n- license=\"MIT\",\n- install_requires=reqs,\n- include_package_data=True,\n- package_data={\"mephisto\": [\"*.yaml\", \"abstractions/**/*\"]},\n- zip_safe=False,\n- entry_points={\"console_scripts\": \"mephisto=mephisto.client.cli:cli\"},\n- classifiers=[\n- \"Programming Language :: Python :: 3\",\n- \"Programming Language :: Python :: 3.7\",\n- \"Programming Language :: Python :: 3.8\",\n- \"License :: OSI Approved :: MIT License\",\n- \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n- \"Natural Language :: English\",\n- ],\n-)\n", "issue": "Errors when installing new dependencies\n## Overview\r\nAdding a new dependency into requirements.txt causes errors to occur in the github actions workflow.\r\n\r\nFor example, \r\nWhen I added the `rich` dependency to the requirements.txt file I got this output for the test:\r\nhttps://github.com/facebookresearch/Mephisto/runs/7237323897?check_suite_focus=true\r\n\r\nThe issue is not exclusive to `rich` as I also got this error when trying to add the `detoxify` dependency.\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nfrom setuptools import setup, find_packages\nimport os\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\nwith open(\"README.md\", encoding=\"utf8\") as f:\n # strip the header and badges etc\n readme = f.read()\n\nwith open(\"requirements.txt\") as f:\n reqs = f.readlines()\n reqs = [r for r in reqs if \"--hash\" not in r]\n reqs = [r.split(\"\\\\\")[0].split(\";\")[0].strip() for r in reqs]\n\nwith open(os.path.join(here, \"mephisto\", \"VERSION\")) as version_file:\n version = version_file.read().strip()\n\nsetup(\n name=\"mephisto\",\n version=version,\n description=\"Crowdsourcing made simpler.\",\n author=\"Jack Urbanek, Pratik Ringshia\",\n author_email=\"[email protected]\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/facebookresearch/Mephisto\",\n python_requires=\">=3.7\",\n packages=find_packages(include=[\"mephisto.*\", \"hydra_plugins.*\"]),\n license=\"MIT\",\n install_requires=reqs,\n include_package_data=True,\n package_data={\"mephisto\": [\"*.yaml\", \"abstractions/**/*\"]},\n zip_safe=False,\n entry_points={\"console_scripts\": \"mephisto=mephisto.client.cli:cli\"},\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"License :: OSI Approved :: MIT License\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Natural Language :: English\",\n ],\n)\n", "path": "setup.py"}]} | 1,158 | 508 |
gh_patches_debug_38444 | rasdani/github-patches | git_diff | biolab__orange3-text-499 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Bag of Words: add option to disable hiding attributes
Sometimes, one would want to inspect words in Box Plot, MDS, Geo Map, Networks. These widgets don't show hidden attributes, which BoW features are by default. Add a checkbox to 'unhide' these attributes.
</issue>
<code>
[start of orangecontrib/text/widgets/utils/owbasevectorizer.py]
1 from AnyQt.QtWidgets import QGroupBox, QHBoxLayout, QVBoxLayout
2
3 from Orange.widgets import gui
4 from Orange.widgets import settings
5 from Orange.widgets.widget import OWWidget, Input, Output
6 from orangecontrib.text.corpus import Corpus
7
8
9 class OWBaseVectorizer(OWWidget):
10 """ A base class for feature extraction methods.
11
12 Notes:
13 Ensure that `create_configuration_layout` and `update_method` are overwritten.
14 """
15 # Input/output
16 class Inputs:
17 corpus = Input("Corpus", Corpus)
18
19 class Outputs:
20 corpus = Output("Corpus", Corpus)
21
22 want_main_area = False
23 resizing_enabled = False
24
25 # Settings
26 autocommit = settings.Setting(True)
27
28 Method = NotImplemented
29
30 def __init__(self):
31 super().__init__()
32 self.corpus = None
33 self.method = None
34
35 box = QGroupBox(title='Options')
36 box.setLayout(self.create_configuration_layout())
37 self.controlArea.layout().addWidget(box)
38
39 buttons_layout = QHBoxLayout()
40 buttons_layout.addSpacing(15)
41 buttons_layout.addWidget(
42 gui.auto_commit(None, self, 'autocommit', 'Commit', box=False)
43 )
44 self.controlArea.layout().addLayout(buttons_layout)
45 self.update_method()
46
47 @Inputs.corpus
48 def set_data(self, data):
49 self.corpus = data
50 self.commit()
51
52 def commit(self):
53 self.apply()
54
55 def apply(self):
56 if self.corpus is not None:
57 new_corpus = self.method.transform(self.corpus)
58 self.Outputs.corpus.send(new_corpus)
59
60 def update_method(self):
61 self.method = self.Method()
62
63 def on_change(self):
64 self.update_method()
65 self.commit()
66
67 def send_report(self):
68 self.report_items(self.method.report())
69
70 def create_configuration_layout(self):
71 return QVBoxLayout()
72
[end of orangecontrib/text/widgets/utils/owbasevectorizer.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/orangecontrib/text/widgets/utils/owbasevectorizer.py b/orangecontrib/text/widgets/utils/owbasevectorizer.py
--- a/orangecontrib/text/widgets/utils/owbasevectorizer.py
+++ b/orangecontrib/text/widgets/utils/owbasevectorizer.py
@@ -1,4 +1,5 @@
-from AnyQt.QtWidgets import QGroupBox, QHBoxLayout, QVBoxLayout
+
+from AnyQt.QtWidgets import QGroupBox, QVBoxLayout
from Orange.widgets import gui
from Orange.widgets import settings
@@ -24,6 +25,7 @@
# Settings
autocommit = settings.Setting(True)
+ hidden_cb = settings.Setting(True)
Method = NotImplemented
@@ -31,38 +33,55 @@
super().__init__()
self.corpus = None
self.method = None
+ self.new_corpus = None
+ self.new_attrs = None
box = QGroupBox(title='Options')
box.setLayout(self.create_configuration_layout())
self.controlArea.layout().addWidget(box)
- buttons_layout = QHBoxLayout()
- buttons_layout.addSpacing(15)
- buttons_layout.addWidget(
- gui.auto_commit(None, self, 'autocommit', 'Commit', box=False)
- )
- self.controlArea.layout().addLayout(buttons_layout)
+ output_layout = gui.hBox(self.controlArea)
+ gui.checkBox(output_layout, self, "hidden_cb", "Hide bow attributes",
+ callback=self.hide_attrs)
+
+ buttons_layout = gui.hBox(self.controlArea)
+ gui.auto_commit(buttons_layout, self, 'autocommit', 'Commit', box=False)
self.update_method()
@Inputs.corpus
def set_data(self, data):
self.corpus = data
- self.commit()
+ self.invalidate()
+
+ def hide_attrs(self):
+ if self.new_corpus:
+ new_domain = self.new_corpus.domain
+ for f in new_domain.attributes:
+ if f.name in self.new_attrs:
+ f.attributes['hidden'] = self.hidden_cb
+ self.new_corpus = self.new_corpus.transform(new_domain)
+ self.commit()
def commit(self):
- self.apply()
+ self.Outputs.corpus.send(self.new_corpus)
def apply(self):
if self.corpus is not None:
- new_corpus = self.method.transform(self.corpus)
- self.Outputs.corpus.send(new_corpus)
+ self.new_corpus = self.method.transform(self.corpus)
+ self.new_attrs = {f.name for f in self.new_corpus.domain.attributes} \
+ - {f.name for f in self.corpus.domain.attributes}
+
+ def invalidate(self):
+ self.apply()
+ self.hide_attrs()
+ self.commit()
def update_method(self):
self.method = self.Method()
def on_change(self):
self.update_method()
- self.commit()
+ self.invalidate()
def send_report(self):
self.report_items(self.method.report())
| {"golden_diff": "diff --git a/orangecontrib/text/widgets/utils/owbasevectorizer.py b/orangecontrib/text/widgets/utils/owbasevectorizer.py\n--- a/orangecontrib/text/widgets/utils/owbasevectorizer.py\n+++ b/orangecontrib/text/widgets/utils/owbasevectorizer.py\n@@ -1,4 +1,5 @@\n-from AnyQt.QtWidgets import QGroupBox, QHBoxLayout, QVBoxLayout\n+\n+from AnyQt.QtWidgets import QGroupBox, QVBoxLayout\n \n from Orange.widgets import gui\n from Orange.widgets import settings\n@@ -24,6 +25,7 @@\n \n # Settings\n autocommit = settings.Setting(True)\n+ hidden_cb = settings.Setting(True)\n \n Method = NotImplemented\n \n@@ -31,38 +33,55 @@\n super().__init__()\n self.corpus = None\n self.method = None\n+ self.new_corpus = None\n+ self.new_attrs = None\n \n box = QGroupBox(title='Options')\n box.setLayout(self.create_configuration_layout())\n self.controlArea.layout().addWidget(box)\n \n- buttons_layout = QHBoxLayout()\n- buttons_layout.addSpacing(15)\n- buttons_layout.addWidget(\n- gui.auto_commit(None, self, 'autocommit', 'Commit', box=False)\n- )\n- self.controlArea.layout().addLayout(buttons_layout)\n+ output_layout = gui.hBox(self.controlArea)\n+ gui.checkBox(output_layout, self, \"hidden_cb\", \"Hide bow attributes\",\n+ callback=self.hide_attrs)\n+\n+ buttons_layout = gui.hBox(self.controlArea)\n+ gui.auto_commit(buttons_layout, self, 'autocommit', 'Commit', box=False)\n self.update_method()\n \n @Inputs.corpus\n def set_data(self, data):\n self.corpus = data\n- self.commit()\n+ self.invalidate()\n+\n+ def hide_attrs(self):\n+ if self.new_corpus:\n+ new_domain = self.new_corpus.domain\n+ for f in new_domain.attributes:\n+ if f.name in self.new_attrs:\n+ f.attributes['hidden'] = self.hidden_cb\n+ self.new_corpus = self.new_corpus.transform(new_domain)\n+ self.commit()\n \n def commit(self):\n- self.apply()\n+ self.Outputs.corpus.send(self.new_corpus)\n \n def apply(self):\n if self.corpus is not None:\n- new_corpus = self.method.transform(self.corpus)\n- self.Outputs.corpus.send(new_corpus)\n+ self.new_corpus = self.method.transform(self.corpus)\n+ self.new_attrs = {f.name for f in self.new_corpus.domain.attributes} \\\n+ - {f.name for f in self.corpus.domain.attributes}\n+\n+ def invalidate(self):\n+ self.apply()\n+ self.hide_attrs()\n+ self.commit()\n \n def update_method(self):\n self.method = self.Method()\n \n def on_change(self):\n self.update_method()\n- self.commit()\n+ self.invalidate()\n \n def send_report(self):\n self.report_items(self.method.report())\n", "issue": "Bag of Words: add option to disable hiding attributes\nSometimes, one would want to inspect words in Box Plot, MDS, Geo Map, Networks. These widgets don't show hidden attributes, which BoW features are by default. Add a checkbox to 'unhide' these attributes.\n", "before_files": [{"content": "from AnyQt.QtWidgets import QGroupBox, QHBoxLayout, QVBoxLayout\n\nfrom Orange.widgets import gui\nfrom Orange.widgets import settings\nfrom Orange.widgets.widget import OWWidget, Input, Output\nfrom orangecontrib.text.corpus import Corpus\n\n\nclass OWBaseVectorizer(OWWidget):\n \"\"\" A base class for feature extraction methods.\n\n Notes:\n Ensure that `create_configuration_layout` and `update_method` are overwritten.\n \"\"\"\n # Input/output\n class Inputs:\n corpus = Input(\"Corpus\", Corpus)\n\n class Outputs:\n corpus = Output(\"Corpus\", Corpus)\n\n want_main_area = False\n resizing_enabled = False\n\n # Settings\n autocommit = settings.Setting(True)\n\n Method = NotImplemented\n\n def __init__(self):\n super().__init__()\n self.corpus = None\n self.method = None\n\n box = QGroupBox(title='Options')\n box.setLayout(self.create_configuration_layout())\n self.controlArea.layout().addWidget(box)\n\n buttons_layout = QHBoxLayout()\n buttons_layout.addSpacing(15)\n buttons_layout.addWidget(\n gui.auto_commit(None, self, 'autocommit', 'Commit', box=False)\n )\n self.controlArea.layout().addLayout(buttons_layout)\n self.update_method()\n\n @Inputs.corpus\n def set_data(self, data):\n self.corpus = data\n self.commit()\n\n def commit(self):\n self.apply()\n\n def apply(self):\n if self.corpus is not None:\n new_corpus = self.method.transform(self.corpus)\n self.Outputs.corpus.send(new_corpus)\n\n def update_method(self):\n self.method = self.Method()\n\n def on_change(self):\n self.update_method()\n self.commit()\n\n def send_report(self):\n self.report_items(self.method.report())\n\n def create_configuration_layout(self):\n return QVBoxLayout()\n", "path": "orangecontrib/text/widgets/utils/owbasevectorizer.py"}]} | 1,140 | 651 |
gh_patches_debug_23859 | rasdani/github-patches | git_diff | learningequality__kolibri-1004 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
emphasize message when content renderer is not available
Current behavior is confusing because the error is styled the same as the description:

> Kolibri is unable to render this content
This string should be rendered as an error message so it doesn't look like part of the content description. Recommend that we use a [non-dismissible alert](https://josephuspaye.github.io/Keen-UI/#/ui-alert) error:

</issue>
<code>
[start of kolibri/plugins/html5_app_renderer/kolibri_plugin.py]
1 from __future__ import absolute_import, print_function, unicode_literals
2
3 from kolibri.content import hooks as content_hooks
4 from kolibri.plugins.base import KolibriPluginBase
5
6
7 class HTML5AppPlugin(KolibriPluginBase):
8 pass
9
10
11 class HTML5AppAsset(content_hooks.WebpackBundleHook):
12 unique_slug = "html5_app_renderer_module"
13 src_file = "assets/src/module.js"
14 content_types_file = "assets/src/content_types.json"
15
[end of kolibri/plugins/html5_app_renderer/kolibri_plugin.py]
[start of kolibri/plugins/audio_mp3_render/kolibri_plugin.py]
1 from __future__ import absolute_import, print_function, unicode_literals
2
3 from kolibri.content import hooks as content_hooks
4 from kolibri.plugins.base import KolibriPluginBase
5
6
7 class AudioMP3RenderPlugin(KolibriPluginBase):
8 pass
9
10
11 class AudioMP3RenderAsset(content_hooks.WebpackBundleHook):
12 unique_slug = "audio_mp3_render_module"
13 src_file = "assets/src/module.js"
14 content_types_file = "assets/src/content_types.json"
15
[end of kolibri/plugins/audio_mp3_render/kolibri_plugin.py]
[start of kolibri/plugins/document_pdf_render/kolibri_plugin.py]
1 from __future__ import absolute_import, print_function, unicode_literals
2
3 from kolibri.content import hooks as content_hooks
4 from kolibri.plugins.base import KolibriPluginBase
5
6
7 class DocumentPDFRenderPlugin(KolibriPluginBase):
8 pass
9
10
11 class DocumentPDFRenderAsset(content_hooks.WebpackBundleHook):
12 unique_slug = "document_pdf_render_module"
13 src_file = "assets/src/module.js"
14 content_types_file = "assets/src/content_types.json"
15
[end of kolibri/plugins/document_pdf_render/kolibri_plugin.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kolibri/plugins/audio_mp3_render/kolibri_plugin.py b/kolibri/plugins/audio_mp3_render/kolibri_plugin.py
--- a/kolibri/plugins/audio_mp3_render/kolibri_plugin.py
+++ b/kolibri/plugins/audio_mp3_render/kolibri_plugin.py
@@ -8,7 +8,7 @@
pass
-class AudioMP3RenderAsset(content_hooks.WebpackBundleHook):
+class AudioMP3RenderAsset(content_hooks.ContentRendererHook):
unique_slug = "audio_mp3_render_module"
src_file = "assets/src/module.js"
content_types_file = "assets/src/content_types.json"
diff --git a/kolibri/plugins/document_pdf_render/kolibri_plugin.py b/kolibri/plugins/document_pdf_render/kolibri_plugin.py
--- a/kolibri/plugins/document_pdf_render/kolibri_plugin.py
+++ b/kolibri/plugins/document_pdf_render/kolibri_plugin.py
@@ -8,7 +8,7 @@
pass
-class DocumentPDFRenderAsset(content_hooks.WebpackBundleHook):
+class DocumentPDFRenderAsset(content_hooks.ContentRendererHook):
unique_slug = "document_pdf_render_module"
src_file = "assets/src/module.js"
content_types_file = "assets/src/content_types.json"
diff --git a/kolibri/plugins/html5_app_renderer/kolibri_plugin.py b/kolibri/plugins/html5_app_renderer/kolibri_plugin.py
--- a/kolibri/plugins/html5_app_renderer/kolibri_plugin.py
+++ b/kolibri/plugins/html5_app_renderer/kolibri_plugin.py
@@ -8,7 +8,7 @@
pass
-class HTML5AppAsset(content_hooks.WebpackBundleHook):
+class HTML5AppAsset(content_hooks.ContentRendererHook):
unique_slug = "html5_app_renderer_module"
src_file = "assets/src/module.js"
content_types_file = "assets/src/content_types.json"
| {"golden_diff": "diff --git a/kolibri/plugins/audio_mp3_render/kolibri_plugin.py b/kolibri/plugins/audio_mp3_render/kolibri_plugin.py\n--- a/kolibri/plugins/audio_mp3_render/kolibri_plugin.py\n+++ b/kolibri/plugins/audio_mp3_render/kolibri_plugin.py\n@@ -8,7 +8,7 @@\n pass\n \n \n-class AudioMP3RenderAsset(content_hooks.WebpackBundleHook):\n+class AudioMP3RenderAsset(content_hooks.ContentRendererHook):\n unique_slug = \"audio_mp3_render_module\"\n src_file = \"assets/src/module.js\"\n content_types_file = \"assets/src/content_types.json\"\ndiff --git a/kolibri/plugins/document_pdf_render/kolibri_plugin.py b/kolibri/plugins/document_pdf_render/kolibri_plugin.py\n--- a/kolibri/plugins/document_pdf_render/kolibri_plugin.py\n+++ b/kolibri/plugins/document_pdf_render/kolibri_plugin.py\n@@ -8,7 +8,7 @@\n pass\n \n \n-class DocumentPDFRenderAsset(content_hooks.WebpackBundleHook):\n+class DocumentPDFRenderAsset(content_hooks.ContentRendererHook):\n unique_slug = \"document_pdf_render_module\"\n src_file = \"assets/src/module.js\"\n content_types_file = \"assets/src/content_types.json\"\ndiff --git a/kolibri/plugins/html5_app_renderer/kolibri_plugin.py b/kolibri/plugins/html5_app_renderer/kolibri_plugin.py\n--- a/kolibri/plugins/html5_app_renderer/kolibri_plugin.py\n+++ b/kolibri/plugins/html5_app_renderer/kolibri_plugin.py\n@@ -8,7 +8,7 @@\n pass\n \n \n-class HTML5AppAsset(content_hooks.WebpackBundleHook):\n+class HTML5AppAsset(content_hooks.ContentRendererHook):\n unique_slug = \"html5_app_renderer_module\"\n src_file = \"assets/src/module.js\"\n content_types_file = \"assets/src/content_types.json\"\n", "issue": "emphasize message when content renderer is not available\n\r\nCurrent behavior is confusing because the error is styled the same as the description:\r\n\r\n\r\n\r\n\r\n> Kolibri is unable to render this content\r\n\r\nThis string should be rendered as an error message so it doesn't look like part of the content description. Recommend that we use a [non-dismissible alert](https://josephuspaye.github.io/Keen-UI/#/ui-alert) error:\r\n\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "from __future__ import absolute_import, print_function, unicode_literals\n\nfrom kolibri.content import hooks as content_hooks\nfrom kolibri.plugins.base import KolibriPluginBase\n\n\nclass HTML5AppPlugin(KolibriPluginBase):\n pass\n\n\nclass HTML5AppAsset(content_hooks.WebpackBundleHook):\n unique_slug = \"html5_app_renderer_module\"\n src_file = \"assets/src/module.js\"\n content_types_file = \"assets/src/content_types.json\"\n", "path": "kolibri/plugins/html5_app_renderer/kolibri_plugin.py"}, {"content": "from __future__ import absolute_import, print_function, unicode_literals\n\nfrom kolibri.content import hooks as content_hooks\nfrom kolibri.plugins.base import KolibriPluginBase\n\n\nclass AudioMP3RenderPlugin(KolibriPluginBase):\n pass\n\n\nclass AudioMP3RenderAsset(content_hooks.WebpackBundleHook):\n unique_slug = \"audio_mp3_render_module\"\n src_file = \"assets/src/module.js\"\n content_types_file = \"assets/src/content_types.json\"\n", "path": "kolibri/plugins/audio_mp3_render/kolibri_plugin.py"}, {"content": "from __future__ import absolute_import, print_function, unicode_literals\n\nfrom kolibri.content import hooks as content_hooks\nfrom kolibri.plugins.base import KolibriPluginBase\n\n\nclass DocumentPDFRenderPlugin(KolibriPluginBase):\n pass\n\n\nclass DocumentPDFRenderAsset(content_hooks.WebpackBundleHook):\n unique_slug = \"document_pdf_render_module\"\n src_file = \"assets/src/module.js\"\n content_types_file = \"assets/src/content_types.json\"\n", "path": "kolibri/plugins/document_pdf_render/kolibri_plugin.py"}]} | 1,184 | 403 |
gh_patches_debug_34 | rasdani/github-patches | git_diff | airctic__icevision-995 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Fix installation in documentation
• Improve Installation Guide
We need to improve the installation guide for IceVision.
Too many people are getting stuck installing the library.
We need clear instructions for:
* Colab
* MacOS
* Windows (WSL2)
* Ubuntu
</issue>
<code>
[start of setup.py]
1 from setuptools import setup
2
3 if __name__ == "__main__":
4 setup()
5
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,5 @@
from setuptools import setup
+
if __name__ == "__main__":
setup()
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,4 +1,5 @@\n from setuptools import setup\n \n+\n if __name__ == \"__main__\":\n setup()\n", "issue": "Fix installation in documentation\n\u2022 Improve Installation Guide\r\n\r\nWe need to improve the installation guide for IceVision.\r\nToo many people are getting stuck installing the library.\r\nWe need clear instructions for:\r\n* Colab\r\n* MacOS\r\n* Windows (WSL2)\r\n* Ubuntu\n", "before_files": [{"content": "from setuptools import setup\n\nif __name__ == \"__main__\":\n setup()\n", "path": "setup.py"}]} | 606 | 48 |
gh_patches_debug_2384 | rasdani/github-patches | git_diff | helmholtz-analytics__heat-406 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Recent CI runs failing with NetCDF: HDF error
**Description**
Recent CI (and local) runs of our tests fail with messages like
```
E RuntimeError: NetCDF: HDF error
netCDF4/_netCDF4.pyx:1887: RuntimeError
During handling of the above exception, another exception occurred:
self = <heat.core.tests.test_io.TestIO testMethod=test_save_netcdf>
def test_save_netcdf(self):
# netcdf support is optional
if not ht.io.supports_netcdf():
return
# local unsplit data
local_data = ht.arange(100)
> ht.save_netcdf(local_data, self.NETCDF_OUT_PATH, self.NETCDF_VARIABLE)
heat/core/tests/test_io.py:373:
```
**To Reproduce**
Steps to reproduce the behavior:
1. Which module/class/function is affected?
heat/core/tests/test_io.py
2. What are the circumstances under which the bug appears?
ANY, just run from current master
3. What is the exact error-message/errorous behavious?
cf. above.
**Expected behavior**
Tests should run successfully.
**Illustrative**
https://travis-ci.com/helmholtz-analytics/heat/builds/135270829
**Version Info**
Topic branch, but master would suffer from a rebuild.
**Additional comments**
The fix will be to pin the NetCDF dependency to <=1.5.2. Problems start to occur with 1.5.3.
</issue>
<code>
[start of setup.py]
1 from setuptools import setup
2 import sys
3
4 sys.path.append("./heat/core")
5 import version
6
7 print(version, dir(version))
8
9 with open("README.md", "r") as handle:
10 long_description = handle.read()
11
12 # with open('./heat/core/version.py') as handle:
13 # exec(handle.read())
14 # print(dir())
15
16 setup(
17 name="heat",
18 packages=["heat", "heat.core", "heat.ml", "heat.ml.cluster"],
19 data_files=["README.md", "LICENSE"],
20 version=version.__version__,
21 description="A framework for high performance data analytics and machine learning.",
22 long_description=long_description,
23 long_description_content_type="text/markdown",
24 author="Helmholtz Association",
25 author_email="[email protected]",
26 url="https://github.com/helmholtz-analytics/heat",
27 keywords=["data", "analytics", "tensors", "distributed", "gpu"],
28 classifiers=[
29 "Development Status :: 2 - Pre-Alpha",
30 "Programming Language :: Python :: 3.5",
31 "License :: OSI Approved :: MIT License",
32 "Intended Audience :: Science/Research",
33 "Topic :: Scientific/Engineering",
34 ],
35 install_requires=["mpi4py>=3.0.0", "numpy>=1.13.0", "torch==1.3.0"],
36 extras_require={
37 "hdf5": ["h5py>=2.8.0"],
38 "netcdf": ["netCDF4>=1.4.0"],
39 "dev": ["pre-commit>=1.18.3"],
40 },
41 )
42
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@
install_requires=["mpi4py>=3.0.0", "numpy>=1.13.0", "torch==1.3.0"],
extras_require={
"hdf5": ["h5py>=2.8.0"],
- "netcdf": ["netCDF4>=1.4.0"],
+ "netcdf": ["netCDF4>=1.4.0,<=1.5.2"],
"dev": ["pre-commit>=1.18.3"],
},
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -35,7 +35,7 @@\n install_requires=[\"mpi4py>=3.0.0\", \"numpy>=1.13.0\", \"torch==1.3.0\"],\n extras_require={\n \"hdf5\": [\"h5py>=2.8.0\"],\n- \"netcdf\": [\"netCDF4>=1.4.0\"],\n+ \"netcdf\": [\"netCDF4>=1.4.0,<=1.5.2\"],\n \"dev\": [\"pre-commit>=1.18.3\"],\n },\n )\n", "issue": "Recent CI runs failing with NetCDF: HDF error\n**Description**\r\n\r\nRecent CI (and local) runs of our tests fail with messages like\r\n\r\n```\r\nE RuntimeError: NetCDF: HDF error \r\n \r\nnetCDF4/_netCDF4.pyx:1887: RuntimeError \r\n \r\nDuring handling of the above exception, another exception occurred: \r\n \r\nself = <heat.core.tests.test_io.TestIO testMethod=test_save_netcdf> \r\n \r\n def test_save_netcdf(self): \r\n # netcdf support is optional \r\n if not ht.io.supports_netcdf(): \r\n return \r\n \r\n # local unsplit data \r\n local_data = ht.arange(100) \r\n> ht.save_netcdf(local_data, self.NETCDF_OUT_PATH, self.NETCDF_VARIABLE) \r\n \r\nheat/core/tests/test_io.py:373: \r\n```\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Which module/class/function is affected?\r\nheat/core/tests/test_io.py\r\n2. What are the circumstances under which the bug appears?\r\nANY, just run from current master\r\n3. What is the exact error-message/errorous behavious?\r\ncf. above.\r\n\r\n\r\n**Expected behavior**\r\nTests should run successfully.\r\n\r\n**Illustrative**\r\nhttps://travis-ci.com/helmholtz-analytics/heat/builds/135270829\r\n\r\n**Version Info**\r\nTopic branch, but master would suffer from a rebuild.\r\n\r\n**Additional comments**\r\nThe fix will be to pin the NetCDF dependency to <=1.5.2. Problems start to occur with 1.5.3.\r\n\n", "before_files": [{"content": "from setuptools import setup\nimport sys\n\nsys.path.append(\"./heat/core\")\nimport version\n\nprint(version, dir(version))\n\nwith open(\"README.md\", \"r\") as handle:\n long_description = handle.read()\n\n# with open('./heat/core/version.py') as handle:\n# exec(handle.read())\n# print(dir())\n\nsetup(\n name=\"heat\",\n packages=[\"heat\", \"heat.core\", \"heat.ml\", \"heat.ml.cluster\"],\n data_files=[\"README.md\", \"LICENSE\"],\n version=version.__version__,\n description=\"A framework for high performance data analytics and machine learning.\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"Helmholtz Association\",\n author_email=\"[email protected]\",\n url=\"https://github.com/helmholtz-analytics/heat\",\n keywords=[\"data\", \"analytics\", \"tensors\", \"distributed\", \"gpu\"],\n classifiers=[\n \"Development Status :: 2 - Pre-Alpha\",\n \"Programming Language :: Python :: 3.5\",\n \"License :: OSI Approved :: MIT License\",\n \"Intended Audience :: Science/Research\",\n \"Topic :: Scientific/Engineering\",\n ],\n install_requires=[\"mpi4py>=3.0.0\", \"numpy>=1.13.0\", \"torch==1.3.0\"],\n extras_require={\n \"hdf5\": [\"h5py>=2.8.0\"],\n \"netcdf\": [\"netCDF4>=1.4.0\"],\n \"dev\": [\"pre-commit>=1.18.3\"],\n },\n)\n", "path": "setup.py"}]} | 1,319 | 146 |
gh_patches_debug_854 | rasdani/github-patches | git_diff | holoviz__holoviews-5436 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Game of Life example needs update
### Package versions
```
panel = 0.13.1
holoviews = 1.15.0
bokeh = 2.4.3
```
### Bug description
In the Game of Life example in the holoviews documentation (https://holoviews.org/gallery/apps/bokeh/game_of_life.html)
I needed to update the second to last line
```python
panel.add_periodic_callback(advance, 50)
```
to
```python
pn.state.add_periodic_callback(advance, period=50) # 50 msec
# note: the `period=` is not necessary, but I think it adds clarity
```
It seems this is due to a change in the `panel` interface.
</issue>
<code>
[start of examples/gallery/apps/bokeh/game_of_life.py]
1 import numpy as np
2 import holoviews as hv
3 import panel as pn
4
5 from holoviews import opts
6 from holoviews.streams import Tap, Counter, DoubleTap
7 from scipy.signal import convolve2d
8
9 hv.extension('bokeh')
10
11 diehard = [[0, 0, 0, 0, 0, 0, 1, 0],
12 [1, 1, 0, 0, 0, 0, 0, 0],
13 [0, 1, 0, 0, 0, 1, 1, 1]]
14
15 boat = [[1, 1, 0],
16 [1, 0, 1],
17 [0, 1, 0]]
18
19 r_pentomino = [[0, 1, 1],
20 [1, 1, 0],
21 [0, 1, 0]]
22
23 beacon = [[0, 0, 1, 1],
24 [0, 0, 1, 1],
25 [1, 1, 0, 0],
26 [1, 1, 0, 0]]
27
28 acorn = [[0, 1, 0, 0, 0, 0, 0],
29 [0, 0, 0, 1, 0, 0, 0],
30 [1, 1, 0, 0, 1, 1, 1]]
31
32 spaceship = [[0, 0, 1, 1, 0],
33 [1, 1, 0, 1, 1],
34 [1, 1, 1, 1, 0],
35 [0, 1, 1, 0, 0]]
36
37 block_switch_engine = [[0, 0, 0, 0, 0, 0, 1, 0],
38 [0, 0, 0, 0, 1, 0, 1, 1],
39 [0, 0, 0, 0, 1, 0, 1, 0],
40 [0, 0, 0, 0, 1, 0, 0, 0],
41 [0, 0, 1, 0, 0, 0, 0, 0],
42 [1, 0, 1, 0, 0, 0, 0, 0]]
43
44 glider = [[1, 0, 0], [0, 1, 1], [1, 1, 0]]
45
46 unbounded = [[1, 1, 1, 0, 1],
47 [1, 0, 0, 0, 0],
48 [0, 0, 0, 1, 1],
49 [0, 1, 1, 0, 1],
50 [1, 0, 1, 0, 1]]
51
52 shapes = {'Glider': glider, 'Block Switch Engine': block_switch_engine,
53 'Spaceship': spaceship, 'Acorn': acorn, 'Beacon': beacon,
54 'Diehard': diehard, 'Unbounded': unbounded}
55
56 def step(X):
57 nbrs_count = convolve2d(X, np.ones((3, 3)), mode='same', boundary='wrap') - X
58 return (nbrs_count == 3) | (X & (nbrs_count == 2))
59
60 def update(pattern, counter, x, y):
61 if x and y:
62 pattern = np.array(shapes[pattern])
63 r, c = pattern.shape
64 y, x = img.sheet2matrixidx(x,y)
65 img.data[y:y+r,x:x+c] = pattern[::-1]
66 else:
67 img.data = step(img.data)
68 return hv.Image(img)
69
70 # Set up plot which advances on counter and adds pattern on tap
71 title = 'Game of Life - Tap to place pattern, Doubletap to clear'
72 img = hv.Image(np.zeros((100, 200), dtype=np.uint8))
73 counter, tap = Counter(transient=True), Tap(transient=True),
74 pattern_dim = hv.Dimension('Pattern', values=sorted(shapes.keys()))
75 dmap = hv.DynamicMap(update, kdims=[pattern_dim], streams=[counter, tap])
76
77 plot = dmap.opts(
78 opts.Image(cmap='gray', clim=(0, 1), toolbar=None, responsive=True,
79 min_height=800, title=title, xaxis=None, yaxis=None)
80 )
81
82 # Add callback to clear on double tap
83 def reset_data(x, y):
84 img.data[:] = 0
85
86 reset = DoubleTap(transient=True, source=plot)
87 reset.add_subscriber(reset_data)
88
89 # Set up Panel app and periodic callback
90 panel = pn.pane.HoloViews(plot, center=True, widget_location='right')
91
92 def advance():
93 counter.event(counter=counter.counter+1)
94 panel.add_periodic_callback(advance, 50)
95
96 panel.servable('Game of Life')
97
[end of examples/gallery/apps/bokeh/game_of_life.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/examples/gallery/apps/bokeh/game_of_life.py b/examples/gallery/apps/bokeh/game_of_life.py
--- a/examples/gallery/apps/bokeh/game_of_life.py
+++ b/examples/gallery/apps/bokeh/game_of_life.py
@@ -91,6 +91,6 @@
def advance():
counter.event(counter=counter.counter+1)
-panel.add_periodic_callback(advance, 50)
+pn.state.add_periodic_callback(advance, period=50, start=False)
panel.servable('Game of Life')
| {"golden_diff": "diff --git a/examples/gallery/apps/bokeh/game_of_life.py b/examples/gallery/apps/bokeh/game_of_life.py\n--- a/examples/gallery/apps/bokeh/game_of_life.py\n+++ b/examples/gallery/apps/bokeh/game_of_life.py\n@@ -91,6 +91,6 @@\n \n def advance():\n counter.event(counter=counter.counter+1)\n-panel.add_periodic_callback(advance, 50)\n+pn.state.add_periodic_callback(advance, period=50, start=False)\n \n panel.servable('Game of Life')\n", "issue": "Game of Life example needs update\n### Package versions\r\n```\r\npanel = 0.13.1\r\nholoviews = 1.15.0\r\nbokeh = 2.4.3\r\n```\r\n\r\n\r\n### Bug description\r\nIn the Game of Life example in the holoviews documentation (https://holoviews.org/gallery/apps/bokeh/game_of_life.html) \r\nI needed to update the second to last line\r\n```python\r\npanel.add_periodic_callback(advance, 50)\r\n```\r\n\r\nto\r\n```python\r\npn.state.add_periodic_callback(advance, period=50) # 50 msec \r\n# note: the `period=` is not necessary, but I think it adds clarity\r\n```\r\nIt seems this is due to a change in the `panel` interface.\r\n\n", "before_files": [{"content": "import numpy as np\nimport holoviews as hv\nimport panel as pn\n\nfrom holoviews import opts\nfrom holoviews.streams import Tap, Counter, DoubleTap\nfrom scipy.signal import convolve2d\n\nhv.extension('bokeh')\n\ndiehard = [[0, 0, 0, 0, 0, 0, 1, 0],\n [1, 1, 0, 0, 0, 0, 0, 0],\n [0, 1, 0, 0, 0, 1, 1, 1]]\n\nboat = [[1, 1, 0],\n [1, 0, 1],\n [0, 1, 0]]\n\nr_pentomino = [[0, 1, 1],\n [1, 1, 0],\n [0, 1, 0]]\n\nbeacon = [[0, 0, 1, 1],\n [0, 0, 1, 1],\n [1, 1, 0, 0],\n [1, 1, 0, 0]]\n\nacorn = [[0, 1, 0, 0, 0, 0, 0],\n [0, 0, 0, 1, 0, 0, 0],\n [1, 1, 0, 0, 1, 1, 1]]\n\nspaceship = [[0, 0, 1, 1, 0],\n [1, 1, 0, 1, 1],\n [1, 1, 1, 1, 0],\n [0, 1, 1, 0, 0]]\n\nblock_switch_engine = [[0, 0, 0, 0, 0, 0, 1, 0],\n [0, 0, 0, 0, 1, 0, 1, 1],\n [0, 0, 0, 0, 1, 0, 1, 0],\n [0, 0, 0, 0, 1, 0, 0, 0],\n [0, 0, 1, 0, 0, 0, 0, 0],\n [1, 0, 1, 0, 0, 0, 0, 0]]\n\nglider = [[1, 0, 0], [0, 1, 1], [1, 1, 0]]\n\nunbounded = [[1, 1, 1, 0, 1],\n [1, 0, 0, 0, 0],\n [0, 0, 0, 1, 1],\n [0, 1, 1, 0, 1],\n [1, 0, 1, 0, 1]]\n\nshapes = {'Glider': glider, 'Block Switch Engine': block_switch_engine,\n 'Spaceship': spaceship, 'Acorn': acorn, 'Beacon': beacon,\n 'Diehard': diehard, 'Unbounded': unbounded}\n\ndef step(X):\n nbrs_count = convolve2d(X, np.ones((3, 3)), mode='same', boundary='wrap') - X\n return (nbrs_count == 3) | (X & (nbrs_count == 2))\n\ndef update(pattern, counter, x, y):\n if x and y:\n pattern = np.array(shapes[pattern])\n r, c = pattern.shape\n y, x = img.sheet2matrixidx(x,y)\n img.data[y:y+r,x:x+c] = pattern[::-1]\n else:\n img.data = step(img.data)\n return hv.Image(img)\n\n# Set up plot which advances on counter and adds pattern on tap\ntitle = 'Game of Life - Tap to place pattern, Doubletap to clear'\nimg = hv.Image(np.zeros((100, 200), dtype=np.uint8))\ncounter, tap = Counter(transient=True), Tap(transient=True),\npattern_dim = hv.Dimension('Pattern', values=sorted(shapes.keys()))\ndmap = hv.DynamicMap(update, kdims=[pattern_dim], streams=[counter, tap])\n\nplot = dmap.opts(\n opts.Image(cmap='gray', clim=(0, 1), toolbar=None, responsive=True,\n min_height=800, title=title, xaxis=None, yaxis=None)\n)\n\n# Add callback to clear on double tap\ndef reset_data(x, y):\n img.data[:] = 0\n\nreset = DoubleTap(transient=True, source=plot)\nreset.add_subscriber(reset_data)\n\n# Set up Panel app and periodic callback\npanel = pn.pane.HoloViews(plot, center=True, widget_location='right')\n\ndef advance():\n counter.event(counter=counter.counter+1)\npanel.add_periodic_callback(advance, 50)\n\npanel.servable('Game of Life')\n", "path": "examples/gallery/apps/bokeh/game_of_life.py"}]} | 2,018 | 118 |
gh_patches_debug_6948 | rasdani/github-patches | git_diff | digitalfabrik__integreat-cms-205 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Pages: Separate page and translation attributes in page form
At the moment, it is not clear which attributes of the page form refer to the `Page` object and which only handle `PageTranslation` objects (from a user's point of view). Probably, we should separate these two sections from each other.
This is part of #98.
</issue>
<code>
[start of backend/cms/models/region.py]
1 """
2 Database model representing an autonomous authority
3 """
4 from django.contrib.postgres.fields import ArrayField
5 from django.db import models
6 from django.http import Http404
7 from django.utils import timezone
8 from django.utils.translation import ugettext_lazy as _
9
10
11 class Region(models.Model):
12 """
13 Class to generate region database objects
14 """
15 ACTIVE = 'acti'
16 HIDDEN = 'hidd'
17 ARCHIVED = 'arch'
18
19 STATUS = (
20 (ACTIVE, _('Active')),
21 (HIDDEN, _('Hidden')),
22 (ARCHIVED, _('Archived')),
23 )
24
25 name = models.CharField(max_length=200)
26 slug = models.SlugField(max_length=200, unique=True, blank=True)
27 status = models.CharField(max_length=4, choices=STATUS)
28
29 events_enabled = models.BooleanField(default=True)
30 push_notifications_enabled = models.BooleanField(default=True)
31 push_notification_channels = ArrayField(models.CharField(max_length=60), blank=True)
32
33 latitude = models.FloatField(null=True)
34 longitude = models.FloatField(null=True)
35 postal_code = models.CharField(max_length=10)
36
37 admin_mail = models.EmailField()
38
39 created_date = models.DateTimeField(default=timezone.now)
40 last_updated = models.DateTimeField(auto_now=True)
41
42 statistics_enabled = models.BooleanField(default=False)
43 matomo_url = models.CharField(max_length=150, blank=True, default='')
44 matomo_token = models.CharField(max_length=150, blank=True, default='')
45 matomo_ssl_verify = models.BooleanField(default=True)
46
47 @property
48 def languages(self):
49 language_tree_nodes = self.language_tree_nodes.select_related('language').all()
50 return [language_tree_node.language for language_tree_node in language_tree_nodes]
51
52 @property
53 def default_language(self):
54 tree_root = self.language_tree_nodes.filter(level=0).first()
55 return tree_root.language if tree_root else None
56
57 @classmethod
58 def get_current_region(cls, request):
59 if not hasattr(request, 'resolver_match'):
60 return None
61 region_slug = request.resolver_match.kwargs.get('region_slug')
62 if not region_slug:
63 return None
64 region = cls.objects.filter(slug=region_slug)
65 if not region.exists():
66 raise Http404
67 return region.first()
68
69 def __str__(self):
70 """Function that provides a string representation of this object
71
72 Returns: String
73 """
74 return self.name
75
76 class Meta:
77 default_permissions = ()
78 permissions = (
79 ('manage_regions', 'Can manage regions'),
80 )
81
[end of backend/cms/models/region.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/backend/cms/models/region.py b/backend/cms/models/region.py
--- a/backend/cms/models/region.py
+++ b/backend/cms/models/region.py
@@ -56,7 +56,8 @@
@classmethod
def get_current_region(cls, request):
- if not hasattr(request, 'resolver_match'):
+ # if rendered url is edit_region, the region slug originates from the region form.
+ if not hasattr(request, 'resolver_match') or request.resolver_match.url_name == 'edit_region':
return None
region_slug = request.resolver_match.kwargs.get('region_slug')
if not region_slug:
| {"golden_diff": "diff --git a/backend/cms/models/region.py b/backend/cms/models/region.py\n--- a/backend/cms/models/region.py\n+++ b/backend/cms/models/region.py\n@@ -56,7 +56,8 @@\n \n @classmethod\n def get_current_region(cls, request):\n- if not hasattr(request, 'resolver_match'):\n+ # if rendered url is edit_region, the region slug originates from the region form.\n+ if not hasattr(request, 'resolver_match') or request.resolver_match.url_name == 'edit_region':\n return None\n region_slug = request.resolver_match.kwargs.get('region_slug')\n if not region_slug:\n", "issue": "Pages: Separate page and translation attributes in page form\nAt the moment, it is not clear which attributes of the page form refer to the `Page` object and which only handle `PageTranslation` objects (from a user's point of view). Probably, we should separate these two sections from each other.\r\n\r\nThis is part of #98.\n", "before_files": [{"content": "\"\"\"\nDatabase model representing an autonomous authority\n\"\"\"\nfrom django.contrib.postgres.fields import ArrayField\nfrom django.db import models\nfrom django.http import Http404\nfrom django.utils import timezone\nfrom django.utils.translation import ugettext_lazy as _\n\n\nclass Region(models.Model):\n \"\"\"\n Class to generate region database objects\n \"\"\"\n ACTIVE = 'acti'\n HIDDEN = 'hidd'\n ARCHIVED = 'arch'\n\n STATUS = (\n (ACTIVE, _('Active')),\n (HIDDEN, _('Hidden')),\n (ARCHIVED, _('Archived')),\n )\n\n name = models.CharField(max_length=200)\n slug = models.SlugField(max_length=200, unique=True, blank=True)\n status = models.CharField(max_length=4, choices=STATUS)\n\n events_enabled = models.BooleanField(default=True)\n push_notifications_enabled = models.BooleanField(default=True)\n push_notification_channels = ArrayField(models.CharField(max_length=60), blank=True)\n\n latitude = models.FloatField(null=True)\n longitude = models.FloatField(null=True)\n postal_code = models.CharField(max_length=10)\n\n admin_mail = models.EmailField()\n\n created_date = models.DateTimeField(default=timezone.now)\n last_updated = models.DateTimeField(auto_now=True)\n\n statistics_enabled = models.BooleanField(default=False)\n matomo_url = models.CharField(max_length=150, blank=True, default='')\n matomo_token = models.CharField(max_length=150, blank=True, default='')\n matomo_ssl_verify = models.BooleanField(default=True)\n\n @property\n def languages(self):\n language_tree_nodes = self.language_tree_nodes.select_related('language').all()\n return [language_tree_node.language for language_tree_node in language_tree_nodes]\n\n @property\n def default_language(self):\n tree_root = self.language_tree_nodes.filter(level=0).first()\n return tree_root.language if tree_root else None\n\n @classmethod\n def get_current_region(cls, request):\n if not hasattr(request, 'resolver_match'):\n return None\n region_slug = request.resolver_match.kwargs.get('region_slug')\n if not region_slug:\n return None\n region = cls.objects.filter(slug=region_slug)\n if not region.exists():\n raise Http404\n return region.first()\n\n def __str__(self):\n \"\"\"Function that provides a string representation of this object\n\n Returns: String\n \"\"\"\n return self.name\n\n class Meta:\n default_permissions = ()\n permissions = (\n ('manage_regions', 'Can manage regions'),\n )\n", "path": "backend/cms/models/region.py"}]} | 1,306 | 140 |
gh_patches_debug_16060 | rasdani/github-patches | git_diff | open-mmlab__mmdetection-3606 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`benchmark.py`. ModuleNotFoundError: No module named 'tools.fuse_conv_bn'.
Recently the `tools/fuse_conv_bn.py` was removed in #3529 . But the `benchmark.py` is still trying to use the old script.
[https://github.com/open-mmlab/mmdetection/blob/master/tools/benchmark.py#L8](https://github.com/open-mmlab/mmdetection/blob/master/tools/benchmark.py#L8)
I think this might be causing the error.
**Reproduction**
1. What command or script did you run?
```
tools/benchmark.py
```
</issue>
<code>
[start of tools/benchmark.py]
1 import argparse
2 import time
3
4 import torch
5 from mmcv import Config
6 from mmcv.parallel import MMDataParallel
7 from mmcv.runner import load_checkpoint
8 from tools.fuse_conv_bn import fuse_module
9
10 from mmdet.core import wrap_fp16_model
11 from mmdet.datasets import build_dataloader, build_dataset
12 from mmdet.models import build_detector
13
14
15 def parse_args():
16 parser = argparse.ArgumentParser(description='MMDet benchmark a model')
17 parser.add_argument('config', help='test config file path')
18 parser.add_argument('checkpoint', help='checkpoint file')
19 parser.add_argument(
20 '--log-interval', default=50, help='interval of logging')
21 parser.add_argument(
22 '--fuse-conv-bn',
23 action='store_true',
24 help='Whether to fuse conv and bn, this will slightly increase'
25 'the inference speed')
26 args = parser.parse_args()
27 return args
28
29
30 def main():
31 args = parse_args()
32
33 cfg = Config.fromfile(args.config)
34 # set cudnn_benchmark
35 if cfg.get('cudnn_benchmark', False):
36 torch.backends.cudnn.benchmark = True
37 cfg.model.pretrained = None
38 cfg.data.test.test_mode = True
39
40 # build the dataloader
41 # TODO: support multiple images per gpu (only minor changes are needed)
42 dataset = build_dataset(cfg.data.test)
43 data_loader = build_dataloader(
44 dataset,
45 samples_per_gpu=1,
46 workers_per_gpu=cfg.data.workers_per_gpu,
47 dist=False,
48 shuffle=False)
49
50 # build the model and load checkpoint
51 model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg)
52 fp16_cfg = cfg.get('fp16', None)
53 if fp16_cfg is not None:
54 wrap_fp16_model(model)
55 load_checkpoint(model, args.checkpoint, map_location='cpu')
56 if args.fuse_conv_bn:
57 model = fuse_module(model)
58
59 model = MMDataParallel(model, device_ids=[0])
60
61 model.eval()
62
63 # the first several iterations may be very slow so skip them
64 num_warmup = 5
65 pure_inf_time = 0
66
67 # benchmark with 2000 image and take the average
68 for i, data in enumerate(data_loader):
69
70 torch.cuda.synchronize()
71 start_time = time.perf_counter()
72
73 with torch.no_grad():
74 model(return_loss=False, rescale=True, **data)
75
76 torch.cuda.synchronize()
77 elapsed = time.perf_counter() - start_time
78
79 if i >= num_warmup:
80 pure_inf_time += elapsed
81 if (i + 1) % args.log_interval == 0:
82 fps = (i + 1 - num_warmup) / pure_inf_time
83 print(f'Done image [{i + 1:<3}/ 2000], fps: {fps:.1f} img / s')
84
85 if (i + 1) == 2000:
86 pure_inf_time += elapsed
87 fps = (i + 1 - num_warmup) / pure_inf_time
88 print(f'Overall fps: {fps:.1f} img / s')
89 break
90
91
92 if __name__ == '__main__':
93 main()
94
[end of tools/benchmark.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/tools/benchmark.py b/tools/benchmark.py
--- a/tools/benchmark.py
+++ b/tools/benchmark.py
@@ -3,9 +3,9 @@
import torch
from mmcv import Config
+from mmcv.cnn import fuse_conv_bn
from mmcv.parallel import MMDataParallel
from mmcv.runner import load_checkpoint
-from tools.fuse_conv_bn import fuse_module
from mmdet.core import wrap_fp16_model
from mmdet.datasets import build_dataloader, build_dataset
@@ -54,7 +54,7 @@
wrap_fp16_model(model)
load_checkpoint(model, args.checkpoint, map_location='cpu')
if args.fuse_conv_bn:
- model = fuse_module(model)
+ model = fuse_conv_bn(model)
model = MMDataParallel(model, device_ids=[0])
| {"golden_diff": "diff --git a/tools/benchmark.py b/tools/benchmark.py\n--- a/tools/benchmark.py\n+++ b/tools/benchmark.py\n@@ -3,9 +3,9 @@\n \n import torch\n from mmcv import Config\n+from mmcv.cnn import fuse_conv_bn\n from mmcv.parallel import MMDataParallel\n from mmcv.runner import load_checkpoint\n-from tools.fuse_conv_bn import fuse_module\n \n from mmdet.core import wrap_fp16_model\n from mmdet.datasets import build_dataloader, build_dataset\n@@ -54,7 +54,7 @@\n wrap_fp16_model(model)\n load_checkpoint(model, args.checkpoint, map_location='cpu')\n if args.fuse_conv_bn:\n- model = fuse_module(model)\n+ model = fuse_conv_bn(model)\n \n model = MMDataParallel(model, device_ids=[0])\n", "issue": "`benchmark.py`. ModuleNotFoundError: No module named 'tools.fuse_conv_bn'.\nRecently the `tools/fuse_conv_bn.py` was removed in #3529 . But the `benchmark.py` is still trying to use the old script. \r\n\r\n[https://github.com/open-mmlab/mmdetection/blob/master/tools/benchmark.py#L8](https://github.com/open-mmlab/mmdetection/blob/master/tools/benchmark.py#L8)\r\n\r\nI think this might be causing the error.\r\n\r\n**Reproduction**\r\n1. What command or script did you run?\r\n```\r\ntools/benchmark.py\r\n```\n", "before_files": [{"content": "import argparse\nimport time\n\nimport torch\nfrom mmcv import Config\nfrom mmcv.parallel import MMDataParallel\nfrom mmcv.runner import load_checkpoint\nfrom tools.fuse_conv_bn import fuse_module\n\nfrom mmdet.core import wrap_fp16_model\nfrom mmdet.datasets import build_dataloader, build_dataset\nfrom mmdet.models import build_detector\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(description='MMDet benchmark a model')\n parser.add_argument('config', help='test config file path')\n parser.add_argument('checkpoint', help='checkpoint file')\n parser.add_argument(\n '--log-interval', default=50, help='interval of logging')\n parser.add_argument(\n '--fuse-conv-bn',\n action='store_true',\n help='Whether to fuse conv and bn, this will slightly increase'\n 'the inference speed')\n args = parser.parse_args()\n return args\n\n\ndef main():\n args = parse_args()\n\n cfg = Config.fromfile(args.config)\n # set cudnn_benchmark\n if cfg.get('cudnn_benchmark', False):\n torch.backends.cudnn.benchmark = True\n cfg.model.pretrained = None\n cfg.data.test.test_mode = True\n\n # build the dataloader\n # TODO: support multiple images per gpu (only minor changes are needed)\n dataset = build_dataset(cfg.data.test)\n data_loader = build_dataloader(\n dataset,\n samples_per_gpu=1,\n workers_per_gpu=cfg.data.workers_per_gpu,\n dist=False,\n shuffle=False)\n\n # build the model and load checkpoint\n model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg)\n fp16_cfg = cfg.get('fp16', None)\n if fp16_cfg is not None:\n wrap_fp16_model(model)\n load_checkpoint(model, args.checkpoint, map_location='cpu')\n if args.fuse_conv_bn:\n model = fuse_module(model)\n\n model = MMDataParallel(model, device_ids=[0])\n\n model.eval()\n\n # the first several iterations may be very slow so skip them\n num_warmup = 5\n pure_inf_time = 0\n\n # benchmark with 2000 image and take the average\n for i, data in enumerate(data_loader):\n\n torch.cuda.synchronize()\n start_time = time.perf_counter()\n\n with torch.no_grad():\n model(return_loss=False, rescale=True, **data)\n\n torch.cuda.synchronize()\n elapsed = time.perf_counter() - start_time\n\n if i >= num_warmup:\n pure_inf_time += elapsed\n if (i + 1) % args.log_interval == 0:\n fps = (i + 1 - num_warmup) / pure_inf_time\n print(f'Done image [{i + 1:<3}/ 2000], fps: {fps:.1f} img / s')\n\n if (i + 1) == 2000:\n pure_inf_time += elapsed\n fps = (i + 1 - num_warmup) / pure_inf_time\n print(f'Overall fps: {fps:.1f} img / s')\n break\n\n\nif __name__ == '__main__':\n main()\n", "path": "tools/benchmark.py"}]} | 1,564 | 185 |
gh_patches_debug_1614 | rasdani/github-patches | git_diff | Nitrate__Nitrate-381 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Mark Nitrate as not zip_safe
Add `zip_safe=False` to `setup.py` because Nitrate cannot run from a zip file directly.
</issue>
<code>
[start of setup.py]
1 # -*- coding: utf-8 -*-
2
3 import sys
4
5 from setuptools import setup, find_packages
6
7
8 with open('VERSION.txt', 'r') as f:
9 pkg_version = f.read().strip()
10
11
12 def get_long_description():
13 with open('README.rst', 'r') as f:
14 return f.read()
15
16
17 install_requires = [
18 'beautifulsoup4 >= 4.1.1',
19 'django >= 1.11,<3.0',
20 'django-contrib-comments == 1.8.0',
21 'django-tinymce == 2.7.0',
22 'django-uuslug == 1.1.8',
23 'html2text',
24 'odfpy >= 0.9.6',
25 'python-bugzilla',
26 'six',
27 'xmltodict',
28 'kobo == 0.9.0'
29 ]
30
31 if sys.version_info.major < 3:
32 install_requires += [
33 'enum34',
34 ]
35
36 extras_require = {
37 'mysql': ['PyMySQL == 0.9.2'],
38 'pgsql': ['psycopg2 == 2.7.5'],
39
40 # Required for tcms.core.contrib.auth.backends.KerberosBackend
41 'krbauth': [
42 'kerberos == 1.2.5'
43 ],
44
45 # Packages for building documentation
46 'docs': [
47 'Sphinx >= 1.1.2',
48 'sphinx_rtd_theme',
49 ],
50
51 # Necessary packages for running tests
52 'tests': [
53 'beautifulsoup4',
54 'coverage',
55 'factory_boy',
56 'flake8',
57 'mock',
58 'pytest',
59 'pytest-cov',
60 'pytest-django',
61 ],
62
63 # Contain tools that assists the development
64 'devtools': [
65 'django-debug-toolbar == 1.7',
66 'tox',
67 'django-extensions',
68 'pygraphviz',
69 'future-breakpoint',
70 ],
71
72 # Required packages required to run async tasks
73 'async': [
74 'celery == 4.2.0',
75 ]
76 }
77
78 setup(
79 name='Nitrate',
80 version=pkg_version,
81 description='Test Case Management System',
82 long_description=get_long_description(),
83 author='Nitrate Team',
84 maintainer='Chenxiong Qi',
85 maintainer_email='[email protected]',
86 url='https://github.com/Nitrate/Nitrate/',
87 license='GPLv2+',
88 keywords='test case',
89 install_requires=install_requires,
90 extras_require=extras_require,
91 packages=find_packages(),
92 include_package_data=True,
93 classifiers=[
94 'Framework :: Django',
95 'Framework :: Django :: 1.11',
96 'Framework :: Django :: 2.0',
97 'Framework :: Django :: 2.1',
98 'Intended Audience :: Developers',
99 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
100 'Programming Language :: Python :: 2',
101 'Programming Language :: Python :: 2.7',
102 'Programming Language :: Python :: 3',
103 'Programming Language :: Python :: 3.6',
104 'Programming Language :: Python :: 3.7',
105 'Topic :: Software Development :: Quality Assurance',
106 'Topic :: Software Development :: Testing',
107 ],
108 project_urls={
109 'Issue Tracker': 'https://github.com/Nitrate/Nitrate/issues',
110 'Source Code': 'https://github.com/Nitrate/Nitrate',
111 'Documentation': 'https://nitrate.readthedocs.io/',
112 },
113 )
114
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -90,6 +90,7 @@
extras_require=extras_require,
packages=find_packages(),
include_package_data=True,
+ zip_safe=False,
classifiers=[
'Framework :: Django',
'Framework :: Django :: 1.11',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -90,6 +90,7 @@\n extras_require=extras_require,\n packages=find_packages(),\n include_package_data=True,\n+ zip_safe=False,\n classifiers=[\n 'Framework :: Django',\n 'Framework :: Django :: 1.11',\n", "issue": "Mark Nitrate as not zip_safe\nAdd `zip_safe=False` to `setup.py` because Nitrate cannot run from a zip file directly.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nwith open('VERSION.txt', 'r') as f:\n pkg_version = f.read().strip()\n\n\ndef get_long_description():\n with open('README.rst', 'r') as f:\n return f.read()\n\n\ninstall_requires = [\n 'beautifulsoup4 >= 4.1.1',\n 'django >= 1.11,<3.0',\n 'django-contrib-comments == 1.8.0',\n 'django-tinymce == 2.7.0',\n 'django-uuslug == 1.1.8',\n 'html2text',\n 'odfpy >= 0.9.6',\n 'python-bugzilla',\n 'six',\n 'xmltodict',\n 'kobo == 0.9.0'\n]\n\nif sys.version_info.major < 3:\n install_requires += [\n 'enum34',\n ]\n\nextras_require = {\n 'mysql': ['PyMySQL == 0.9.2'],\n 'pgsql': ['psycopg2 == 2.7.5'],\n\n # Required for tcms.core.contrib.auth.backends.KerberosBackend\n 'krbauth': [\n 'kerberos == 1.2.5'\n ],\n\n # Packages for building documentation\n 'docs': [\n 'Sphinx >= 1.1.2',\n 'sphinx_rtd_theme',\n ],\n\n # Necessary packages for running tests\n 'tests': [\n 'beautifulsoup4',\n 'coverage',\n 'factory_boy',\n 'flake8',\n 'mock',\n 'pytest',\n 'pytest-cov',\n 'pytest-django',\n ],\n\n # Contain tools that assists the development\n 'devtools': [\n 'django-debug-toolbar == 1.7',\n 'tox',\n 'django-extensions',\n 'pygraphviz',\n 'future-breakpoint',\n ],\n\n # Required packages required to run async tasks\n 'async': [\n 'celery == 4.2.0',\n ]\n}\n\nsetup(\n name='Nitrate',\n version=pkg_version,\n description='Test Case Management System',\n long_description=get_long_description(),\n author='Nitrate Team',\n maintainer='Chenxiong Qi',\n maintainer_email='[email protected]',\n url='https://github.com/Nitrate/Nitrate/',\n license='GPLv2+',\n keywords='test case',\n install_requires=install_requires,\n extras_require=extras_require,\n packages=find_packages(),\n include_package_data=True,\n classifiers=[\n 'Framework :: Django',\n 'Framework :: Django :: 1.11',\n 'Framework :: Django :: 2.0',\n 'Framework :: Django :: 2.1',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Software Development :: Quality Assurance',\n 'Topic :: Software Development :: Testing',\n ],\n project_urls={\n 'Issue Tracker': 'https://github.com/Nitrate/Nitrate/issues',\n 'Source Code': 'https://github.com/Nitrate/Nitrate',\n 'Documentation': 'https://nitrate.readthedocs.io/',\n },\n)\n", "path": "setup.py"}]} | 1,576 | 78 |
gh_patches_debug_13488 | rasdani/github-patches | git_diff | blakeblackshear__frigate-5021 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[Config Support]: #backchannel=0 argument isn't being passed to go2rtc
### Describe the problem you are having
Looks like the #backchannel=0 argument isn't being passed to go2rtc from the Frigate config file.
I've even added ' ' around the input as I'm aware that Frigate sees # as comments.
This is causing my doorbell button press to not work.
### Version
0.12.0-0dbf909
### Frigate config file
```yaml
cameras:
frontdoor:
ffmpeg:
inputs:
- path: 'rtsp://admin:[email protected]:554/cam/realmonitor?channel=1&subtype=0&#backchannel=0'
roles:
- restream
- path: rtsp://localhost:8554/frontdoor
roles:
- record
```
### Relevant log output
```shell
N/A
```
### Frigate stats
_No response_
### Operating system
Debian
### Install method
Docker Compose
### Coral version
CPU (no coral)
### Any other information that may be helpful
_No response_
</issue>
<code>
[start of frigate/restream.py]
1 """Controls go2rtc restream."""
2
3
4 import logging
5 import requests
6
7 from typing import Optional
8
9 from frigate.config import FrigateConfig, RestreamCodecEnum
10 from frigate.const import BIRDSEYE_PIPE
11 from frigate.ffmpeg_presets import (
12 parse_preset_hardware_acceleration_encode,
13 parse_preset_hardware_acceleration_go2rtc_engine,
14 )
15 from frigate.util import escape_special_characters
16
17 logger = logging.getLogger(__name__)
18
19
20 def get_manual_go2rtc_stream(
21 camera_url: str, codec: RestreamCodecEnum, engine: Optional[str]
22 ) -> str:
23 """Get a manual stream for go2rtc."""
24 if codec == RestreamCodecEnum.copy:
25 return f"ffmpeg:{camera_url}#video=copy#audio=aac#audio=opus"
26
27 if engine:
28 return (
29 f"ffmpeg:{camera_url}#video={codec}#hardware={engine}#audio=aac#audio=opus"
30 )
31
32 return f"ffmpeg:{camera_url}#video={codec}#audio=aac#audio=opus"
33
34
35 class RestreamApi:
36 """Control go2rtc relay API."""
37
38 def __init__(self, config: FrigateConfig) -> None:
39 self.config: FrigateConfig = config
40
41 def add_cameras(self) -> None:
42 """Add cameras to go2rtc."""
43 self.relays: dict[str, str] = {}
44
45 for cam_name, camera in self.config.cameras.items():
46 if not camera.restream.enabled:
47 continue
48
49 for input in camera.ffmpeg.inputs:
50 if "restream" in input.roles:
51 if (
52 input.path.startswith("rtsp")
53 and not camera.restream.force_audio
54 ):
55 self.relays[cam_name] = escape_special_characters(input.path)
56 else:
57 # go2rtc only supports rtsp for direct relay, otherwise ffmpeg is used
58 self.relays[cam_name] = get_manual_go2rtc_stream(
59 escape_special_characters(input.path),
60 camera.restream.video_encoding,
61 parse_preset_hardware_acceleration_go2rtc_engine(
62 self.config.ffmpeg.hwaccel_args
63 ),
64 )
65
66 if self.config.restream.birdseye:
67 self.relays[
68 "birdseye"
69 ] = f"exec:{parse_preset_hardware_acceleration_encode(self.config.ffmpeg.hwaccel_args, f'-f rawvideo -pix_fmt yuv420p -video_size {self.config.birdseye.width}x{self.config.birdseye.height} -r 10 -i {BIRDSEYE_PIPE}', '-rtsp_transport tcp -f rtsp {output}')}"
70
71 for name, path in self.relays.items():
72 params = {"src": path, "name": name}
73 requests.put("http://127.0.0.1:1984/api/streams", params=params)
74
[end of frigate/restream.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/frigate/restream.py b/frigate/restream.py
--- a/frigate/restream.py
+++ b/frigate/restream.py
@@ -52,7 +52,9 @@
input.path.startswith("rtsp")
and not camera.restream.force_audio
):
- self.relays[cam_name] = escape_special_characters(input.path)
+ self.relays[
+ cam_name
+ ] = f"{escape_special_characters(input.path)}#backchannel=0"
else:
# go2rtc only supports rtsp for direct relay, otherwise ffmpeg is used
self.relays[cam_name] = get_manual_go2rtc_stream(
| {"golden_diff": "diff --git a/frigate/restream.py b/frigate/restream.py\n--- a/frigate/restream.py\n+++ b/frigate/restream.py\n@@ -52,7 +52,9 @@\n input.path.startswith(\"rtsp\")\n and not camera.restream.force_audio\n ):\n- self.relays[cam_name] = escape_special_characters(input.path)\n+ self.relays[\n+ cam_name\n+ ] = f\"{escape_special_characters(input.path)}#backchannel=0\"\n else:\n # go2rtc only supports rtsp for direct relay, otherwise ffmpeg is used\n self.relays[cam_name] = get_manual_go2rtc_stream(\n", "issue": "[Config Support]: #backchannel=0 argument isn't being passed to go2rtc\n### Describe the problem you are having\n\nLooks like the #backchannel=0 argument isn't being passed to go2rtc from the Frigate config file. \r\nI've even added ' ' around the input as I'm aware that Frigate sees # as comments.\r\n\r\nThis is causing my doorbell button press to not work. \n\n### Version\n\n0.12.0-0dbf909\n\n### Frigate config file\n\n```yaml\ncameras:\r\n frontdoor:\r\n ffmpeg:\r\n inputs:\r\n - path: 'rtsp://admin:[email protected]:554/cam/realmonitor?channel=1&subtype=0&#backchannel=0'\r\n roles:\r\n - restream\r\n - path: rtsp://localhost:8554/frontdoor\r\n roles:\r\n - record\n```\n\n\n### Relevant log output\n\n```shell\nN/A\n```\n\n\n### Frigate stats\n\n_No response_\n\n### Operating system\n\nDebian\n\n### Install method\n\nDocker Compose\n\n### Coral version\n\nCPU (no coral)\n\n### Any other information that may be helpful\n\n_No response_\n", "before_files": [{"content": "\"\"\"Controls go2rtc restream.\"\"\"\n\n\nimport logging\nimport requests\n\nfrom typing import Optional\n\nfrom frigate.config import FrigateConfig, RestreamCodecEnum\nfrom frigate.const import BIRDSEYE_PIPE\nfrom frigate.ffmpeg_presets import (\n parse_preset_hardware_acceleration_encode,\n parse_preset_hardware_acceleration_go2rtc_engine,\n)\nfrom frigate.util import escape_special_characters\n\nlogger = logging.getLogger(__name__)\n\n\ndef get_manual_go2rtc_stream(\n camera_url: str, codec: RestreamCodecEnum, engine: Optional[str]\n) -> str:\n \"\"\"Get a manual stream for go2rtc.\"\"\"\n if codec == RestreamCodecEnum.copy:\n return f\"ffmpeg:{camera_url}#video=copy#audio=aac#audio=opus\"\n\n if engine:\n return (\n f\"ffmpeg:{camera_url}#video={codec}#hardware={engine}#audio=aac#audio=opus\"\n )\n\n return f\"ffmpeg:{camera_url}#video={codec}#audio=aac#audio=opus\"\n\n\nclass RestreamApi:\n \"\"\"Control go2rtc relay API.\"\"\"\n\n def __init__(self, config: FrigateConfig) -> None:\n self.config: FrigateConfig = config\n\n def add_cameras(self) -> None:\n \"\"\"Add cameras to go2rtc.\"\"\"\n self.relays: dict[str, str] = {}\n\n for cam_name, camera in self.config.cameras.items():\n if not camera.restream.enabled:\n continue\n\n for input in camera.ffmpeg.inputs:\n if \"restream\" in input.roles:\n if (\n input.path.startswith(\"rtsp\")\n and not camera.restream.force_audio\n ):\n self.relays[cam_name] = escape_special_characters(input.path)\n else:\n # go2rtc only supports rtsp for direct relay, otherwise ffmpeg is used\n self.relays[cam_name] = get_manual_go2rtc_stream(\n escape_special_characters(input.path),\n camera.restream.video_encoding,\n parse_preset_hardware_acceleration_go2rtc_engine(\n self.config.ffmpeg.hwaccel_args\n ),\n )\n\n if self.config.restream.birdseye:\n self.relays[\n \"birdseye\"\n ] = f\"exec:{parse_preset_hardware_acceleration_encode(self.config.ffmpeg.hwaccel_args, f'-f rawvideo -pix_fmt yuv420p -video_size {self.config.birdseye.width}x{self.config.birdseye.height} -r 10 -i {BIRDSEYE_PIPE}', '-rtsp_transport tcp -f rtsp {output}')}\"\n\n for name, path in self.relays.items():\n params = {\"src\": path, \"name\": name}\n requests.put(\"http://127.0.0.1:1984/api/streams\", params=params)\n", "path": "frigate/restream.py"}]} | 1,571 | 146 |
gh_patches_debug_1193 | rasdani/github-patches | git_diff | conda__conda-3740 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
conda env create giving ImportError for yaml package
`conda env create` suddenly started giving `"ImportError: No module named 'yaml'"` with latest miniconda on my TravisCI builbs: https://travis-ci.org/leouieda/website/builds/170917743
I changed nothing significant in my code. Tried rebuilding previous passing builds and started getting the same error.
Is this something from a recent release?
conda env create giving ImportError for yaml package
`conda env create` suddenly started giving `"ImportError: No module named 'yaml'"` with latest miniconda on my TravisCI builbs: https://travis-ci.org/leouieda/website/builds/170917743
I changed nothing significant in my code. Tried rebuilding previous passing builds and started getting the same error.
Is this something from a recent release?
</issue>
<code>
[start of conda_env/yaml.py]
1 """
2 Wrapper around yaml to ensure that everything is ordered correctly.
3
4 This is based on the answer at http://stackoverflow.com/a/16782282
5 """
6 from __future__ import absolute_import, print_function
7 from collections import OrderedDict
8 import yaml
9
10
11 def represent_ordereddict(dumper, data):
12 value = []
13
14 for item_key, item_value in data.items():
15 node_key = dumper.represent_data(item_key)
16 node_value = dumper.represent_data(item_value)
17
18 value.append((node_key, node_value))
19
20 return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value)
21
22 yaml.add_representer(OrderedDict, represent_ordereddict)
23
24 dump = yaml.dump
25 load = yaml.load
26 dict = OrderedDict
27
[end of conda_env/yaml.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/conda_env/yaml.py b/conda_env/yaml.py
--- a/conda_env/yaml.py
+++ b/conda_env/yaml.py
@@ -5,7 +5,9 @@
"""
from __future__ import absolute_import, print_function
from collections import OrderedDict
-import yaml
+
+from conda.common.yaml import get_yaml
+yaml = get_yaml()
def represent_ordereddict(dumper, data):
| {"golden_diff": "diff --git a/conda_env/yaml.py b/conda_env/yaml.py\n--- a/conda_env/yaml.py\n+++ b/conda_env/yaml.py\n@@ -5,7 +5,9 @@\n \"\"\"\n from __future__ import absolute_import, print_function\n from collections import OrderedDict\n-import yaml\n+\n+from conda.common.yaml import get_yaml\n+yaml = get_yaml()\n \n \n def represent_ordereddict(dumper, data):\n", "issue": "conda env create giving ImportError for yaml package\n`conda env create` suddenly started giving `\"ImportError: No module named 'yaml'\"` with latest miniconda on my TravisCI builbs: https://travis-ci.org/leouieda/website/builds/170917743\n\nI changed nothing significant in my code. Tried rebuilding previous passing builds and started getting the same error. \n\nIs this something from a recent release?\n\nconda env create giving ImportError for yaml package\n`conda env create` suddenly started giving `\"ImportError: No module named 'yaml'\"` with latest miniconda on my TravisCI builbs: https://travis-ci.org/leouieda/website/builds/170917743\n\nI changed nothing significant in my code. Tried rebuilding previous passing builds and started getting the same error. \n\nIs this something from a recent release?\n\n", "before_files": [{"content": "\"\"\"\nWrapper around yaml to ensure that everything is ordered correctly.\n\nThis is based on the answer at http://stackoverflow.com/a/16782282\n\"\"\"\nfrom __future__ import absolute_import, print_function\nfrom collections import OrderedDict\nimport yaml\n\n\ndef represent_ordereddict(dumper, data):\n value = []\n\n for item_key, item_value in data.items():\n node_key = dumper.represent_data(item_key)\n node_value = dumper.represent_data(item_value)\n\n value.append((node_key, node_value))\n\n return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value)\n\nyaml.add_representer(OrderedDict, represent_ordereddict)\n\ndump = yaml.dump\nload = yaml.load\ndict = OrderedDict\n", "path": "conda_env/yaml.py"}]} | 944 | 94 |
gh_patches_debug_16516 | rasdani/github-patches | git_diff | keras-team__autokeras-1164 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Multi-label classification `predict` method return probabilities rather than calibrated labels.
### Bug Description
<!---
A clear and concise description of what the bug is.
-->
Multi-label classification `predict()` method return probabilities rather than calibrated labels.
### Bug Reproduction
https://github.com/datamllab/automl-in-action-notebooks/blob/master/3.3.4-Task-API-Multi-label.ipynb
### Setup Details
Include the details about the versions of:
- OS type and version:
- Python:
- autokeras: 1.0.2
- keras-tuner:
- scikit-learn:
- numpy:
- pandas:
- tensorflow: 2.1.0
</issue>
<code>
[start of autokeras/adapters/output_adapter.py]
1 import numpy as np
2 import pandas as pd
3 import tensorflow as tf
4
5 from autokeras import encoders
6 from autokeras.engine import adapter as adapter_module
7 from autokeras.utils import data_utils
8
9
10 class HeadAdapter(adapter_module.Adapter):
11
12 def __init__(self, name, **kwargs):
13 super().__init__(**kwargs)
14 self.name = name
15
16 def check(self, dataset):
17 supported_types = (tf.data.Dataset, np.ndarray, pd.DataFrame, pd.Series)
18 if not isinstance(dataset, supported_types):
19 raise TypeError('Expect the target data of {name} to be tf.data.Dataset,'
20 ' np.ndarray, pd.DataFrame or pd.Series, but got {type}.'
21 .format(name=self.name, type=type(dataset)))
22
23 def convert_to_dataset(self, dataset):
24 if isinstance(dataset, np.ndarray):
25 if len(dataset.shape) == 1:
26 dataset = dataset.reshape(-1, 1)
27 if isinstance(dataset, pd.DataFrame):
28 dataset = dataset.values
29 if isinstance(dataset, pd.Series):
30 dataset = dataset.values.reshape(-1, 1)
31 return super().convert_to_dataset(dataset)
32
33 def postprocess(self, y):
34 """Postprocess the output of the Keras Model."""
35 return y
36
37 def get_config(self):
38 config = super().get_config()
39 config.update({
40 'name': self.name,
41 })
42 return config
43
44
45 class ClassificationHeadAdapter(HeadAdapter):
46
47 def __init__(self,
48 num_classes=None,
49 multi_label=False,
50 **kwargs):
51 super().__init__(**kwargs)
52 self.num_classes = num_classes
53 self.label_encoder = None
54 self.multi_label = multi_label
55
56 def get_config(self):
57 config = super().get_config()
58 config.update({
59 'encoder': encoders.serialize(self.label_encoder),
60 })
61 return config
62
63 @classmethod
64 def from_config(cls, config):
65 obj = super().from_config(config)
66 obj.label_encoder = encoders.deserialize(config['encoder'])
67
68 def fit_before_convert(self, dataset):
69 """Fit the encoder."""
70 # If in tf.data.Dataset, must be encoded already.
71 if isinstance(dataset, tf.data.Dataset):
72 return
73
74 # Convert the data to np.ndarray.
75 if isinstance(dataset, pd.DataFrame):
76 dataset = dataset.values
77 if isinstance(dataset, pd.Series):
78 dataset = dataset.values.reshape(-1, 1)
79
80 # If encoded.
81 if len(dataset.flatten()) != len(dataset):
82 if self.num_classes:
83 self._check_data_shape(dataset.shape[1:])
84 return
85
86 # Fit encoder.
87 labels = set(dataset.flatten())
88 if len(labels) < 2:
89 raise ValueError('Expect the target data for {name} to have '
90 'at least 2 classes, but got {num_classes}.'
91 .format(name=self.name, num_classes=self.num_classes))
92 if len(labels) == 2 and not self.multi_label:
93 self.label_encoder = encoders.LabelEncoder()
94 else:
95 self.label_encoder = encoders.OneHotEncoder()
96 self.label_encoder.fit(dataset)
97
98 def convert_to_dataset(self, dataset):
99 if self.label_encoder:
100 dataset = self.label_encoder.encode(dataset)
101 return super().convert_to_dataset(dataset)
102
103 def fit(self, dataset):
104 super().fit(dataset)
105 shape = tuple(data_utils.dataset_shape(dataset).as_list()[1:])
106 # Infer the num_classes.
107 if not self.num_classes:
108 # Single column with 0s and 1s.
109 if shape == (1,):
110 self.num_classes = 2
111 else:
112 self.num_classes = shape[0]
113 return
114
115 # Compute expected shape from num_classes.
116 if self.num_classes == 2 and not self.multi_label:
117 expected = (1,)
118 else:
119 expected = (self.num_classes,)
120
121 # Check shape equals expected shape.
122 if shape != expected:
123 raise ValueError('Expect the target data for {name} to have '
124 'shape {expected}, but got {actual}.'
125 .format(name=self.name, expected=expected,
126 actual=shape))
127
128 def postprocess(self, y):
129 if self.label_encoder:
130 y = self.label_encoder.decode(y)
131 return y
132
133
134 class RegressionHeadAdapter(HeadAdapter):
135 pass
136
137
138 class SegmentationHeadAdapter(ClassificationHeadAdapter):
139 pass
140
[end of autokeras/adapters/output_adapter.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/autokeras/adapters/output_adapter.py b/autokeras/adapters/output_adapter.py
--- a/autokeras/adapters/output_adapter.py
+++ b/autokeras/adapters/output_adapter.py
@@ -78,6 +78,7 @@
dataset = dataset.values.reshape(-1, 1)
# If encoded.
+ # TODO: support raw string labels for multi-label.
if len(dataset.flatten()) != len(dataset):
if self.num_classes:
self._check_data_shape(dataset.shape[1:])
@@ -126,6 +127,9 @@
actual=shape))
def postprocess(self, y):
+ if self.multi_label:
+ y[y < 0.5] = 0
+ y[y > 0.5] = 1
if self.label_encoder:
y = self.label_encoder.decode(y)
return y
| {"golden_diff": "diff --git a/autokeras/adapters/output_adapter.py b/autokeras/adapters/output_adapter.py\n--- a/autokeras/adapters/output_adapter.py\n+++ b/autokeras/adapters/output_adapter.py\n@@ -78,6 +78,7 @@\n dataset = dataset.values.reshape(-1, 1)\n \n # If encoded.\n+ # TODO: support raw string labels for multi-label.\n if len(dataset.flatten()) != len(dataset):\n if self.num_classes:\n self._check_data_shape(dataset.shape[1:])\n@@ -126,6 +127,9 @@\n actual=shape))\n \n def postprocess(self, y):\n+ if self.multi_label:\n+ y[y < 0.5] = 0\n+ y[y > 0.5] = 1\n if self.label_encoder:\n y = self.label_encoder.decode(y)\n return y\n", "issue": "Multi-label classification `predict` method return probabilities rather than calibrated labels.\n### Bug Description\r\n<!---\r\nA clear and concise description of what the bug is.\r\n-->\r\n\r\nMulti-label classification `predict()` method return probabilities rather than calibrated labels.\r\n\r\n\r\n### Bug Reproduction\r\nhttps://github.com/datamllab/automl-in-action-notebooks/blob/master/3.3.4-Task-API-Multi-label.ipynb\r\n\r\n\r\n### Setup Details\r\nInclude the details about the versions of:\r\n - OS type and version:\r\n - Python: \r\n - autokeras: 1.0.2\r\n - keras-tuner:\r\n - scikit-learn:\r\n - numpy:\r\n - pandas:\r\n - tensorflow: 2.1.0\r\n\r\n\n", "before_files": [{"content": "import numpy as np\nimport pandas as pd\nimport tensorflow as tf\n\nfrom autokeras import encoders\nfrom autokeras.engine import adapter as adapter_module\nfrom autokeras.utils import data_utils\n\n\nclass HeadAdapter(adapter_module.Adapter):\n\n def __init__(self, name, **kwargs):\n super().__init__(**kwargs)\n self.name = name\n\n def check(self, dataset):\n supported_types = (tf.data.Dataset, np.ndarray, pd.DataFrame, pd.Series)\n if not isinstance(dataset, supported_types):\n raise TypeError('Expect the target data of {name} to be tf.data.Dataset,'\n ' np.ndarray, pd.DataFrame or pd.Series, but got {type}.'\n .format(name=self.name, type=type(dataset)))\n\n def convert_to_dataset(self, dataset):\n if isinstance(dataset, np.ndarray):\n if len(dataset.shape) == 1:\n dataset = dataset.reshape(-1, 1)\n if isinstance(dataset, pd.DataFrame):\n dataset = dataset.values\n if isinstance(dataset, pd.Series):\n dataset = dataset.values.reshape(-1, 1)\n return super().convert_to_dataset(dataset)\n\n def postprocess(self, y):\n \"\"\"Postprocess the output of the Keras Model.\"\"\"\n return y\n\n def get_config(self):\n config = super().get_config()\n config.update({\n 'name': self.name,\n })\n return config\n\n\nclass ClassificationHeadAdapter(HeadAdapter):\n\n def __init__(self,\n num_classes=None,\n multi_label=False,\n **kwargs):\n super().__init__(**kwargs)\n self.num_classes = num_classes\n self.label_encoder = None\n self.multi_label = multi_label\n\n def get_config(self):\n config = super().get_config()\n config.update({\n 'encoder': encoders.serialize(self.label_encoder),\n })\n return config\n\n @classmethod\n def from_config(cls, config):\n obj = super().from_config(config)\n obj.label_encoder = encoders.deserialize(config['encoder'])\n\n def fit_before_convert(self, dataset):\n \"\"\"Fit the encoder.\"\"\"\n # If in tf.data.Dataset, must be encoded already.\n if isinstance(dataset, tf.data.Dataset):\n return\n\n # Convert the data to np.ndarray.\n if isinstance(dataset, pd.DataFrame):\n dataset = dataset.values\n if isinstance(dataset, pd.Series):\n dataset = dataset.values.reshape(-1, 1)\n\n # If encoded.\n if len(dataset.flatten()) != len(dataset):\n if self.num_classes:\n self._check_data_shape(dataset.shape[1:])\n return\n\n # Fit encoder.\n labels = set(dataset.flatten())\n if len(labels) < 2:\n raise ValueError('Expect the target data for {name} to have '\n 'at least 2 classes, but got {num_classes}.'\n .format(name=self.name, num_classes=self.num_classes))\n if len(labels) == 2 and not self.multi_label:\n self.label_encoder = encoders.LabelEncoder()\n else:\n self.label_encoder = encoders.OneHotEncoder()\n self.label_encoder.fit(dataset)\n\n def convert_to_dataset(self, dataset):\n if self.label_encoder:\n dataset = self.label_encoder.encode(dataset)\n return super().convert_to_dataset(dataset)\n\n def fit(self, dataset):\n super().fit(dataset)\n shape = tuple(data_utils.dataset_shape(dataset).as_list()[1:])\n # Infer the num_classes.\n if not self.num_classes:\n # Single column with 0s and 1s.\n if shape == (1,):\n self.num_classes = 2\n else:\n self.num_classes = shape[0]\n return\n\n # Compute expected shape from num_classes.\n if self.num_classes == 2 and not self.multi_label:\n expected = (1,)\n else:\n expected = (self.num_classes,)\n\n # Check shape equals expected shape.\n if shape != expected:\n raise ValueError('Expect the target data for {name} to have '\n 'shape {expected}, but got {actual}.'\n .format(name=self.name, expected=expected,\n actual=shape))\n\n def postprocess(self, y):\n if self.label_encoder:\n y = self.label_encoder.decode(y)\n return y\n\n\nclass RegressionHeadAdapter(HeadAdapter):\n pass\n\n\nclass SegmentationHeadAdapter(ClassificationHeadAdapter):\n pass\n", "path": "autokeras/adapters/output_adapter.py"}]} | 1,945 | 201 |
gh_patches_debug_2868 | rasdani/github-patches | git_diff | tobymao__sqlglot-2165 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Spark raw String Support, comonly used with regexes
This fails with sqlglot:
```python
import sqlglot
sql = """select regexp_replace('100-200', r'([^0-9])', '')"""
sqlglot.parse_one(sql, read="databricks")
```
**Official Documentation**
https://spark.apache.org/docs/latest/sql-ref-literals.html
</issue>
<code>
[start of sqlglot/dialects/spark.py]
1 from __future__ import annotations
2
3 import typing as t
4
5 from sqlglot import exp
6 from sqlglot.dialects.dialect import rename_func
7 from sqlglot.dialects.spark2 import Spark2
8 from sqlglot.helper import seq_get
9
10
11 def _parse_datediff(args: t.List) -> exp.Expression:
12 """
13 Although Spark docs don't mention the "unit" argument, Spark3 added support for
14 it at some point. Databricks also supports this variant (see below).
15
16 For example, in spark-sql (v3.3.1):
17 - SELECT DATEDIFF('2020-01-01', '2020-01-05') results in -4
18 - SELECT DATEDIFF(day, '2020-01-01', '2020-01-05') results in 4
19
20 See also:
21 - https://docs.databricks.com/sql/language-manual/functions/datediff3.html
22 - https://docs.databricks.com/sql/language-manual/functions/datediff.html
23 """
24 unit = None
25 this = seq_get(args, 0)
26 expression = seq_get(args, 1)
27
28 if len(args) == 3:
29 unit = this
30 this = args[2]
31
32 return exp.DateDiff(
33 this=exp.TsOrDsToDate(this=this), expression=exp.TsOrDsToDate(this=expression), unit=unit
34 )
35
36
37 class Spark(Spark2):
38 class Parser(Spark2.Parser):
39 FUNCTIONS = {
40 **Spark2.Parser.FUNCTIONS,
41 "ANY_VALUE": lambda args: exp.AnyValue(
42 this=seq_get(args, 0), ignore_nulls=seq_get(args, 1)
43 ),
44 "DATEDIFF": _parse_datediff,
45 }
46
47 FUNCTION_PARSERS = Spark2.Parser.FUNCTION_PARSERS.copy()
48 FUNCTION_PARSERS.pop("ANY_VALUE")
49
50 class Generator(Spark2.Generator):
51 TYPE_MAPPING = {
52 **Spark2.Generator.TYPE_MAPPING,
53 exp.DataType.Type.MONEY: "DECIMAL(15, 4)",
54 exp.DataType.Type.SMALLMONEY: "DECIMAL(6, 4)",
55 exp.DataType.Type.UNIQUEIDENTIFIER: "STRING",
56 }
57
58 TRANSFORMS = {
59 **Spark2.Generator.TRANSFORMS,
60 exp.StartsWith: rename_func("STARTSWITH"),
61 exp.TimestampAdd: lambda self, e: self.func(
62 "DATEADD", e.args.get("unit") or "DAY", e.expression, e.this
63 ),
64 }
65 TRANSFORMS.pop(exp.AnyValue)
66 TRANSFORMS.pop(exp.DateDiff)
67 TRANSFORMS.pop(exp.Group)
68
69 def anyvalue_sql(self, expression: exp.AnyValue) -> str:
70 return self.function_fallback_sql(expression)
71
72 def datediff_sql(self, expression: exp.DateDiff) -> str:
73 unit = self.sql(expression, "unit")
74 end = self.sql(expression, "this")
75 start = self.sql(expression, "expression")
76
77 if unit:
78 return self.func("DATEDIFF", unit, start, end)
79
80 return self.func("DATEDIFF", end, start)
81
[end of sqlglot/dialects/spark.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sqlglot/dialects/spark.py b/sqlglot/dialects/spark.py
--- a/sqlglot/dialects/spark.py
+++ b/sqlglot/dialects/spark.py
@@ -35,6 +35,13 @@
class Spark(Spark2):
+ class Tokenizer(Spark2.Tokenizer):
+ RAW_STRINGS = [
+ (prefix + q, q)
+ for q in t.cast(t.List[str], Spark2.Tokenizer.QUOTES)
+ for prefix in ("r", "R")
+ ]
+
class Parser(Spark2.Parser):
FUNCTIONS = {
**Spark2.Parser.FUNCTIONS,
| {"golden_diff": "diff --git a/sqlglot/dialects/spark.py b/sqlglot/dialects/spark.py\n--- a/sqlglot/dialects/spark.py\n+++ b/sqlglot/dialects/spark.py\n@@ -35,6 +35,13 @@\n \n \n class Spark(Spark2):\n+ class Tokenizer(Spark2.Tokenizer):\n+ RAW_STRINGS = [\n+ (prefix + q, q)\n+ for q in t.cast(t.List[str], Spark2.Tokenizer.QUOTES)\n+ for prefix in (\"r\", \"R\")\n+ ]\n+\n class Parser(Spark2.Parser):\n FUNCTIONS = {\n **Spark2.Parser.FUNCTIONS,\n", "issue": "Spark raw String Support, comonly used with regexes\nThis fails with sqlglot:\r\n\r\n```python\r\nimport sqlglot\r\n\r\nsql = \"\"\"select regexp_replace('100-200', r'([^0-9])', '')\"\"\"\r\nsqlglot.parse_one(sql, read=\"databricks\")\r\n```\r\n\r\n**Official Documentation**\r\nhttps://spark.apache.org/docs/latest/sql-ref-literals.html\r\n\n", "before_files": [{"content": "from __future__ import annotations\n\nimport typing as t\n\nfrom sqlglot import exp\nfrom sqlglot.dialects.dialect import rename_func\nfrom sqlglot.dialects.spark2 import Spark2\nfrom sqlglot.helper import seq_get\n\n\ndef _parse_datediff(args: t.List) -> exp.Expression:\n \"\"\"\n Although Spark docs don't mention the \"unit\" argument, Spark3 added support for\n it at some point. Databricks also supports this variant (see below).\n\n For example, in spark-sql (v3.3.1):\n - SELECT DATEDIFF('2020-01-01', '2020-01-05') results in -4\n - SELECT DATEDIFF(day, '2020-01-01', '2020-01-05') results in 4\n\n See also:\n - https://docs.databricks.com/sql/language-manual/functions/datediff3.html\n - https://docs.databricks.com/sql/language-manual/functions/datediff.html\n \"\"\"\n unit = None\n this = seq_get(args, 0)\n expression = seq_get(args, 1)\n\n if len(args) == 3:\n unit = this\n this = args[2]\n\n return exp.DateDiff(\n this=exp.TsOrDsToDate(this=this), expression=exp.TsOrDsToDate(this=expression), unit=unit\n )\n\n\nclass Spark(Spark2):\n class Parser(Spark2.Parser):\n FUNCTIONS = {\n **Spark2.Parser.FUNCTIONS,\n \"ANY_VALUE\": lambda args: exp.AnyValue(\n this=seq_get(args, 0), ignore_nulls=seq_get(args, 1)\n ),\n \"DATEDIFF\": _parse_datediff,\n }\n\n FUNCTION_PARSERS = Spark2.Parser.FUNCTION_PARSERS.copy()\n FUNCTION_PARSERS.pop(\"ANY_VALUE\")\n\n class Generator(Spark2.Generator):\n TYPE_MAPPING = {\n **Spark2.Generator.TYPE_MAPPING,\n exp.DataType.Type.MONEY: \"DECIMAL(15, 4)\",\n exp.DataType.Type.SMALLMONEY: \"DECIMAL(6, 4)\",\n exp.DataType.Type.UNIQUEIDENTIFIER: \"STRING\",\n }\n\n TRANSFORMS = {\n **Spark2.Generator.TRANSFORMS,\n exp.StartsWith: rename_func(\"STARTSWITH\"),\n exp.TimestampAdd: lambda self, e: self.func(\n \"DATEADD\", e.args.get(\"unit\") or \"DAY\", e.expression, e.this\n ),\n }\n TRANSFORMS.pop(exp.AnyValue)\n TRANSFORMS.pop(exp.DateDiff)\n TRANSFORMS.pop(exp.Group)\n\n def anyvalue_sql(self, expression: exp.AnyValue) -> str:\n return self.function_fallback_sql(expression)\n\n def datediff_sql(self, expression: exp.DateDiff) -> str:\n unit = self.sql(expression, \"unit\")\n end = self.sql(expression, \"this\")\n start = self.sql(expression, \"expression\")\n\n if unit:\n return self.func(\"DATEDIFF\", unit, start, end)\n\n return self.func(\"DATEDIFF\", end, start)\n", "path": "sqlglot/dialects/spark.py"}]} | 1,485 | 151 |
gh_patches_debug_22314 | rasdani/github-patches | git_diff | learningequality__kolibri-5140 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
On (at least) one modal, dismissing with enter keypress causes it to come back
### Observed behavior
This was seen with the privacy modal, and is in other places now:
1. Facility > Class Details > Rename Class
1. Privacy link on sign-in page
1. Privacy link on sign-up page
1. Profile page, change password modal
1. "Change" status button for Lessons
It seems to be for the same reasons as #4973 (privacy modal), the "Edit" button maintains focus while the modal is on, so it gets "clicked" again when you dismiss the modal with an Enter keypress.
The common thing between these two cases is that the privacy link and rename use the text-link versions of the Button.
### Expected behavior
Text-type links do not have this side-effect when "clicked" by an enter-keypress
### Context
Kolibri 0.12 latest
</issue>
<code>
[start of kolibri/core/content/signals.py]
1 from django.db.models import F
2 from django.db.models.signals import pre_delete
3 from django.dispatch import receiver
4
5 from .models import ChannelMetadata
6 from .models import ContentNode
7 from kolibri.core.notifications.models import LearnerProgressNotification
8
9
10 @receiver(pre_delete, sender=ContentNode)
11 def cascade_delete_node(sender, instance=None, *args, **kwargs):
12 """
13 For a given node, we delete all notifications
14 objects whose contentnode is the instance's node..
15 """
16 LearnerProgressNotification.objects.filter(contentnode_id=instance.id).delete()
17
18
19 @receiver(pre_delete, sender=ChannelMetadata)
20 def reorder_channels_upon_deletion(sender, instance=None, *args, **kwargs):
21 """
22 For a given channel, decrement the order of all channels that come after this channel.
23 """
24 ChannelMetadata.objects.filter(order__gt=instance.order).update(order=F('order') - 1)
25
[end of kolibri/core/content/signals.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kolibri/core/content/signals.py b/kolibri/core/content/signals.py
--- a/kolibri/core/content/signals.py
+++ b/kolibri/core/content/signals.py
@@ -5,6 +5,7 @@
from .models import ChannelMetadata
from .models import ContentNode
from kolibri.core.notifications.models import LearnerProgressNotification
+from kolibri.core.lessons.models import Lesson
@receiver(pre_delete, sender=ContentNode)
@@ -22,3 +23,15 @@
For a given channel, decrement the order of all channels that come after this channel.
"""
ChannelMetadata.objects.filter(order__gt=instance.order).update(order=F('order') - 1)
+
+
+@receiver(pre_delete, sender=ChannelMetadata)
+def update_lesson_resources_before_delete(sender, instance=None, *args, **kwargs):
+ # Update the resources array of all lessons to ensure they don't have
+ # any deleted content
+ lessons = Lesson.objects.filter(resources__contains=instance.id)
+ for lesson in lessons:
+ updated_resources = [r for r in lesson.resources if r['channel_id'] != instance.id]
+ if len(updated_resources) < len(lesson.resources):
+ lesson.resources = updated_resources
+ lesson.save()
| {"golden_diff": "diff --git a/kolibri/core/content/signals.py b/kolibri/core/content/signals.py\n--- a/kolibri/core/content/signals.py\n+++ b/kolibri/core/content/signals.py\n@@ -5,6 +5,7 @@\n from .models import ChannelMetadata\n from .models import ContentNode\n from kolibri.core.notifications.models import LearnerProgressNotification\n+from kolibri.core.lessons.models import Lesson\n \n \n @receiver(pre_delete, sender=ContentNode)\n@@ -22,3 +23,15 @@\n For a given channel, decrement the order of all channels that come after this channel.\n \"\"\"\n ChannelMetadata.objects.filter(order__gt=instance.order).update(order=F('order') - 1)\n+\n+\n+@receiver(pre_delete, sender=ChannelMetadata)\n+def update_lesson_resources_before_delete(sender, instance=None, *args, **kwargs):\n+ # Update the resources array of all lessons to ensure they don't have\n+ # any deleted content\n+ lessons = Lesson.objects.filter(resources__contains=instance.id)\n+ for lesson in lessons:\n+ updated_resources = [r for r in lesson.resources if r['channel_id'] != instance.id]\n+ if len(updated_resources) < len(lesson.resources):\n+ lesson.resources = updated_resources\n+ lesson.save()\n", "issue": "On (at least) one modal, dismissing with enter keypress causes it to come back\n### Observed behavior\r\n\r\nThis was seen with the privacy modal, and is in other places now:\r\n\r\n1. Facility > Class Details > Rename Class\r\n1. Privacy link on sign-in page\r\n1. Privacy link on sign-up page\r\n1. Profile page, change password modal\r\n1. \"Change\" status button for Lessons\r\n\r\nIt seems to be for the same reasons as #4973 (privacy modal), the \"Edit\" button maintains focus while the modal is on, so it gets \"clicked\" again when you dismiss the modal with an Enter keypress.\r\n\r\nThe common thing between these two cases is that the privacy link and rename use the text-link versions of the Button.\r\n\r\n### Expected behavior\r\n\r\nText-type links do not have this side-effect when \"clicked\" by an enter-keypress\r\n\r\n### Context\r\n\r\nKolibri 0.12 latest\n", "before_files": [{"content": "from django.db.models import F\nfrom django.db.models.signals import pre_delete\nfrom django.dispatch import receiver\n\nfrom .models import ChannelMetadata\nfrom .models import ContentNode\nfrom kolibri.core.notifications.models import LearnerProgressNotification\n\n\n@receiver(pre_delete, sender=ContentNode)\ndef cascade_delete_node(sender, instance=None, *args, **kwargs):\n \"\"\"\n For a given node, we delete all notifications\n objects whose contentnode is the instance's node..\n \"\"\"\n LearnerProgressNotification.objects.filter(contentnode_id=instance.id).delete()\n\n\n@receiver(pre_delete, sender=ChannelMetadata)\ndef reorder_channels_upon_deletion(sender, instance=None, *args, **kwargs):\n \"\"\"\n For a given channel, decrement the order of all channels that come after this channel.\n \"\"\"\n ChannelMetadata.objects.filter(order__gt=instance.order).update(order=F('order') - 1)\n", "path": "kolibri/core/content/signals.py"}]} | 967 | 283 |
gh_patches_debug_1463 | rasdani/github-patches | git_diff | pre-commit__pre-commit-376 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Newly gitignored (but file still exists) files are linted
(they should not be)
</issue>
<code>
[start of pre_commit/git.py]
1 from __future__ import unicode_literals
2
3 import functools
4 import logging
5 import os
6 import os.path
7 import re
8
9 from pre_commit.errors import FatalError
10 from pre_commit.util import CalledProcessError
11 from pre_commit.util import cmd_output
12 from pre_commit.util import memoize_by_cwd
13
14
15 logger = logging.getLogger('pre_commit')
16
17
18 def get_root():
19 try:
20 return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()
21 except CalledProcessError:
22 raise FatalError(
23 'Called from outside of the gits. Please cd to a git repository.'
24 )
25
26
27 def get_git_dir(git_root):
28 return os.path.normpath(os.path.join(
29 git_root,
30 cmd_output('git', 'rev-parse', '--git-dir', cwd=git_root)[1].strip(),
31 ))
32
33
34 def is_in_merge_conflict():
35 git_dir = get_git_dir('.')
36 return (
37 os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and
38 os.path.exists(os.path.join(git_dir, 'MERGE_HEAD'))
39 )
40
41
42 def parse_merge_msg_for_conflicts(merge_msg):
43 # Conflicted files start with tabs
44 return [
45 line.lstrip('#').strip()
46 for line in merge_msg.splitlines()
47 # '#\t' for git 2.4.1
48 if line.startswith(('\t', '#\t'))
49 ]
50
51
52 @memoize_by_cwd
53 def get_conflicted_files():
54 logger.info('Checking merge-conflict files only.')
55 # Need to get the conflicted files from the MERGE_MSG because they could
56 # have resolved the conflict by choosing one side or the other
57 merge_msg = open(os.path.join(get_git_dir('.'), 'MERGE_MSG')).read()
58 merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)
59
60 # This will get the rest of the changes made after the merge.
61 # If they resolved the merge conflict by choosing a mesh of both sides
62 # this will also include the conflicted files
63 tree_hash = cmd_output('git', 'write-tree')[1].strip()
64 merge_diff_filenames = cmd_output(
65 'git', 'diff', '-m', tree_hash, 'HEAD', 'MERGE_HEAD', '--name-only',
66 )[1].splitlines()
67 return set(merge_conflict_filenames) | set(merge_diff_filenames)
68
69
70 @memoize_by_cwd
71 def get_staged_files():
72 return cmd_output('git', 'diff', '--staged', '--name-only')[1].splitlines()
73
74
75 @memoize_by_cwd
76 def get_all_files():
77 return cmd_output('git', 'ls-files')[1].splitlines()
78
79
80 def get_files_matching(all_file_list_strategy):
81 @functools.wraps(all_file_list_strategy)
82 @memoize_by_cwd
83 def wrapper(include_expr, exclude_expr):
84 include_regex = re.compile(include_expr)
85 exclude_regex = re.compile(exclude_expr)
86 return set(
87 filename
88 for filename in all_file_list_strategy()
89 if (
90 include_regex.search(filename) and
91 not exclude_regex.search(filename) and
92 os.path.lexists(filename)
93 )
94 )
95 return wrapper
96
97
98 get_staged_files_matching = get_files_matching(get_staged_files)
99 get_all_files_matching = get_files_matching(get_all_files)
100 get_conflicted_files_matching = get_files_matching(get_conflicted_files)
101
[end of pre_commit/git.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pre_commit/git.py b/pre_commit/git.py
--- a/pre_commit/git.py
+++ b/pre_commit/git.py
@@ -69,7 +69,11 @@
@memoize_by_cwd
def get_staged_files():
- return cmd_output('git', 'diff', '--staged', '--name-only')[1].splitlines()
+ return cmd_output(
+ 'git', 'diff', '--staged', '--name-only',
+ # Everything except for D
+ '--diff-filter=ACMRTUXB'
+ )[1].splitlines()
@memoize_by_cwd
| {"golden_diff": "diff --git a/pre_commit/git.py b/pre_commit/git.py\n--- a/pre_commit/git.py\n+++ b/pre_commit/git.py\n@@ -69,7 +69,11 @@\n \n @memoize_by_cwd\n def get_staged_files():\n- return cmd_output('git', 'diff', '--staged', '--name-only')[1].splitlines()\n+ return cmd_output(\n+ 'git', 'diff', '--staged', '--name-only',\n+ # Everything except for D\n+ '--diff-filter=ACMRTUXB'\n+ )[1].splitlines()\n \n \n @memoize_by_cwd\n", "issue": "Newly gitignored (but file still exists) files are linted\n(they should not be)\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport functools\nimport logging\nimport os\nimport os.path\nimport re\n\nfrom pre_commit.errors import FatalError\nfrom pre_commit.util import CalledProcessError\nfrom pre_commit.util import cmd_output\nfrom pre_commit.util import memoize_by_cwd\n\n\nlogger = logging.getLogger('pre_commit')\n\n\ndef get_root():\n try:\n return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()\n except CalledProcessError:\n raise FatalError(\n 'Called from outside of the gits. Please cd to a git repository.'\n )\n\n\ndef get_git_dir(git_root):\n return os.path.normpath(os.path.join(\n git_root,\n cmd_output('git', 'rev-parse', '--git-dir', cwd=git_root)[1].strip(),\n ))\n\n\ndef is_in_merge_conflict():\n git_dir = get_git_dir('.')\n return (\n os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and\n os.path.exists(os.path.join(git_dir, 'MERGE_HEAD'))\n )\n\n\ndef parse_merge_msg_for_conflicts(merge_msg):\n # Conflicted files start with tabs\n return [\n line.lstrip('#').strip()\n for line in merge_msg.splitlines()\n # '#\\t' for git 2.4.1\n if line.startswith(('\\t', '#\\t'))\n ]\n\n\n@memoize_by_cwd\ndef get_conflicted_files():\n logger.info('Checking merge-conflict files only.')\n # Need to get the conflicted files from the MERGE_MSG because they could\n # have resolved the conflict by choosing one side or the other\n merge_msg = open(os.path.join(get_git_dir('.'), 'MERGE_MSG')).read()\n merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)\n\n # This will get the rest of the changes made after the merge.\n # If they resolved the merge conflict by choosing a mesh of both sides\n # this will also include the conflicted files\n tree_hash = cmd_output('git', 'write-tree')[1].strip()\n merge_diff_filenames = cmd_output(\n 'git', 'diff', '-m', tree_hash, 'HEAD', 'MERGE_HEAD', '--name-only',\n )[1].splitlines()\n return set(merge_conflict_filenames) | set(merge_diff_filenames)\n\n\n@memoize_by_cwd\ndef get_staged_files():\n return cmd_output('git', 'diff', '--staged', '--name-only')[1].splitlines()\n\n\n@memoize_by_cwd\ndef get_all_files():\n return cmd_output('git', 'ls-files')[1].splitlines()\n\n\ndef get_files_matching(all_file_list_strategy):\n @functools.wraps(all_file_list_strategy)\n @memoize_by_cwd\n def wrapper(include_expr, exclude_expr):\n include_regex = re.compile(include_expr)\n exclude_regex = re.compile(exclude_expr)\n return set(\n filename\n for filename in all_file_list_strategy()\n if (\n include_regex.search(filename) and\n not exclude_regex.search(filename) and\n os.path.lexists(filename)\n )\n )\n return wrapper\n\n\nget_staged_files_matching = get_files_matching(get_staged_files)\nget_all_files_matching = get_files_matching(get_all_files)\nget_conflicted_files_matching = get_files_matching(get_conflicted_files)\n", "path": "pre_commit/git.py"}]} | 1,488 | 138 |
gh_patches_debug_14135 | rasdani/github-patches | git_diff | OpenNMT__OpenNMT-py-1206 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ImportError for get_rank function
While running the **preprocess.py** I am getting an ImportError of get_rank function from Pytorch. What I have found on the internet that it is now deprecated and is inside the deprecated module. I can send a pull request for a fix of that?
</issue>
<code>
[start of onmt/utils/statistics.py]
1 """ Statistics calculation utility """
2 from __future__ import division
3 import time
4 import math
5 import sys
6
7 from torch.distributed import get_rank
8 from onmt.utils.distributed import all_gather_list
9 from onmt.utils.logging import logger
10
11
12 class Statistics(object):
13 """
14 Accumulator for loss statistics.
15 Currently calculates:
16
17 * accuracy
18 * perplexity
19 * elapsed time
20 """
21
22 def __init__(self, loss=0, n_words=0, n_correct=0):
23 self.loss = loss
24 self.n_words = n_words
25 self.n_correct = n_correct
26 self.n_src_words = 0
27 self.start_time = time.time()
28
29 @staticmethod
30 def all_gather_stats(stat, max_size=4096):
31 """
32 Gather a `Statistics` object accross multiple process/nodes
33
34 Args:
35 stat(:obj:Statistics): the statistics object to gather
36 accross all processes/nodes
37 max_size(int): max buffer size to use
38
39 Returns:
40 `Statistics`, the update stats object
41 """
42 stats = Statistics.all_gather_stats_list([stat], max_size=max_size)
43 return stats[0]
44
45 @staticmethod
46 def all_gather_stats_list(stat_list, max_size=4096):
47 """
48 Gather a `Statistics` list accross all processes/nodes
49
50 Args:
51 stat_list(list([`Statistics`])): list of statistics objects to
52 gather accross all processes/nodes
53 max_size(int): max buffer size to use
54
55 Returns:
56 our_stats(list([`Statistics`])): list of updated stats
57 """
58 # Get a list of world_size lists with len(stat_list) Statistics objects
59 all_stats = all_gather_list(stat_list, max_size=max_size)
60
61 our_rank = get_rank()
62 our_stats = all_stats[our_rank]
63 for other_rank, stats in enumerate(all_stats):
64 if other_rank == our_rank:
65 continue
66 for i, stat in enumerate(stats):
67 our_stats[i].update(stat, update_n_src_words=True)
68 return our_stats
69
70 def update(self, stat, update_n_src_words=False):
71 """
72 Update statistics by suming values with another `Statistics` object
73
74 Args:
75 stat: another statistic object
76 update_n_src_words(bool): whether to update (sum) `n_src_words`
77 or not
78
79 """
80 self.loss += stat.loss
81 self.n_words += stat.n_words
82 self.n_correct += stat.n_correct
83
84 if update_n_src_words:
85 self.n_src_words += stat.n_src_words
86
87 def accuracy(self):
88 """ compute accuracy """
89 return 100 * (self.n_correct / self.n_words)
90
91 def xent(self):
92 """ compute cross entropy """
93 return self.loss / self.n_words
94
95 def ppl(self):
96 """ compute perplexity """
97 return math.exp(min(self.loss / self.n_words, 100))
98
99 def elapsed_time(self):
100 """ compute elapsed time """
101 return time.time() - self.start_time
102
103 def output(self, step, num_steps, learning_rate, start):
104 """Write out statistics to stdout.
105
106 Args:
107 step (int): current step
108 n_batch (int): total batches
109 start (int): start time of step.
110 """
111 t = self.elapsed_time()
112 logger.info(
113 ("Step %2d/%5d; acc: %6.2f; ppl: %5.2f; xent: %4.2f; " +
114 "lr: %7.5f; %3.0f/%3.0f tok/s; %6.0f sec")
115 % (step, num_steps,
116 self.accuracy(),
117 self.ppl(),
118 self.xent(),
119 learning_rate,
120 self.n_src_words / (t + 1e-5),
121 self.n_words / (t + 1e-5),
122 time.time() - start))
123 sys.stdout.flush()
124
125 def log_tensorboard(self, prefix, writer, learning_rate, step):
126 """ display statistics to tensorboard """
127 t = self.elapsed_time()
128 writer.add_scalar(prefix + "/xent", self.xent(), step)
129 writer.add_scalar(prefix + "/ppl", self.ppl(), step)
130 writer.add_scalar(prefix + "/accuracy", self.accuracy(), step)
131 writer.add_scalar(prefix + "/tgtper", self.n_words / t, step)
132 writer.add_scalar(prefix + "/lr", learning_rate, step)
133
[end of onmt/utils/statistics.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/onmt/utils/statistics.py b/onmt/utils/statistics.py
--- a/onmt/utils/statistics.py
+++ b/onmt/utils/statistics.py
@@ -4,8 +4,6 @@
import math
import sys
-from torch.distributed import get_rank
-from onmt.utils.distributed import all_gather_list
from onmt.utils.logging import logger
@@ -55,6 +53,9 @@
Returns:
our_stats(list([`Statistics`])): list of updated stats
"""
+ from torch.distributed import get_rank
+ from onmt.utils.distributed import all_gather_list
+
# Get a list of world_size lists with len(stat_list) Statistics objects
all_stats = all_gather_list(stat_list, max_size=max_size)
| {"golden_diff": "diff --git a/onmt/utils/statistics.py b/onmt/utils/statistics.py\n--- a/onmt/utils/statistics.py\n+++ b/onmt/utils/statistics.py\n@@ -4,8 +4,6 @@\n import math\n import sys\n \n-from torch.distributed import get_rank\n-from onmt.utils.distributed import all_gather_list\n from onmt.utils.logging import logger\n \n \n@@ -55,6 +53,9 @@\n Returns:\n our_stats(list([`Statistics`])): list of updated stats\n \"\"\"\n+ from torch.distributed import get_rank\n+ from onmt.utils.distributed import all_gather_list\n+\n # Get a list of world_size lists with len(stat_list) Statistics objects\n all_stats = all_gather_list(stat_list, max_size=max_size)\n", "issue": "ImportError for get_rank function\nWhile running the **preprocess.py** I am getting an ImportError of get_rank function from Pytorch. What I have found on the internet that it is now deprecated and is inside the deprecated module. I can send a pull request for a fix of that?\n", "before_files": [{"content": "\"\"\" Statistics calculation utility \"\"\"\nfrom __future__ import division\nimport time\nimport math\nimport sys\n\nfrom torch.distributed import get_rank\nfrom onmt.utils.distributed import all_gather_list\nfrom onmt.utils.logging import logger\n\n\nclass Statistics(object):\n \"\"\"\n Accumulator for loss statistics.\n Currently calculates:\n\n * accuracy\n * perplexity\n * elapsed time\n \"\"\"\n\n def __init__(self, loss=0, n_words=0, n_correct=0):\n self.loss = loss\n self.n_words = n_words\n self.n_correct = n_correct\n self.n_src_words = 0\n self.start_time = time.time()\n\n @staticmethod\n def all_gather_stats(stat, max_size=4096):\n \"\"\"\n Gather a `Statistics` object accross multiple process/nodes\n\n Args:\n stat(:obj:Statistics): the statistics object to gather\n accross all processes/nodes\n max_size(int): max buffer size to use\n\n Returns:\n `Statistics`, the update stats object\n \"\"\"\n stats = Statistics.all_gather_stats_list([stat], max_size=max_size)\n return stats[0]\n\n @staticmethod\n def all_gather_stats_list(stat_list, max_size=4096):\n \"\"\"\n Gather a `Statistics` list accross all processes/nodes\n\n Args:\n stat_list(list([`Statistics`])): list of statistics objects to\n gather accross all processes/nodes\n max_size(int): max buffer size to use\n\n Returns:\n our_stats(list([`Statistics`])): list of updated stats\n \"\"\"\n # Get a list of world_size lists with len(stat_list) Statistics objects\n all_stats = all_gather_list(stat_list, max_size=max_size)\n\n our_rank = get_rank()\n our_stats = all_stats[our_rank]\n for other_rank, stats in enumerate(all_stats):\n if other_rank == our_rank:\n continue\n for i, stat in enumerate(stats):\n our_stats[i].update(stat, update_n_src_words=True)\n return our_stats\n\n def update(self, stat, update_n_src_words=False):\n \"\"\"\n Update statistics by suming values with another `Statistics` object\n\n Args:\n stat: another statistic object\n update_n_src_words(bool): whether to update (sum) `n_src_words`\n or not\n\n \"\"\"\n self.loss += stat.loss\n self.n_words += stat.n_words\n self.n_correct += stat.n_correct\n\n if update_n_src_words:\n self.n_src_words += stat.n_src_words\n\n def accuracy(self):\n \"\"\" compute accuracy \"\"\"\n return 100 * (self.n_correct / self.n_words)\n\n def xent(self):\n \"\"\" compute cross entropy \"\"\"\n return self.loss / self.n_words\n\n def ppl(self):\n \"\"\" compute perplexity \"\"\"\n return math.exp(min(self.loss / self.n_words, 100))\n\n def elapsed_time(self):\n \"\"\" compute elapsed time \"\"\"\n return time.time() - self.start_time\n\n def output(self, step, num_steps, learning_rate, start):\n \"\"\"Write out statistics to stdout.\n\n Args:\n step (int): current step\n n_batch (int): total batches\n start (int): start time of step.\n \"\"\"\n t = self.elapsed_time()\n logger.info(\n (\"Step %2d/%5d; acc: %6.2f; ppl: %5.2f; xent: %4.2f; \" +\n \"lr: %7.5f; %3.0f/%3.0f tok/s; %6.0f sec\")\n % (step, num_steps,\n self.accuracy(),\n self.ppl(),\n self.xent(),\n learning_rate,\n self.n_src_words / (t + 1e-5),\n self.n_words / (t + 1e-5),\n time.time() - start))\n sys.stdout.flush()\n\n def log_tensorboard(self, prefix, writer, learning_rate, step):\n \"\"\" display statistics to tensorboard \"\"\"\n t = self.elapsed_time()\n writer.add_scalar(prefix + \"/xent\", self.xent(), step)\n writer.add_scalar(prefix + \"/ppl\", self.ppl(), step)\n writer.add_scalar(prefix + \"/accuracy\", self.accuracy(), step)\n writer.add_scalar(prefix + \"/tgtper\", self.n_words / t, step)\n writer.add_scalar(prefix + \"/lr\", learning_rate, step)\n", "path": "onmt/utils/statistics.py"}]} | 1,870 | 172 |
gh_patches_debug_19889 | rasdani/github-patches | git_diff | inventree__InvenTree-2427 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[BUG] could not serialize access due to concurrent update
could not serialize access due to concurrent update
and just crash....
</issue>
<code>
[start of InvenTree/InvenTree/exchange.py]
1 from common.settings import currency_code_default, currency_codes
2 from urllib.error import HTTPError, URLError
3
4 from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
5
6
7 class InvenTreeExchange(SimpleExchangeBackend):
8 """
9 Backend for automatically updating currency exchange rates.
10
11 Uses the exchangerate.host service API
12 """
13
14 name = "InvenTreeExchange"
15
16 def __init__(self):
17 self.url = "https://api.exchangerate.host/latest"
18
19 super().__init__()
20
21 def get_params(self):
22 # No API key is required
23 return {
24 }
25
26 def update_rates(self, base_currency=currency_code_default()):
27
28 symbols = ','.join(currency_codes())
29
30 try:
31 super().update_rates(base=base_currency, symbols=symbols)
32 # catch connection errors
33 except (HTTPError, URLError):
34 print('Encountered connection error while updating')
35
[end of InvenTree/InvenTree/exchange.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/InvenTree/InvenTree/exchange.py b/InvenTree/InvenTree/exchange.py
--- a/InvenTree/InvenTree/exchange.py
+++ b/InvenTree/InvenTree/exchange.py
@@ -2,6 +2,7 @@
from urllib.error import HTTPError, URLError
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
+from django.db.utils import OperationalError
class InvenTreeExchange(SimpleExchangeBackend):
@@ -32,3 +33,12 @@
# catch connection errors
except (HTTPError, URLError):
print('Encountered connection error while updating')
+ except OperationalError as e:
+ if 'SerializationFailure' in e.__cause__.__class__.__name__:
+ print('Serialization Failure while updating exchange rates')
+ # We are just going to swallow this exception because the
+ # exchange rates will be updated later by the scheduled task
+ else:
+ # Other operational errors probably are still show stoppers
+ # so reraise them so that the log contains the stacktrace
+ raise
| {"golden_diff": "diff --git a/InvenTree/InvenTree/exchange.py b/InvenTree/InvenTree/exchange.py\n--- a/InvenTree/InvenTree/exchange.py\n+++ b/InvenTree/InvenTree/exchange.py\n@@ -2,6 +2,7 @@\n from urllib.error import HTTPError, URLError\n \n from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend\n+from django.db.utils import OperationalError\n \n \n class InvenTreeExchange(SimpleExchangeBackend):\n@@ -32,3 +33,12 @@\n # catch connection errors\n except (HTTPError, URLError):\n print('Encountered connection error while updating')\n+ except OperationalError as e:\n+ if 'SerializationFailure' in e.__cause__.__class__.__name__:\n+ print('Serialization Failure while updating exchange rates')\n+ # We are just going to swallow this exception because the\n+ # exchange rates will be updated later by the scheduled task\n+ else:\n+ # Other operational errors probably are still show stoppers\n+ # so reraise them so that the log contains the stacktrace\n+ raise\n", "issue": "[BUG] could not serialize access due to concurrent update\ncould not serialize access due to concurrent update\r\n\r\nand just crash....\n", "before_files": [{"content": "from common.settings import currency_code_default, currency_codes\nfrom urllib.error import HTTPError, URLError\n\nfrom djmoney.contrib.exchange.backends.base import SimpleExchangeBackend\n\n\nclass InvenTreeExchange(SimpleExchangeBackend):\n \"\"\"\n Backend for automatically updating currency exchange rates.\n\n Uses the exchangerate.host service API\n \"\"\"\n\n name = \"InvenTreeExchange\"\n\n def __init__(self):\n self.url = \"https://api.exchangerate.host/latest\"\n\n super().__init__()\n\n def get_params(self):\n # No API key is required\n return {\n }\n\n def update_rates(self, base_currency=currency_code_default()):\n\n symbols = ','.join(currency_codes())\n\n try:\n super().update_rates(base=base_currency, symbols=symbols)\n # catch connection errors\n except (HTTPError, URLError):\n print('Encountered connection error while updating')\n", "path": "InvenTree/InvenTree/exchange.py"}]} | 827 | 250 |
gh_patches_debug_9711 | rasdani/github-patches | git_diff | kivy__python-for-android-735 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Libffi recipe fails with "unrecognized options: --enable-shared"
I'm trying to use the packaged `cryptography` recipe, which includes the `Libffi` recipe as a dependency. I'm getting the following errors with `Libffi` while trying to build the apk:
```
configure: WARNING: unrecognized options: --enable-shared
configure: error: cannot find install-sh, install.sh, or shtool in "." "./.." "./../.."
```
I'm using recent versions of the toolchain: p4a (checked out from github a week ago), most recent Android SDK & NDK (25.1.3 and r11c, respectively). I'm invoking p4a as follows:
```
p4a apk --sdk_dir ~/AndroidUpstream/android-sdk-linux \
--ndk_dir ~/AndroidUpstream/android-ndk-r11c \
--android_api 16 --ndk_ver r11c \
--private ~/src/TestCrypto \
--package=org.example.testcrypto \
--name=TestCrypto --version=0.5 \
--bootstrap=sdl2 \
--requirements=sdl2,python2,kivy,cryptography \
--dist_name=remotepython \
--permission=INTERNET
```
I've looked around (on web, and here on github issue tracker), but can't find explanations for this error message. Thanks in advance for any suggestions.
</issue>
<code>
[start of pythonforandroid/recipes/libffi/__init__.py]
1 from pythonforandroid.recipe import Recipe
2 from pythonforandroid.logger import shprint
3 from pythonforandroid.util import current_directory
4 from os.path import exists, join
5 import sh
6 import glob
7
8
9 class LibffiRecipe(Recipe):
10 name = 'libffi'
11 version = 'v3.2.1'
12 url = 'https://github.com/atgreen/libffi/archive/{version}.zip'
13
14 patches = ['remove-version-info.patch']
15
16 def get_host(self, arch):
17 with current_directory(self.get_build_dir(arch.arch)):
18 host = None
19 with open('Makefile') as f:
20 for line in f:
21 if line.startswith('host = '):
22 host = line.strip()[7:]
23 break
24
25 if not host or not exists(host):
26 raise RuntimeError('failed to find build output! ({})'
27 .format(host))
28
29 return host
30
31 def should_build(self, arch):
32 # return not bool(glob.glob(join(self.ctx.get_libs_dir(arch.arch),
33 # 'libffi.so*')))
34 return not exists(join(self.ctx.get_libs_dir(arch.arch), 'libffi.so'))
35 # return not exists(join(self.ctx.get_python_install_dir(), 'lib',
36 # 'libffi.so'))
37
38 def build_arch(self, arch):
39 env = self.get_recipe_env(arch)
40 with current_directory(self.get_build_dir(arch.arch)):
41 if not exists('configure'):
42 shprint(sh.Command('./autogen.sh'), _env=env)
43 shprint(sh.Command('./configure'), '--host=' + arch.toolchain_prefix,
44 '--prefix=' + self.ctx.get_python_install_dir(),
45 '--enable-shared', _env=env)
46 shprint(sh.make, '-j5', 'libffi.la', _env=env)
47
48
49 # dlname = None
50 # with open(join(host, 'libffi.la')) as f:
51 # for line in f:
52 # if line.startswith('dlname='):
53 # dlname = line.strip()[8:-1]
54 # break
55 #
56 # if not dlname or not exists(join(host, '.libs', dlname)):
57 # raise RuntimeError('failed to locate shared object! ({})'
58 # .format(dlname))
59
60 # shprint(sh.sed, '-i', 's/^dlname=.*$/dlname=\'libffi.so\'/', join(host, 'libffi.la'))
61
62 shprint(sh.cp, '-t', self.ctx.get_libs_dir(arch.arch),
63 join(self.get_host(arch), '.libs', 'libffi.so')) #,
64 # join(host, 'libffi.la'))
65
66 def get_include_dirs(self, arch):
67 return [join(self.get_build_dir(arch.arch), self.get_host(arch), 'include')]
68
69
70 recipe = LibffiRecipe()
71
[end of pythonforandroid/recipes/libffi/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pythonforandroid/recipes/libffi/__init__.py b/pythonforandroid/recipes/libffi/__init__.py
--- a/pythonforandroid/recipes/libffi/__init__.py
+++ b/pythonforandroid/recipes/libffi/__init__.py
@@ -40,6 +40,7 @@
with current_directory(self.get_build_dir(arch.arch)):
if not exists('configure'):
shprint(sh.Command('./autogen.sh'), _env=env)
+ shprint(sh.Command('autoreconf -vif'), _env=env)
shprint(sh.Command('./configure'), '--host=' + arch.toolchain_prefix,
'--prefix=' + self.ctx.get_python_install_dir(),
'--enable-shared', _env=env)
| {"golden_diff": "diff --git a/pythonforandroid/recipes/libffi/__init__.py b/pythonforandroid/recipes/libffi/__init__.py\n--- a/pythonforandroid/recipes/libffi/__init__.py\n+++ b/pythonforandroid/recipes/libffi/__init__.py\n@@ -40,6 +40,7 @@\n \t\twith current_directory(self.get_build_dir(arch.arch)):\n \t\t\tif not exists('configure'):\n \t\t\t\tshprint(sh.Command('./autogen.sh'), _env=env)\n+\t\t\tshprint(sh.Command('autoreconf -vif'), _env=env)\n \t\t\tshprint(sh.Command('./configure'), '--host=' + arch.toolchain_prefix,\n \t\t\t '--prefix=' + self.ctx.get_python_install_dir(),\n \t\t\t '--enable-shared', _env=env)\n", "issue": "Libffi recipe fails with \"unrecognized options: --enable-shared\"\nI'm trying to use the packaged `cryptography` recipe, which includes the `Libffi` recipe as a dependency. I'm getting the following errors with `Libffi` while trying to build the apk:\n\n```\nconfigure: WARNING: unrecognized options: --enable-shared\nconfigure: error: cannot find install-sh, install.sh, or shtool in \".\" \"./..\" \"./../..\"\n```\n\nI'm using recent versions of the toolchain: p4a (checked out from github a week ago), most recent Android SDK & NDK (25.1.3 and r11c, respectively). I'm invoking p4a as follows:\n\n```\np4a apk --sdk_dir ~/AndroidUpstream/android-sdk-linux \\\n --ndk_dir ~/AndroidUpstream/android-ndk-r11c \\\n --android_api 16 --ndk_ver r11c \\\n --private ~/src/TestCrypto \\\n --package=org.example.testcrypto \\\n --name=TestCrypto --version=0.5 \\\n --bootstrap=sdl2 \\\n --requirements=sdl2,python2,kivy,cryptography \\\n --dist_name=remotepython \\\n --permission=INTERNET\n```\n\nI've looked around (on web, and here on github issue tracker), but can't find explanations for this error message. Thanks in advance for any suggestions.\n\n", "before_files": [{"content": "from pythonforandroid.recipe import Recipe\nfrom pythonforandroid.logger import shprint\nfrom pythonforandroid.util import current_directory\nfrom os.path import exists, join\nimport sh\nimport glob\n\n\nclass LibffiRecipe(Recipe):\n\tname = 'libffi'\n\tversion = 'v3.2.1'\n\turl = 'https://github.com/atgreen/libffi/archive/{version}.zip'\n\n\tpatches = ['remove-version-info.patch']\n\n\tdef get_host(self, arch):\n\t\twith current_directory(self.get_build_dir(arch.arch)):\n\t\t\thost = None\n\t\t\twith open('Makefile') as f:\n\t\t\t\tfor line in f:\n\t\t\t\t\tif line.startswith('host = '):\n\t\t\t\t\t\thost = line.strip()[7:]\n\t\t\t\t\t\tbreak\n\n\t\t\tif not host or not exists(host):\n\t\t\t\traise RuntimeError('failed to find build output! ({})'\n\t\t\t\t .format(host))\n\t\t\t\n\t\t\treturn host\n\n\tdef should_build(self, arch):\n\t\t# return not bool(glob.glob(join(self.ctx.get_libs_dir(arch.arch),\n\t\t# 'libffi.so*')))\n\t\treturn not exists(join(self.ctx.get_libs_dir(arch.arch), 'libffi.so'))\n\t\t# return not exists(join(self.ctx.get_python_install_dir(), 'lib',\n\t\t# 'libffi.so'))\n\n\tdef build_arch(self, arch):\n\t\tenv = self.get_recipe_env(arch)\n\t\twith current_directory(self.get_build_dir(arch.arch)):\n\t\t\tif not exists('configure'):\n\t\t\t\tshprint(sh.Command('./autogen.sh'), _env=env)\n\t\t\tshprint(sh.Command('./configure'), '--host=' + arch.toolchain_prefix,\n\t\t\t '--prefix=' + self.ctx.get_python_install_dir(),\n\t\t\t '--enable-shared', _env=env)\n\t\t\tshprint(sh.make, '-j5', 'libffi.la', _env=env)\n\n\n\t\t\t# dlname = None\n\t\t\t# with open(join(host, 'libffi.la')) as f:\n\t\t\t# \tfor line in f:\n\t\t\t# \t\tif line.startswith('dlname='):\n\t\t\t# \t\t\tdlname = line.strip()[8:-1]\n\t\t\t# \t\t\tbreak\n\t\t\t# \n\t\t\t# if not dlname or not exists(join(host, '.libs', dlname)):\n\t\t\t# \traise RuntimeError('failed to locate shared object! ({})'\n\t\t\t# \t .format(dlname))\n\n\t\t\t# shprint(sh.sed, '-i', 's/^dlname=.*$/dlname=\\'libffi.so\\'/', join(host, 'libffi.la'))\n\n\t\t\tshprint(sh.cp, '-t', self.ctx.get_libs_dir(arch.arch),\n\t\t\t join(self.get_host(arch), '.libs', 'libffi.so')) #,\n\t\t\t # join(host, 'libffi.la'))\n\n\tdef get_include_dirs(self, arch):\n\t\treturn [join(self.get_build_dir(arch.arch), self.get_host(arch), 'include')]\n\n\nrecipe = LibffiRecipe()\n", "path": "pythonforandroid/recipes/libffi/__init__.py"}]} | 1,624 | 163 |
gh_patches_debug_12862 | rasdani/github-patches | git_diff | pyro-ppl__numpyro-987 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add required packages at the top of tutorials to run on colab
</issue>
<code>
[start of setup.py]
1 # Copyright Contributors to the Pyro project.
2 # SPDX-License-Identifier: Apache-2.0
3
4 from __future__ import absolute_import, division, print_function
5
6 import os
7 import sys
8
9 from setuptools import find_packages, setup
10
11 PROJECT_PATH = os.path.dirname(os.path.abspath(__file__))
12
13 # Find version
14 for line in open(os.path.join(PROJECT_PATH, "numpyro", "version.py")):
15 if line.startswith("__version__ = "):
16 version = line.strip().split()[2][1:-1]
17
18 # READ README.md for long description on PyPi.
19 try:
20 long_description = open("README.md", encoding="utf-8").read()
21 except Exception as e:
22 sys.stderr.write("Failed to read README.md:\n {}\n".format(e))
23 sys.stderr.flush()
24 long_description = ""
25
26
27 setup(
28 name="numpyro",
29 version=version,
30 description="Pyro PPL on NumPy",
31 packages=find_packages(include=["numpyro", "numpyro.*"]),
32 url="https://github.com/pyro-ppl/numpyro",
33 author="Uber AI Labs",
34 install_requires=[
35 "jax>=0.2.11",
36 "jaxlib>=0.1.62",
37 "tqdm",
38 ],
39 extras_require={
40 "doc": [
41 "ipython", # sphinx needs this to render codes
42 "nbsphinx",
43 "sphinx",
44 "sphinx_rtd_theme",
45 "sphinx-gallery",
46 ],
47 "test": [
48 "black",
49 "flake8",
50 "isort>=5.0",
51 "pytest>=4.1",
52 "pyro-api>=0.1.1",
53 "scipy>=1.1",
54 ],
55 "dev": [
56 "dm-haiku",
57 "flax",
58 "funsor @ git+https://github.com/pyro-ppl/funsor.git@d5574988665dd822ec64e41f2b54b9dc929959dc",
59 "graphviz",
60 "tensorflow_probability",
61 ],
62 "examples": ["arviz", "jupyter", "matplotlib", "pandas", "seaborn"],
63 },
64 long_description=long_description,
65 long_description_content_type="text/markdown",
66 keywords="probabilistic machine learning bayesian statistics",
67 license="Apache License 2.0",
68 classifiers=[
69 "Intended Audience :: Developers",
70 "Intended Audience :: Education",
71 "Intended Audience :: Science/Research",
72 "License :: OSI Approved :: Apache Software License",
73 "Operating System :: POSIX :: Linux",
74 "Operating System :: MacOS :: MacOS X",
75 "Programming Language :: Python :: 3.6",
76 "Programming Language :: Python :: 3.7",
77 "Programming Language :: Python :: 3.8",
78 "Programming Language :: Python :: 3.9",
79 ],
80 )
81
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -55,9 +55,12 @@
"dev": [
"dm-haiku",
"flax",
+ # TODO: bump funsor version before the release
"funsor @ git+https://github.com/pyro-ppl/funsor.git@d5574988665dd822ec64e41f2b54b9dc929959dc",
"graphviz",
- "tensorflow_probability",
+ # TODO: change this to tensorflow_probability>0.12.1 when the next version
+ # of tfp is released. The current release is not compatible with jax>=0.2.12.
+ "tfp-nightly",
],
"examples": ["arviz", "jupyter", "matplotlib", "pandas", "seaborn"],
},
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -55,9 +55,12 @@\n \"dev\": [\n \"dm-haiku\",\n \"flax\",\n+ # TODO: bump funsor version before the release\n \"funsor @ git+https://github.com/pyro-ppl/funsor.git@d5574988665dd822ec64e41f2b54b9dc929959dc\",\n \"graphviz\",\n- \"tensorflow_probability\",\n+ # TODO: change this to tensorflow_probability>0.12.1 when the next version\n+ # of tfp is released. The current release is not compatible with jax>=0.2.12.\n+ \"tfp-nightly\",\n ],\n \"examples\": [\"arviz\", \"jupyter\", \"matplotlib\", \"pandas\", \"seaborn\"],\n },\n", "issue": "Add required packages at the top of tutorials to run on colab\n\n", "before_files": [{"content": "# Copyright Contributors to the Pyro project.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom __future__ import absolute_import, division, print_function\n\nimport os\nimport sys\n\nfrom setuptools import find_packages, setup\n\nPROJECT_PATH = os.path.dirname(os.path.abspath(__file__))\n\n# Find version\nfor line in open(os.path.join(PROJECT_PATH, \"numpyro\", \"version.py\")):\n if line.startswith(\"__version__ = \"):\n version = line.strip().split()[2][1:-1]\n\n# READ README.md for long description on PyPi.\ntry:\n long_description = open(\"README.md\", encoding=\"utf-8\").read()\nexcept Exception as e:\n sys.stderr.write(\"Failed to read README.md:\\n {}\\n\".format(e))\n sys.stderr.flush()\n long_description = \"\"\n\n\nsetup(\n name=\"numpyro\",\n version=version,\n description=\"Pyro PPL on NumPy\",\n packages=find_packages(include=[\"numpyro\", \"numpyro.*\"]),\n url=\"https://github.com/pyro-ppl/numpyro\",\n author=\"Uber AI Labs\",\n install_requires=[\n \"jax>=0.2.11\",\n \"jaxlib>=0.1.62\",\n \"tqdm\",\n ],\n extras_require={\n \"doc\": [\n \"ipython\", # sphinx needs this to render codes\n \"nbsphinx\",\n \"sphinx\",\n \"sphinx_rtd_theme\",\n \"sphinx-gallery\",\n ],\n \"test\": [\n \"black\",\n \"flake8\",\n \"isort>=5.0\",\n \"pytest>=4.1\",\n \"pyro-api>=0.1.1\",\n \"scipy>=1.1\",\n ],\n \"dev\": [\n \"dm-haiku\",\n \"flax\",\n \"funsor @ git+https://github.com/pyro-ppl/funsor.git@d5574988665dd822ec64e41f2b54b9dc929959dc\",\n \"graphviz\",\n \"tensorflow_probability\",\n ],\n \"examples\": [\"arviz\", \"jupyter\", \"matplotlib\", \"pandas\", \"seaborn\"],\n },\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n keywords=\"probabilistic machine learning bayesian statistics\",\n license=\"Apache License 2.0\",\n classifiers=[\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n ],\n)\n", "path": "setup.py"}]} | 1,337 | 217 |
gh_patches_debug_18571 | rasdani/github-patches | git_diff | urllib3__urllib3-823 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Cannot run tests on Ubuntu 14.04 LTS + Python 3
#### Repro Steps
```
# (Install Docker from: http://www.docker.com )
# If OS X, start with:
$ docker-machine start default; eval "$(docker-machine env default)"
# If OS X or Linux, continue with:
$ docker run -it ubuntu:trusty # Ubuntu 14.04 LTS
$$ apt-get update
$$ apt-get install git -y
$$ apt-get install python3-pip -y
$$ pip3 install virtualenv
$$ git clone https://github.com/shazow/urllib3
$$ cd urllib3/
$$ virtualenv venv
$$ source venv/bin/activate
$$ make test
```
#### Output
```
python setup.py develop
Traceback (most recent call last):
File "setup.py", line 23, in <module>
long_description=open('README.rst').read() + '\n\n' + open('CHANGES.rst').read(),
File "/urllib3/venv/lib/python3.4/encodings/ascii.py", line 26, in decode
return codecs.ascii_decode(input, self.errors)[0]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xf0 in position 4597: ordinal not in range(128)
make: *** [*.egg-info] Error 1
```
#### Notes
My best guess is that the `read()`s in setup.py should have an encoding specified. Probably UTF-8.
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2
3 from setuptools import setup
4
5 import os
6 import re
7
8
9 base_path = os.path.dirname(__file__)
10
11 # Get the version (borrowed from SQLAlchemy)
12 fp = open(os.path.join(base_path, 'urllib3', '__init__.py'))
13 VERSION = re.compile(r".*__version__ = '(.*?)'",
14 re.S).match(fp.read()).group(1)
15 fp.close()
16
17
18 version = VERSION
19
20 setup(name='urllib3',
21 version=version,
22 description="HTTP library with thread-safe connection pooling, file post, and more.",
23 long_description=open('README.rst').read() + '\n\n' + open('CHANGES.rst').read(),
24 classifiers=[
25 'Environment :: Web Environment',
26 'Intended Audience :: Developers',
27 'License :: OSI Approved :: MIT License',
28 'Operating System :: OS Independent',
29 'Programming Language :: Python',
30 'Programming Language :: Python :: 2',
31 'Programming Language :: Python :: 3',
32 'Topic :: Internet :: WWW/HTTP',
33 'Topic :: Software Development :: Libraries',
34 ],
35 keywords='urllib httplib threadsafe filepost http https ssl pooling',
36 author='Andrey Petrov',
37 author_email='[email protected]',
38 url='http://urllib3.readthedocs.org/',
39 license='MIT',
40 packages=['urllib3',
41 'urllib3.packages', 'urllib3.packages.ssl_match_hostname',
42 'urllib3.contrib', 'urllib3.util',
43 ],
44 requires=[],
45 tests_require=[
46 # These are a less-specific subset of dev-requirements.txt, for the
47 # convenience of distro package maintainers.
48 'nose',
49 'mock',
50 'tornado',
51 ],
52 test_suite='test',
53 extras_require={
54 'secure': [
55 'pyOpenSSL>=0.13',
56 'ndg-httpsclient',
57 'pyasn1',
58 'certifi',
59 ],
60 'socks': [
61 'PySocks>=1.5.6,<2.0',
62 ]
63 },
64 )
65
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@
import os
import re
-
+import codecs
base_path = os.path.dirname(__file__)
@@ -14,13 +14,14 @@
re.S).match(fp.read()).group(1)
fp.close()
-
+readme = codecs.open('README.rst', encoding='utf-8').read()
+changes = codecs.open('README.rst', encoding='utf-8').read()
version = VERSION
setup(name='urllib3',
version=version,
description="HTTP library with thread-safe connection pooling, file post, and more.",
- long_description=open('README.rst').read() + '\n\n' + open('CHANGES.rst').read(),
+ long_description=u'\n\n'.join([readme, changes]),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -4,7 +4,7 @@\n \n import os\n import re\n-\n+import codecs\n \n base_path = os.path.dirname(__file__)\n \n@@ -14,13 +14,14 @@\n re.S).match(fp.read()).group(1)\n fp.close()\n \n-\n+readme = codecs.open('README.rst', encoding='utf-8').read()\n+changes = codecs.open('README.rst', encoding='utf-8').read()\n version = VERSION\n \n setup(name='urllib3',\n version=version,\n description=\"HTTP library with thread-safe connection pooling, file post, and more.\",\n- long_description=open('README.rst').read() + '\\n\\n' + open('CHANGES.rst').read(),\n+ long_description=u'\\n\\n'.join([readme, changes]),\n classifiers=[\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n", "issue": "Cannot run tests on Ubuntu 14.04 LTS + Python 3\n#### Repro Steps\n\n```\n# (Install Docker from: http://www.docker.com )\n\n# If OS X, start with:\n$ docker-machine start default; eval \"$(docker-machine env default)\"\n\n# If OS X or Linux, continue with:\n$ docker run -it ubuntu:trusty # Ubuntu 14.04 LTS\n$$ apt-get update\n$$ apt-get install git -y\n$$ apt-get install python3-pip -y\n$$ pip3 install virtualenv\n$$ git clone https://github.com/shazow/urllib3\n$$ cd urllib3/\n$$ virtualenv venv\n$$ source venv/bin/activate\n$$ make test\n```\n#### Output\n\n```\npython setup.py develop\nTraceback (most recent call last):\n File \"setup.py\", line 23, in <module>\n long_description=open('README.rst').read() + '\\n\\n' + open('CHANGES.rst').read(),\n File \"/urllib3/venv/lib/python3.4/encodings/ascii.py\", line 26, in decode\n return codecs.ascii_decode(input, self.errors)[0]\nUnicodeDecodeError: 'ascii' codec can't decode byte 0xf0 in position 4597: ordinal not in range(128)\nmake: *** [*.egg-info] Error 1\n```\n#### Notes\n\nMy best guess is that the `read()`s in setup.py should have an encoding specified. Probably UTF-8.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nfrom setuptools import setup\n\nimport os\nimport re\n\n\nbase_path = os.path.dirname(__file__)\n\n# Get the version (borrowed from SQLAlchemy)\nfp = open(os.path.join(base_path, 'urllib3', '__init__.py'))\nVERSION = re.compile(r\".*__version__ = '(.*?)'\",\n re.S).match(fp.read()).group(1)\nfp.close()\n\n\nversion = VERSION\n\nsetup(name='urllib3',\n version=version,\n description=\"HTTP library with thread-safe connection pooling, file post, and more.\",\n long_description=open('README.rst').read() + '\\n\\n' + open('CHANGES.rst').read(),\n classifiers=[\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3',\n 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Software Development :: Libraries',\n ],\n keywords='urllib httplib threadsafe filepost http https ssl pooling',\n author='Andrey Petrov',\n author_email='[email protected]',\n url='http://urllib3.readthedocs.org/',\n license='MIT',\n packages=['urllib3',\n 'urllib3.packages', 'urllib3.packages.ssl_match_hostname',\n 'urllib3.contrib', 'urllib3.util',\n ],\n requires=[],\n tests_require=[\n # These are a less-specific subset of dev-requirements.txt, for the\n # convenience of distro package maintainers.\n 'nose',\n 'mock',\n 'tornado',\n ],\n test_suite='test',\n extras_require={\n 'secure': [\n 'pyOpenSSL>=0.13',\n 'ndg-httpsclient',\n 'pyasn1',\n 'certifi',\n ],\n 'socks': [\n 'PySocks>=1.5.6,<2.0',\n ]\n },\n )\n", "path": "setup.py"}]} | 1,442 | 215 |
gh_patches_debug_9009 | rasdani/github-patches | git_diff | CTFd__CTFd-2091 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Page preview doesn't consider format
Page preview needs to take into account format when previewing
</issue>
<code>
[start of CTFd/admin/pages.py]
1 from flask import render_template, request
2
3 from CTFd.admin import admin
4 from CTFd.models import Pages
5 from CTFd.schemas.pages import PageSchema
6 from CTFd.utils import markdown
7 from CTFd.utils.decorators import admins_only
8
9
10 @admin.route("/admin/pages")
11 @admins_only
12 def pages_listing():
13 pages = Pages.query.all()
14 return render_template("admin/pages.html", pages=pages)
15
16
17 @admin.route("/admin/pages/new")
18 @admins_only
19 def pages_new():
20 return render_template("admin/editor.html")
21
22
23 @admin.route("/admin/pages/preview", methods=["POST"])
24 @admins_only
25 def pages_preview():
26 # We only care about content.
27 # Loading other attributes improperly will cause Marshmallow to incorrectly return a dict
28 data = {"content": request.form.get("content")}
29 schema = PageSchema()
30 page = schema.load(data)
31 return render_template("page.html", content=page.data.html)
32
33
34 @admin.route("/admin/pages/<int:page_id>")
35 @admins_only
36 def pages_detail(page_id):
37 page = Pages.query.filter_by(id=page_id).first_or_404()
38 page_op = request.args.get("operation")
39
40 if request.method == "GET" and page_op == "preview":
41 return render_template("page.html", content=markdown(page.content))
42
43 if request.method == "GET" and page_op == "create":
44 return render_template("admin/editor.html")
45
46 return render_template("admin/editor.html", page=page)
47
[end of CTFd/admin/pages.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/CTFd/admin/pages.py b/CTFd/admin/pages.py
--- a/CTFd/admin/pages.py
+++ b/CTFd/admin/pages.py
@@ -25,7 +25,10 @@
def pages_preview():
# We only care about content.
# Loading other attributes improperly will cause Marshmallow to incorrectly return a dict
- data = {"content": request.form.get("content")}
+ data = {
+ "content": request.form.get("content"),
+ "format": request.form.get("format"),
+ }
schema = PageSchema()
page = schema.load(data)
return render_template("page.html", content=page.data.html)
| {"golden_diff": "diff --git a/CTFd/admin/pages.py b/CTFd/admin/pages.py\n--- a/CTFd/admin/pages.py\n+++ b/CTFd/admin/pages.py\n@@ -25,7 +25,10 @@\n def pages_preview():\n # We only care about content.\n # Loading other attributes improperly will cause Marshmallow to incorrectly return a dict\n- data = {\"content\": request.form.get(\"content\")}\n+ data = {\n+ \"content\": request.form.get(\"content\"),\n+ \"format\": request.form.get(\"format\"),\n+ }\n schema = PageSchema()\n page = schema.load(data)\n return render_template(\"page.html\", content=page.data.html)\n", "issue": "Page preview doesn't consider format\nPage preview needs to take into account format when previewing\n", "before_files": [{"content": "from flask import render_template, request\n\nfrom CTFd.admin import admin\nfrom CTFd.models import Pages\nfrom CTFd.schemas.pages import PageSchema\nfrom CTFd.utils import markdown\nfrom CTFd.utils.decorators import admins_only\n\n\[email protected](\"/admin/pages\")\n@admins_only\ndef pages_listing():\n pages = Pages.query.all()\n return render_template(\"admin/pages.html\", pages=pages)\n\n\[email protected](\"/admin/pages/new\")\n@admins_only\ndef pages_new():\n return render_template(\"admin/editor.html\")\n\n\[email protected](\"/admin/pages/preview\", methods=[\"POST\"])\n@admins_only\ndef pages_preview():\n # We only care about content.\n # Loading other attributes improperly will cause Marshmallow to incorrectly return a dict\n data = {\"content\": request.form.get(\"content\")}\n schema = PageSchema()\n page = schema.load(data)\n return render_template(\"page.html\", content=page.data.html)\n\n\[email protected](\"/admin/pages/<int:page_id>\")\n@admins_only\ndef pages_detail(page_id):\n page = Pages.query.filter_by(id=page_id).first_or_404()\n page_op = request.args.get(\"operation\")\n\n if request.method == \"GET\" and page_op == \"preview\":\n return render_template(\"page.html\", content=markdown(page.content))\n\n if request.method == \"GET\" and page_op == \"create\":\n return render_template(\"admin/editor.html\")\n\n return render_template(\"admin/editor.html\", page=page)\n", "path": "CTFd/admin/pages.py"}]} | 968 | 148 |
gh_patches_debug_34011 | rasdani/github-patches | git_diff | statsmodels__statsmodels-5203 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
web._generate_url returns ValueError--> intentional?
```
return ValueError('Input not understood')
```
seems like it should be `raise` instead of `return`. Am I missing something?
</issue>
<code>
[start of statsmodels/tools/web.py]
1 """
2 Provides a function to open the system browser to either search or go directly
3 to a function's reference
4 """
5 import webbrowser
6
7 from statsmodels.compat.python import urlencode
8 from statsmodels import __version__
9
10 BASE_URL = 'https://www.statsmodels.org/'
11
12
13 def _generate_url(arg, stable):
14 """
15 Parse inputs and return a correctly formatted URL or an error if the input
16 is not understandable
17 """
18 url = BASE_URL
19 if stable:
20 url += 'stable/'
21 else:
22 url += 'devel/'
23
24 if arg is None:
25 return url
26 elif type(arg) is str:
27 url += 'search.html?'
28 url += urlencode({'q': arg})
29 url += '&check_keywords=yes&area=default'
30 else:
31 try:
32 func = arg
33 func_name = func.__name__
34 func_module = func.__module__
35 if not func_module.startswith('statsmodels.'):
36 return ValueError('Function must be from statsmodels')
37 url += 'generated/'
38 url += func_module + '.' + func_name + '.html'
39 except:
40 return ValueError('Input not understood')
41 return url
42
43
44 def webdoc(arg=None, stable=None):
45 """
46 Opens a browser and displays online documentation
47
48 Parameters
49 ----------
50 arg, optional : string or statsmodels function
51 Either a string to search the documentation or a function
52 stable, optional : bool
53 Flag indicating whether to use the stable documentation (True) or
54 the development documentation (False). If not provided, opens
55 the stable documentation if the current version of statsmodels is a
56 release
57
58 Examples
59 --------
60 >>> import statsmodels.api as sm
61 >>> sm.webdoc() # Documention site
62 >>> sm.webdoc('glm') # Search for glm in docs
63 >>> sm.webdoc(sm.OLS, stable=False) # Go to generated help for OLS, devel
64
65 Notes
66 -----
67 By default, open stable documentation if the current version of statsmodels
68 is a release. Otherwise opens the development documentation.
69
70 Uses the default system browser.
71 """
72 stable = __version__ if 'dev' not in __version__ else stable
73 url_or_error = _generate_url(arg, stable)
74 if isinstance(url_or_error, ValueError):
75 raise url_or_error
76 webbrowser.open(url_or_error)
77 return None
78
[end of statsmodels/tools/web.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/statsmodels/tools/web.py b/statsmodels/tools/web.py
--- a/statsmodels/tools/web.py
+++ b/statsmodels/tools/web.py
@@ -4,7 +4,7 @@
"""
import webbrowser
-from statsmodels.compat.python import urlencode
+from statsmodels.compat.python import urlencode, string_types
from statsmodels import __version__
BASE_URL = 'https://www.statsmodels.org/'
@@ -12,8 +12,8 @@
def _generate_url(arg, stable):
"""
- Parse inputs and return a correctly formatted URL or an error if the input
- is not understandable
+ Parse inputs and return a correctly formatted URL or raises ValueError
+ if the input is not understandable
"""
url = BASE_URL
if stable:
@@ -23,7 +23,7 @@
if arg is None:
return url
- elif type(arg) is str:
+ elif isinstance(arg, string_types):
url += 'search.html?'
url += urlencode({'q': arg})
url += '&check_keywords=yes&area=default'
@@ -33,11 +33,11 @@
func_name = func.__name__
func_module = func.__module__
if not func_module.startswith('statsmodels.'):
- return ValueError('Function must be from statsmodels')
+ raise ValueError('Function must be from statsmodels')
url += 'generated/'
url += func_module + '.' + func_name + '.html'
- except:
- return ValueError('Input not understood')
+ except AttributeError:
+ raise ValueError('Input not understood')
return url
@@ -71,7 +71,5 @@
"""
stable = __version__ if 'dev' not in __version__ else stable
url_or_error = _generate_url(arg, stable)
- if isinstance(url_or_error, ValueError):
- raise url_or_error
webbrowser.open(url_or_error)
return None
| {"golden_diff": "diff --git a/statsmodels/tools/web.py b/statsmodels/tools/web.py\n--- a/statsmodels/tools/web.py\n+++ b/statsmodels/tools/web.py\n@@ -4,7 +4,7 @@\n \"\"\"\n import webbrowser\n \n-from statsmodels.compat.python import urlencode\n+from statsmodels.compat.python import urlencode, string_types\n from statsmodels import __version__\n \n BASE_URL = 'https://www.statsmodels.org/'\n@@ -12,8 +12,8 @@\n \n def _generate_url(arg, stable):\n \"\"\"\n- Parse inputs and return a correctly formatted URL or an error if the input\n- is not understandable\n+ Parse inputs and return a correctly formatted URL or raises ValueError\n+ if the input is not understandable\n \"\"\"\n url = BASE_URL\n if stable:\n@@ -23,7 +23,7 @@\n \n if arg is None:\n return url\n- elif type(arg) is str:\n+ elif isinstance(arg, string_types):\n url += 'search.html?'\n url += urlencode({'q': arg})\n url += '&check_keywords=yes&area=default'\n@@ -33,11 +33,11 @@\n func_name = func.__name__\n func_module = func.__module__\n if not func_module.startswith('statsmodels.'):\n- return ValueError('Function must be from statsmodels')\n+ raise ValueError('Function must be from statsmodels')\n url += 'generated/'\n url += func_module + '.' + func_name + '.html'\n- except:\n- return ValueError('Input not understood')\n+ except AttributeError:\n+ raise ValueError('Input not understood')\n return url\n \n \n@@ -71,7 +71,5 @@\n \"\"\"\n stable = __version__ if 'dev' not in __version__ else stable\n url_or_error = _generate_url(arg, stable)\n- if isinstance(url_or_error, ValueError):\n- raise url_or_error\n webbrowser.open(url_or_error)\n return None\n", "issue": "web._generate_url returns ValueError--> intentional?\n```\r\n return ValueError('Input not understood')\r\n```\r\nseems like it should be `raise` instead of `return`. Am I missing something?\n", "before_files": [{"content": "\"\"\"\nProvides a function to open the system browser to either search or go directly\nto a function's reference\n\"\"\"\nimport webbrowser\n\nfrom statsmodels.compat.python import urlencode\nfrom statsmodels import __version__\n\nBASE_URL = 'https://www.statsmodels.org/'\n\n\ndef _generate_url(arg, stable):\n \"\"\"\n Parse inputs and return a correctly formatted URL or an error if the input\n is not understandable\n \"\"\"\n url = BASE_URL\n if stable:\n url += 'stable/'\n else:\n url += 'devel/'\n\n if arg is None:\n return url\n elif type(arg) is str:\n url += 'search.html?'\n url += urlencode({'q': arg})\n url += '&check_keywords=yes&area=default'\n else:\n try:\n func = arg\n func_name = func.__name__\n func_module = func.__module__\n if not func_module.startswith('statsmodels.'):\n return ValueError('Function must be from statsmodels')\n url += 'generated/'\n url += func_module + '.' + func_name + '.html'\n except:\n return ValueError('Input not understood')\n return url\n\n\ndef webdoc(arg=None, stable=None):\n \"\"\"\n Opens a browser and displays online documentation\n\n Parameters\n ----------\n arg, optional : string or statsmodels function\n Either a string to search the documentation or a function\n stable, optional : bool\n Flag indicating whether to use the stable documentation (True) or\n the development documentation (False). If not provided, opens\n the stable documentation if the current version of statsmodels is a\n release\n\n Examples\n --------\n >>> import statsmodels.api as sm\n >>> sm.webdoc() # Documention site\n >>> sm.webdoc('glm') # Search for glm in docs\n >>> sm.webdoc(sm.OLS, stable=False) # Go to generated help for OLS, devel\n\n Notes\n -----\n By default, open stable documentation if the current version of statsmodels\n is a release. Otherwise opens the development documentation.\n\n Uses the default system browser.\n \"\"\"\n stable = __version__ if 'dev' not in __version__ else stable\n url_or_error = _generate_url(arg, stable)\n if isinstance(url_or_error, ValueError):\n raise url_or_error\n webbrowser.open(url_or_error)\n return None\n", "path": "statsmodels/tools/web.py"}]} | 1,241 | 427 |
gh_patches_debug_21230 | rasdani/github-patches | git_diff | ckan__ckan-6008 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Flask-Babel does not translate zh_TW and zh_CN
### CKAN Version if known (or site URL)
≧ 2.8.0
### Please describe the expected behaviour
When switching to ``zh_TW`` or ``zh_CN`` languages on the pages written in Flask (ex. ``/`` and ``/user``), the pages should be shown in corresponding languages.
### Please describe the actual behaviour
It shows English instead of ``zh_TW`` or ``zh_CN``.
### What steps can be taken to reproduce the issue?
Switch language to Chinese on the above-mentioned pages.
</issue>
<code>
[start of ckan/views/home.py]
1 # encoding: utf-8
2
3 from flask import Blueprint, abort
4
5 import ckan.model as model
6 import ckan.logic as logic
7 import ckan.lib.base as base
8 import ckan.lib.search as search
9 import ckan.lib.helpers as h
10
11 from ckan.common import g, config, _
12
13 CACHE_PARAMETERS = [u'__cache', u'__no_cache__']
14
15
16 home = Blueprint(u'home', __name__)
17
18
19 @home.before_request
20 def before_request():
21 u'''set context and check authorization'''
22 try:
23 context = {
24 u'model': model,
25 u'user': g.user,
26 u'auth_user_obj': g.userobj}
27 logic.check_access(u'site_read', context)
28 except logic.NotAuthorized:
29 abort(403)
30
31
32 def index():
33 u'''display home page'''
34 try:
35 context = {u'model': model, u'session': model.Session,
36 u'user': g.user, u'auth_user_obj': g.userobj}
37 data_dict = {u'q': u'*:*',
38 u'facet.field': h.facets(),
39 u'rows': 4,
40 u'start': 0,
41 u'sort': u'view_recent desc',
42 u'fq': u'capacity:"public"'}
43 query = logic.get_action(u'package_search')(context, data_dict)
44 g.search_facets = query['search_facets']
45 g.package_count = query['count']
46 g.datasets = query['results']
47
48 org_label = h.humanize_entity_type(
49 u'organization',
50 h.default_group_type(u'organization'),
51 u'facet label') or _(u'Organizations')
52
53 group_label = h.humanize_entity_type(
54 u'group',
55 h.default_group_type(u'group'),
56 u'facet label') or _(u'Groups')
57
58 g.facet_titles = {
59 u'organization': org_label,
60 u'groups': group_label,
61 u'tags': _(u'Tags'),
62 u'res_format': _(u'Formats'),
63 u'license': _(u'Licenses'),
64 }
65
66 except search.SearchError:
67 g.package_count = 0
68
69 if g.userobj and not g.userobj.email:
70 url = h.url_for(controller=u'user', action=u'edit')
71 msg = _(u'Please <a href="%s">update your profile</a>'
72 u' and add your email address. ') % url + \
73 _(u'%s uses your email address'
74 u' if you need to reset your password.') \
75 % config.get(u'ckan.site_title')
76 h.flash_notice(msg, allow_html=True)
77 return base.render(u'home/index.html', extra_vars={})
78
79
80 def about():
81 u''' display about page'''
82 return base.render(u'home/about.html', extra_vars={})
83
84
85 util_rules = [
86 (u'/', index),
87 (u'/about', about)
88 ]
89 for rule, view_func in util_rules:
90 home.add_url_rule(rule, view_func=view_func)
91
[end of ckan/views/home.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/ckan/views/home.py b/ckan/views/home.py
--- a/ckan/views/home.py
+++ b/ckan/views/home.py
@@ -1,6 +1,6 @@
# encoding: utf-8
-from flask import Blueprint, abort
+from flask import Blueprint, abort, redirect
import ckan.model as model
import ckan.logic as logic
@@ -82,9 +82,36 @@
return base.render(u'home/about.html', extra_vars={})
+def redirect_locale(target_locale, path=None):
+ target = f'/{target_locale}/{path}' if path else f'/{target_locale}'
+ return redirect(target, code=308)
+
+
util_rules = [
(u'/', index),
(u'/about', about)
]
for rule, view_func in util_rules:
home.add_url_rule(rule, view_func=view_func)
+
+locales_mapping = [
+ ('zh_TW', 'zh_Hant_TW'),
+ ('zh_CN', 'zh_Hans_CN'),
+]
+
+for locale in locales_mapping:
+
+ legacy_locale = locale[0]
+ new_locale = locale[1]
+
+ home.add_url_rule(
+ f'/{legacy_locale}/',
+ view_func=redirect_locale,
+ defaults={'target_locale': new_locale}
+ )
+
+ home.add_url_rule(
+ f'/{legacy_locale}/<path:path>',
+ view_func=redirect_locale,
+ defaults={'target_locale': new_locale}
+ )
| {"golden_diff": "diff --git a/ckan/views/home.py b/ckan/views/home.py\n--- a/ckan/views/home.py\n+++ b/ckan/views/home.py\n@@ -1,6 +1,6 @@\n # encoding: utf-8\n \n-from flask import Blueprint, abort\n+from flask import Blueprint, abort, redirect\n \n import ckan.model as model\n import ckan.logic as logic\n@@ -82,9 +82,36 @@\n return base.render(u'home/about.html', extra_vars={})\n \n \n+def redirect_locale(target_locale, path=None):\n+ target = f'/{target_locale}/{path}' if path else f'/{target_locale}'\n+ return redirect(target, code=308)\n+\n+\n util_rules = [\n (u'/', index),\n (u'/about', about)\n ]\n for rule, view_func in util_rules:\n home.add_url_rule(rule, view_func=view_func)\n+\n+locales_mapping = [\n+ ('zh_TW', 'zh_Hant_TW'),\n+ ('zh_CN', 'zh_Hans_CN'),\n+]\n+\n+for locale in locales_mapping:\n+\n+ legacy_locale = locale[0]\n+ new_locale = locale[1]\n+\n+ home.add_url_rule(\n+ f'/{legacy_locale}/',\n+ view_func=redirect_locale,\n+ defaults={'target_locale': new_locale}\n+ )\n+\n+ home.add_url_rule(\n+ f'/{legacy_locale}/<path:path>',\n+ view_func=redirect_locale,\n+ defaults={'target_locale': new_locale}\n+ )\n", "issue": "Flask-Babel does not translate zh_TW and zh_CN\n### CKAN Version if known (or site URL)\r\n\r\n\u2267 2.8.0\r\n\r\n### Please describe the expected behaviour\r\n\r\nWhen switching to ``zh_TW`` or ``zh_CN`` languages on the pages written in Flask (ex. ``/`` and ``/user``), the pages should be shown in corresponding languages.\r\n\r\n### Please describe the actual behaviour\r\n\r\nIt shows English instead of ``zh_TW`` or ``zh_CN``.\r\n\r\n### What steps can be taken to reproduce the issue? \r\n\r\nSwitch language to Chinese on the above-mentioned pages.\n", "before_files": [{"content": "# encoding: utf-8\n\nfrom flask import Blueprint, abort\n\nimport ckan.model as model\nimport ckan.logic as logic\nimport ckan.lib.base as base\nimport ckan.lib.search as search\nimport ckan.lib.helpers as h\n\nfrom ckan.common import g, config, _\n\nCACHE_PARAMETERS = [u'__cache', u'__no_cache__']\n\n\nhome = Blueprint(u'home', __name__)\n\n\[email protected]_request\ndef before_request():\n u'''set context and check authorization'''\n try:\n context = {\n u'model': model,\n u'user': g.user,\n u'auth_user_obj': g.userobj}\n logic.check_access(u'site_read', context)\n except logic.NotAuthorized:\n abort(403)\n\n\ndef index():\n u'''display home page'''\n try:\n context = {u'model': model, u'session': model.Session,\n u'user': g.user, u'auth_user_obj': g.userobj}\n data_dict = {u'q': u'*:*',\n u'facet.field': h.facets(),\n u'rows': 4,\n u'start': 0,\n u'sort': u'view_recent desc',\n u'fq': u'capacity:\"public\"'}\n query = logic.get_action(u'package_search')(context, data_dict)\n g.search_facets = query['search_facets']\n g.package_count = query['count']\n g.datasets = query['results']\n\n org_label = h.humanize_entity_type(\n u'organization',\n h.default_group_type(u'organization'),\n u'facet label') or _(u'Organizations')\n\n group_label = h.humanize_entity_type(\n u'group',\n h.default_group_type(u'group'),\n u'facet label') or _(u'Groups')\n\n g.facet_titles = {\n u'organization': org_label,\n u'groups': group_label,\n u'tags': _(u'Tags'),\n u'res_format': _(u'Formats'),\n u'license': _(u'Licenses'),\n }\n\n except search.SearchError:\n g.package_count = 0\n\n if g.userobj and not g.userobj.email:\n url = h.url_for(controller=u'user', action=u'edit')\n msg = _(u'Please <a href=\"%s\">update your profile</a>'\n u' and add your email address. ') % url + \\\n _(u'%s uses your email address'\n u' if you need to reset your password.') \\\n % config.get(u'ckan.site_title')\n h.flash_notice(msg, allow_html=True)\n return base.render(u'home/index.html', extra_vars={})\n\n\ndef about():\n u''' display about page'''\n return base.render(u'home/about.html', extra_vars={})\n\n\nutil_rules = [\n (u'/', index),\n (u'/about', about)\n]\nfor rule, view_func in util_rules:\n home.add_url_rule(rule, view_func=view_func)\n", "path": "ckan/views/home.py"}]} | 1,497 | 337 |
gh_patches_debug_6079 | rasdani/github-patches | git_diff | carpentries__amy-2324 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Bug: Internal Server Error: /fiscal/organizations/
Production issue.
```
FieldError at /fiscal/organizations/
Cannot resolve keyword 'membership' into field. Choices are: administered_events, affiliated_organizations, country, domain, fullname, hosted_events, id, latitude, longitude, member, memberships, selforganisedsubmission, sponsored_events, workshopinquiryrequest, workshoprequest
```
https://amy.carpentries.org/fiscal/organizations/?country=&membership__variant=silver&order_by=
</issue>
<code>
[start of amy/fiscal/filters.py]
1 from datetime import date
2
3 from django.forms import widgets
4 import django_filters
5
6 from workshops.fields import Select2MultipleWidget, Select2Widget
7 from workshops.filters import AllCountriesFilter, AMYFilterSet
8 from workshops.models import Membership, Organization
9
10
11 class OrganizationFilter(AMYFilterSet):
12 country = AllCountriesFilter(widget=Select2Widget)
13
14 membership__variant = django_filters.MultipleChoiceFilter(
15 label="Memberships (current or past)",
16 choices=Membership.MEMBERSHIP_CHOICES,
17 widget=Select2MultipleWidget,
18 )
19
20 order_by = django_filters.OrderingFilter(
21 fields=(
22 "fullname",
23 "domain",
24 ),
25 )
26
27 class Meta:
28 model = Organization
29 fields = [
30 "country",
31 ]
32
33
34 def filter_active_memberships_only(queryset, name, active):
35 """Limit Memberships to only active entries."""
36 if active:
37 today = date.today()
38 return queryset.filter(agreement_start__lte=today, agreement_end__gte=today)
39 else:
40 return queryset
41
42
43 def filter_training_seats_only(queryset, name, seats):
44 """Limit Memberships to only entries with some training seats allowed."""
45 if seats:
46 return queryset.filter(instructor_training_seats_total__gt=0)
47 else:
48 return queryset
49
50
51 def filter_nonpositive_remaining_seats(queryset, name, seats):
52 """Limit Memberships to only entries with negative remaining seats."""
53 if seats:
54 return queryset.filter(instructor_training_seats_remaining__lt=0)
55 else:
56 return queryset
57
58
59 class MembershipFilter(AMYFilterSet):
60 organization_name = django_filters.CharFilter(
61 label="Organisation name",
62 field_name="organizations__fullname",
63 lookup_expr="icontains",
64 )
65
66 MEMBERSHIP_CHOICES = (("", "Any"),) + Membership.MEMBERSHIP_CHOICES
67 variant = django_filters.ChoiceFilter(choices=MEMBERSHIP_CHOICES)
68
69 CONTRIBUTION_CHOICES = (("", "Any"),) + Membership.CONTRIBUTION_CHOICES
70 contribution_type = django_filters.ChoiceFilter(choices=CONTRIBUTION_CHOICES)
71
72 active_only = django_filters.BooleanFilter(
73 label="Only show active memberships",
74 method=filter_active_memberships_only,
75 widget=widgets.CheckboxInput,
76 )
77
78 training_seats_only = django_filters.BooleanFilter(
79 label="Only show memberships with non-zero allowed training seats",
80 method=filter_training_seats_only,
81 widget=widgets.CheckboxInput,
82 )
83
84 nonpositive_remaining_seats_only = django_filters.BooleanFilter(
85 label="Only show memberships with zero or less remaining seats",
86 method=filter_nonpositive_remaining_seats,
87 widget=widgets.CheckboxInput,
88 )
89
90 order_by = django_filters.OrderingFilter(
91 fields=(
92 "agreement_start",
93 "agreement_end",
94 "instructor_training_seats_remaining",
95 ),
96 )
97
98 class Meta:
99 model = Membership
100 fields = [
101 "organization_name",
102 "consortium",
103 "public_status",
104 "variant",
105 "contribution_type",
106 ]
107
108
109 class MembershipTrainingsFilter(AMYFilterSet):
110 organization_name = django_filters.CharFilter(
111 label="Organization name",
112 field_name="organization__fullname",
113 lookup_expr="icontains",
114 )
115
116 active_only = django_filters.BooleanFilter(
117 label="Only show active memberships",
118 method=filter_active_memberships_only,
119 widget=widgets.CheckboxInput,
120 )
121
122 training_seats_only = django_filters.BooleanFilter(
123 label="Only show memberships with non-zero allowed training seats",
124 method=filter_training_seats_only,
125 widget=widgets.CheckboxInput,
126 )
127
128 nonpositive_remaining_seats_only = django_filters.BooleanFilter(
129 label="Only show memberships with zero or less remaining seats",
130 method=filter_nonpositive_remaining_seats,
131 widget=widgets.CheckboxInput,
132 )
133
134 order_by = django_filters.OrderingFilter(
135 fields=(
136 "organization__fullname",
137 "organization__domain",
138 "agreement_start",
139 "agreement_end",
140 "instructor_training_seats_total",
141 "instructor_training_seats_utilized",
142 "instructor_training_seats_remaining",
143 ),
144 )
145
146 class Meta:
147 model = Membership
148 fields = [
149 "organization_name",
150 ]
151
[end of amy/fiscal/filters.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/amy/fiscal/filters.py b/amy/fiscal/filters.py
--- a/amy/fiscal/filters.py
+++ b/amy/fiscal/filters.py
@@ -11,7 +11,7 @@
class OrganizationFilter(AMYFilterSet):
country = AllCountriesFilter(widget=Select2Widget)
- membership__variant = django_filters.MultipleChoiceFilter(
+ memberships__variant = django_filters.MultipleChoiceFilter(
label="Memberships (current or past)",
choices=Membership.MEMBERSHIP_CHOICES,
widget=Select2MultipleWidget,
| {"golden_diff": "diff --git a/amy/fiscal/filters.py b/amy/fiscal/filters.py\n--- a/amy/fiscal/filters.py\n+++ b/amy/fiscal/filters.py\n@@ -11,7 +11,7 @@\n class OrganizationFilter(AMYFilterSet):\n country = AllCountriesFilter(widget=Select2Widget)\n \n- membership__variant = django_filters.MultipleChoiceFilter(\n+ memberships__variant = django_filters.MultipleChoiceFilter(\n label=\"Memberships (current or past)\",\n choices=Membership.MEMBERSHIP_CHOICES,\n widget=Select2MultipleWidget,\n", "issue": "Bug: Internal Server Error: /fiscal/organizations/\nProduction issue.\r\n\r\n```\r\nFieldError at /fiscal/organizations/\r\nCannot resolve keyword 'membership' into field. Choices are: administered_events, affiliated_organizations, country, domain, fullname, hosted_events, id, latitude, longitude, member, memberships, selforganisedsubmission, sponsored_events, workshopinquiryrequest, workshoprequest\r\n```\r\n\r\nhttps://amy.carpentries.org/fiscal/organizations/?country=&membership__variant=silver&order_by=\n", "before_files": [{"content": "from datetime import date\n\nfrom django.forms import widgets\nimport django_filters\n\nfrom workshops.fields import Select2MultipleWidget, Select2Widget\nfrom workshops.filters import AllCountriesFilter, AMYFilterSet\nfrom workshops.models import Membership, Organization\n\n\nclass OrganizationFilter(AMYFilterSet):\n country = AllCountriesFilter(widget=Select2Widget)\n\n membership__variant = django_filters.MultipleChoiceFilter(\n label=\"Memberships (current or past)\",\n choices=Membership.MEMBERSHIP_CHOICES,\n widget=Select2MultipleWidget,\n )\n\n order_by = django_filters.OrderingFilter(\n fields=(\n \"fullname\",\n \"domain\",\n ),\n )\n\n class Meta:\n model = Organization\n fields = [\n \"country\",\n ]\n\n\ndef filter_active_memberships_only(queryset, name, active):\n \"\"\"Limit Memberships to only active entries.\"\"\"\n if active:\n today = date.today()\n return queryset.filter(agreement_start__lte=today, agreement_end__gte=today)\n else:\n return queryset\n\n\ndef filter_training_seats_only(queryset, name, seats):\n \"\"\"Limit Memberships to only entries with some training seats allowed.\"\"\"\n if seats:\n return queryset.filter(instructor_training_seats_total__gt=0)\n else:\n return queryset\n\n\ndef filter_nonpositive_remaining_seats(queryset, name, seats):\n \"\"\"Limit Memberships to only entries with negative remaining seats.\"\"\"\n if seats:\n return queryset.filter(instructor_training_seats_remaining__lt=0)\n else:\n return queryset\n\n\nclass MembershipFilter(AMYFilterSet):\n organization_name = django_filters.CharFilter(\n label=\"Organisation name\",\n field_name=\"organizations__fullname\",\n lookup_expr=\"icontains\",\n )\n\n MEMBERSHIP_CHOICES = ((\"\", \"Any\"),) + Membership.MEMBERSHIP_CHOICES\n variant = django_filters.ChoiceFilter(choices=MEMBERSHIP_CHOICES)\n\n CONTRIBUTION_CHOICES = ((\"\", \"Any\"),) + Membership.CONTRIBUTION_CHOICES\n contribution_type = django_filters.ChoiceFilter(choices=CONTRIBUTION_CHOICES)\n\n active_only = django_filters.BooleanFilter(\n label=\"Only show active memberships\",\n method=filter_active_memberships_only,\n widget=widgets.CheckboxInput,\n )\n\n training_seats_only = django_filters.BooleanFilter(\n label=\"Only show memberships with non-zero allowed training seats\",\n method=filter_training_seats_only,\n widget=widgets.CheckboxInput,\n )\n\n nonpositive_remaining_seats_only = django_filters.BooleanFilter(\n label=\"Only show memberships with zero or less remaining seats\",\n method=filter_nonpositive_remaining_seats,\n widget=widgets.CheckboxInput,\n )\n\n order_by = django_filters.OrderingFilter(\n fields=(\n \"agreement_start\",\n \"agreement_end\",\n \"instructor_training_seats_remaining\",\n ),\n )\n\n class Meta:\n model = Membership\n fields = [\n \"organization_name\",\n \"consortium\",\n \"public_status\",\n \"variant\",\n \"contribution_type\",\n ]\n\n\nclass MembershipTrainingsFilter(AMYFilterSet):\n organization_name = django_filters.CharFilter(\n label=\"Organization name\",\n field_name=\"organization__fullname\",\n lookup_expr=\"icontains\",\n )\n\n active_only = django_filters.BooleanFilter(\n label=\"Only show active memberships\",\n method=filter_active_memberships_only,\n widget=widgets.CheckboxInput,\n )\n\n training_seats_only = django_filters.BooleanFilter(\n label=\"Only show memberships with non-zero allowed training seats\",\n method=filter_training_seats_only,\n widget=widgets.CheckboxInput,\n )\n\n nonpositive_remaining_seats_only = django_filters.BooleanFilter(\n label=\"Only show memberships with zero or less remaining seats\",\n method=filter_nonpositive_remaining_seats,\n widget=widgets.CheckboxInput,\n )\n\n order_by = django_filters.OrderingFilter(\n fields=(\n \"organization__fullname\",\n \"organization__domain\",\n \"agreement_start\",\n \"agreement_end\",\n \"instructor_training_seats_total\",\n \"instructor_training_seats_utilized\",\n \"instructor_training_seats_remaining\",\n ),\n )\n\n class Meta:\n model = Membership\n fields = [\n \"organization_name\",\n ]\n", "path": "amy/fiscal/filters.py"}]} | 1,907 | 128 |
gh_patches_debug_14037 | rasdani/github-patches | git_diff | voicepaw__so-vits-svc-fork-557 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
PermissionError: [Errno 13] Permission denied
I'm on Windows 10 WSL2 Ubuntu and this happens when I try to run `svc pre-resample`, what's going on?
Traceback (most recent call last):
File "/home/fab/miniconda3/envs/sovits/bin/svc", line 5, in <module>
from so_vits_svc_fork.__main__ import cli
File "/home/fab/miniconda3/envs/sovits/lib/python3.10/site-packages/so_vits_svc_fork/__init__.py", line 5, in <module>
init_logger()
File "/home/fab/miniconda3/envs/sovits/lib/python3.10/site-packages/so_vits_svc_fork/logger.py", line 31, in init_logger
FileHandler(f"{__name__.split('.')[0]}.log"),
File "/home/fab/miniconda3/envs/sovits/lib/python3.10/logging/__init__.py", line 1169, in __init__
StreamHandler.__init__(self, self._open())
File "/home/fab/miniconda3/envs/sovits/lib/python3.10/logging/__init__.py", line 1201, in _open
return open_func(self.baseFilename, self.mode,
PermissionError: [Errno 13] Permission denied: '/home/fab/sovits/so_vits_svc_fork.log'
</issue>
<code>
[start of src/so_vits_svc_fork/logger.py]
1 import os
2 import sys
3 from logging import (
4 DEBUG,
5 INFO,
6 FileHandler,
7 StreamHandler,
8 basicConfig,
9 captureWarnings,
10 getLogger,
11 )
12 from pathlib import Path
13
14 from rich.logging import RichHandler
15
16 LOGGER_INIT = False
17
18
19 def init_logger() -> None:
20 global LOGGER_INIT
21 if LOGGER_INIT:
22 return
23
24 IS_TEST = "test" in Path.cwd().stem
25 package_name = sys.modules[__name__].__package__
26 basicConfig(
27 level=INFO,
28 format="%(asctime)s %(message)s",
29 datefmt="[%X]",
30 handlers=[
31 StreamHandler() if is_notebook() else RichHandler(),
32 FileHandler(f"{package_name}.log"),
33 ],
34 )
35 if IS_TEST:
36 getLogger(package_name).setLevel(DEBUG)
37 captureWarnings(True)
38 LOGGER_INIT = True
39
40
41 def is_notebook():
42 try:
43 from IPython import get_ipython
44
45 if "IPKernelApp" not in get_ipython().config: # pragma: no cover
46 raise ImportError("console")
47 return False
48 if "VSCODE_PID" in os.environ: # pragma: no cover
49 raise ImportError("vscode")
50 return False
51 except Exception:
52 return False
53 else: # pragma: no cover
54 return True
55
[end of src/so_vits_svc_fork/logger.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/so_vits_svc_fork/logger.py b/src/so_vits_svc_fork/logger.py
--- a/src/so_vits_svc_fork/logger.py
+++ b/src/so_vits_svc_fork/logger.py
@@ -1,14 +1,6 @@
import os
import sys
-from logging import (
- DEBUG,
- INFO,
- FileHandler,
- StreamHandler,
- basicConfig,
- captureWarnings,
- getLogger,
-)
+from logging import DEBUG, INFO, StreamHandler, basicConfig, captureWarnings, getLogger
from pathlib import Path
from rich.logging import RichHandler
@@ -29,7 +21,7 @@
datefmt="[%X]",
handlers=[
StreamHandler() if is_notebook() else RichHandler(),
- FileHandler(f"{package_name}.log"),
+ # FileHandler(f"{package_name}.log"),
],
)
if IS_TEST:
| {"golden_diff": "diff --git a/src/so_vits_svc_fork/logger.py b/src/so_vits_svc_fork/logger.py\n--- a/src/so_vits_svc_fork/logger.py\n+++ b/src/so_vits_svc_fork/logger.py\n@@ -1,14 +1,6 @@\n import os\n import sys\n-from logging import (\n- DEBUG,\n- INFO,\n- FileHandler,\n- StreamHandler,\n- basicConfig,\n- captureWarnings,\n- getLogger,\n-)\n+from logging import DEBUG, INFO, StreamHandler, basicConfig, captureWarnings, getLogger\n from pathlib import Path\n \n from rich.logging import RichHandler\n@@ -29,7 +21,7 @@\n datefmt=\"[%X]\",\n handlers=[\n StreamHandler() if is_notebook() else RichHandler(),\n- FileHandler(f\"{package_name}.log\"),\n+ # FileHandler(f\"{package_name}.log\"),\n ],\n )\n if IS_TEST:\n", "issue": "PermissionError: [Errno 13] Permission denied\nI'm on Windows 10 WSL2 Ubuntu and this happens when I try to run `svc pre-resample`, what's going on?\r\n\r\nTraceback (most recent call last):\r\n File \"/home/fab/miniconda3/envs/sovits/bin/svc\", line 5, in <module>\r\n from so_vits_svc_fork.__main__ import cli\r\n File \"/home/fab/miniconda3/envs/sovits/lib/python3.10/site-packages/so_vits_svc_fork/__init__.py\", line 5, in <module>\r\n init_logger()\r\n File \"/home/fab/miniconda3/envs/sovits/lib/python3.10/site-packages/so_vits_svc_fork/logger.py\", line 31, in init_logger\r\n FileHandler(f\"{__name__.split('.')[0]}.log\"),\r\n File \"/home/fab/miniconda3/envs/sovits/lib/python3.10/logging/__init__.py\", line 1169, in __init__\r\n StreamHandler.__init__(self, self._open())\r\n File \"/home/fab/miniconda3/envs/sovits/lib/python3.10/logging/__init__.py\", line 1201, in _open\r\n return open_func(self.baseFilename, self.mode,\r\nPermissionError: [Errno 13] Permission denied: '/home/fab/sovits/so_vits_svc_fork.log'\n", "before_files": [{"content": "import os\nimport sys\nfrom logging import (\n DEBUG,\n INFO,\n FileHandler,\n StreamHandler,\n basicConfig,\n captureWarnings,\n getLogger,\n)\nfrom pathlib import Path\n\nfrom rich.logging import RichHandler\n\nLOGGER_INIT = False\n\n\ndef init_logger() -> None:\n global LOGGER_INIT\n if LOGGER_INIT:\n return\n\n IS_TEST = \"test\" in Path.cwd().stem\n package_name = sys.modules[__name__].__package__\n basicConfig(\n level=INFO,\n format=\"%(asctime)s %(message)s\",\n datefmt=\"[%X]\",\n handlers=[\n StreamHandler() if is_notebook() else RichHandler(),\n FileHandler(f\"{package_name}.log\"),\n ],\n )\n if IS_TEST:\n getLogger(package_name).setLevel(DEBUG)\n captureWarnings(True)\n LOGGER_INIT = True\n\n\ndef is_notebook():\n try:\n from IPython import get_ipython\n\n if \"IPKernelApp\" not in get_ipython().config: # pragma: no cover\n raise ImportError(\"console\")\n return False\n if \"VSCODE_PID\" in os.environ: # pragma: no cover\n raise ImportError(\"vscode\")\n return False\n except Exception:\n return False\n else: # pragma: no cover\n return True\n", "path": "src/so_vits_svc_fork/logger.py"}]} | 1,261 | 206 |
gh_patches_debug_469 | rasdani/github-patches | git_diff | googleapis__google-api-python-client-293 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Switch from "uritemplate" to "uritemplate.py"
There are at least 2 URI template packages on the PyPI: [uritemplate](https://pypi.python.org/pypi/uritemplate/0.6) and [uritemplate.py](https://pypi.python.org/pypi/uritemplate.py/0.3.0). Both of these packages use `uritemplate` as the package name to be imported, which causes a conflict when both are installed (see https://github.com/sigmavirus24/uritemplate/issues/14).
I propose that this project depend on `uritemplate.py` because:
- `uritemplate.py` is API-compatible with `uritemplate`, but the reverse is not true. I have confirmed that this library works with uritemplate.py.
- Other projects depend on `uritemplate.py`. For example, we are currently working on a project that depends on both `github3.py` (GitHub API client) and `google-api-python-client`. Installing both results in an immediate `ImportError` due to the `uritemplate` conflict.
This is a simple, low-risk change that would aid compatibility with other projects.
Thoughts?
</issue>
<code>
[start of setup.py]
1 # Copyright 2014 Google Inc. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Setup script for Google API Python client.
16
17 Also installs included versions of third party libraries, if those libraries
18 are not already installed.
19 """
20 from __future__ import print_function
21
22 import sys
23
24 if sys.version_info < (2, 6):
25 print('google-api-python-client requires python version >= 2.6.',
26 file=sys.stderr)
27 sys.exit(1)
28 if (3, 1) <= sys.version_info < (3, 3):
29 print('google-api-python-client requires python3 version >= 3.3.',
30 file=sys.stderr)
31 sys.exit(1)
32
33 from setuptools import setup
34 import pkg_resources
35
36 def _DetectBadness():
37 import os
38 if 'SKIP_GOOGLEAPICLIENT_COMPAT_CHECK' in os.environ:
39 return
40 o2c_pkg = None
41 try:
42 o2c_pkg = pkg_resources.get_distribution('oauth2client')
43 except pkg_resources.DistributionNotFound:
44 pass
45 oauth2client = None
46 try:
47 import oauth2client
48 except ImportError:
49 pass
50 if o2c_pkg is None and oauth2client is not None:
51 raise RuntimeError(
52 'Previous version of google-api-python-client detected; due to a '
53 'packaging issue, we cannot perform an in-place upgrade. Please remove '
54 'the old version and re-install this package.'
55 )
56
57 _DetectBadness()
58
59 packages = [
60 'apiclient',
61 'googleapiclient',
62 'googleapiclient/discovery_cache',
63 ]
64
65 install_requires = [
66 'httplib2>=0.8,<1',
67 'oauth2client>=1.5.0,<4.0.0',
68 'six>=1.6.1,<2',
69 'uritemplate>=0.6,<1',
70 ]
71
72 if sys.version_info < (2, 7):
73 install_requires.append('argparse')
74
75 long_desc = """The Google API Client for Python is a client library for
76 accessing the Plus, Moderator, and many other Google APIs."""
77
78 import googleapiclient
79 version = googleapiclient.__version__
80
81 setup(
82 name="google-api-python-client",
83 version=version,
84 description="Google API Client Library for Python",
85 long_description=long_desc,
86 author="Google Inc.",
87 url="http://github.com/google/google-api-python-client/",
88 install_requires=install_requires,
89 packages=packages,
90 package_data={},
91 license="Apache 2.0",
92 keywords="google api client",
93 classifiers=[
94 'Programming Language :: Python :: 2',
95 'Programming Language :: Python :: 2.6',
96 'Programming Language :: Python :: 2.7',
97 'Programming Language :: Python :: 3',
98 'Programming Language :: Python :: 3.3',
99 'Programming Language :: Python :: 3.4',
100 'Development Status :: 5 - Production/Stable',
101 'Intended Audience :: Developers',
102 'License :: OSI Approved :: Apache Software License',
103 'Operating System :: OS Independent',
104 'Topic :: Internet :: WWW/HTTP',
105 ],
106 )
107
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -66,7 +66,7 @@
'httplib2>=0.8,<1',
'oauth2client>=1.5.0,<4.0.0',
'six>=1.6.1,<2',
- 'uritemplate>=0.6,<1',
+ 'uritemplate>=3.0.0,<4',
]
if sys.version_info < (2, 7):
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -66,7 +66,7 @@\n 'httplib2>=0.8,<1',\n 'oauth2client>=1.5.0,<4.0.0',\n 'six>=1.6.1,<2',\n- 'uritemplate>=0.6,<1',\n+ 'uritemplate>=3.0.0,<4',\n ]\n \n if sys.version_info < (2, 7):\n", "issue": "Switch from \"uritemplate\" to \"uritemplate.py\"\nThere are at least 2 URI template packages on the PyPI: [uritemplate](https://pypi.python.org/pypi/uritemplate/0.6) and [uritemplate.py](https://pypi.python.org/pypi/uritemplate.py/0.3.0). Both of these packages use `uritemplate` as the package name to be imported, which causes a conflict when both are installed (see https://github.com/sigmavirus24/uritemplate/issues/14).\n\nI propose that this project depend on `uritemplate.py` because:\n- `uritemplate.py` is API-compatible with `uritemplate`, but the reverse is not true. I have confirmed that this library works with uritemplate.py.\n- Other projects depend on `uritemplate.py`. For example, we are currently working on a project that depends on both `github3.py` (GitHub API client) and `google-api-python-client`. Installing both results in an immediate `ImportError` due to the `uritemplate` conflict.\n\nThis is a simple, low-risk change that would aid compatibility with other projects.\n\nThoughts?\n\n", "before_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Setup script for Google API Python client.\n\nAlso installs included versions of third party libraries, if those libraries\nare not already installed.\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\n\nif sys.version_info < (2, 6):\n print('google-api-python-client requires python version >= 2.6.',\n file=sys.stderr)\n sys.exit(1)\nif (3, 1) <= sys.version_info < (3, 3):\n print('google-api-python-client requires python3 version >= 3.3.',\n file=sys.stderr)\n sys.exit(1)\n\nfrom setuptools import setup\nimport pkg_resources\n\ndef _DetectBadness():\n import os\n if 'SKIP_GOOGLEAPICLIENT_COMPAT_CHECK' in os.environ:\n return\n o2c_pkg = None\n try:\n o2c_pkg = pkg_resources.get_distribution('oauth2client')\n except pkg_resources.DistributionNotFound:\n pass\n oauth2client = None\n try:\n import oauth2client\n except ImportError:\n pass\n if o2c_pkg is None and oauth2client is not None:\n raise RuntimeError(\n 'Previous version of google-api-python-client detected; due to a '\n 'packaging issue, we cannot perform an in-place upgrade. Please remove '\n 'the old version and re-install this package.'\n )\n\n_DetectBadness()\n\npackages = [\n 'apiclient',\n 'googleapiclient',\n 'googleapiclient/discovery_cache',\n]\n\ninstall_requires = [\n 'httplib2>=0.8,<1',\n 'oauth2client>=1.5.0,<4.0.0',\n 'six>=1.6.1,<2',\n 'uritemplate>=0.6,<1',\n]\n\nif sys.version_info < (2, 7):\n install_requires.append('argparse')\n\nlong_desc = \"\"\"The Google API Client for Python is a client library for\naccessing the Plus, Moderator, and many other Google APIs.\"\"\"\n\nimport googleapiclient\nversion = googleapiclient.__version__\n\nsetup(\n name=\"google-api-python-client\",\n version=version,\n description=\"Google API Client Library for Python\",\n long_description=long_desc,\n author=\"Google Inc.\",\n url=\"http://github.com/google/google-api-python-client/\",\n install_requires=install_requires,\n packages=packages,\n package_data={},\n license=\"Apache 2.0\",\n keywords=\"google api client\",\n classifiers=[\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: OS Independent',\n 'Topic :: Internet :: WWW/HTTP',\n ],\n)\n", "path": "setup.py"}]} | 1,794 | 116 |
gh_patches_debug_21178 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-1983 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Distros are coupled with instrumentations
As mentioned [here](https://github.com/open-telemetry/opentelemetry-python/discussions/2005#discussion-3489738).
</issue>
<code>
[start of opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py]
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import sys
16 from logging import getLogger
17 from os import environ, path
18 from os.path import abspath, dirname, pathsep
19 from re import sub
20
21 from pkg_resources import iter_entry_points
22
23 from opentelemetry.environment_variables import (
24 OTEL_PYTHON_DISABLED_INSTRUMENTATIONS,
25 )
26 from opentelemetry.instrumentation.dependencies import (
27 get_dist_dependency_conflicts,
28 )
29 from opentelemetry.instrumentation.distro import BaseDistro, DefaultDistro
30
31 logger = getLogger(__file__)
32
33
34 def _load_distros() -> BaseDistro:
35 for entry_point in iter_entry_points("opentelemetry_distro"):
36 try:
37 distro = entry_point.load()()
38 if not isinstance(distro, BaseDistro):
39 logger.debug(
40 "%s is not an OpenTelemetry Distro. Skipping",
41 entry_point.name,
42 )
43 continue
44 logger.debug(
45 "Distribution %s will be configured", entry_point.name
46 )
47 return distro
48 except Exception as exc: # pylint: disable=broad-except
49 logger.exception(
50 "Distribution %s configuration failed", entry_point.name
51 )
52 raise exc
53 return DefaultDistro()
54
55
56 def _load_instrumentors(distro):
57 package_to_exclude = environ.get(OTEL_PYTHON_DISABLED_INSTRUMENTATIONS, [])
58 if isinstance(package_to_exclude, str):
59 package_to_exclude = package_to_exclude.split(",")
60 # to handle users entering "requests , flask" or "requests, flask" with spaces
61 package_to_exclude = [x.strip() for x in package_to_exclude]
62
63 for entry_point in iter_entry_points("opentelemetry_instrumentor"):
64 if entry_point.name in package_to_exclude:
65 logger.debug(
66 "Instrumentation skipped for library %s", entry_point.name
67 )
68 continue
69
70 try:
71 conflict = get_dist_dependency_conflicts(entry_point.dist)
72 if conflict:
73 logger.debug(
74 "Skipping instrumentation %s: %s",
75 entry_point.name,
76 conflict,
77 )
78 continue
79
80 # tell instrumentation to not run dep checks again as we already did it above
81 distro.load_instrumentor(entry_point, skip_dep_check=True)
82 logger.debug("Instrumented %s", entry_point.name)
83 except Exception as exc: # pylint: disable=broad-except
84 logger.exception("Instrumenting of %s failed", entry_point.name)
85 raise exc
86
87
88 def _load_configurators():
89 configured = None
90 for entry_point in iter_entry_points("opentelemetry_configurator"):
91 if configured is not None:
92 logger.warning(
93 "Configuration of %s not loaded, %s already loaded",
94 entry_point.name,
95 configured,
96 )
97 continue
98 try:
99 entry_point.load()().configure() # type: ignore
100 configured = entry_point.name
101 except Exception as exc: # pylint: disable=broad-except
102 logger.exception("Configuration of %s failed", entry_point.name)
103 raise exc
104
105
106 def initialize():
107 try:
108 distro = _load_distros()
109 distro.configure()
110 _load_configurators()
111 _load_instrumentors(distro)
112 except Exception: # pylint: disable=broad-except
113 logger.exception("Failed to auto initialize opentelemetry")
114 finally:
115 environ["PYTHONPATH"] = sub(
116 r"{}{}?".format(dirname(abspath(__file__)), pathsep),
117 "",
118 environ["PYTHONPATH"],
119 )
120
121
122 if (
123 hasattr(sys, "argv")
124 and sys.argv[0].split(path.sep)[-1] == "celery"
125 and "worker" in sys.argv[1:]
126 ):
127 from celery.signals import worker_process_init # pylint:disable=E0401
128
129 @worker_process_init.connect(weak=False)
130 def init_celery(*args, **kwargs):
131 initialize()
132
133
134 else:
135 initialize()
136
[end of opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
+++ b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
@@ -60,6 +60,9 @@
# to handle users entering "requests , flask" or "requests, flask" with spaces
package_to_exclude = [x.strip() for x in package_to_exclude]
+ for entry_point in iter_entry_points("opentelemetry_pre_instrument"):
+ entry_point.load()()
+
for entry_point in iter_entry_points("opentelemetry_instrumentor"):
if entry_point.name in package_to_exclude:
logger.debug(
@@ -84,6 +87,9 @@
logger.exception("Instrumenting of %s failed", entry_point.name)
raise exc
+ for entry_point in iter_entry_points("opentelemetry_post_instrument"):
+ entry_point.load()()
+
def _load_configurators():
configured = None
| {"golden_diff": "diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\n--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\n+++ b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\n@@ -60,6 +60,9 @@\n # to handle users entering \"requests , flask\" or \"requests, flask\" with spaces\n package_to_exclude = [x.strip() for x in package_to_exclude]\n \n+ for entry_point in iter_entry_points(\"opentelemetry_pre_instrument\"):\n+ entry_point.load()()\n+\n for entry_point in iter_entry_points(\"opentelemetry_instrumentor\"):\n if entry_point.name in package_to_exclude:\n logger.debug(\n@@ -84,6 +87,9 @@\n logger.exception(\"Instrumenting of %s failed\", entry_point.name)\n raise exc\n \n+ for entry_point in iter_entry_points(\"opentelemetry_post_instrument\"):\n+ entry_point.load()()\n+\n \n def _load_configurators():\n configured = None\n", "issue": "Distros are coupled with instrumentations\nAs mentioned [here](https://github.com/open-telemetry/opentelemetry-python/discussions/2005#discussion-3489738).\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys\nfrom logging import getLogger\nfrom os import environ, path\nfrom os.path import abspath, dirname, pathsep\nfrom re import sub\n\nfrom pkg_resources import iter_entry_points\n\nfrom opentelemetry.environment_variables import (\n OTEL_PYTHON_DISABLED_INSTRUMENTATIONS,\n)\nfrom opentelemetry.instrumentation.dependencies import (\n get_dist_dependency_conflicts,\n)\nfrom opentelemetry.instrumentation.distro import BaseDistro, DefaultDistro\n\nlogger = getLogger(__file__)\n\n\ndef _load_distros() -> BaseDistro:\n for entry_point in iter_entry_points(\"opentelemetry_distro\"):\n try:\n distro = entry_point.load()()\n if not isinstance(distro, BaseDistro):\n logger.debug(\n \"%s is not an OpenTelemetry Distro. Skipping\",\n entry_point.name,\n )\n continue\n logger.debug(\n \"Distribution %s will be configured\", entry_point.name\n )\n return distro\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\n \"Distribution %s configuration failed\", entry_point.name\n )\n raise exc\n return DefaultDistro()\n\n\ndef _load_instrumentors(distro):\n package_to_exclude = environ.get(OTEL_PYTHON_DISABLED_INSTRUMENTATIONS, [])\n if isinstance(package_to_exclude, str):\n package_to_exclude = package_to_exclude.split(\",\")\n # to handle users entering \"requests , flask\" or \"requests, flask\" with spaces\n package_to_exclude = [x.strip() for x in package_to_exclude]\n\n for entry_point in iter_entry_points(\"opentelemetry_instrumentor\"):\n if entry_point.name in package_to_exclude:\n logger.debug(\n \"Instrumentation skipped for library %s\", entry_point.name\n )\n continue\n\n try:\n conflict = get_dist_dependency_conflicts(entry_point.dist)\n if conflict:\n logger.debug(\n \"Skipping instrumentation %s: %s\",\n entry_point.name,\n conflict,\n )\n continue\n\n # tell instrumentation to not run dep checks again as we already did it above\n distro.load_instrumentor(entry_point, skip_dep_check=True)\n logger.debug(\"Instrumented %s\", entry_point.name)\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\"Instrumenting of %s failed\", entry_point.name)\n raise exc\n\n\ndef _load_configurators():\n configured = None\n for entry_point in iter_entry_points(\"opentelemetry_configurator\"):\n if configured is not None:\n logger.warning(\n \"Configuration of %s not loaded, %s already loaded\",\n entry_point.name,\n configured,\n )\n continue\n try:\n entry_point.load()().configure() # type: ignore\n configured = entry_point.name\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\"Configuration of %s failed\", entry_point.name)\n raise exc\n\n\ndef initialize():\n try:\n distro = _load_distros()\n distro.configure()\n _load_configurators()\n _load_instrumentors(distro)\n except Exception: # pylint: disable=broad-except\n logger.exception(\"Failed to auto initialize opentelemetry\")\n finally:\n environ[\"PYTHONPATH\"] = sub(\n r\"{}{}?\".format(dirname(abspath(__file__)), pathsep),\n \"\",\n environ[\"PYTHONPATH\"],\n )\n\n\nif (\n hasattr(sys, \"argv\")\n and sys.argv[0].split(path.sep)[-1] == \"celery\"\n and \"worker\" in sys.argv[1:]\n):\n from celery.signals import worker_process_init # pylint:disable=E0401\n\n @worker_process_init.connect(weak=False)\n def init_celery(*args, **kwargs):\n initialize()\n\n\nelse:\n initialize()\n", "path": "opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py"}]} | 1,871 | 266 |
gh_patches_debug_36968 | rasdani/github-patches | git_diff | ESMCI__cime-1436 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add --component option to preview_namelists
It would be useful if users could specify a single component when running `preview_namelists` rather than building namelists for all components when the script is run from the command line in a case directory.
</issue>
<code>
[start of scripts/lib/CIME/preview_namelists.py]
1 """
2 API for preview namelist
3 """
4
5 from CIME.XML.standard_module_setup import *
6
7 import glob, shutil, imp
8 logger = logging.getLogger(__name__)
9
10 def create_dirs(case):
11 """
12 Make necessary directories for case
13 """
14 # Get data from XML
15 exeroot = case.get_value("EXEROOT")
16 libroot = case.get_value("LIBROOT")
17 incroot = case.get_value("INCROOT")
18 rundir = case.get_value("RUNDIR")
19 caseroot = case.get_value("CASEROOT")
20
21 docdir = os.path.join(caseroot, "CaseDocs")
22 dirs_to_make = []
23 models = case.get_values("COMP_CLASSES")
24 for model in models:
25 dirname = model.lower()
26 dirs_to_make.append(os.path.join(exeroot, dirname, "obj"))
27
28 dirs_to_make.extend([exeroot, libroot, incroot, rundir, docdir])
29
30 for dir_to_make in dirs_to_make:
31 if (not os.path.isdir(dir_to_make)):
32 try:
33 logger.debug("Making dir '%s'" % dir_to_make)
34 os.makedirs(dir_to_make)
35 except OSError as e:
36 expect(False, "Could not make directory '%s', error: %s" % (dir_to_make, e))
37
38 # As a convenience write the location of the case directory in the bld and run directories
39 for dir_ in (exeroot, rundir):
40 with open(os.path.join(dir_,"CASEROOT"),"w+") as fd:
41 fd.write(caseroot+"\n")
42
43 def create_namelists(case):
44 """
45 Create component namelists
46 """
47 case.flush()
48
49 create_dirs(case)
50
51 casebuild = case.get_value("CASEBUILD")
52 caseroot = case.get_value("CASEROOT")
53 rundir = case.get_value("RUNDIR")
54
55 docdir = os.path.join(caseroot, "CaseDocs")
56
57 # Load modules
58 case.load_env()
59
60 logger.info("Creating component namelists")
61
62 # Create namelists - must have cpl last in the list below
63 # Note - cpl must be last in the loop below so that in generating its namelist,
64 # it can use xml vars potentially set by other component's buildnml scripts
65 models = case.get_values("COMP_CLASSES")
66 models += [models.pop(0)]
67 for model in models:
68 model_str = model.lower()
69 config_file = case.get_value("CONFIG_%s_FILE" % model_str.upper())
70 config_dir = os.path.dirname(config_file)
71 if model_str == "cpl":
72 compname = "drv"
73 else:
74 compname = case.get_value("COMP_%s" % model_str.upper())
75
76 cmd = os.path.join(config_dir, "buildnml")
77 do_run_cmd = False
78 # This code will try to import and run each buildnml as a subroutine
79 # if that fails it will run it as a program in a seperate shell
80 try:
81 with open(cmd, 'r') as f:
82 first_line = f.readline()
83 if "python" in first_line:
84 mod = imp.load_source("buildnml", cmd)
85 logger.info(" Calling %s buildnml"%compname)
86 mod.buildnml(case, caseroot, compname)
87 else:
88 raise SyntaxError
89 except SyntaxError as detail:
90 if 'python' in first_line:
91 expect(False, detail)
92 else:
93 do_run_cmd = True
94 except AttributeError:
95 do_run_cmd = True
96 except:
97 raise
98
99 if do_run_cmd:
100 logger.info(" Running %s buildnml"%compname)
101 case.flush()
102 output = run_cmd_no_fail("%s %s" % (cmd, caseroot), verbose=False)
103 logger.info(output)
104 # refresh case xml object from file
105 case.read_xml()
106
107 logger.info("Finished creating component namelists")
108
109 # Save namelists to docdir
110 if (not os.path.isdir(docdir)):
111 os.makedirs(docdir)
112 try:
113 with open(os.path.join(docdir, "README"), "w") as fd:
114 fd.write(" CESM Resolved Namelist Files\n For documentation only DO NOT MODIFY\n")
115 except (OSError, IOError) as e:
116 expect(False, "Failed to write %s/README: %s" % (docdir, e))
117
118 for cpglob in ["*_in_[0-9]*", "*modelio*", "*_in",
119 "*streams*txt*", "*stxt", "*maps.rc", "*cism.config*"]:
120 for file_to_copy in glob.glob(os.path.join(rundir, cpglob)):
121 logger.debug("Copy file from '%s' to '%s'" % (file_to_copy, docdir))
122 shutil.copy2(file_to_copy, docdir)
123
124 # Copy over chemistry mechanism docs if they exist
125 if (os.path.isdir(os.path.join(casebuild, "camconf"))):
126 for file_to_copy in glob.glob(os.path.join(casebuild, "camconf", "*chem_mech*")):
127 shutil.copy2(file_to_copy, docdir)
128
[end of scripts/lib/CIME/preview_namelists.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/scripts/lib/CIME/preview_namelists.py b/scripts/lib/CIME/preview_namelists.py
--- a/scripts/lib/CIME/preview_namelists.py
+++ b/scripts/lib/CIME/preview_namelists.py
@@ -40,7 +40,7 @@
with open(os.path.join(dir_,"CASEROOT"),"w+") as fd:
fd.write(caseroot+"\n")
-def create_namelists(case):
+def create_namelists(case, component=None):
"""
Create component namelists
"""
@@ -73,36 +73,37 @@
else:
compname = case.get_value("COMP_%s" % model_str.upper())
- cmd = os.path.join(config_dir, "buildnml")
- do_run_cmd = False
- # This code will try to import and run each buildnml as a subroutine
- # if that fails it will run it as a program in a seperate shell
- try:
- with open(cmd, 'r') as f:
- first_line = f.readline()
- if "python" in first_line:
- mod = imp.load_source("buildnml", cmd)
- logger.info(" Calling %s buildnml"%compname)
- mod.buildnml(case, caseroot, compname)
- else:
- raise SyntaxError
- except SyntaxError as detail:
- if 'python' in first_line:
- expect(False, detail)
- else:
+ if component is None or component == model_str:
+ cmd = os.path.join(config_dir, "buildnml")
+ do_run_cmd = False
+ # This code will try to import and run each buildnml as a subroutine
+ # if that fails it will run it as a program in a seperate shell
+ try:
+ with open(cmd, 'r') as f:
+ first_line = f.readline()
+ if "python" in first_line:
+ mod = imp.load_source("buildnml", cmd)
+ logger.info(" Calling %s buildnml"%compname)
+ mod.buildnml(case, caseroot, compname)
+ else:
+ raise SyntaxError
+ except SyntaxError as detail:
+ if 'python' in first_line:
+ expect(False, detail)
+ else:
+ do_run_cmd = True
+ except AttributeError:
do_run_cmd = True
- except AttributeError:
- do_run_cmd = True
- except:
- raise
-
- if do_run_cmd:
- logger.info(" Running %s buildnml"%compname)
- case.flush()
- output = run_cmd_no_fail("%s %s" % (cmd, caseroot), verbose=False)
- logger.info(output)
- # refresh case xml object from file
- case.read_xml()
+ except:
+ raise
+
+ if do_run_cmd:
+ logger.info(" Running %s buildnml"%compname)
+ case.flush()
+ output = run_cmd_no_fail("%s %s" % (cmd, caseroot), verbose=False)
+ logger.info(output)
+ # refresh case xml object from file
+ case.read_xml()
logger.info("Finished creating component namelists")
| {"golden_diff": "diff --git a/scripts/lib/CIME/preview_namelists.py b/scripts/lib/CIME/preview_namelists.py\n--- a/scripts/lib/CIME/preview_namelists.py\n+++ b/scripts/lib/CIME/preview_namelists.py\n@@ -40,7 +40,7 @@\n with open(os.path.join(dir_,\"CASEROOT\"),\"w+\") as fd:\n fd.write(caseroot+\"\\n\")\n \n-def create_namelists(case):\n+def create_namelists(case, component=None):\n \"\"\"\n Create component namelists\n \"\"\"\n@@ -73,36 +73,37 @@\n else:\n compname = case.get_value(\"COMP_%s\" % model_str.upper())\n \n- cmd = os.path.join(config_dir, \"buildnml\")\n- do_run_cmd = False\n- # This code will try to import and run each buildnml as a subroutine\n- # if that fails it will run it as a program in a seperate shell\n- try:\n- with open(cmd, 'r') as f:\n- first_line = f.readline()\n- if \"python\" in first_line:\n- mod = imp.load_source(\"buildnml\", cmd)\n- logger.info(\" Calling %s buildnml\"%compname)\n- mod.buildnml(case, caseroot, compname)\n- else:\n- raise SyntaxError\n- except SyntaxError as detail:\n- if 'python' in first_line:\n- expect(False, detail)\n- else:\n+ if component is None or component == model_str:\n+ cmd = os.path.join(config_dir, \"buildnml\")\n+ do_run_cmd = False\n+ # This code will try to import and run each buildnml as a subroutine\n+ # if that fails it will run it as a program in a seperate shell\n+ try:\n+ with open(cmd, 'r') as f:\n+ first_line = f.readline()\n+ if \"python\" in first_line:\n+ mod = imp.load_source(\"buildnml\", cmd)\n+ logger.info(\" Calling %s buildnml\"%compname)\n+ mod.buildnml(case, caseroot, compname)\n+ else:\n+ raise SyntaxError\n+ except SyntaxError as detail:\n+ if 'python' in first_line:\n+ expect(False, detail)\n+ else:\n+ do_run_cmd = True\n+ except AttributeError:\n do_run_cmd = True\n- except AttributeError:\n- do_run_cmd = True\n- except:\n- raise\n-\n- if do_run_cmd:\n- logger.info(\" Running %s buildnml\"%compname)\n- case.flush()\n- output = run_cmd_no_fail(\"%s %s\" % (cmd, caseroot), verbose=False)\n- logger.info(output)\n- # refresh case xml object from file\n- case.read_xml()\n+ except:\n+ raise\n+\n+ if do_run_cmd:\n+ logger.info(\" Running %s buildnml\"%compname)\n+ case.flush()\n+ output = run_cmd_no_fail(\"%s %s\" % (cmd, caseroot), verbose=False)\n+ logger.info(output)\n+ # refresh case xml object from file\n+ case.read_xml()\n \n logger.info(\"Finished creating component namelists\")\n", "issue": "Add --component option to preview_namelists\nIt would be useful if users could specify a single component when running `preview_namelists` rather than building namelists for all components when the script is run from the command line in a case directory.\n", "before_files": [{"content": "\"\"\"\nAPI for preview namelist\n\"\"\"\n\nfrom CIME.XML.standard_module_setup import *\n\nimport glob, shutil, imp\nlogger = logging.getLogger(__name__)\n\ndef create_dirs(case):\n \"\"\"\n Make necessary directories for case\n \"\"\"\n # Get data from XML\n exeroot = case.get_value(\"EXEROOT\")\n libroot = case.get_value(\"LIBROOT\")\n incroot = case.get_value(\"INCROOT\")\n rundir = case.get_value(\"RUNDIR\")\n caseroot = case.get_value(\"CASEROOT\")\n\n docdir = os.path.join(caseroot, \"CaseDocs\")\n dirs_to_make = []\n models = case.get_values(\"COMP_CLASSES\")\n for model in models:\n dirname = model.lower()\n dirs_to_make.append(os.path.join(exeroot, dirname, \"obj\"))\n\n dirs_to_make.extend([exeroot, libroot, incroot, rundir, docdir])\n\n for dir_to_make in dirs_to_make:\n if (not os.path.isdir(dir_to_make)):\n try:\n logger.debug(\"Making dir '%s'\" % dir_to_make)\n os.makedirs(dir_to_make)\n except OSError as e:\n expect(False, \"Could not make directory '%s', error: %s\" % (dir_to_make, e))\n\n # As a convenience write the location of the case directory in the bld and run directories\n for dir_ in (exeroot, rundir):\n with open(os.path.join(dir_,\"CASEROOT\"),\"w+\") as fd:\n fd.write(caseroot+\"\\n\")\n\ndef create_namelists(case):\n \"\"\"\n Create component namelists\n \"\"\"\n case.flush()\n\n create_dirs(case)\n\n casebuild = case.get_value(\"CASEBUILD\")\n caseroot = case.get_value(\"CASEROOT\")\n rundir = case.get_value(\"RUNDIR\")\n\n docdir = os.path.join(caseroot, \"CaseDocs\")\n\n # Load modules\n case.load_env()\n\n logger.info(\"Creating component namelists\")\n\n # Create namelists - must have cpl last in the list below\n # Note - cpl must be last in the loop below so that in generating its namelist,\n # it can use xml vars potentially set by other component's buildnml scripts\n models = case.get_values(\"COMP_CLASSES\")\n models += [models.pop(0)]\n for model in models:\n model_str = model.lower()\n config_file = case.get_value(\"CONFIG_%s_FILE\" % model_str.upper())\n config_dir = os.path.dirname(config_file)\n if model_str == \"cpl\":\n compname = \"drv\"\n else:\n compname = case.get_value(\"COMP_%s\" % model_str.upper())\n\n cmd = os.path.join(config_dir, \"buildnml\")\n do_run_cmd = False\n # This code will try to import and run each buildnml as a subroutine\n # if that fails it will run it as a program in a seperate shell\n try:\n with open(cmd, 'r') as f:\n first_line = f.readline()\n if \"python\" in first_line:\n mod = imp.load_source(\"buildnml\", cmd)\n logger.info(\" Calling %s buildnml\"%compname)\n mod.buildnml(case, caseroot, compname)\n else:\n raise SyntaxError\n except SyntaxError as detail:\n if 'python' in first_line:\n expect(False, detail)\n else:\n do_run_cmd = True\n except AttributeError:\n do_run_cmd = True\n except:\n raise\n\n if do_run_cmd:\n logger.info(\" Running %s buildnml\"%compname)\n case.flush()\n output = run_cmd_no_fail(\"%s %s\" % (cmd, caseroot), verbose=False)\n logger.info(output)\n # refresh case xml object from file\n case.read_xml()\n\n logger.info(\"Finished creating component namelists\")\n\n # Save namelists to docdir\n if (not os.path.isdir(docdir)):\n os.makedirs(docdir)\n try:\n with open(os.path.join(docdir, \"README\"), \"w\") as fd:\n fd.write(\" CESM Resolved Namelist Files\\n For documentation only DO NOT MODIFY\\n\")\n except (OSError, IOError) as e:\n expect(False, \"Failed to write %s/README: %s\" % (docdir, e))\n\n for cpglob in [\"*_in_[0-9]*\", \"*modelio*\", \"*_in\",\n \"*streams*txt*\", \"*stxt\", \"*maps.rc\", \"*cism.config*\"]:\n for file_to_copy in glob.glob(os.path.join(rundir, cpglob)):\n logger.debug(\"Copy file from '%s' to '%s'\" % (file_to_copy, docdir))\n shutil.copy2(file_to_copy, docdir)\n\n # Copy over chemistry mechanism docs if they exist\n if (os.path.isdir(os.path.join(casebuild, \"camconf\"))):\n for file_to_copy in glob.glob(os.path.join(casebuild, \"camconf\", \"*chem_mech*\")):\n shutil.copy2(file_to_copy, docdir)\n", "path": "scripts/lib/CIME/preview_namelists.py"}]} | 2,003 | 736 |
gh_patches_debug_1633 | rasdani/github-patches | git_diff | microsoft__botbuilder-python-2050 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
botbuidler support for regex== 2022 and above
Description:
I'm currently working on building a chatbot using Azure Bot Builder SDK in conjunction with OpenAI. In my project, I'm relying on the OpenAIEmbedding class from the langchain package, which utilizes Tiktoken. However, I've run into an issue due to dependency conflicts with Tiktoken. Specifically, Tiktoken requires regex version 2022 or higher, while the Bot Builder package supports only up to regex version 2019.
Feature Request:
I kindly request adding support for Tiktoken's regex version 2022 or higher in the OpenAIEmbedding class within the langchain package. This update would resolve the dependency conflicts and enable smoother integration of OpenAI into projects using Azure Bot Builder SDK.
Additional Information:
Current Behavior: Currently, the OpenAIEmbedding class in langchain relies on Tiktoken, which necessitates a regex version that is not compatible with the Bot Builder SDK's regex version support.
Desired Behavior: The botbuilder classes should be updated to support Tiktoken's dependency on regex version 2022 or higher t
Impact of the Feature:
This feature would benefit developers working on chatbot projects that use Azure Bot Builder SDK and OpenAI. It would eliminate dependency conflicts, allowing for a seamless integration experience.
</issue>
<code>
[start of libraries/botbuilder-dialogs/setup.py]
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License.
3
4 import os
5 from setuptools import setup
6
7 REQUIRES = [
8 "regex<=2019.08.19",
9 "emoji==1.7.0",
10 "recognizers-text-date-time>=1.0.2a1",
11 "recognizers-text-number-with-unit>=1.0.2a1",
12 "recognizers-text-number>=1.0.2a1",
13 "recognizers-text>=1.0.2a1",
14 "recognizers-text-choice>=1.0.2a1",
15 "babel==2.9.1",
16 "botbuilder-schema==4.15.0",
17 "botframework-connector==4.15.0",
18 "botbuilder-core==4.15.0",
19 ]
20
21 TEST_REQUIRES = ["aiounittest==1.3.0"]
22
23 root = os.path.abspath(os.path.dirname(__file__))
24
25 with open(os.path.join(root, "botbuilder", "dialogs", "about.py")) as f:
26 package_info = {}
27 info = f.read()
28 exec(info, package_info)
29
30 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f:
31 long_description = f.read()
32
33 setup(
34 name=package_info["__title__"],
35 version=package_info["__version__"],
36 url=package_info["__uri__"],
37 author=package_info["__author__"],
38 description=package_info["__description__"],
39 keywords=["BotBuilderDialogs", "bots", "ai", "botframework", "botbuilder"],
40 long_description=long_description,
41 long_description_content_type="text/x-rst",
42 license=package_info["__license__"],
43 packages=[
44 "botbuilder.dialogs",
45 "botbuilder.dialogs.prompts",
46 "botbuilder.dialogs.choices",
47 "botbuilder.dialogs.skills",
48 "botbuilder.dialogs.memory",
49 "botbuilder.dialogs.memory.path_resolvers",
50 "botbuilder.dialogs.memory.scopes",
51 ],
52 install_requires=REQUIRES + TEST_REQUIRES,
53 tests_require=TEST_REQUIRES,
54 include_package_data=True,
55 classifiers=[
56 "Programming Language :: Python :: 3.7",
57 "Intended Audience :: Developers",
58 "License :: OSI Approved :: MIT License",
59 "Operating System :: OS Independent",
60 "Development Status :: 5 - Production/Stable",
61 "Topic :: Scientific/Engineering :: Artificial Intelligence",
62 ],
63 )
64
[end of libraries/botbuilder-dialogs/setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/libraries/botbuilder-dialogs/setup.py b/libraries/botbuilder-dialogs/setup.py
--- a/libraries/botbuilder-dialogs/setup.py
+++ b/libraries/botbuilder-dialogs/setup.py
@@ -5,7 +5,7 @@
from setuptools import setup
REQUIRES = [
- "regex<=2019.08.19",
+ "regex>=2022.1.18",
"emoji==1.7.0",
"recognizers-text-date-time>=1.0.2a1",
"recognizers-text-number-with-unit>=1.0.2a1",
| {"golden_diff": "diff --git a/libraries/botbuilder-dialogs/setup.py b/libraries/botbuilder-dialogs/setup.py\n--- a/libraries/botbuilder-dialogs/setup.py\n+++ b/libraries/botbuilder-dialogs/setup.py\n@@ -5,7 +5,7 @@\n from setuptools import setup\n \n REQUIRES = [\n- \"regex<=2019.08.19\",\n+ \"regex>=2022.1.18\",\n \"emoji==1.7.0\",\n \"recognizers-text-date-time>=1.0.2a1\",\n \"recognizers-text-number-with-unit>=1.0.2a1\",\n", "issue": "botbuidler support for regex== 2022 and above\nDescription:\r\n\r\nI'm currently working on building a chatbot using Azure Bot Builder SDK in conjunction with OpenAI. In my project, I'm relying on the OpenAIEmbedding class from the langchain package, which utilizes Tiktoken. However, I've run into an issue due to dependency conflicts with Tiktoken. Specifically, Tiktoken requires regex version 2022 or higher, while the Bot Builder package supports only up to regex version 2019.\r\n\r\nFeature Request:\r\n\r\nI kindly request adding support for Tiktoken's regex version 2022 or higher in the OpenAIEmbedding class within the langchain package. This update would resolve the dependency conflicts and enable smoother integration of OpenAI into projects using Azure Bot Builder SDK.\r\n\r\nAdditional Information:\r\n\r\nCurrent Behavior: Currently, the OpenAIEmbedding class in langchain relies on Tiktoken, which necessitates a regex version that is not compatible with the Bot Builder SDK's regex version support.\r\n\r\nDesired Behavior: The botbuilder classes should be updated to support Tiktoken's dependency on regex version 2022 or higher t\r\n\r\nImpact of the Feature:\r\n\r\nThis feature would benefit developers working on chatbot projects that use Azure Bot Builder SDK and OpenAI. It would eliminate dependency conflicts, allowing for a seamless integration experience.\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nREQUIRES = [\n \"regex<=2019.08.19\",\n \"emoji==1.7.0\",\n \"recognizers-text-date-time>=1.0.2a1\",\n \"recognizers-text-number-with-unit>=1.0.2a1\",\n \"recognizers-text-number>=1.0.2a1\",\n \"recognizers-text>=1.0.2a1\",\n \"recognizers-text-choice>=1.0.2a1\",\n \"babel==2.9.1\",\n \"botbuilder-schema==4.15.0\",\n \"botframework-connector==4.15.0\",\n \"botbuilder-core==4.15.0\",\n]\n\nTEST_REQUIRES = [\"aiounittest==1.3.0\"]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(root, \"botbuilder\", \"dialogs\", \"about.py\")) as f:\n package_info = {}\n info = f.read()\n exec(info, package_info)\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=package_info[\"__title__\"],\n version=package_info[\"__version__\"],\n url=package_info[\"__uri__\"],\n author=package_info[\"__author__\"],\n description=package_info[\"__description__\"],\n keywords=[\"BotBuilderDialogs\", \"bots\", \"ai\", \"botframework\", \"botbuilder\"],\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=package_info[\"__license__\"],\n packages=[\n \"botbuilder.dialogs\",\n \"botbuilder.dialogs.prompts\",\n \"botbuilder.dialogs.choices\",\n \"botbuilder.dialogs.skills\",\n \"botbuilder.dialogs.memory\",\n \"botbuilder.dialogs.memory.path_resolvers\",\n \"botbuilder.dialogs.memory.scopes\",\n ],\n install_requires=REQUIRES + TEST_REQUIRES,\n tests_require=TEST_REQUIRES,\n include_package_data=True,\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 5 - Production/Stable\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botbuilder-dialogs/setup.py"}]} | 1,483 | 142 |
gh_patches_debug_21491 | rasdani/github-patches | git_diff | plone__Products.CMFPlone-2793 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
verifydb fails with debug in some cases
Using `./bin/instance verifydb -D` to check the `Data.fs` it fails in some cases:
```
Traceback (most recent call last):
File "/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py", line 68, in verify_record
class_info = unpickler.load()
File "/Users/pbauer/.cache/buildout/eggs/ZODB-5.5.1-py3.7.egg/ZODB/_compat.py", line 62, in find_class
return super(Unpickler, self).find_class(modulename, name)
ModuleNotFoundError: No module named 'Products.Archetypes'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "./bin/instance", line 262, in <module>
+ sys.argv[1:]))
File "/Users/pbauer/.cache/buildout/eggs/plone.recipe.zope2instance-6.1.3-py3.7.egg/plone/recipe/zope2instance/ctl.py", line 937, in main
c.onecmd(' '.join(options.args))
File "/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/cmd.py", line 217, in onecmd
return func(arg)
File "/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py", line 31, in zopectl_entry
verify_zodb(app, debug=options.debug)
File "/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py", line 50, in verify_zodb
success = verify_record(oid, data, debug)
File "/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py", line 82, in verify_record
pickletools.dis(pickle[pos:])
UnboundLocalError: local variable 'pos' referenced before assignment
```
</issue>
<code>
[start of Products/CMFPlone/_scripts/verifydb.py]
1 # -*- coding: utf-8 -*-
2 from Zope2.Startup.run import make_wsgi_app
3 from ZODB.interfaces import IStorageCurrentRecordIteration
4 from ZODB.serialize import PersistentUnpickler
5
6 import argparse
7 import io
8 import logging
9 import pdb
10 import pickletools
11 import sys
12 import traceback
13 import Zope2
14
15 logger = logging.getLogger('zodbverify')
16
17
18 def zopectl_entry(self, arg):
19 parser = argparse.ArgumentParser(
20 prog=sys.argv[0] + ' verifydb',
21 description='Verifies that all records in the database can be loaded.',
22 )
23 parser.add_argument(
24 '-D', '--debug', action='store_true', dest='debug',
25 help='pause to debug broken pickles')
26 options = parser.parse_args(arg.split(' ') if arg else [])
27
28 logging.basicConfig(level=logging.INFO)
29 make_wsgi_app({}, self.options.configfile)
30 app = Zope2.app()
31 verify_zodb(app, debug=options.debug)
32
33
34 def verify_zodb(obj, debug=False):
35 storage = obj._p_jar._db._storage
36 if not IStorageCurrentRecordIteration.providedBy(storage):
37 raise TypeError(
38 'ZODB storage {} does not implement record_iternext'.format(
39 storage))
40
41 logger.info('Scanning ZODB...')
42
43 next_ = None
44 count = 0
45 errors = 0
46 while True:
47 count += 1
48 oid, tid, data, next_ = storage.record_iternext(next_)
49 logger.debug('Verifying {}'.format(oid))
50 success = verify_record(oid, data, debug)
51 if not success:
52 errors += 1
53 if next_ is None:
54 break
55
56 logger.info(
57 'Done! Scanned {} records. '
58 'Found {} records that could not be loaded.'.format(
59 count, errors)
60 )
61
62
63 def verify_record(oid, data, debug=False):
64 input_file = io.BytesIO(data)
65 unpickler = PersistentUnpickler(None, persistent_load, input_file)
66 class_info = 'unknown'
67 try:
68 class_info = unpickler.load()
69 pos = input_file.tell()
70 unpickler.load()
71 except Exception:
72 input_file.seek(0)
73 pickle = input_file.read()
74 logger.info('\nCould not process {} record {}:'.format(
75 class_info,
76 repr(oid),
77 ))
78 logger.info(repr(pickle))
79 logger.info(traceback.format_exc())
80 if debug:
81 try:
82 pickletools.dis(pickle[pos:])
83 finally:
84 pdb.set_trace()
85 return False
86 return True
87
88
89 def persistent_load(ref):
90 pass
91
[end of Products/CMFPlone/_scripts/verifydb.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/Products/CMFPlone/_scripts/verifydb.py b/Products/CMFPlone/_scripts/verifydb.py
--- a/Products/CMFPlone/_scripts/verifydb.py
+++ b/Products/CMFPlone/_scripts/verifydb.py
@@ -64,6 +64,7 @@
input_file = io.BytesIO(data)
unpickler = PersistentUnpickler(None, persistent_load, input_file)
class_info = 'unknown'
+ pos = None
try:
class_info = unpickler.load()
pos = input_file.tell()
@@ -77,11 +78,17 @@
))
logger.info(repr(pickle))
logger.info(traceback.format_exc())
- if debug:
+ if debug and pos is not None:
try:
pickletools.dis(pickle[pos:])
+ except Exception:
+ # ignore exceptions while disassembling the pickle since the
+ # real issue is that it references a unavailable module
+ pass
finally:
pdb.set_trace()
+ elif debug and pos is None:
+ pdb.set_trace()
return False
return True
| {"golden_diff": "diff --git a/Products/CMFPlone/_scripts/verifydb.py b/Products/CMFPlone/_scripts/verifydb.py\n--- a/Products/CMFPlone/_scripts/verifydb.py\n+++ b/Products/CMFPlone/_scripts/verifydb.py\n@@ -64,6 +64,7 @@\n input_file = io.BytesIO(data)\n unpickler = PersistentUnpickler(None, persistent_load, input_file)\n class_info = 'unknown'\n+ pos = None\n try:\n class_info = unpickler.load()\n pos = input_file.tell()\n@@ -77,11 +78,17 @@\n ))\n logger.info(repr(pickle))\n logger.info(traceback.format_exc())\n- if debug:\n+ if debug and pos is not None:\n try:\n pickletools.dis(pickle[pos:])\n+ except Exception:\n+ # ignore exceptions while disassembling the pickle since the\n+ # real issue is that it references a unavailable module\n+ pass\n finally:\n pdb.set_trace()\n+ elif debug and pos is None:\n+ pdb.set_trace()\n return False\n return True\n", "issue": "verifydb fails with debug in some cases\nUsing `./bin/instance verifydb -D` to check the `Data.fs` it fails in some cases:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py\", line 68, in verify_record\r\n class_info = unpickler.load()\r\n File \"/Users/pbauer/.cache/buildout/eggs/ZODB-5.5.1-py3.7.egg/ZODB/_compat.py\", line 62, in find_class\r\n return super(Unpickler, self).find_class(modulename, name)\r\nModuleNotFoundError: No module named 'Products.Archetypes'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"./bin/instance\", line 262, in <module>\r\n + sys.argv[1:]))\r\n File \"/Users/pbauer/.cache/buildout/eggs/plone.recipe.zope2instance-6.1.3-py3.7.egg/plone/recipe/zope2instance/ctl.py\", line 937, in main\r\n c.onecmd(' '.join(options.args))\r\n File \"/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/cmd.py\", line 217, in onecmd\r\n return func(arg)\r\n File \"/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py\", line 31, in zopectl_entry\r\n verify_zodb(app, debug=options.debug)\r\n File \"/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py\", line 50, in verify_zodb\r\n success = verify_record(oid, data, debug)\r\n File \"/Users/pbauer/.cache/buildout/eggs/Products.CMFPlone-5.2rc1-py3.7.egg/Products/CMFPlone/_scripts/verifydb.py\", line 82, in verify_record\r\n pickletools.dis(pickle[pos:])\r\nUnboundLocalError: local variable 'pos' referenced before assignment\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom Zope2.Startup.run import make_wsgi_app\nfrom ZODB.interfaces import IStorageCurrentRecordIteration\nfrom ZODB.serialize import PersistentUnpickler\n\nimport argparse\nimport io\nimport logging\nimport pdb\nimport pickletools\nimport sys\nimport traceback\nimport Zope2\n\nlogger = logging.getLogger('zodbverify')\n\n\ndef zopectl_entry(self, arg):\n parser = argparse.ArgumentParser(\n prog=sys.argv[0] + ' verifydb',\n description='Verifies that all records in the database can be loaded.',\n )\n parser.add_argument(\n '-D', '--debug', action='store_true', dest='debug',\n help='pause to debug broken pickles')\n options = parser.parse_args(arg.split(' ') if arg else [])\n\n logging.basicConfig(level=logging.INFO)\n make_wsgi_app({}, self.options.configfile)\n app = Zope2.app()\n verify_zodb(app, debug=options.debug)\n\n\ndef verify_zodb(obj, debug=False):\n storage = obj._p_jar._db._storage\n if not IStorageCurrentRecordIteration.providedBy(storage):\n raise TypeError(\n 'ZODB storage {} does not implement record_iternext'.format(\n storage))\n\n logger.info('Scanning ZODB...')\n\n next_ = None\n count = 0\n errors = 0\n while True:\n count += 1\n oid, tid, data, next_ = storage.record_iternext(next_)\n logger.debug('Verifying {}'.format(oid))\n success = verify_record(oid, data, debug)\n if not success:\n errors += 1\n if next_ is None:\n break\n\n logger.info(\n 'Done! Scanned {} records. '\n 'Found {} records that could not be loaded.'.format(\n count, errors)\n )\n\n\ndef verify_record(oid, data, debug=False):\n input_file = io.BytesIO(data)\n unpickler = PersistentUnpickler(None, persistent_load, input_file)\n class_info = 'unknown'\n try:\n class_info = unpickler.load()\n pos = input_file.tell()\n unpickler.load()\n except Exception:\n input_file.seek(0)\n pickle = input_file.read()\n logger.info('\\nCould not process {} record {}:'.format(\n class_info,\n repr(oid),\n ))\n logger.info(repr(pickle))\n logger.info(traceback.format_exc())\n if debug:\n try:\n pickletools.dis(pickle[pos:])\n finally:\n pdb.set_trace()\n return False\n return True\n\n\ndef persistent_load(ref):\n pass\n", "path": "Products/CMFPlone/_scripts/verifydb.py"}]} | 1,856 | 260 |
gh_patches_debug_20175 | rasdani/github-patches | git_diff | bridgecrewio__checkov-3750 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
CKV_AZURE_9 & CKV_AZURE_10 - Scan fails if protocol value is a wildcard
**Describe the issue**
CKV_AZURE_9 & CKV_AZURE_10
When scanning Bicep files the checks are looking for a protocol value of `tcp` and fail to catch when `*` is used.
**Examples**
The following bicep code fails to produce a finding for CKV_AZURE_9 & CKV_AZURE_10
```
resource nsg 'Microsoft.Network/networkSecurityGroups@2021-05-01' = {
name: nsgName
location: nsgLocation
properties: {
securityRules: [
{
name: 'badrule'
properties: {
access: 'Allow'
destinationAddressPrefix: '*'
destinationPortRange: '*'
direction: 'Inbound'
priority: 100
protocol: '*'
sourceAddressPrefix: '*'
sourcePortRange: '*'
}
}
]
}
}
```
While this works as expected:
```
resource nsg 'Microsoft.Network/networkSecurityGroups@2021-05-01' = {
name: nsgName
location: nsgLocation
properties: {
securityRules: [
{
name: 'badrule'
properties: {
access: 'Allow'
destinationAddressPrefix: '*'
destinationPortRange: '*'
direction: 'Inbound'
priority: 100
protocol: 'tcp'
sourceAddressPrefix: '*'
sourcePortRange: '*'
}
}
]
}
}
```
**Version (please complete the following information):**
- docker container 2.2.0
**Additional context**
A similar problem existed for Terraform that was previously fixed (see https://github.com/bridgecrewio/checkov/issues/601)
I believe the relevant lines is:
https://github.com/bridgecrewio/checkov/blob/master/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py#LL48C4-L48C117
</issue>
<code>
[start of checkov/arm/checks/resource/NSGRulePortAccessRestricted.py]
1 import re
2 from typing import Union, Dict, Any
3
4 from checkov.common.models.enums import CheckResult, CheckCategories
5 from checkov.arm.base_resource_check import BaseResourceCheck
6
7 # https://docs.microsoft.com/en-us/azure/templates/microsoft.network/networksecuritygroups
8 # https://docs.microsoft.com/en-us/azure/templates/microsoft.network/networksecuritygroups/securityrules
9
10 INTERNET_ADDRESSES = ["*", "0.0.0.0", "<nw>/0", "/0", "internet", "any"] # nosec
11 PORT_RANGE = re.compile(r"\d+-\d+")
12
13
14 class NSGRulePortAccessRestricted(BaseResourceCheck):
15 def __init__(self, name: str, check_id: str, port: int) -> None:
16 supported_resources = (
17 "Microsoft.Network/networkSecurityGroups",
18 "Microsoft.Network/networkSecurityGroups/securityRules",
19 )
20 categories = (CheckCategories.NETWORKING,)
21 super().__init__(name=name, id=check_id, categories=categories, supported_resources=supported_resources)
22 self.port = port
23
24 def is_port_in_range(self, port_range: Union[int, str]) -> bool:
25 if re.match(PORT_RANGE, str(port_range)):
26 start, end = int(port_range.split("-")[0]), int(port_range.split("-")[1])
27 if start <= self.port <= end:
28 return True
29 if port_range in (str(self.port), "*"):
30 return True
31 return False
32
33 def scan_resource_conf(self, conf: Dict[str, Any]) -> CheckResult:
34 if "properties" in conf:
35 securityRules = []
36 if self.entity_type == "Microsoft.Network/networkSecurityGroups":
37 if "securityRules" in conf["properties"]:
38 securityRules.extend(conf["properties"]["securityRules"])
39 if self.entity_type == "Microsoft.Network/networkSecurityGroups/securityRules":
40 securityRules.append(conf)
41
42 for rule in securityRules:
43 portRanges = []
44 sourcePrefixes = []
45 if "properties" in rule:
46 if "access" in rule["properties"] and rule["properties"]["access"].lower() == "allow":
47 if "direction" in rule["properties"] and rule["properties"]["direction"].lower() == "inbound":
48 if "protocol" in rule["properties"] and rule["properties"]["protocol"].lower() == "tcp":
49 if "destinationPortRanges" in rule["properties"]:
50 portRanges.extend(rule["properties"]["destinationPortRanges"])
51 if "destinationPortRange" in rule["properties"]:
52 portRanges.append(rule["properties"]["destinationPortRange"])
53
54 if "sourceAddressPrefixes" in rule["properties"]:
55 sourcePrefixes.extend(rule["properties"]["sourceAddressPrefixes"])
56 if "sourceAddressPrefix" in rule["properties"]:
57 sourcePrefixes.append(rule["properties"]["sourceAddressPrefix"])
58
59 for portRange in portRanges:
60 if self.is_port_in_range(portRange):
61 for prefix in sourcePrefixes:
62 if prefix in INTERNET_ADDRESSES:
63 return CheckResult.FAILED
64
65 return CheckResult.PASSED
66
[end of checkov/arm/checks/resource/NSGRulePortAccessRestricted.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py b/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py
--- a/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py
+++ b/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py
@@ -45,7 +45,7 @@
if "properties" in rule:
if "access" in rule["properties"] and rule["properties"]["access"].lower() == "allow":
if "direction" in rule["properties"] and rule["properties"]["direction"].lower() == "inbound":
- if "protocol" in rule["properties"] and rule["properties"]["protocol"].lower() == "tcp":
+ if "protocol" in rule["properties"] and rule["properties"]["protocol"].lower() in ("tcp", "*"):
if "destinationPortRanges" in rule["properties"]:
portRanges.extend(rule["properties"]["destinationPortRanges"])
if "destinationPortRange" in rule["properties"]:
| {"golden_diff": "diff --git a/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py b/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py\n--- a/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py\n+++ b/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py\n@@ -45,7 +45,7 @@\n if \"properties\" in rule:\n if \"access\" in rule[\"properties\"] and rule[\"properties\"][\"access\"].lower() == \"allow\":\n if \"direction\" in rule[\"properties\"] and rule[\"properties\"][\"direction\"].lower() == \"inbound\":\n- if \"protocol\" in rule[\"properties\"] and rule[\"properties\"][\"protocol\"].lower() == \"tcp\":\n+ if \"protocol\" in rule[\"properties\"] and rule[\"properties\"][\"protocol\"].lower() in (\"tcp\", \"*\"):\n if \"destinationPortRanges\" in rule[\"properties\"]:\n portRanges.extend(rule[\"properties\"][\"destinationPortRanges\"])\n if \"destinationPortRange\" in rule[\"properties\"]:\n", "issue": "CKV_AZURE_9 & CKV_AZURE_10 - Scan fails if protocol value is a wildcard\n**Describe the issue**\r\nCKV_AZURE_9 & CKV_AZURE_10\r\n\r\nWhen scanning Bicep files the checks are looking for a protocol value of `tcp` and fail to catch when `*` is used.\r\n\r\n**Examples**\r\n\r\nThe following bicep code fails to produce a finding for CKV_AZURE_9 & CKV_AZURE_10\r\n```\r\nresource nsg 'Microsoft.Network/networkSecurityGroups@2021-05-01' = {\r\n name: nsgName\r\n location: nsgLocation\r\n properties: {\r\n securityRules: [\r\n {\r\n name: 'badrule'\r\n properties: {\r\n access: 'Allow'\r\n destinationAddressPrefix: '*'\r\n destinationPortRange: '*'\r\n direction: 'Inbound'\r\n priority: 100\r\n protocol: '*'\r\n sourceAddressPrefix: '*'\r\n sourcePortRange: '*'\r\n }\r\n }\r\n ]\r\n }\r\n}\r\n```\r\n\r\nWhile this works as expected:\r\n```\r\nresource nsg 'Microsoft.Network/networkSecurityGroups@2021-05-01' = {\r\n name: nsgName\r\n location: nsgLocation\r\n properties: {\r\n securityRules: [\r\n {\r\n name: 'badrule'\r\n properties: {\r\n access: 'Allow'\r\n destinationAddressPrefix: '*'\r\n destinationPortRange: '*'\r\n direction: 'Inbound'\r\n priority: 100\r\n protocol: 'tcp'\r\n sourceAddressPrefix: '*'\r\n sourcePortRange: '*'\r\n }\r\n }\r\n ]\r\n }\r\n}\r\n```\r\n\r\n**Version (please complete the following information):**\r\n - docker container 2.2.0\r\n\r\n**Additional context**\r\nA similar problem existed for Terraform that was previously fixed (see https://github.com/bridgecrewio/checkov/issues/601) \r\n\r\nI believe the relevant lines is: \r\nhttps://github.com/bridgecrewio/checkov/blob/master/checkov/arm/checks/resource/NSGRulePortAccessRestricted.py#LL48C4-L48C117\r\n\r\n\n", "before_files": [{"content": "import re\nfrom typing import Union, Dict, Any\n\nfrom checkov.common.models.enums import CheckResult, CheckCategories\nfrom checkov.arm.base_resource_check import BaseResourceCheck\n\n# https://docs.microsoft.com/en-us/azure/templates/microsoft.network/networksecuritygroups\n# https://docs.microsoft.com/en-us/azure/templates/microsoft.network/networksecuritygroups/securityrules\n\nINTERNET_ADDRESSES = [\"*\", \"0.0.0.0\", \"<nw>/0\", \"/0\", \"internet\", \"any\"] # nosec\nPORT_RANGE = re.compile(r\"\\d+-\\d+\")\n\n\nclass NSGRulePortAccessRestricted(BaseResourceCheck):\n def __init__(self, name: str, check_id: str, port: int) -> None:\n supported_resources = (\n \"Microsoft.Network/networkSecurityGroups\",\n \"Microsoft.Network/networkSecurityGroups/securityRules\",\n )\n categories = (CheckCategories.NETWORKING,)\n super().__init__(name=name, id=check_id, categories=categories, supported_resources=supported_resources)\n self.port = port\n\n def is_port_in_range(self, port_range: Union[int, str]) -> bool:\n if re.match(PORT_RANGE, str(port_range)):\n start, end = int(port_range.split(\"-\")[0]), int(port_range.split(\"-\")[1])\n if start <= self.port <= end:\n return True\n if port_range in (str(self.port), \"*\"):\n return True\n return False\n\n def scan_resource_conf(self, conf: Dict[str, Any]) -> CheckResult:\n if \"properties\" in conf:\n securityRules = []\n if self.entity_type == \"Microsoft.Network/networkSecurityGroups\":\n if \"securityRules\" in conf[\"properties\"]:\n securityRules.extend(conf[\"properties\"][\"securityRules\"])\n if self.entity_type == \"Microsoft.Network/networkSecurityGroups/securityRules\":\n securityRules.append(conf)\n\n for rule in securityRules:\n portRanges = []\n sourcePrefixes = []\n if \"properties\" in rule:\n if \"access\" in rule[\"properties\"] and rule[\"properties\"][\"access\"].lower() == \"allow\":\n if \"direction\" in rule[\"properties\"] and rule[\"properties\"][\"direction\"].lower() == \"inbound\":\n if \"protocol\" in rule[\"properties\"] and rule[\"properties\"][\"protocol\"].lower() == \"tcp\":\n if \"destinationPortRanges\" in rule[\"properties\"]:\n portRanges.extend(rule[\"properties\"][\"destinationPortRanges\"])\n if \"destinationPortRange\" in rule[\"properties\"]:\n portRanges.append(rule[\"properties\"][\"destinationPortRange\"])\n\n if \"sourceAddressPrefixes\" in rule[\"properties\"]:\n sourcePrefixes.extend(rule[\"properties\"][\"sourceAddressPrefixes\"])\n if \"sourceAddressPrefix\" in rule[\"properties\"]:\n sourcePrefixes.append(rule[\"properties\"][\"sourceAddressPrefix\"])\n\n for portRange in portRanges:\n if self.is_port_in_range(portRange):\n for prefix in sourcePrefixes:\n if prefix in INTERNET_ADDRESSES:\n return CheckResult.FAILED\n\n return CheckResult.PASSED\n", "path": "checkov/arm/checks/resource/NSGRulePortAccessRestricted.py"}]} | 1,802 | 222 |
gh_patches_debug_15632 | rasdani/github-patches | git_diff | getredash__redash-3362 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Clickhouse: password is optional but we try to access it anyway
For Clickhouse type data sources, we don't require a password. But the code does require it by trying to directly access the value in the options dictionary, instead of using `get`:
https://github.com/getredash/redash/blob/823e4ccdd6fcfee5d0df0d919d87af3100876549/redash/query_runner/clickhouse.py#L77
</issue>
<code>
[start of redash/query_runner/clickhouse.py]
1 import logging
2 import re
3
4 import requests
5
6 from redash.query_runner import *
7 from redash.utils import json_dumps, json_loads
8
9 logger = logging.getLogger(__name__)
10
11
12 class ClickHouse(BaseSQLQueryRunner):
13 noop_query = "SELECT 1"
14
15 @classmethod
16 def configuration_schema(cls):
17 return {
18 "type": "object",
19 "properties": {
20 "url": {
21 "type": "string",
22 "default": "http://127.0.0.1:8123"
23 },
24 "user": {
25 "type": "string",
26 "default": "default"
27 },
28 "password": {
29 "type": "string"
30 },
31 "dbname": {
32 "type": "string",
33 "title": "Database Name"
34 },
35 "timeout": {
36 "type": "number",
37 "title": "Request Timeout",
38 "default": 30
39 }
40 },
41 "required": ["dbname"],
42 "secret": ["password"]
43 }
44
45 @classmethod
46 def type(cls):
47 return "clickhouse"
48
49 def _get_tables(self, schema):
50 query = "SELECT database, table, name FROM system.columns WHERE database NOT IN ('system')"
51
52 results, error = self.run_query(query, None)
53
54 if error is not None:
55 raise Exception("Failed getting schema.")
56
57 results = json_loads(results)
58
59 for row in results['rows']:
60 table_name = '{}.{}'.format(row['database'], row['table'])
61
62 if table_name not in schema:
63 schema[table_name] = {'name': table_name, 'columns': []}
64
65 schema[table_name]['columns'].append(row['name'])
66
67 return schema.values()
68
69 def _send_query(self, data, stream=False):
70 r = requests.post(
71 self.configuration['url'],
72 data=data.encode("utf-8"),
73 stream=stream,
74 timeout=self.configuration.get('timeout', 30),
75 params={
76 'user': self.configuration['user'],
77 'password': self.configuration['password'],
78 'database': self.configuration['dbname']
79 }
80 )
81 if r.status_code != 200:
82 raise Exception(r.text)
83 # logging.warning(r.json())
84 return r.json()
85
86 @staticmethod
87 def _define_column_type(column):
88 c = column.lower()
89 f = re.search(r'^nullable\((.*)\)$', c)
90 if f is not None:
91 c = f.group(1)
92 if c.startswith('int') or c.startswith('uint'):
93 return TYPE_INTEGER
94 elif c.startswith('float'):
95 return TYPE_FLOAT
96 elif c == 'datetime':
97 return TYPE_DATETIME
98 elif c == 'date':
99 return TYPE_DATE
100 else:
101 return TYPE_STRING
102
103 def _clickhouse_query(self, query):
104 query += '\nFORMAT JSON'
105 result = self._send_query(query)
106 columns = []
107 columns_int64 = [] # db converts value to string if its type equals UInt64
108 columns_totals = {}
109
110 for r in result['meta']:
111 column_name = r['name']
112 column_type = self._define_column_type(r['type'])
113
114 if r['type'] in ('Int64', 'UInt64', 'Nullable(Int64)', 'Nullable(UInt64)'):
115 columns_int64.append(column_name)
116 else:
117 columns_totals[column_name] = 'Total' if column_type == TYPE_STRING else None
118
119 columns.append({'name': column_name, 'friendly_name': column_name, 'type': column_type})
120
121 rows = result['data']
122 for row in rows:
123 for column in columns_int64:
124 try:
125 row[column] = int(row[column])
126 except TypeError:
127 row[column] = None
128
129 if 'totals' in result:
130 totals = result['totals']
131 for column, value in columns_totals.iteritems():
132 totals[column] = value
133 rows.append(totals)
134
135 return {'columns': columns, 'rows': rows}
136
137 def run_query(self, query, user):
138 logger.debug("Clickhouse is about to execute query: %s", query)
139 if query == "":
140 json_data = None
141 error = "Query is empty"
142 return json_data, error
143 try:
144 q = self._clickhouse_query(query)
145 data = json_dumps(q)
146 error = None
147 except Exception as e:
148 data = None
149 logging.exception(e)
150 error = unicode(e)
151 return data, error
152
153 register(ClickHouse)
154
[end of redash/query_runner/clickhouse.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py
--- a/redash/query_runner/clickhouse.py
+++ b/redash/query_runner/clickhouse.py
@@ -68,13 +68,13 @@
def _send_query(self, data, stream=False):
r = requests.post(
- self.configuration['url'],
+ self.configuration.get('url', "http://127.0.0.1:8123"),
data=data.encode("utf-8"),
stream=stream,
timeout=self.configuration.get('timeout', 30),
params={
- 'user': self.configuration['user'],
- 'password': self.configuration['password'],
+ 'user': self.configuration.get('user', "default"),
+ 'password': self.configuration.get('password', ""),
'database': self.configuration['dbname']
}
)
| {"golden_diff": "diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py\n--- a/redash/query_runner/clickhouse.py\n+++ b/redash/query_runner/clickhouse.py\n@@ -68,13 +68,13 @@\n \n def _send_query(self, data, stream=False):\n r = requests.post(\n- self.configuration['url'],\n+ self.configuration.get('url', \"http://127.0.0.1:8123\"),\n data=data.encode(\"utf-8\"),\n stream=stream,\n timeout=self.configuration.get('timeout', 30),\n params={\n- 'user': self.configuration['user'],\n- 'password': self.configuration['password'],\n+ 'user': self.configuration.get('user', \"default\"),\n+ 'password': self.configuration.get('password', \"\"),\n 'database': self.configuration['dbname']\n }\n )\n", "issue": "Clickhouse: password is optional but we try to access it anyway\nFor Clickhouse type data sources, we don't require a password. But the code does require it by trying to directly access the value in the options dictionary, instead of using `get`:\r\n\r\nhttps://github.com/getredash/redash/blob/823e4ccdd6fcfee5d0df0d919d87af3100876549/redash/query_runner/clickhouse.py#L77\n", "before_files": [{"content": "import logging\nimport re\n\nimport requests\n\nfrom redash.query_runner import *\nfrom redash.utils import json_dumps, json_loads\n\nlogger = logging.getLogger(__name__)\n\n\nclass ClickHouse(BaseSQLQueryRunner):\n noop_query = \"SELECT 1\"\n\n @classmethod\n def configuration_schema(cls):\n return {\n \"type\": \"object\",\n \"properties\": {\n \"url\": {\n \"type\": \"string\",\n \"default\": \"http://127.0.0.1:8123\"\n },\n \"user\": {\n \"type\": \"string\",\n \"default\": \"default\"\n },\n \"password\": {\n \"type\": \"string\"\n },\n \"dbname\": {\n \"type\": \"string\",\n \"title\": \"Database Name\"\n },\n \"timeout\": {\n \"type\": \"number\",\n \"title\": \"Request Timeout\",\n \"default\": 30\n }\n },\n \"required\": [\"dbname\"],\n \"secret\": [\"password\"]\n }\n\n @classmethod\n def type(cls):\n return \"clickhouse\"\n\n def _get_tables(self, schema):\n query = \"SELECT database, table, name FROM system.columns WHERE database NOT IN ('system')\"\n\n results, error = self.run_query(query, None)\n\n if error is not None:\n raise Exception(\"Failed getting schema.\")\n\n results = json_loads(results)\n\n for row in results['rows']:\n table_name = '{}.{}'.format(row['database'], row['table'])\n\n if table_name not in schema:\n schema[table_name] = {'name': table_name, 'columns': []}\n\n schema[table_name]['columns'].append(row['name'])\n\n return schema.values()\n\n def _send_query(self, data, stream=False):\n r = requests.post(\n self.configuration['url'],\n data=data.encode(\"utf-8\"),\n stream=stream,\n timeout=self.configuration.get('timeout', 30),\n params={\n 'user': self.configuration['user'],\n 'password': self.configuration['password'],\n 'database': self.configuration['dbname']\n }\n )\n if r.status_code != 200:\n raise Exception(r.text)\n # logging.warning(r.json())\n return r.json()\n\n @staticmethod\n def _define_column_type(column):\n c = column.lower()\n f = re.search(r'^nullable\\((.*)\\)$', c)\n if f is not None:\n c = f.group(1)\n if c.startswith('int') or c.startswith('uint'):\n return TYPE_INTEGER\n elif c.startswith('float'):\n return TYPE_FLOAT\n elif c == 'datetime':\n return TYPE_DATETIME\n elif c == 'date':\n return TYPE_DATE\n else:\n return TYPE_STRING\n\n def _clickhouse_query(self, query):\n query += '\\nFORMAT JSON'\n result = self._send_query(query)\n columns = []\n columns_int64 = [] # db converts value to string if its type equals UInt64\n columns_totals = {}\n\n for r in result['meta']:\n column_name = r['name']\n column_type = self._define_column_type(r['type'])\n\n if r['type'] in ('Int64', 'UInt64', 'Nullable(Int64)', 'Nullable(UInt64)'):\n columns_int64.append(column_name)\n else:\n columns_totals[column_name] = 'Total' if column_type == TYPE_STRING else None\n\n columns.append({'name': column_name, 'friendly_name': column_name, 'type': column_type})\n\n rows = result['data']\n for row in rows:\n for column in columns_int64:\n try:\n row[column] = int(row[column])\n except TypeError:\n row[column] = None\n\n if 'totals' in result:\n totals = result['totals']\n for column, value in columns_totals.iteritems():\n totals[column] = value\n rows.append(totals)\n\n return {'columns': columns, 'rows': rows}\n\n def run_query(self, query, user):\n logger.debug(\"Clickhouse is about to execute query: %s\", query)\n if query == \"\":\n json_data = None\n error = \"Query is empty\"\n return json_data, error\n try:\n q = self._clickhouse_query(query)\n data = json_dumps(q)\n error = None\n except Exception as e:\n data = None\n logging.exception(e)\n error = unicode(e)\n return data, error\n\nregister(ClickHouse)\n", "path": "redash/query_runner/clickhouse.py"}]} | 2,000 | 201 |
gh_patches_debug_17760 | rasdani/github-patches | git_diff | joke2k__faker-105 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Default locale to language if no territory given.
It would be great that if faker was initialized with only a locale and no territory, that it would use a sensible default.
For example I currently have to do the following if using something such as "en" instead of "en_US".
``` py
from faker import Factory
from faker import AVAILABLE_LOCALES
locale = 'en'
if locale not in AVAILABLE_LOCALES:
locale = next(l for l in AVAILABLE_LOCALES if l.startswith(locale))
factory = Factory.create(locale)
```
This happens when using dynamic mock data in local development where django sets the locale to "en" because we do not define territories.
</issue>
<code>
[start of faker/factory.py]
1 from __future__ import unicode_literals
2 from __future__ import absolute_import
3 import sys
4 from faker import DEFAULT_LOCALE, DEFAULT_PROVIDERS, AVAILABLE_LOCALES
5 from faker import Generator
6 from faker import providers as providers_mod
7
8
9 class Factory(object):
10
11 @classmethod
12 def create(cls, locale=None, providers=None, generator=None, **config):
13
14 # fix locale to package name
15 locale = locale.replace('-', '_') if locale else DEFAULT_LOCALE
16 if '_' in locale:
17 locale = locale[:2] + locale[2:].upper()
18 if locale not in AVAILABLE_LOCALES:
19 raise AttributeError('Invalid configuration for faker locale "{0}"'.format(locale))
20
21 providers = providers or DEFAULT_PROVIDERS
22
23 faker = generator or Generator(**config)
24 faker.add_provider(providers_mod.BaseProvider)
25 for provider_name in providers:
26
27 provider_class, lang_found = cls._get_provider_class(provider_name, locale)
28 provider = provider_class(faker)
29 provider.__provider__ = provider_name
30 provider.__lang__ = lang_found
31 faker.add_provider(provider)
32
33 return faker
34
35 @classmethod
36 def _get_provider_class(cls, provider, locale=''):
37
38 provider_class = cls._find_provider_class(provider, locale)
39
40 if provider_class:
41 return provider_class, locale
42
43 if locale and locale != DEFAULT_LOCALE:
44 # fallback to default locale
45 provider_class = cls._find_provider_class(provider, DEFAULT_LOCALE)
46 if provider_class:
47 return provider_class, DEFAULT_LOCALE
48
49 # fallback to no locale
50 provider_class = cls._find_provider_class(provider)
51 if provider_class:
52 return provider_class, None
53
54 raise ValueError('Unable to find provider "{0}" with locale "{1}"'.format(provider, locale))
55
56 @classmethod
57 def _find_provider_class(cls, provider, locale=''):
58
59 path = "{providers}{lang}.{provider}".format(
60 providers=providers_mod.__package__ or providers_mod.__name__,
61 lang='.' + locale if locale else '',
62 provider=provider
63 )
64
65 try:
66 __import__(path)
67 except ImportError:
68 return None
69
70 return sys.modules[path].Provider
71
[end of faker/factory.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/faker/factory.py b/faker/factory.py
--- a/faker/factory.py
+++ b/faker/factory.py
@@ -1,5 +1,6 @@
from __future__ import unicode_literals
from __future__ import absolute_import
+import locale as pylocale
import sys
from faker import DEFAULT_LOCALE, DEFAULT_PROVIDERS, AVAILABLE_LOCALES
from faker import Generator
@@ -13,8 +14,7 @@
# fix locale to package name
locale = locale.replace('-', '_') if locale else DEFAULT_LOCALE
- if '_' in locale:
- locale = locale[:2] + locale[2:].upper()
+ locale = pylocale.normalize(locale).split('.')[0]
if locale not in AVAILABLE_LOCALES:
raise AttributeError('Invalid configuration for faker locale "{0}"'.format(locale))
| {"golden_diff": "diff --git a/faker/factory.py b/faker/factory.py\n--- a/faker/factory.py\n+++ b/faker/factory.py\n@@ -1,5 +1,6 @@\n from __future__ import unicode_literals\n from __future__ import absolute_import\n+import locale as pylocale\n import sys\n from faker import DEFAULT_LOCALE, DEFAULT_PROVIDERS, AVAILABLE_LOCALES\n from faker import Generator\n@@ -13,8 +14,7 @@\n \n # fix locale to package name\n locale = locale.replace('-', '_') if locale else DEFAULT_LOCALE\n- if '_' in locale:\n- locale = locale[:2] + locale[2:].upper()\n+ locale = pylocale.normalize(locale).split('.')[0]\n if locale not in AVAILABLE_LOCALES:\n raise AttributeError('Invalid configuration for faker locale \"{0}\"'.format(locale))\n", "issue": "Default locale to language if no territory given.\nIt would be great that if faker was initialized with only a locale and no territory, that it would use a sensible default.\n\nFor example I currently have to do the following if using something such as \"en\" instead of \"en_US\". \n\n``` py\nfrom faker import Factory\nfrom faker import AVAILABLE_LOCALES\n\nlocale = 'en'\nif locale not in AVAILABLE_LOCALES:\n locale = next(l for l in AVAILABLE_LOCALES if l.startswith(locale))\n\nfactory = Factory.create(locale)\n```\n\nThis happens when using dynamic mock data in local development where django sets the locale to \"en\" because we do not define territories.\n\n", "before_files": [{"content": "from __future__ import unicode_literals\nfrom __future__ import absolute_import\nimport sys\nfrom faker import DEFAULT_LOCALE, DEFAULT_PROVIDERS, AVAILABLE_LOCALES\nfrom faker import Generator\nfrom faker import providers as providers_mod\n\n\nclass Factory(object):\n\n @classmethod\n def create(cls, locale=None, providers=None, generator=None, **config):\n\n # fix locale to package name\n locale = locale.replace('-', '_') if locale else DEFAULT_LOCALE\n if '_' in locale:\n locale = locale[:2] + locale[2:].upper()\n if locale not in AVAILABLE_LOCALES:\n raise AttributeError('Invalid configuration for faker locale \"{0}\"'.format(locale))\n\n providers = providers or DEFAULT_PROVIDERS\n\n faker = generator or Generator(**config)\n faker.add_provider(providers_mod.BaseProvider)\n for provider_name in providers:\n\n provider_class, lang_found = cls._get_provider_class(provider_name, locale)\n provider = provider_class(faker)\n provider.__provider__ = provider_name\n provider.__lang__ = lang_found\n faker.add_provider(provider)\n\n return faker\n\n @classmethod\n def _get_provider_class(cls, provider, locale=''):\n\n provider_class = cls._find_provider_class(provider, locale)\n\n if provider_class:\n return provider_class, locale\n\n if locale and locale != DEFAULT_LOCALE:\n # fallback to default locale\n provider_class = cls._find_provider_class(provider, DEFAULT_LOCALE)\n if provider_class:\n return provider_class, DEFAULT_LOCALE\n\n # fallback to no locale\n provider_class = cls._find_provider_class(provider)\n if provider_class:\n return provider_class, None\n\n raise ValueError('Unable to find provider \"{0}\" with locale \"{1}\"'.format(provider, locale))\n\n @classmethod\n def _find_provider_class(cls, provider, locale=''):\n\n path = \"{providers}{lang}.{provider}\".format(\n providers=providers_mod.__package__ or providers_mod.__name__,\n lang='.' + locale if locale else '',\n provider=provider\n )\n\n try:\n __import__(path)\n except ImportError:\n return None\n\n return sys.modules[path].Provider\n", "path": "faker/factory.py"}]} | 1,278 | 187 |
gh_patches_debug_20593 | rasdani/github-patches | git_diff | pyload__pyload-1385 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Feature Request: [UploadedNet] Detect Maintenance Mode
Please update the UploadedNet plugin so it detects uploaded maintenance mode.
Adding a timer to re-check indefinitely every 5 minutes or so might be necessary.
Else all links in the queue are skipped as offline and have to be restarted manually.
Here is the html code if Uploaded is in maintenance
http://paste2.org/OaBy4vZ6
EDiT: A check for the head title "uploaded.net - Maintenance - Wartungsarbeiten" should suffice, I guess..
</issue>
<code>
[start of module/plugins/hoster/UploadedTo.py]
1 # -*- coding: utf-8 -*-
2
3 import re
4 import time
5
6 from module.network.RequestFactory import getURL
7 from module.plugins.internal.CaptchaService import ReCaptcha
8 from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
9
10
11 class UploadedTo(SimpleHoster):
12 __name__ = "UploadedTo"
13 __type__ = "hoster"
14 __version__ = "0.86"
15
16 __pattern__ = r'https?://(?:www\.)?(uploaded\.(to|net)|ul\.to)(/file/|/?\?id=|.*?&id=|/)(?P<ID>\w+)'
17 __config__ = [("use_premium", "bool", "Use premium account if available", True)]
18
19 __description__ = """Uploaded.net hoster plugin"""
20 __license__ = "GPLv3"
21 __authors__ = [("Walter Purcaro", "[email protected]")]
22
23
24 DISPOSITION = False
25
26 API_KEY = "lhF2IeeprweDfu9ccWlxXVVypA5nA3EL"
27
28 URL_REPLACEMENTS = [(__pattern__ + ".*", r'http://uploaded.net/file/\g<ID>')]
29
30 LINK_PREMIUM_PATTERN = r'<div class="tfree".*\s*<form method="post" action="(.+?)"'
31
32 WAIT_PATTERN = r'Current waiting period: <span>(\d+)'
33 DL_LIMIT_ERROR = r'You have reached the max. number of possible free downloads for this hour'
34
35
36 @classmethod
37 def apiInfo(cls, url="", get={}, post={}):
38 info = super(UploadedTo, cls).apiInfo(url)
39
40 for _i in xrange(5):
41 html = getURL("http://uploaded.net/api/filemultiple",
42 get={"apikey": cls.API_KEY, 'id_0': re.match(cls.__pattern__, url).group('ID')},
43 decode=True)
44
45 if html != "can't find request":
46 api = html.split(",", 4)
47 if api[0] == "online":
48 info.update({'name': api[4].strip(), 'size': api[2], 'status': 2})
49 else:
50 info['status'] = 1
51 break
52 else:
53 time.sleep(3)
54
55 return info
56
57
58 def setup(self):
59 self.multiDL = self.resumeDownload = self.premium
60 self.chunkLimit = 1 # critical problems with more chunks
61
62
63 def checkErrors(self):
64 if 'var free_enabled = false;' in self.html:
65 self.logError(_("Free-download capacities exhausted"))
66 self.retry(24, 5 * 60)
67
68 elif "limit-size" in self.html:
69 self.fail(_("File too big for free download"))
70
71 elif "limit-slot" in self.html: # Temporary restriction so just wait a bit
72 self.wait(30 * 60, True)
73 self.retry()
74
75 elif "limit-parallel" in self.html:
76 self.fail(_("Cannot download in parallel"))
77
78 elif "limit-dl" in self.html or self.DL_LIMIT_ERROR in self.html: # limit-dl
79 self.wait(3 * 60 * 60, True)
80 self.retry()
81
82 elif '"err":"captcha"' in self.html:
83 self.invalidCaptcha()
84
85 else:
86 m = re.search(self.WAIT_PATTERN, self.html)
87 if m:
88 self.wait(m.group(1))
89
90
91 def handleFree(self, pyfile):
92 self.load("http://uploaded.net/language/en", just_header=True)
93
94 self.html = self.load("http://uploaded.net/js/download.js", decode=True)
95
96 recaptcha = ReCaptcha(self)
97 response, challenge = recaptcha.challenge()
98
99 self.html = self.load("http://uploaded.net/io/ticket/captcha/%s" % self.info['pattern']['ID'],
100 post={'recaptcha_challenge_field': challenge,
101 'recaptcha_response_field' : response})
102
103 if "type:'download'" in self.html:
104 self.correctCaptcha()
105 try:
106 self.link = re.search("url:'(.+?)'", self.html).group(1)
107
108 except Exception:
109 pass
110
111 self.checkErrors()
112
113
114 def checkFile(self, rules={}):
115 if self.checkDownload({'limit-dl': self.DL_LIMIT_ERROR}):
116 self.wait(3 * 60 * 60, True)
117 self.retry()
118
119 return super(UploadedTo, self).checkFile(rules)
120
121
122 getInfo = create_getInfo(UploadedTo)
123
[end of module/plugins/hoster/UploadedTo.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/module/plugins/hoster/UploadedTo.py b/module/plugins/hoster/UploadedTo.py
--- a/module/plugins/hoster/UploadedTo.py
+++ b/module/plugins/hoster/UploadedTo.py
@@ -11,7 +11,7 @@
class UploadedTo(SimpleHoster):
__name__ = "UploadedTo"
__type__ = "hoster"
- __version__ = "0.86"
+ __version__ = "0.87"
__pattern__ = r'https?://(?:www\.)?(uploaded\.(to|net)|ul\.to)(/file/|/?\?id=|.*?&id=|/)(?P<ID>\w+)'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
@@ -27,6 +27,8 @@
URL_REPLACEMENTS = [(__pattern__ + ".*", r'http://uploaded.net/file/\g<ID>')]
+ TEMP_OFFLINE_PATTERN = r'<title>uploaded\.net - Maintenance - Wartungsarbeiten</title>'
+
LINK_PREMIUM_PATTERN = r'<div class="tfree".*\s*<form method="post" action="(.+?)"'
WAIT_PATTERN = r'Current waiting period: <span>(\d+)'
| {"golden_diff": "diff --git a/module/plugins/hoster/UploadedTo.py b/module/plugins/hoster/UploadedTo.py\n--- a/module/plugins/hoster/UploadedTo.py\n+++ b/module/plugins/hoster/UploadedTo.py\n@@ -11,7 +11,7 @@\n class UploadedTo(SimpleHoster):\n __name__ = \"UploadedTo\"\n __type__ = \"hoster\"\n- __version__ = \"0.86\"\n+ __version__ = \"0.87\"\n \n __pattern__ = r'https?://(?:www\\.)?(uploaded\\.(to|net)|ul\\.to)(/file/|/?\\?id=|.*?&id=|/)(?P<ID>\\w+)'\n __config__ = [(\"use_premium\", \"bool\", \"Use premium account if available\", True)]\n@@ -27,6 +27,8 @@\n \n URL_REPLACEMENTS = [(__pattern__ + \".*\", r'http://uploaded.net/file/\\g<ID>')]\n \n+ TEMP_OFFLINE_PATTERN = r'<title>uploaded\\.net - Maintenance - Wartungsarbeiten</title>'\n+\n LINK_PREMIUM_PATTERN = r'<div class=\"tfree\".*\\s*<form method=\"post\" action=\"(.+?)\"'\n \n WAIT_PATTERN = r'Current waiting period: <span>(\\d+)'\n", "issue": "Feature Request: [UploadedNet] Detect Maintenance Mode\nPlease update the UploadedNet plugin so it detects uploaded maintenance mode.\n\nAdding a timer to re-check indefinitely every 5 minutes or so might be necessary.\n\nElse all links in the queue are skipped as offline and have to be restarted manually.\n\nHere is the html code if Uploaded is in maintenance\n\nhttp://paste2.org/OaBy4vZ6\n\nEDiT: A check for the head title \"uploaded.net - Maintenance - Wartungsarbeiten\" should suffice, I guess..\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport re\nimport time\n\nfrom module.network.RequestFactory import getURL\nfrom module.plugins.internal.CaptchaService import ReCaptcha\nfrom module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo\n\n\nclass UploadedTo(SimpleHoster):\n __name__ = \"UploadedTo\"\n __type__ = \"hoster\"\n __version__ = \"0.86\"\n\n __pattern__ = r'https?://(?:www\\.)?(uploaded\\.(to|net)|ul\\.to)(/file/|/?\\?id=|.*?&id=|/)(?P<ID>\\w+)'\n __config__ = [(\"use_premium\", \"bool\", \"Use premium account if available\", True)]\n\n __description__ = \"\"\"Uploaded.net hoster plugin\"\"\"\n __license__ = \"GPLv3\"\n __authors__ = [(\"Walter Purcaro\", \"[email protected]\")]\n\n\n DISPOSITION = False\n\n API_KEY = \"lhF2IeeprweDfu9ccWlxXVVypA5nA3EL\"\n\n URL_REPLACEMENTS = [(__pattern__ + \".*\", r'http://uploaded.net/file/\\g<ID>')]\n\n LINK_PREMIUM_PATTERN = r'<div class=\"tfree\".*\\s*<form method=\"post\" action=\"(.+?)\"'\n\n WAIT_PATTERN = r'Current waiting period: <span>(\\d+)'\n DL_LIMIT_ERROR = r'You have reached the max. number of possible free downloads for this hour'\n\n\n @classmethod\n def apiInfo(cls, url=\"\", get={}, post={}):\n info = super(UploadedTo, cls).apiInfo(url)\n\n for _i in xrange(5):\n html = getURL(\"http://uploaded.net/api/filemultiple\",\n get={\"apikey\": cls.API_KEY, 'id_0': re.match(cls.__pattern__, url).group('ID')},\n decode=True)\n\n if html != \"can't find request\":\n api = html.split(\",\", 4)\n if api[0] == \"online\":\n info.update({'name': api[4].strip(), 'size': api[2], 'status': 2})\n else:\n info['status'] = 1\n break\n else:\n time.sleep(3)\n\n return info\n\n\n def setup(self):\n self.multiDL = self.resumeDownload = self.premium\n self.chunkLimit = 1 # critical problems with more chunks\n\n\n def checkErrors(self):\n if 'var free_enabled = false;' in self.html:\n self.logError(_(\"Free-download capacities exhausted\"))\n self.retry(24, 5 * 60)\n\n elif \"limit-size\" in self.html:\n self.fail(_(\"File too big for free download\"))\n\n elif \"limit-slot\" in self.html: # Temporary restriction so just wait a bit\n self.wait(30 * 60, True)\n self.retry()\n\n elif \"limit-parallel\" in self.html:\n self.fail(_(\"Cannot download in parallel\"))\n\n elif \"limit-dl\" in self.html or self.DL_LIMIT_ERROR in self.html: # limit-dl\n self.wait(3 * 60 * 60, True)\n self.retry()\n\n elif '\"err\":\"captcha\"' in self.html:\n self.invalidCaptcha()\n\n else:\n m = re.search(self.WAIT_PATTERN, self.html)\n if m:\n self.wait(m.group(1))\n\n\n def handleFree(self, pyfile):\n self.load(\"http://uploaded.net/language/en\", just_header=True)\n\n self.html = self.load(\"http://uploaded.net/js/download.js\", decode=True)\n\n recaptcha = ReCaptcha(self)\n response, challenge = recaptcha.challenge()\n\n self.html = self.load(\"http://uploaded.net/io/ticket/captcha/%s\" % self.info['pattern']['ID'],\n post={'recaptcha_challenge_field': challenge,\n 'recaptcha_response_field' : response})\n\n if \"type:'download'\" in self.html:\n self.correctCaptcha()\n try:\n self.link = re.search(\"url:'(.+?)'\", self.html).group(1)\n\n except Exception:\n pass\n\n self.checkErrors()\n\n\n def checkFile(self, rules={}):\n if self.checkDownload({'limit-dl': self.DL_LIMIT_ERROR}):\n self.wait(3 * 60 * 60, True)\n self.retry()\n\n return super(UploadedTo, self).checkFile(rules)\n\n\ngetInfo = create_getInfo(UploadedTo)\n", "path": "module/plugins/hoster/UploadedTo.py"}]} | 1,931 | 299 |
gh_patches_debug_9143 | rasdani/github-patches | git_diff | google-deepmind__dm-haiku-168 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Examples are distributed as part of the package
Hi,
I think the examples are folder are currently packaged as part of the package on PyPI. This means that installing haiku will also install the examples as the package `examples`. Should these be excluded from the distribution?
JAX also has examples in their repo, but those are excluded from packaging in
https://github.com/google/jax/blob/main/setup.py#L33
</issue>
<code>
[start of setup.py]
1 # Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15 """Setup for pip package."""
16
17 from setuptools import find_namespace_packages
18 from setuptools import setup
19
20
21 def _get_version():
22 with open('haiku/__init__.py') as fp:
23 for line in fp:
24 if line.startswith('__version__'):
25 g = {}
26 exec(line, g) # pylint: disable=exec-used
27 return g['__version__']
28 raise ValueError('`__version__` not defined in `haiku/__init__.py`')
29
30
31 def _parse_requirements(requirements_txt_path):
32 with open(requirements_txt_path) as fp:
33 return fp.read().splitlines()
34
35
36 _VERSION = _get_version()
37
38 EXTRA_PACKAGES = {
39 'jax': ['jax>=0.1.71'],
40 'jaxlib': ['jaxlib>=0.1.49'],
41 }
42
43 setup(
44 name='dm-haiku',
45 version=_VERSION,
46 url='https://github.com/deepmind/dm-haiku',
47 license='Apache 2.0',
48 author='DeepMind',
49 description='Haiku is a library for building neural networks in JAX.',
50 long_description=open('README.md').read(),
51 long_description_content_type='text/markdown',
52 author_email='[email protected]',
53 # Contained modules and scripts.
54 packages=find_namespace_packages(exclude=['*_test.py']),
55 install_requires=_parse_requirements('requirements.txt'),
56 extras_require=EXTRA_PACKAGES,
57 tests_require=_parse_requirements('requirements-test.txt'),
58 requires_python='>=3.7',
59 include_package_data=True,
60 zip_safe=False,
61 # PyPI package information.
62 classifiers=[
63 'Development Status :: 4 - Beta',
64 'Intended Audience :: Developers',
65 'Intended Audience :: Education',
66 'Intended Audience :: Science/Research',
67 'License :: OSI Approved :: Apache Software License',
68 'Programming Language :: Python :: 3',
69 'Programming Language :: Python :: 3.7',
70 'Programming Language :: Python :: 3.8',
71 'Topic :: Scientific/Engineering :: Mathematics',
72 'Topic :: Software Development :: Libraries :: Python Modules',
73 'Topic :: Software Development :: Libraries',
74 ],
75 )
76
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -51,7 +51,7 @@
long_description_content_type='text/markdown',
author_email='[email protected]',
# Contained modules and scripts.
- packages=find_namespace_packages(exclude=['*_test.py']),
+ packages=find_namespace_packages(exclude=['*_test.py', 'examples']),
install_requires=_parse_requirements('requirements.txt'),
extras_require=EXTRA_PACKAGES,
tests_require=_parse_requirements('requirements-test.txt'),
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -51,7 +51,7 @@\n long_description_content_type='text/markdown',\n author_email='[email protected]',\n # Contained modules and scripts.\n- packages=find_namespace_packages(exclude=['*_test.py']),\n+ packages=find_namespace_packages(exclude=['*_test.py', 'examples']),\n install_requires=_parse_requirements('requirements.txt'),\n extras_require=EXTRA_PACKAGES,\n tests_require=_parse_requirements('requirements-test.txt'),\n", "issue": "Examples are distributed as part of the package\nHi, \r\n\r\nI think the examples are folder are currently packaged as part of the package on PyPI. This means that installing haiku will also install the examples as the package `examples`. Should these be excluded from the distribution?\r\n\r\nJAX also has examples in their repo, but those are excluded from packaging in \r\n\r\nhttps://github.com/google/jax/blob/main/setup.py#L33\r\n\r\n\n", "before_files": [{"content": "# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Setup for pip package.\"\"\"\n\nfrom setuptools import find_namespace_packages\nfrom setuptools import setup\n\n\ndef _get_version():\n with open('haiku/__init__.py') as fp:\n for line in fp:\n if line.startswith('__version__'):\n g = {}\n exec(line, g) # pylint: disable=exec-used\n return g['__version__']\n raise ValueError('`__version__` not defined in `haiku/__init__.py`')\n\n\ndef _parse_requirements(requirements_txt_path):\n with open(requirements_txt_path) as fp:\n return fp.read().splitlines()\n\n\n_VERSION = _get_version()\n\nEXTRA_PACKAGES = {\n 'jax': ['jax>=0.1.71'],\n 'jaxlib': ['jaxlib>=0.1.49'],\n}\n\nsetup(\n name='dm-haiku',\n version=_VERSION,\n url='https://github.com/deepmind/dm-haiku',\n license='Apache 2.0',\n author='DeepMind',\n description='Haiku is a library for building neural networks in JAX.',\n long_description=open('README.md').read(),\n long_description_content_type='text/markdown',\n author_email='[email protected]',\n # Contained modules and scripts.\n packages=find_namespace_packages(exclude=['*_test.py']),\n install_requires=_parse_requirements('requirements.txt'),\n extras_require=EXTRA_PACKAGES,\n tests_require=_parse_requirements('requirements-test.txt'),\n requires_python='>=3.7',\n include_package_data=True,\n zip_safe=False,\n # PyPI package information.\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Software Development :: Libraries',\n ],\n)\n", "path": "setup.py"}]} | 1,370 | 122 |
gh_patches_debug_2119 | rasdani/github-patches | git_diff | qtile__qtile-1578 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
qtile error to load
</issue>
<code>
[start of libqtile/scripts/qtile.py]
1 # Copyright (c) 2008, Aldo Cortesi. All rights reserved.
2 # Copyright (c) 2011, Florian Mounier
3 #
4 # Permission is hereby granted, free of charge, to any person obtaining a copy
5 # of this software and associated documentation files (the "Software"), to deal
6 # in the Software without restriction, including without limitation the rights
7 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 # copies of the Software, and to permit persons to whom the Software is
9 # furnished to do so, subject to the following conditions:
10 #
11 # The above copyright notice and this permission notice shall be included in
12 # all copies or substantial portions of the Software.
13 #
14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 # SOFTWARE.
21
22 # Set the locale before any widgets or anything are imported, so any widget
23 # whose defaults depend on a reasonable locale sees something reasonable.
24 import locale
25 import logging
26 from os import path, getenv, makedirs
27
28 from libqtile.log_utils import init_log, logger
29 from libqtile import confreader
30 from libqtile.backend.x11 import xcore
31
32 locale.setlocale(locale.LC_ALL, locale.getdefaultlocale()) # type: ignore
33
34 try:
35 import pkg_resources
36 VERSION = pkg_resources.require("qtile")[0].version
37 except (pkg_resources.DistributionNotFound, ImportError):
38 VERSION = 'dev'
39
40
41 def rename_process():
42 """
43 Try to rename the qtile process if py-setproctitle is installed:
44
45 http://code.google.com/p/py-setproctitle/
46
47 Will fail silently if it's not installed. Setting the title lets you do
48 stuff like "killall qtile".
49 """
50 try:
51 import setproctitle
52 setproctitle.setproctitle("qtile")
53 except ImportError:
54 pass
55
56
57 def make_qtile():
58 from argparse import ArgumentParser
59 parser = ArgumentParser(
60 description='A full-featured, pure-Python tiling window manager.',
61 prog='qtile',
62 )
63 parser.add_argument(
64 '--version',
65 action='version',
66 version=VERSION,
67 )
68 parser.add_argument(
69 "-c", "--config",
70 action="store",
71 default=path.expanduser(path.join(
72 getenv('XDG_CONFIG_HOME', '~/.config'), 'qtile', 'config.py')),
73 dest="configfile",
74 help='Use the specified configuration file',
75 )
76 parser.add_argument(
77 "-s", "--socket",
78 action="store",
79 default=None,
80 dest="socket",
81 help='Path of the Qtile IPC socket.'
82 )
83 parser.add_argument(
84 "-n", "--no-spawn",
85 action="store_true",
86 default=False,
87 dest="no_spawn",
88 help='Avoid spawning apps. (Used for restart)'
89 )
90 parser.add_argument(
91 '-l', '--log-level',
92 default='WARNING',
93 dest='log_level',
94 choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'),
95 help='Set qtile log level'
96 )
97 parser.add_argument(
98 '--with-state',
99 default=None,
100 dest='state',
101 help='Pickled QtileState object (typically used only internally)',
102 )
103 options = parser.parse_args()
104 log_level = getattr(logging, options.log_level)
105 init_log(log_level=log_level)
106
107 kore = xcore.XCore()
108 try:
109 if not path.isfile(options.configfile):
110 try:
111 makedirs(path.dirname(options.configfile), exist_ok=True)
112 from shutil import copyfile
113 default_config_path = path.join(path.dirname(__file__),
114 "..",
115 "resources",
116 "default_config.py")
117 copyfile(default_config_path, options.configfile)
118 logger.info('Copied default_config.py to %s', options.configfile)
119 except Exception as e:
120 logger.exception('Failed to copy default_config.py to %s: (%s)',
121 options.configfile, e)
122
123 config = confreader.Config.from_file(kore, options.configfile)
124 except Exception as e:
125 logger.exception('Error while reading config file (%s)', e)
126 config = confreader.Config()
127 from libqtile.widget import TextBox
128 widgets = config.screens[0].bottom.widgets
129 widgets.insert(0, TextBox('Config Err!'))
130
131 # XXX: the import is here because we need to call init_log
132 # before start importing stuff
133 from libqtile.core import session_manager
134 return session_manager.SessionManager(
135 kore,
136 config,
137 fname=options.socket,
138 no_spawn=options.no_spawn,
139 state=options.state,
140 )
141
142
143 def main():
144 rename_process()
145 q = make_qtile()
146 try:
147 q.loop()
148 except Exception:
149 logger.exception('Qtile crashed')
150 logger.info('Exiting...')
151
[end of libqtile/scripts/qtile.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/libqtile/scripts/qtile.py b/libqtile/scripts/qtile.py
--- a/libqtile/scripts/qtile.py
+++ b/libqtile/scripts/qtile.py
@@ -29,7 +29,11 @@
from libqtile import confreader
from libqtile.backend.x11 import xcore
-locale.setlocale(locale.LC_ALL, locale.getdefaultlocale()) # type: ignore
+try:
+ locale.setlocale(locale.LC_ALL, locale.getdefaultlocale()) # type: ignore
+except locale.Error:
+ pass
+
try:
import pkg_resources
| {"golden_diff": "diff --git a/libqtile/scripts/qtile.py b/libqtile/scripts/qtile.py\n--- a/libqtile/scripts/qtile.py\n+++ b/libqtile/scripts/qtile.py\n@@ -29,7 +29,11 @@\n from libqtile import confreader\n from libqtile.backend.x11 import xcore\n \n-locale.setlocale(locale.LC_ALL, locale.getdefaultlocale()) # type: ignore\n+try:\n+ locale.setlocale(locale.LC_ALL, locale.getdefaultlocale()) # type: ignore\n+except locale.Error:\n+ pass\n+\n \n try:\n import pkg_resources\n", "issue": "qtile error to load\n\n", "before_files": [{"content": "# Copyright (c) 2008, Aldo Cortesi. All rights reserved.\n# Copyright (c) 2011, Florian Mounier\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\n# Set the locale before any widgets or anything are imported, so any widget\n# whose defaults depend on a reasonable locale sees something reasonable.\nimport locale\nimport logging\nfrom os import path, getenv, makedirs\n\nfrom libqtile.log_utils import init_log, logger\nfrom libqtile import confreader\nfrom libqtile.backend.x11 import xcore\n\nlocale.setlocale(locale.LC_ALL, locale.getdefaultlocale()) # type: ignore\n\ntry:\n import pkg_resources\n VERSION = pkg_resources.require(\"qtile\")[0].version\nexcept (pkg_resources.DistributionNotFound, ImportError):\n VERSION = 'dev'\n\n\ndef rename_process():\n \"\"\"\n Try to rename the qtile process if py-setproctitle is installed:\n\n http://code.google.com/p/py-setproctitle/\n\n Will fail silently if it's not installed. Setting the title lets you do\n stuff like \"killall qtile\".\n \"\"\"\n try:\n import setproctitle\n setproctitle.setproctitle(\"qtile\")\n except ImportError:\n pass\n\n\ndef make_qtile():\n from argparse import ArgumentParser\n parser = ArgumentParser(\n description='A full-featured, pure-Python tiling window manager.',\n prog='qtile',\n )\n parser.add_argument(\n '--version',\n action='version',\n version=VERSION,\n )\n parser.add_argument(\n \"-c\", \"--config\",\n action=\"store\",\n default=path.expanduser(path.join(\n getenv('XDG_CONFIG_HOME', '~/.config'), 'qtile', 'config.py')),\n dest=\"configfile\",\n help='Use the specified configuration file',\n )\n parser.add_argument(\n \"-s\", \"--socket\",\n action=\"store\",\n default=None,\n dest=\"socket\",\n help='Path of the Qtile IPC socket.'\n )\n parser.add_argument(\n \"-n\", \"--no-spawn\",\n action=\"store_true\",\n default=False,\n dest=\"no_spawn\",\n help='Avoid spawning apps. (Used for restart)'\n )\n parser.add_argument(\n '-l', '--log-level',\n default='WARNING',\n dest='log_level',\n choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'),\n help='Set qtile log level'\n )\n parser.add_argument(\n '--with-state',\n default=None,\n dest='state',\n help='Pickled QtileState object (typically used only internally)',\n )\n options = parser.parse_args()\n log_level = getattr(logging, options.log_level)\n init_log(log_level=log_level)\n\n kore = xcore.XCore()\n try:\n if not path.isfile(options.configfile):\n try:\n makedirs(path.dirname(options.configfile), exist_ok=True)\n from shutil import copyfile\n default_config_path = path.join(path.dirname(__file__),\n \"..\",\n \"resources\",\n \"default_config.py\")\n copyfile(default_config_path, options.configfile)\n logger.info('Copied default_config.py to %s', options.configfile)\n except Exception as e:\n logger.exception('Failed to copy default_config.py to %s: (%s)',\n options.configfile, e)\n\n config = confreader.Config.from_file(kore, options.configfile)\n except Exception as e:\n logger.exception('Error while reading config file (%s)', e)\n config = confreader.Config()\n from libqtile.widget import TextBox\n widgets = config.screens[0].bottom.widgets\n widgets.insert(0, TextBox('Config Err!'))\n\n # XXX: the import is here because we need to call init_log\n # before start importing stuff\n from libqtile.core import session_manager\n return session_manager.SessionManager(\n kore,\n config,\n fname=options.socket,\n no_spawn=options.no_spawn,\n state=options.state,\n )\n\n\ndef main():\n rename_process()\n q = make_qtile()\n try:\n q.loop()\n except Exception:\n logger.exception('Qtile crashed')\n logger.info('Exiting...')\n", "path": "libqtile/scripts/qtile.py"}]} | 2,019 | 136 |
gh_patches_debug_11280 | rasdani/github-patches | git_diff | scverse__scanpy-1856 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Increase visibility of ecosystem page
As discussed at last meeting
- [ ] Document process for adding entries (note this on top of ecosystem page)
- [ ] Link from external
- [ ] Clarify goals/ differences b/w ecosystem and external
</issue>
<code>
[start of scanpy/external/__init__.py]
1 from . import tl
2 from . import pl
3 from . import pp
4 from . import exporting
5
6 import sys
7 from .. import _utils
8
9 _utils.annotate_doc_types(sys.modules[__name__], 'scanpy')
10 del sys, _utils
11
12
13 __doc__ = """\
14 External API
15 ============
16
17
18 Import Scanpy's wrappers to external tools as::
19
20 import scanpy.external as sce
21
22 If you'd like to see your tool included here, please open a `pull request <https://github.com/theislab/scanpy>`_!
23
24 Preprocessing: PP
25 ------------------
26
27 Data integration
28 ~~~~~~~~~~~~~~~~
29
30 .. autosummary::
31 :toctree: .
32
33 pp.bbknn
34 pp.harmony_integrate
35 pp.mnn_correct
36 pp.scanorama_integrate
37
38
39 Sample demultiplexing, Doublet detection
40 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
41
42 .. autosummary::
43 :toctree: .
44
45 pp.scrublet
46 pp.scrublet_simulate_doublets
47 pl.scrublet_score_distribution
48 pp.hashsolo
49
50 Imputation
51 ~~~~~~~~~~
52
53 Note that the fundamental limitations of imputation are still under `debate
54 <https://github.com/theislab/scanpy/issues/189>`__.
55
56 .. autosummary::
57 :toctree: .
58
59 pp.dca
60 pp.magic
61
62
63 Tools: TL
64 ----------
65
66 Embeddings
67 ~~~~~~~~~~
68
69 .. autosummary::
70 :toctree: .
71
72 tl.phate
73 tl.palantir
74 tl.trimap
75 tl.sam
76
77 Clustering and trajectory inference
78 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
79
80 .. autosummary::
81 :toctree: .
82
83 tl.phenograph
84 tl.harmony_timeseries
85 tl.wishbone
86 tl.palantir
87 tl.palantir_results
88
89 Gene scores, Cell cycle
90 ~~~~~~~~~~~~~~~~~~~~~~~
91
92 .. autosummary::
93 :toctree: .
94
95 tl.sandbag
96 tl.cyclone
97
98
99 Plotting: PL
100 ------------
101
102 .. autosummary::
103 :toctree: .
104
105 pl.phate
106 pl.trimap
107 pl.sam
108 pl.wishbone_marker_trajectory
109
110 Exporting
111 ---------
112
113 .. autosummary::
114 :toctree: .
115
116 exporting.spring_project
117 exporting.cellbrowser
118 """
119
[end of scanpy/external/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/scanpy/external/__init__.py b/scanpy/external/__init__.py
--- a/scanpy/external/__init__.py
+++ b/scanpy/external/__init__.py
@@ -14,12 +14,15 @@
External API
============
+.. note::
+ More tools that integrate well with scanpy and anndata can be found on the :doc:`ecosystem page <../ecosystem>`.
Import Scanpy's wrappers to external tools as::
import scanpy.external as sce
-If you'd like to see your tool included here, please open a `pull request <https://github.com/theislab/scanpy>`_!
+If you'd like to include a tool here, consider making a pull request (:doc:`instructions <../dev/external-tools>`).
+If the tool already uses `scanpy` or `anndata`, it may fit better in the :doc:`ecosystem page <../ecosystem>`.
Preprocessing: PP
------------------
| {"golden_diff": "diff --git a/scanpy/external/__init__.py b/scanpy/external/__init__.py\n--- a/scanpy/external/__init__.py\n+++ b/scanpy/external/__init__.py\n@@ -14,12 +14,15 @@\n External API\n ============\n \n+.. note::\n+ More tools that integrate well with scanpy and anndata can be found on the :doc:`ecosystem page <../ecosystem>`.\n \n Import Scanpy's wrappers to external tools as::\n \n import scanpy.external as sce\n \n-If you'd like to see your tool included here, please open a `pull request <https://github.com/theislab/scanpy>`_!\n+If you'd like to include a tool here, consider making a pull request (:doc:`instructions <../dev/external-tools>`).\n+If the tool already uses `scanpy` or `anndata`, it may fit better in the :doc:`ecosystem page <../ecosystem>`.\n \n Preprocessing: PP\n ------------------\n", "issue": "Increase visibility of ecosystem page\nAs discussed at last meeting\r\n\r\n- [ ] Document process for adding entries (note this on top of ecosystem page)\r\n- [ ] Link from external\r\n- [ ] Clarify goals/ differences b/w ecosystem and external\n", "before_files": [{"content": "from . import tl\nfrom . import pl\nfrom . import pp\nfrom . import exporting\n\nimport sys\nfrom .. import _utils\n\n_utils.annotate_doc_types(sys.modules[__name__], 'scanpy')\ndel sys, _utils\n\n\n__doc__ = \"\"\"\\\nExternal API\n============\n\n\nImport Scanpy's wrappers to external tools as::\n\n import scanpy.external as sce\n\nIf you'd like to see your tool included here, please open a `pull request <https://github.com/theislab/scanpy>`_!\n\nPreprocessing: PP\n------------------\n\nData integration\n~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n pp.bbknn\n pp.harmony_integrate\n pp.mnn_correct\n pp.scanorama_integrate\n\n\nSample demultiplexing, Doublet detection\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n pp.scrublet\n pp.scrublet_simulate_doublets\n pl.scrublet_score_distribution\n pp.hashsolo\n\nImputation\n~~~~~~~~~~\n\nNote that the fundamental limitations of imputation are still under `debate\n<https://github.com/theislab/scanpy/issues/189>`__.\n\n.. autosummary::\n :toctree: .\n\n pp.dca\n pp.magic\n\n\nTools: TL\n----------\n\nEmbeddings\n~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.phate\n tl.palantir\n tl.trimap\n tl.sam\n\nClustering and trajectory inference\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.phenograph\n tl.harmony_timeseries\n tl.wishbone\n tl.palantir\n tl.palantir_results\n\nGene scores, Cell cycle\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.sandbag\n tl.cyclone\n\n\nPlotting: PL\n------------\n\n.. autosummary::\n :toctree: .\n\n pl.phate\n pl.trimap\n pl.sam\n pl.wishbone_marker_trajectory\n\nExporting\n---------\n\n.. autosummary::\n :toctree: .\n\n exporting.spring_project\n exporting.cellbrowser\n\"\"\"\n", "path": "scanpy/external/__init__.py"}]} | 1,367 | 228 |
gh_patches_debug_18187 | rasdani/github-patches | git_diff | praw-dev__praw-782 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Explain "PRAW is not thread safe" in the documentation.
It would be great to provide an example of why it is not thread safe.
Relevant comment:
https://www.reddit.com/r/redditdev/comments/63ugl5/praw_mulitprocessing_handler_prawhandler_is_not_a/dfx9oet/?context=3
</issue>
<code>
[start of setup.py]
1 """praw setup.py"""
2
3 import re
4 from codecs import open
5 from os import path
6 from setuptools import find_packages, setup
7
8
9 PACKAGE_NAME = 'praw'
10 HERE = path.abspath(path.dirname(__file__))
11 with open(path.join(HERE, 'README.rst'), encoding='utf-8') as fp:
12 README = fp.read()
13 with open(path.join(HERE, PACKAGE_NAME, 'const.py'),
14 encoding='utf-8') as fp:
15 VERSION = re.search("__version__ = '([^']+)'", fp.read()).group(1)
16
17
18 setup(name=PACKAGE_NAME,
19 author='Bryce Boe',
20 author_email='[email protected]',
21 classifiers=[
22 'Development Status :: 5 - Production/Stable',
23 'Environment :: Console',
24 'Intended Audience :: Developers',
25 'License :: OSI Approved :: BSD License',
26 'Natural Language :: English',
27 'Operating System :: OS Independent',
28 'Programming Language :: Python',
29 'Programming Language :: Python :: 2.7',
30 'Programming Language :: Python :: 3',
31 'Programming Language :: Python :: 3.3',
32 'Programming Language :: Python :: 3.4',
33 'Programming Language :: Python :: 3.5',
34 'Programming Language :: Python :: 3.6',
35 'Programming Language :: Python :: Implementation :: CPython',
36 'Topic :: Utilities'],
37 description=('PRAW, an acronym for `Python Reddit API Wrapper`, is a '
38 'python package that allows for simple access to '
39 'reddit\'s API.'),
40 install_requires=['prawcore >=0.9.0, <0.10',
41 'update_checker >=0.16'],
42 keywords='reddit api wrapper',
43 license='Simplified BSD License',
44 long_description=README,
45 package_data={'': ['LICENSE.txt'], PACKAGE_NAME: ['*.ini']},
46 packages=find_packages(exclude=['tests', 'tests.*']),
47 setup_requires=['pytest-runner >=2.1'],
48 tests_require=['betamax >=0.8, <0.9',
49 'betamax-matchers >=0.3.0, <0.4',
50 'betamax-serializers >=0.2, <0.3',
51 'mock >=0.8',
52 'pytest >=2.7.3',
53 'six >=1.10'],
54 test_suite='tests',
55 url='https://praw.readthedocs.org/',
56 version=VERSION)
57
[end of setup.py]
[start of docs/conf.py]
1 import os
2 import sys
3 sys.path.insert(0, '..')
4
5 from praw import __version__
6
7 copyright = '2016, Bryce Boe'
8 exclude_patterns = ['_build']
9 extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
10 html_static_path = ['_static']
11 html_theme = 'sphinx_rtd_theme'
12 html_theme_options = {
13 'collapse_navigation': True
14 }
15 html_use_smartypants = True
16 htmlhelp_basename = 'PRAW'
17 intersphinx_mapping = {'python': ('https://docs.python.org/3.6', None)}
18 master_doc = 'index'
19 nitpicky = True
20 project = 'PRAW'
21 pygments_style = 'sphinx'
22 release = __version__
23 source_suffix = '.rst'
24 suppress_warnings = ['image.nonlocal_uri']
25 version = '.'.join(__version__.split('.', 2)[:2])
26
27
28 # Use RTD theme locally
29 if not os.environ.get('READTHEDOCS'):
30 import sphinx_rtd_theme
31 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
32
33
34 def skip(app, what, name, obj, skip, options):
35 if name in {'__call__', '__contains__', '__getitem__', '__init__',
36 '__iter__', '__len__'}:
37 return False
38 return skip
39
40
41 def setup(app):
42 app.connect('autodoc-skip-member', skip)
43
[end of docs/conf.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -4,7 +4,7 @@
from praw import __version__
-copyright = '2016, Bryce Boe'
+copyright = '2017, Bryce Boe'
exclude_patterns = ['_build']
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_static_path = ['_static']
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,7 @@
description=('PRAW, an acronym for `Python Reddit API Wrapper`, is a '
'python package that allows for simple access to '
'reddit\'s API.'),
- install_requires=['prawcore >=0.9.0, <0.10',
+ install_requires=['prawcore >=0.10.1, <0.11',
'update_checker >=0.16'],
keywords='reddit api wrapper',
license='Simplified BSD License',
| {"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -4,7 +4,7 @@\n \n from praw import __version__\n \n-copyright = '2016, Bryce Boe'\n+copyright = '2017, Bryce Boe'\n exclude_patterns = ['_build']\n extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']\n html_static_path = ['_static']\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -37,7 +37,7 @@\n description=('PRAW, an acronym for `Python Reddit API Wrapper`, is a '\n 'python package that allows for simple access to '\n 'reddit\\'s API.'),\n- install_requires=['prawcore >=0.9.0, <0.10',\n+ install_requires=['prawcore >=0.10.1, <0.11',\n 'update_checker >=0.16'],\n keywords='reddit api wrapper',\n license='Simplified BSD License',\n", "issue": "Explain \"PRAW is not thread safe\" in the documentation.\nIt would be great to provide an example of why it is not thread safe.\r\n\r\nRelevant comment:\r\n\r\nhttps://www.reddit.com/r/redditdev/comments/63ugl5/praw_mulitprocessing_handler_prawhandler_is_not_a/dfx9oet/?context=3\n", "before_files": [{"content": "\"\"\"praw setup.py\"\"\"\n\nimport re\nfrom codecs import open\nfrom os import path\nfrom setuptools import find_packages, setup\n\n\nPACKAGE_NAME = 'praw'\nHERE = path.abspath(path.dirname(__file__))\nwith open(path.join(HERE, 'README.rst'), encoding='utf-8') as fp:\n README = fp.read()\nwith open(path.join(HERE, PACKAGE_NAME, 'const.py'),\n encoding='utf-8') as fp:\n VERSION = re.search(\"__version__ = '([^']+)'\", fp.read()).group(1)\n\n\nsetup(name=PACKAGE_NAME,\n author='Bryce Boe',\n author_email='[email protected]',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Topic :: Utilities'],\n description=('PRAW, an acronym for `Python Reddit API Wrapper`, is a '\n 'python package that allows for simple access to '\n 'reddit\\'s API.'),\n install_requires=['prawcore >=0.9.0, <0.10',\n 'update_checker >=0.16'],\n keywords='reddit api wrapper',\n license='Simplified BSD License',\n long_description=README,\n package_data={'': ['LICENSE.txt'], PACKAGE_NAME: ['*.ini']},\n packages=find_packages(exclude=['tests', 'tests.*']),\n setup_requires=['pytest-runner >=2.1'],\n tests_require=['betamax >=0.8, <0.9',\n 'betamax-matchers >=0.3.0, <0.4',\n 'betamax-serializers >=0.2, <0.3',\n 'mock >=0.8',\n 'pytest >=2.7.3',\n 'six >=1.10'],\n test_suite='tests',\n url='https://praw.readthedocs.org/',\n version=VERSION)\n", "path": "setup.py"}, {"content": "import os\nimport sys\nsys.path.insert(0, '..')\n\nfrom praw import __version__\n\ncopyright = '2016, Bryce Boe'\nexclude_patterns = ['_build']\nextensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']\nhtml_static_path = ['_static']\nhtml_theme = 'sphinx_rtd_theme'\nhtml_theme_options = {\n 'collapse_navigation': True\n}\nhtml_use_smartypants = True\nhtmlhelp_basename = 'PRAW'\nintersphinx_mapping = {'python': ('https://docs.python.org/3.6', None)}\nmaster_doc = 'index'\nnitpicky = True\nproject = 'PRAW'\npygments_style = 'sphinx'\nrelease = __version__\nsource_suffix = '.rst'\nsuppress_warnings = ['image.nonlocal_uri']\nversion = '.'.join(__version__.split('.', 2)[:2])\n\n\n# Use RTD theme locally\nif not os.environ.get('READTHEDOCS'):\n import sphinx_rtd_theme\n html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n\ndef skip(app, what, name, obj, skip, options):\n if name in {'__call__', '__contains__', '__getitem__', '__init__',\n '__iter__', '__len__'}:\n return False\n return skip\n\n\ndef setup(app):\n app.connect('autodoc-skip-member', skip)\n", "path": "docs/conf.py"}]} | 1,623 | 238 |
gh_patches_debug_24913 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-6564 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
bupa_gb spider is including closed branches
The bupa_gb.py spider is returning a number of closed practices, as these still have web pages and are still listed in the sitemap. Current examples include:
https://www.bupa.co.uk/dental/dental-care/practices/harleston
https://www.bupa.co.uk/dental/dental-care/practices/leckhampton
I think these can be reliably detected by checking if the name ends (case-insensitively) with "closed". So I'd suggest we drop any whose name matches /closed$/i. There are about 12 of these in total out of 388 branches.
(Sorry, I can no longer run the code on my computer to implement and test this myself, since I'm unable to install a recent enough version of Python.)
</issue>
<code>
[start of locations/items.py]
1 # Define here the models for your scraped items
2 #
3 # See documentation in:
4 # http://doc.scrapy.org/en/latest/topics/items.html
5
6 import scrapy
7
8
9 class Feature(scrapy.Item):
10 lat = scrapy.Field()
11 lon = scrapy.Field()
12 geometry = scrapy.Field()
13 name = scrapy.Field()
14 branch = scrapy.Field()
15 addr_full = scrapy.Field()
16 housenumber = scrapy.Field()
17 street = scrapy.Field()
18 street_address = scrapy.Field()
19 city = scrapy.Field()
20 state = scrapy.Field()
21 postcode = scrapy.Field()
22 country = scrapy.Field()
23 phone = scrapy.Field()
24 email = scrapy.Field()
25 website = scrapy.Field()
26 twitter = scrapy.Field()
27 facebook = scrapy.Field()
28 opening_hours = scrapy.Field()
29 image = scrapy.Field()
30 ref = scrapy.Field()
31 brand = scrapy.Field()
32 brand_wikidata = scrapy.Field()
33 operator = scrapy.Field()
34 operator_wikidata = scrapy.Field()
35 located_in = scrapy.Field()
36 located_in_wikidata = scrapy.Field()
37 nsi_id = scrapy.Field()
38 extras = scrapy.Field()
39
40 def __init__(self, *args, **kwargs):
41 super().__init__(*args, **kwargs)
42 if not self._values.get("extras"):
43 self.__setitem__("extras", {})
44
45
46 def get_lat_lon(item: Feature) -> (float, float):
47 if geometry := item.get("geometry"):
48 if isinstance(geometry, dict):
49 if geometry.get("type") == "Point":
50 if coords := geometry.get("coordinates"):
51 try:
52 return float(coords[1]), float(coords[0])
53 except (TypeError, ValueError):
54 item["geometry"] = None
55 else:
56 try:
57 return float(item.get("lat")), float(item.get("lon"))
58 except (TypeError, ValueError):
59 pass
60 return None
61
62
63 def set_lat_lon(item: Feature, lat: float, lon: float):
64 item.pop("lat", None)
65 item.pop("lon", None)
66 if lat and lon:
67 item["geometry"] = {
68 "type": "Point",
69 "coordinates": [lon, lat],
70 }
71 else:
72 item["geometry"] = None
73
74
75 def add_social_media(item: Feature, service: str, account: str):
76 service = service.lower()
77 if service in item.fields:
78 item[service] = account
79 else:
80 item["extras"][f"contact:{service}"] = account
81
[end of locations/items.py]
[start of locations/spiders/bupa_gb.py]
1 from scrapy.spiders import SitemapSpider
2
3 from locations.categories import Categories
4 from locations.structured_data_spider import StructuredDataSpider
5
6
7 class BupaGBSpider(SitemapSpider, StructuredDataSpider):
8 name = "bupa_gb"
9 item_attributes = {"brand": "Bupa", "brand_wikidata": "Q931628", "extras": Categories.DENTIST.value}
10 sitemap_urls = ["https://www.bupa.co.uk/robots.txt"]
11 sitemap_rules = [(r"/practices/([-\w]+)$", "parse_sd")]
12
13 def post_process_item(self, item, response, ld_data, **kwargs):
14 if "Total Dental Care" in item["name"]:
15 item["brand"] = "Total Dental Care"
16 yield item
17
[end of locations/spiders/bupa_gb.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/items.py b/locations/items.py
--- a/locations/items.py
+++ b/locations/items.py
@@ -2,6 +2,7 @@
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
+from datetime import datetime
import scrapy
@@ -78,3 +79,7 @@
item[service] = account
else:
item["extras"][f"contact:{service}"] = account
+
+
+def set_closed(item: Feature, end_date: datetime = None):
+ item["extras"]["end_date"] = end_date.strftime("%Y-%m-%d") if end_date else "yes"
diff --git a/locations/spiders/bupa_gb.py b/locations/spiders/bupa_gb.py
--- a/locations/spiders/bupa_gb.py
+++ b/locations/spiders/bupa_gb.py
@@ -1,6 +1,7 @@
from scrapy.spiders import SitemapSpider
from locations.categories import Categories
+from locations.items import set_closed
from locations.structured_data_spider import StructuredDataSpider
@@ -13,4 +14,8 @@
def post_process_item(self, item, response, ld_data, **kwargs):
if "Total Dental Care" in item["name"]:
item["brand"] = "Total Dental Care"
+
+ if item["name"].lower().endswith(" - closed"):
+ set_closed(item)
+
yield item
| {"golden_diff": "diff --git a/locations/items.py b/locations/items.py\n--- a/locations/items.py\n+++ b/locations/items.py\n@@ -2,6 +2,7 @@\n #\n # See documentation in:\n # http://doc.scrapy.org/en/latest/topics/items.html\n+from datetime import datetime\n \n import scrapy\n \n@@ -78,3 +79,7 @@\n item[service] = account\n else:\n item[\"extras\"][f\"contact:{service}\"] = account\n+\n+\n+def set_closed(item: Feature, end_date: datetime = None):\n+ item[\"extras\"][\"end_date\"] = end_date.strftime(\"%Y-%m-%d\") if end_date else \"yes\"\ndiff --git a/locations/spiders/bupa_gb.py b/locations/spiders/bupa_gb.py\n--- a/locations/spiders/bupa_gb.py\n+++ b/locations/spiders/bupa_gb.py\n@@ -1,6 +1,7 @@\n from scrapy.spiders import SitemapSpider\n \n from locations.categories import Categories\n+from locations.items import set_closed\n from locations.structured_data_spider import StructuredDataSpider\n \n \n@@ -13,4 +14,8 @@\n def post_process_item(self, item, response, ld_data, **kwargs):\n if \"Total Dental Care\" in item[\"name\"]:\n item[\"brand\"] = \"Total Dental Care\"\n+\n+ if item[\"name\"].lower().endswith(\" - closed\"):\n+ set_closed(item)\n+\n yield item\n", "issue": "bupa_gb spider is including closed branches\nThe bupa_gb.py spider is returning a number of closed practices, as these still have web pages and are still listed in the sitemap. Current examples include:\r\n\r\nhttps://www.bupa.co.uk/dental/dental-care/practices/harleston\r\nhttps://www.bupa.co.uk/dental/dental-care/practices/leckhampton\r\n\r\nI think these can be reliably detected by checking if the name ends (case-insensitively) with \"closed\". So I'd suggest we drop any whose name matches /closed$/i. There are about 12 of these in total out of 388 branches.\r\n\r\n(Sorry, I can no longer run the code on my computer to implement and test this myself, since I'm unable to install a recent enough version of Python.)\n", "before_files": [{"content": "# Define here the models for your scraped items\n#\n# See documentation in:\n# http://doc.scrapy.org/en/latest/topics/items.html\n\nimport scrapy\n\n\nclass Feature(scrapy.Item):\n lat = scrapy.Field()\n lon = scrapy.Field()\n geometry = scrapy.Field()\n name = scrapy.Field()\n branch = scrapy.Field()\n addr_full = scrapy.Field()\n housenumber = scrapy.Field()\n street = scrapy.Field()\n street_address = scrapy.Field()\n city = scrapy.Field()\n state = scrapy.Field()\n postcode = scrapy.Field()\n country = scrapy.Field()\n phone = scrapy.Field()\n email = scrapy.Field()\n website = scrapy.Field()\n twitter = scrapy.Field()\n facebook = scrapy.Field()\n opening_hours = scrapy.Field()\n image = scrapy.Field()\n ref = scrapy.Field()\n brand = scrapy.Field()\n brand_wikidata = scrapy.Field()\n operator = scrapy.Field()\n operator_wikidata = scrapy.Field()\n located_in = scrapy.Field()\n located_in_wikidata = scrapy.Field()\n nsi_id = scrapy.Field()\n extras = scrapy.Field()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n if not self._values.get(\"extras\"):\n self.__setitem__(\"extras\", {})\n\n\ndef get_lat_lon(item: Feature) -> (float, float):\n if geometry := item.get(\"geometry\"):\n if isinstance(geometry, dict):\n if geometry.get(\"type\") == \"Point\":\n if coords := geometry.get(\"coordinates\"):\n try:\n return float(coords[1]), float(coords[0])\n except (TypeError, ValueError):\n item[\"geometry\"] = None\n else:\n try:\n return float(item.get(\"lat\")), float(item.get(\"lon\"))\n except (TypeError, ValueError):\n pass\n return None\n\n\ndef set_lat_lon(item: Feature, lat: float, lon: float):\n item.pop(\"lat\", None)\n item.pop(\"lon\", None)\n if lat and lon:\n item[\"geometry\"] = {\n \"type\": \"Point\",\n \"coordinates\": [lon, lat],\n }\n else:\n item[\"geometry\"] = None\n\n\ndef add_social_media(item: Feature, service: str, account: str):\n service = service.lower()\n if service in item.fields:\n item[service] = account\n else:\n item[\"extras\"][f\"contact:{service}\"] = account\n", "path": "locations/items.py"}, {"content": "from scrapy.spiders import SitemapSpider\n\nfrom locations.categories import Categories\nfrom locations.structured_data_spider import StructuredDataSpider\n\n\nclass BupaGBSpider(SitemapSpider, StructuredDataSpider):\n name = \"bupa_gb\"\n item_attributes = {\"brand\": \"Bupa\", \"brand_wikidata\": \"Q931628\", \"extras\": Categories.DENTIST.value}\n sitemap_urls = [\"https://www.bupa.co.uk/robots.txt\"]\n sitemap_rules = [(r\"/practices/([-\\w]+)$\", \"parse_sd\")]\n\n def post_process_item(self, item, response, ld_data, **kwargs):\n if \"Total Dental Care\" in item[\"name\"]:\n item[\"brand\"] = \"Total Dental Care\"\n yield item\n", "path": "locations/spiders/bupa_gb.py"}]} | 1,596 | 319 |
gh_patches_debug_32 | rasdani/github-patches | git_diff | mlflow__mlflow-2797 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[SETUP-BUG] ResolvePackageNotFound - python=3.5.2
Thank you for submitting an issue. Please refer to our [issue policy](https://www.github.com/mlflow/mlflow/blob/master/ISSUE_POLICY.md)
for information on what types of issues we address.
Please fill in this template and do not delete it unless you are sure your issue is outside its scope.
### System information
- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**: Ubuntu 16.04
- **MLflow installed from (source or binary)**: binary (pip install mlflow)
- **MLflow version (run ``mlflow --version``)**: 1.2.0
- **Python version**: 3.5.2
- **Exact command to reproduce**: mlflow models build-docker -m /path/to/model -n "my-model"
### Describe the problem
mlflow models build-docker -m /path/to/model -n "my-model"
### Other info / logs
Warning: you have pip-installed dependencies in your environment file, but you do not list pip itself as one of your condadependencies. Conda may not use the correct pip to install your packages, and they may end up in the wrong place. Pleaseadd an explicit pip dependency. I'm adding one for you, but still nagging you.
Collecting package metadata (repodata.json): ...working... done
Solving environment: ...working... failed
ResolvePackageNotFound:
- python=3.5.2
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/miniconda/lib/python3.7/site-packages/mlflow/models/container/__init__.py", line 102, in _install_pyfunc_deps
raise Exception("Failed to create model environment.")
Exception: Failed to create model environment.
creating and activating custom environment
The command '/bin/sh -c python -c 'from mlflow.models.container import _install_pyfunc_deps; _install_pyfunc_deps("/opt/ml/model", install_mlflow=False)'' returned a non-zero code: 1
</issue>
<code>
[start of mlflow/utils/environment.py]
1 import yaml
2
3 from mlflow.utils import PYTHON_VERSION
4
5 _conda_header = """\
6 name: mlflow-env
7 channels:
8 - defaults
9 """
10
11
12 def _mlflow_conda_env(path=None, additional_conda_deps=None, additional_pip_deps=None,
13 additional_conda_channels=None, install_mlflow=True):
14 """
15 Creates a Conda environment with the specified package channels and dependencies. If there are
16 any pip dependencies, including from the install_mlflow parameter, then pip will be added to
17 the conda dependencies. This is done to ensure that the pip inside the conda environment is
18 used to install the pip dependencies.
19
20 :param path: Local filesystem path where the conda env file is to be written. If unspecified,
21 the conda env will not be written to the filesystem; it will still be returned
22 in dictionary format.
23 :param additional_conda_deps: List of additional conda dependencies passed as strings.
24 :param additional_pip_deps: List of additional pip dependencies passed as strings.
25 :param additional_conda_channels: List of additional conda channels to search when resolving
26 packages.
27 :return: ``None`` if ``path`` is specified. Otherwise, the a dictionary representation of the
28 Conda environment.
29 """
30 pip_deps = (["mlflow"] if install_mlflow else []) + (
31 additional_pip_deps if additional_pip_deps else [])
32 conda_deps = (additional_conda_deps if additional_conda_deps else []) + (
33 ["pip"] if pip_deps else [])
34
35 env = yaml.safe_load(_conda_header)
36 env["dependencies"] = ["python={}".format(PYTHON_VERSION)]
37 if conda_deps is not None:
38 env["dependencies"] += conda_deps
39 env["dependencies"].append({"pip": pip_deps})
40 if additional_conda_channels is not None:
41 env["channels"] += additional_conda_channels
42
43 if path is not None:
44 with open(path, "w") as out:
45 yaml.safe_dump(env, stream=out, default_flow_style=False)
46 return None
47 else:
48 return env
49
[end of mlflow/utils/environment.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mlflow/utils/environment.py b/mlflow/utils/environment.py
--- a/mlflow/utils/environment.py
+++ b/mlflow/utils/environment.py
@@ -6,6 +6,7 @@
name: mlflow-env
channels:
- defaults
+ - conda-forge
"""
| {"golden_diff": "diff --git a/mlflow/utils/environment.py b/mlflow/utils/environment.py\n--- a/mlflow/utils/environment.py\n+++ b/mlflow/utils/environment.py\n@@ -6,6 +6,7 @@\n name: mlflow-env\n channels:\n - defaults\n+ - conda-forge\n \"\"\"\n", "issue": "[SETUP-BUG] ResolvePackageNotFound - python=3.5.2\nThank you for submitting an issue. Please refer to our [issue policy](https://www.github.com/mlflow/mlflow/blob/master/ISSUE_POLICY.md)\r\nfor information on what types of issues we address.\r\n \r\nPlease fill in this template and do not delete it unless you are sure your issue is outside its scope.\r\n\r\n### System information\r\n- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**: Ubuntu 16.04\r\n- **MLflow installed from (source or binary)**: binary (pip install mlflow)\r\n- **MLflow version (run ``mlflow --version``)**: 1.2.0\r\n- **Python version**: 3.5.2\r\n- **Exact command to reproduce**: mlflow models build-docker -m /path/to/model -n \"my-model\" \r\n\r\n### Describe the problem\r\nmlflow models build-docker -m /path/to/model -n \"my-model\"\r\n\r\n### Other info / logs\r\nWarning: you have pip-installed dependencies in your environment file, but you do not list pip itself as one of your condadependencies. Conda may not use the correct pip to install your packages, and they may end up in the wrong place. Pleaseadd an explicit pip dependency. I'm adding one for you, but still nagging you.\r\nCollecting package metadata (repodata.json): ...working... done\r\nSolving environment: ...working... failed\r\n\r\nResolvePackageNotFound:\r\n - python=3.5.2\r\n\r\nTraceback (most recent call last):\r\n File \"<string>\", line 1, in <module>\r\n File \"/miniconda/lib/python3.7/site-packages/mlflow/models/container/__init__.py\", line 102, in _install_pyfunc_deps\r\n raise Exception(\"Failed to create model environment.\")\r\nException: Failed to create model environment.\r\ncreating and activating custom environment\r\nThe command '/bin/sh -c python -c 'from mlflow.models.container import _install_pyfunc_deps; _install_pyfunc_deps(\"/opt/ml/model\", install_mlflow=False)'' returned a non-zero code: 1\r\n\n", "before_files": [{"content": "import yaml\n\nfrom mlflow.utils import PYTHON_VERSION\n\n_conda_header = \"\"\"\\\nname: mlflow-env\nchannels:\n - defaults\n\"\"\"\n\n\ndef _mlflow_conda_env(path=None, additional_conda_deps=None, additional_pip_deps=None,\n additional_conda_channels=None, install_mlflow=True):\n \"\"\"\n Creates a Conda environment with the specified package channels and dependencies. If there are\n any pip dependencies, including from the install_mlflow parameter, then pip will be added to\n the conda dependencies. This is done to ensure that the pip inside the conda environment is\n used to install the pip dependencies.\n\n :param path: Local filesystem path where the conda env file is to be written. If unspecified,\n the conda env will not be written to the filesystem; it will still be returned\n in dictionary format.\n :param additional_conda_deps: List of additional conda dependencies passed as strings.\n :param additional_pip_deps: List of additional pip dependencies passed as strings.\n :param additional_conda_channels: List of additional conda channels to search when resolving\n packages.\n :return: ``None`` if ``path`` is specified. Otherwise, the a dictionary representation of the\n Conda environment.\n \"\"\"\n pip_deps = ([\"mlflow\"] if install_mlflow else []) + (\n additional_pip_deps if additional_pip_deps else [])\n conda_deps = (additional_conda_deps if additional_conda_deps else []) + (\n [\"pip\"] if pip_deps else [])\n\n env = yaml.safe_load(_conda_header)\n env[\"dependencies\"] = [\"python={}\".format(PYTHON_VERSION)]\n if conda_deps is not None:\n env[\"dependencies\"] += conda_deps\n env[\"dependencies\"].append({\"pip\": pip_deps})\n if additional_conda_channels is not None:\n env[\"channels\"] += additional_conda_channels\n\n if path is not None:\n with open(path, \"w\") as out:\n yaml.safe_dump(env, stream=out, default_flow_style=False)\n return None\n else:\n return env\n", "path": "mlflow/utils/environment.py"}]} | 1,541 | 64 |
gh_patches_debug_37559 | rasdani/github-patches | git_diff | yt-dlp__yt-dlp-1202 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[Site Request] 7plus add login/pass to access 720p resolution
<!--
######################################################################
WARNING!
IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE
######################################################################
-->
## Checklist
<!--
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
- First of, make sure you are using the latest version of yt-dlp. Run `yt-dlp --version` and ensure your version is 2021.09.25. If it's not, see https://github.com/yt-dlp/yt-dlp on how to update. Issues with outdated version will be REJECTED.
- Search the bugtracker for similar site feature requests: https://github.com/yt-dlp/yt-dlp. DO NOT post duplicates.
- Finally, put x into all relevant boxes like this [x] (Dont forget to delete the empty space)
-->
- [x] I'm reporting a site feature request
- [x] I've verified that I'm running yt-dlp version **2021.09.25**
- [x] I've searched the bugtracker for similar site feature requests including closed ones
## Description
<!--
Provide an explanation of your site feature request in an arbitrary form. Please make sure the description is worded well enough to be understood, see https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient. Provide any additional information, suggested solution and as much context and examples as possible.
-->
7Plus has 720p resolution available behind a login/pass. Current site functionality only pulls 540p.
</issue>
<code>
[start of yt_dlp/extractor/sevenplus.py]
1 # coding: utf-8
2 from __future__ import unicode_literals
3
4 import re
5
6 from .brightcove import BrightcoveNewIE
7 from ..compat import (
8 compat_HTTPError,
9 compat_str,
10 )
11 from ..utils import (
12 ExtractorError,
13 try_get,
14 update_url_query,
15 )
16
17
18 class SevenPlusIE(BrightcoveNewIE):
19 IE_NAME = '7plus'
20 _VALID_URL = r'https?://(?:www\.)?7plus\.com\.au/(?P<path>[^?]+\?.*?\bepisode-id=(?P<id>[^&#]+))'
21 _TESTS = [{
22 'url': 'https://7plus.com.au/MTYS?episode-id=MTYS7-003',
23 'info_dict': {
24 'id': 'MTYS7-003',
25 'ext': 'mp4',
26 'title': 'S7 E3 - Wind Surf',
27 'description': 'md5:29c6a69f21accda7601278f81b46483d',
28 'uploader_id': '5303576322001',
29 'upload_date': '20171201',
30 'timestamp': 1512106377,
31 'series': 'Mighty Ships',
32 'season_number': 7,
33 'episode_number': 3,
34 'episode': 'Wind Surf',
35 },
36 'params': {
37 'format': 'bestvideo',
38 'skip_download': True,
39 }
40 }, {
41 'url': 'https://7plus.com.au/UUUU?episode-id=AUMS43-001',
42 'only_matching': True,
43 }]
44
45 def _real_extract(self, url):
46 path, episode_id = self._match_valid_url(url).groups()
47
48 try:
49 media = self._download_json(
50 'https://videoservice.swm.digital/playback', episode_id, query={
51 'appId': '7plus',
52 'deviceType': 'web',
53 'platformType': 'web',
54 'accountId': 5303576322001,
55 'referenceId': 'ref:' + episode_id,
56 'deliveryId': 'csai',
57 'videoType': 'vod',
58 })['media']
59 except ExtractorError as e:
60 if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403:
61 raise ExtractorError(self._parse_json(
62 e.cause.read().decode(), episode_id)[0]['error_code'], expected=True)
63 raise
64
65 for source in media.get('sources', {}):
66 src = source.get('src')
67 if not src:
68 continue
69 source['src'] = update_url_query(src, {'rule': ''})
70
71 info = self._parse_brightcove_metadata(media, episode_id)
72
73 content = self._download_json(
74 'https://component-cdn.swm.digital/content/' + path,
75 episode_id, headers={
76 'market-id': 4,
77 }, fatal=False) or {}
78 for item in content.get('items', {}):
79 if item.get('componentData', {}).get('componentType') == 'infoPanel':
80 for src_key, dst_key in [('title', 'title'), ('shortSynopsis', 'description')]:
81 value = item.get(src_key)
82 if value:
83 info[dst_key] = value
84 info['series'] = try_get(
85 item, lambda x: x['seriesLogo']['name'], compat_str)
86 mobj = re.search(r'^S(\d+)\s+E(\d+)\s+-\s+(.+)$', info['title'])
87 if mobj:
88 info.update({
89 'season_number': int(mobj.group(1)),
90 'episode_number': int(mobj.group(2)),
91 'episode': mobj.group(3),
92 })
93
94 return info
95
[end of yt_dlp/extractor/sevenplus.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/yt_dlp/extractor/sevenplus.py b/yt_dlp/extractor/sevenplus.py
--- a/yt_dlp/extractor/sevenplus.py
+++ b/yt_dlp/extractor/sevenplus.py
@@ -1,6 +1,7 @@
# coding: utf-8
from __future__ import unicode_literals
+import json
import re
from .brightcove import BrightcoveNewIE
@@ -42,9 +43,52 @@
'only_matching': True,
}]
+ def _real_initialize(self):
+ self.token = None
+
+ cookies = self._get_cookies('https://7plus.com.au')
+ api_key = next((x for x in cookies if x.startswith('glt_')), '')[4:]
+ if not api_key: # Cookies are signed out, skip login
+ return
+
+ login_resp = self._download_json(
+ 'https://login.7plus.com.au/accounts.getJWT', None, 'Logging in', fatal=False,
+ query={
+ 'APIKey': api_key,
+ 'sdk': 'js_latest',
+ 'login_token': cookies[f'glt_{api_key}'].value,
+ 'authMode': 'cookie',
+ 'pageURL': 'https://7plus.com.au/',
+ 'sdkBuild': '12471',
+ 'format': 'json',
+ }) or {}
+
+ if 'errorMessage' in login_resp:
+ self.report_warning(f'Unable to login: 7plus said: {login_resp["errorMessage"]}')
+ return
+ id_token = login_resp.get('id_token')
+ if not id_token:
+ self.report_warning('Unable to login: Could not extract id token')
+ return
+
+ token_resp = self._download_json(
+ 'https://7plus.com.au/auth/token', None, 'Getting auth token', fatal=False,
+ headers={'Content-Type': 'application/json'}, data=json.dumps({
+ 'idToken': id_token,
+ 'platformId': 'web',
+ 'regSource': '7plus',
+ }).encode('utf-8')) or {}
+ self.token = token_resp.get('token')
+ if not self.token:
+ self.report_warning('Unable to log in: Could not extract auth token')
+
def _real_extract(self, url):
path, episode_id = self._match_valid_url(url).groups()
+ headers = {}
+ if self.token:
+ headers['Authorization'] = f'Bearer {self.token}'
+
try:
media = self._download_json(
'https://videoservice.swm.digital/playback', episode_id, query={
@@ -55,7 +99,7 @@
'referenceId': 'ref:' + episode_id,
'deliveryId': 'csai',
'videoType': 'vod',
- })['media']
+ }, headers=headers)['media']
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403:
raise ExtractorError(self._parse_json(
| {"golden_diff": "diff --git a/yt_dlp/extractor/sevenplus.py b/yt_dlp/extractor/sevenplus.py\n--- a/yt_dlp/extractor/sevenplus.py\n+++ b/yt_dlp/extractor/sevenplus.py\n@@ -1,6 +1,7 @@\n # coding: utf-8\n from __future__ import unicode_literals\n \n+import json\n import re\n \n from .brightcove import BrightcoveNewIE\n@@ -42,9 +43,52 @@\n 'only_matching': True,\n }]\n \n+ def _real_initialize(self):\n+ self.token = None\n+\n+ cookies = self._get_cookies('https://7plus.com.au')\n+ api_key = next((x for x in cookies if x.startswith('glt_')), '')[4:]\n+ if not api_key: # Cookies are signed out, skip login\n+ return\n+\n+ login_resp = self._download_json(\n+ 'https://login.7plus.com.au/accounts.getJWT', None, 'Logging in', fatal=False,\n+ query={\n+ 'APIKey': api_key,\n+ 'sdk': 'js_latest',\n+ 'login_token': cookies[f'glt_{api_key}'].value,\n+ 'authMode': 'cookie',\n+ 'pageURL': 'https://7plus.com.au/',\n+ 'sdkBuild': '12471',\n+ 'format': 'json',\n+ }) or {}\n+\n+ if 'errorMessage' in login_resp:\n+ self.report_warning(f'Unable to login: 7plus said: {login_resp[\"errorMessage\"]}')\n+ return\n+ id_token = login_resp.get('id_token')\n+ if not id_token:\n+ self.report_warning('Unable to login: Could not extract id token')\n+ return\n+\n+ token_resp = self._download_json(\n+ 'https://7plus.com.au/auth/token', None, 'Getting auth token', fatal=False,\n+ headers={'Content-Type': 'application/json'}, data=json.dumps({\n+ 'idToken': id_token,\n+ 'platformId': 'web',\n+ 'regSource': '7plus',\n+ }).encode('utf-8')) or {}\n+ self.token = token_resp.get('token')\n+ if not self.token:\n+ self.report_warning('Unable to log in: Could not extract auth token')\n+\n def _real_extract(self, url):\n path, episode_id = self._match_valid_url(url).groups()\n \n+ headers = {}\n+ if self.token:\n+ headers['Authorization'] = f'Bearer {self.token}'\n+\n try:\n media = self._download_json(\n 'https://videoservice.swm.digital/playback', episode_id, query={\n@@ -55,7 +99,7 @@\n 'referenceId': 'ref:' + episode_id,\n 'deliveryId': 'csai',\n 'videoType': 'vod',\n- })['media']\n+ }, headers=headers)['media']\n except ExtractorError as e:\n if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403:\n raise ExtractorError(self._parse_json(\n", "issue": "[Site Request] 7plus add login/pass to access 720p resolution\n<!--\r\n\r\n######################################################################\r\n WARNING!\r\n IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE\r\n######################################################################\r\n\r\n-->\r\n\r\n\r\n## Checklist\r\n\r\n<!--\r\nCarefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:\r\n- First of, make sure you are using the latest version of yt-dlp. Run `yt-dlp --version` and ensure your version is 2021.09.25. If it's not, see https://github.com/yt-dlp/yt-dlp on how to update. Issues with outdated version will be REJECTED.\r\n- Search the bugtracker for similar site feature requests: https://github.com/yt-dlp/yt-dlp. DO NOT post duplicates.\r\n- Finally, put x into all relevant boxes like this [x] (Dont forget to delete the empty space)\r\n-->\r\n\r\n- [x] I'm reporting a site feature request\r\n- [x] I've verified that I'm running yt-dlp version **2021.09.25**\r\n- [x] I've searched the bugtracker for similar site feature requests including closed ones\r\n\r\n\r\n## Description\r\n\r\n<!--\r\nProvide an explanation of your site feature request in an arbitrary form. Please make sure the description is worded well enough to be understood, see https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient. Provide any additional information, suggested solution and as much context and examples as possible.\r\n-->\r\n\r\n7Plus has 720p resolution available behind a login/pass. Current site functionality only pulls 540p. \n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom .brightcove import BrightcoveNewIE\nfrom ..compat import (\n compat_HTTPError,\n compat_str,\n)\nfrom ..utils import (\n ExtractorError,\n try_get,\n update_url_query,\n)\n\n\nclass SevenPlusIE(BrightcoveNewIE):\n IE_NAME = '7plus'\n _VALID_URL = r'https?://(?:www\\.)?7plus\\.com\\.au/(?P<path>[^?]+\\?.*?\\bepisode-id=(?P<id>[^&#]+))'\n _TESTS = [{\n 'url': 'https://7plus.com.au/MTYS?episode-id=MTYS7-003',\n 'info_dict': {\n 'id': 'MTYS7-003',\n 'ext': 'mp4',\n 'title': 'S7 E3 - Wind Surf',\n 'description': 'md5:29c6a69f21accda7601278f81b46483d',\n 'uploader_id': '5303576322001',\n 'upload_date': '20171201',\n 'timestamp': 1512106377,\n 'series': 'Mighty Ships',\n 'season_number': 7,\n 'episode_number': 3,\n 'episode': 'Wind Surf',\n },\n 'params': {\n 'format': 'bestvideo',\n 'skip_download': True,\n }\n }, {\n 'url': 'https://7plus.com.au/UUUU?episode-id=AUMS43-001',\n 'only_matching': True,\n }]\n\n def _real_extract(self, url):\n path, episode_id = self._match_valid_url(url).groups()\n\n try:\n media = self._download_json(\n 'https://videoservice.swm.digital/playback', episode_id, query={\n 'appId': '7plus',\n 'deviceType': 'web',\n 'platformType': 'web',\n 'accountId': 5303576322001,\n 'referenceId': 'ref:' + episode_id,\n 'deliveryId': 'csai',\n 'videoType': 'vod',\n })['media']\n except ExtractorError as e:\n if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403:\n raise ExtractorError(self._parse_json(\n e.cause.read().decode(), episode_id)[0]['error_code'], expected=True)\n raise\n\n for source in media.get('sources', {}):\n src = source.get('src')\n if not src:\n continue\n source['src'] = update_url_query(src, {'rule': ''})\n\n info = self._parse_brightcove_metadata(media, episode_id)\n\n content = self._download_json(\n 'https://component-cdn.swm.digital/content/' + path,\n episode_id, headers={\n 'market-id': 4,\n }, fatal=False) or {}\n for item in content.get('items', {}):\n if item.get('componentData', {}).get('componentType') == 'infoPanel':\n for src_key, dst_key in [('title', 'title'), ('shortSynopsis', 'description')]:\n value = item.get(src_key)\n if value:\n info[dst_key] = value\n info['series'] = try_get(\n item, lambda x: x['seriesLogo']['name'], compat_str)\n mobj = re.search(r'^S(\\d+)\\s+E(\\d+)\\s+-\\s+(.+)$', info['title'])\n if mobj:\n info.update({\n 'season_number': int(mobj.group(1)),\n 'episode_number': int(mobj.group(2)),\n 'episode': mobj.group(3),\n })\n\n return info\n", "path": "yt_dlp/extractor/sevenplus.py"}]} | 1,961 | 699 |
gh_patches_debug_25239 | rasdani/github-patches | git_diff | learningequality__kolibri-1604 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
DateTimeTzFields are serialized with incorrect times
Example saved this time in a `ChannelMetaDataCache` object (at around 14:00 local time)
`2017-06-06 14:44:12.582879(America/Los_Angeles)`
But it gets serialized as
`2017-06-06T07:44:12.582-07:00`
Expected is
`2017-06-06T14:44:12.582-07:00`
</issue>
<code>
[start of kolibri/core/serializers.py]
1 from django.utils import timezone
2 from rest_framework.serializers import DateTimeField, ModelSerializer
3 from .fields import DateTimeTzField as DjangoDateTimeTzField
4
5 class DateTimeTzField(DateTimeField):
6
7 def to_internal_value(self, data):
8 data = super(DateTimeTzField, self).to_internal_value(data)
9 tz = timezone.get_current_timezone()
10 return data.astimezone(tz)
11
12
13 serializer_field_mapping = {
14 DjangoDateTimeTzField: DateTimeTzField,
15 }
16
17 serializer_field_mapping.update(ModelSerializer.serializer_field_mapping)
18
19 class KolibriModelSerializer(ModelSerializer):
20
21 serializer_field_mapping = serializer_field_mapping
22
[end of kolibri/core/serializers.py]
[start of kolibri/core/fields.py]
1 import datetime
2 import re
3
4 import pytz
5 from django.db.models.fields import Field
6 from django.utils import timezone
7
8 date_time_format = "%Y-%m-%d %H:%M:%S.%f"
9 tz_format = "({tz})"
10 tz_regex = re.compile("\(([^\)]+)\)")
11 db_storage_string = "{date_time_string}{tz_string}"
12
13 def parse_timezonestamp(value):
14 if tz_regex.search(value):
15 tz = pytz.timezone(tz_regex.search(value).groups()[0])
16 else:
17 tz = timezone.get_current_timezone()
18 utc_value = tz_regex.sub('', value)
19 value = datetime.datetime.strptime(utc_value, date_time_format)
20 value = timezone.make_aware(value, pytz.utc)
21 return value.astimezone(tz)
22
23 def create_timezonestamp(value):
24 if value.tzinfo:
25 tz = value.tzinfo.zone
26 else:
27 tz = timezone.get_current_timezone().zone
28 date_time_string = value.strftime(date_time_format)
29 tz_string = tz_format.format(tz=tz)
30 value = db_storage_string.format(date_time_string=date_time_string, tz_string=tz_string)
31 return value
32
33 class DateTimeTzField(Field):
34 """
35 A field that stores datetime information as a char in this format:
36
37 %Y-%m-%d %H:%M:%S.%f(<tzinfo>)
38
39 It reads a timezone aware datetime object, and extracts the timezone zone information
40 then parses the datetime into the format above with the timezone information appended.
41
42 As this is ISO formatted, alphabetic sorting should still allow for proper queries
43 against this in the database. Mostly engineered for SQLite usage.
44 """
45
46 def db_type(self, connection):
47 return "varchar"
48
49 def from_db_value(self, value, expression, connection, context):
50 if value is None:
51 return value
52 return parse_timezonestamp(value)
53
54 def to_python(self, value):
55 if isinstance(value, datetime.datetime):
56 return value
57
58 if value is None:
59 return value
60
61 return parse_timezonestamp(value)
62
63 def get_prep_value(self, value):
64 # Casts datetimes into the format expected by the backend
65 if value is None:
66 return value
67 return create_timezonestamp(value)
68
69 def get_db_prep_value(self, value, connection, prepared=False):
70 if not prepared:
71 value = self.get_prep_value(value)
72 return value
73
[end of kolibri/core/fields.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kolibri/core/fields.py b/kolibri/core/fields.py
--- a/kolibri/core/fields.py
+++ b/kolibri/core/fields.py
@@ -25,7 +25,8 @@
tz = value.tzinfo.zone
else:
tz = timezone.get_current_timezone().zone
- date_time_string = value.strftime(date_time_format)
+ value = timezone.make_aware(value, timezone.get_current_timezone())
+ date_time_string = value.astimezone(pytz.utc).strftime(date_time_format)
tz_string = tz_format.format(tz=tz)
value = db_storage_string.format(date_time_string=date_time_string, tz_string=tz_string)
return value
diff --git a/kolibri/core/serializers.py b/kolibri/core/serializers.py
--- a/kolibri/core/serializers.py
+++ b/kolibri/core/serializers.py
@@ -1,3 +1,5 @@
+import pytz
+
from django.utils import timezone
from rest_framework.serializers import DateTimeField, ModelSerializer
from .fields import DateTimeTzField as DjangoDateTimeTzField
@@ -7,6 +9,8 @@
def to_internal_value(self, data):
data = super(DateTimeTzField, self).to_internal_value(data)
tz = timezone.get_current_timezone()
+ if not data.tzinfo:
+ data = timezone.make_aware(data, pytz.utc)
return data.astimezone(tz)
| {"golden_diff": "diff --git a/kolibri/core/fields.py b/kolibri/core/fields.py\n--- a/kolibri/core/fields.py\n+++ b/kolibri/core/fields.py\n@@ -25,7 +25,8 @@\n tz = value.tzinfo.zone\n else:\n tz = timezone.get_current_timezone().zone\n- date_time_string = value.strftime(date_time_format)\n+ value = timezone.make_aware(value, timezone.get_current_timezone())\n+ date_time_string = value.astimezone(pytz.utc).strftime(date_time_format)\n tz_string = tz_format.format(tz=tz)\n value = db_storage_string.format(date_time_string=date_time_string, tz_string=tz_string)\n return value\ndiff --git a/kolibri/core/serializers.py b/kolibri/core/serializers.py\n--- a/kolibri/core/serializers.py\n+++ b/kolibri/core/serializers.py\n@@ -1,3 +1,5 @@\n+import pytz\n+\n from django.utils import timezone\n from rest_framework.serializers import DateTimeField, ModelSerializer\n from .fields import DateTimeTzField as DjangoDateTimeTzField\n@@ -7,6 +9,8 @@\n def to_internal_value(self, data):\n data = super(DateTimeTzField, self).to_internal_value(data)\n tz = timezone.get_current_timezone()\n+ if not data.tzinfo:\n+ data = timezone.make_aware(data, pytz.utc)\n return data.astimezone(tz)\n", "issue": "DateTimeTzFields are serialized with incorrect times\nExample saved this time in a `ChannelMetaDataCache` object (at around 14:00 local time)\r\n\r\n`2017-06-06 14:44:12.582879(America/Los_Angeles)`\r\n\r\nBut it gets serialized as\r\n\r\n`2017-06-06T07:44:12.582-07:00`\r\n\r\nExpected is\r\n\r\n`2017-06-06T14:44:12.582-07:00`\r\n\n", "before_files": [{"content": "from django.utils import timezone\nfrom rest_framework.serializers import DateTimeField, ModelSerializer\nfrom .fields import DateTimeTzField as DjangoDateTimeTzField\n\nclass DateTimeTzField(DateTimeField):\n\n def to_internal_value(self, data):\n data = super(DateTimeTzField, self).to_internal_value(data)\n tz = timezone.get_current_timezone()\n return data.astimezone(tz)\n\n\nserializer_field_mapping = {\n DjangoDateTimeTzField: DateTimeTzField,\n}\n\nserializer_field_mapping.update(ModelSerializer.serializer_field_mapping)\n\nclass KolibriModelSerializer(ModelSerializer):\n\n serializer_field_mapping = serializer_field_mapping\n", "path": "kolibri/core/serializers.py"}, {"content": "import datetime\nimport re\n\nimport pytz\nfrom django.db.models.fields import Field\nfrom django.utils import timezone\n\ndate_time_format = \"%Y-%m-%d %H:%M:%S.%f\"\ntz_format = \"({tz})\"\ntz_regex = re.compile(\"\\(([^\\)]+)\\)\")\ndb_storage_string = \"{date_time_string}{tz_string}\"\n\ndef parse_timezonestamp(value):\n if tz_regex.search(value):\n tz = pytz.timezone(tz_regex.search(value).groups()[0])\n else:\n tz = timezone.get_current_timezone()\n utc_value = tz_regex.sub('', value)\n value = datetime.datetime.strptime(utc_value, date_time_format)\n value = timezone.make_aware(value, pytz.utc)\n return value.astimezone(tz)\n\ndef create_timezonestamp(value):\n if value.tzinfo:\n tz = value.tzinfo.zone\n else:\n tz = timezone.get_current_timezone().zone\n date_time_string = value.strftime(date_time_format)\n tz_string = tz_format.format(tz=tz)\n value = db_storage_string.format(date_time_string=date_time_string, tz_string=tz_string)\n return value\n\nclass DateTimeTzField(Field):\n \"\"\"\n A field that stores datetime information as a char in this format:\n\n %Y-%m-%d %H:%M:%S.%f(<tzinfo>)\n\n It reads a timezone aware datetime object, and extracts the timezone zone information\n then parses the datetime into the format above with the timezone information appended.\n\n As this is ISO formatted, alphabetic sorting should still allow for proper queries\n against this in the database. Mostly engineered for SQLite usage.\n \"\"\"\n\n def db_type(self, connection):\n return \"varchar\"\n\n def from_db_value(self, value, expression, connection, context):\n if value is None:\n return value\n return parse_timezonestamp(value)\n\n def to_python(self, value):\n if isinstance(value, datetime.datetime):\n return value\n\n if value is None:\n return value\n\n return parse_timezonestamp(value)\n\n def get_prep_value(self, value):\n # Casts datetimes into the format expected by the backend\n if value is None:\n return value\n return create_timezonestamp(value)\n\n def get_db_prep_value(self, value, connection, prepared=False):\n if not prepared:\n value = self.get_prep_value(value)\n return value\n", "path": "kolibri/core/fields.py"}]} | 1,537 | 326 |
gh_patches_debug_2535 | rasdani/github-patches | git_diff | python__peps-2229 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Don't auto-add inline links to ref section & rm if empty, per #2130
First step to implementing #2130 , as agreed with @gvanrossum and the PEP editor team.
When building, don't add redundant footnotes and references entries for URLs that are already directly linked inline. This avoids an unnecessary, potentially confusing footnote for each link, and taking up additional space in the references section for no real benefit, plus simplifies the build code and should improve build time, especially for Sphinx. Furthermore, if the references section is empty (not including now-invisible link targets), remove it, as we did before (but in a more robust manner). This allows past and future PEPs to still use inline references with targets stored in the references section, while avoiding showing a now-empty references section.
These are both implemented for Sphinx and the legacy builder, and I visually inspected a variety of PEPs with various cases with both builders to ensure the desired results were achieved, and there were no obvious side effects from this change.
Following merging this PR, following the plan outlined in #2130 , I'll proceed with one updating the meta-PEP docs in PEP 0, PEP 1 and PEP 12 to reflect the revised policy of using standard reST links (inline or with separate targets) rather than the references section, and follow that with a PR updating the relative handful of references in the other active meta-PEPs, for consistency.
</issue>
<code>
[start of pep_sphinx_extensions/pep_processor/transforms/pep_footer.py]
1 import datetime
2 from pathlib import Path
3 import subprocess
4
5 from docutils import nodes
6 from docutils import transforms
7
8
9 class PEPFooter(transforms.Transform):
10 """Footer transforms for PEPs.
11
12 - Removes the References section if it is empty when rendered.
13 - Creates a link to the (GitHub) source text.
14
15 Source Link:
16 Create the link to the source file from the document source path,
17 and append the text to the end of the document.
18
19 """
20
21 # Uses same priority as docutils.transforms.TargetNotes
22 default_priority = 520
23
24 def apply(self) -> None:
25 pep_source_path = Path(self.document["source"])
26 if not pep_source_path.match("pep-*"):
27 return # not a PEP file, exit early
28
29 # Iterate through sections from the end of the document
30 for section in reversed(self.document[0]):
31 if not isinstance(section, nodes.section):
32 continue
33 title_words = section[0].astext().lower().split()
34 if "references" in title_words:
35 # Remove references section if there are no displayed
36 # footnotes (it only has title & link target nodes)
37 if all(isinstance(ref_node, (nodes.title, nodes.target))
38 for ref_node in section):
39 section.parent.remove(section)
40 break
41
42 # Add link to source text and last modified date
43 if pep_source_path.stem != "pep-0000":
44 self.document += _add_source_link(pep_source_path)
45 self.document += _add_commit_history_info(pep_source_path)
46
47
48 def _add_source_link(pep_source_path: Path) -> nodes.paragraph:
49 """Add link to source text on VCS (GitHub)"""
50 source_link = f"https://github.com/python/peps/blob/main/{pep_source_path.name}"
51 link_node = nodes.reference("", source_link, refuri=source_link)
52 return nodes.paragraph("", "Source: ", link_node)
53
54
55 def _add_commit_history_info(pep_source_path: Path) -> nodes.paragraph:
56 """Use local git history to find last modified date."""
57 try:
58 since_epoch = LAST_MODIFIED_TIMES[pep_source_path.name]
59 except KeyError:
60 return nodes.paragraph()
61
62 iso_time = datetime.datetime.utcfromtimestamp(since_epoch).isoformat(sep=" ")
63 commit_link = f"https://github.com/python/peps/commits/main/{pep_source_path.name}"
64 link_node = nodes.reference("", f"{iso_time} GMT", refuri=commit_link)
65 return nodes.paragraph("", "Last modified: ", link_node)
66
67
68 def _get_last_modified_timestamps():
69 # get timestamps and changed files from all commits (without paging results)
70 args = ["git", "--no-pager", "log", "--format=#%at", "--name-only"]
71 with subprocess.Popen(args, stdout=subprocess.PIPE) as process:
72 all_modified = process.stdout.read().decode("utf-8")
73 process.stdout.close()
74 if process.wait(): # non-zero return code
75 return {}
76
77 # set up the dictionary with the *current* files
78 last_modified = {path.name: 0 for path in Path().glob("pep-*") if path.suffix in {".txt", ".rst"}}
79
80 # iterate through newest to oldest, updating per file timestamps
81 change_sets = all_modified.removeprefix("#").split("#")
82 for change_set in change_sets:
83 timestamp, files = change_set.split("\n", 1)
84 for file in files.strip().split("\n"):
85 if file.startswith("pep-") and file[-3:] in {"txt", "rst"}:
86 if last_modified.get(file) == 0:
87 try:
88 last_modified[file] = float(timestamp)
89 except ValueError:
90 pass # if float conversion fails
91
92 return last_modified
93
94
95 LAST_MODIFIED_TIMES = _get_last_modified_timestamps()
96
[end of pep_sphinx_extensions/pep_processor/transforms/pep_footer.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py b/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py
--- a/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py
+++ b/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py
@@ -18,8 +18,8 @@
"""
- # Uses same priority as docutils.transforms.TargetNotes
- default_priority = 520
+ # Set low priority so ref targets aren't removed before they are needed
+ default_priority = 999
def apply(self) -> None:
pep_source_path = Path(self.document["source"])
| {"golden_diff": "diff --git a/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py b/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py\n--- a/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py\n+++ b/pep_sphinx_extensions/pep_processor/transforms/pep_footer.py\n@@ -18,8 +18,8 @@\n \n \"\"\"\n \n- # Uses same priority as docutils.transforms.TargetNotes\n- default_priority = 520\n+ # Set low priority so ref targets aren't removed before they are needed\n+ default_priority = 999\n \n def apply(self) -> None:\n pep_source_path = Path(self.document[\"source\"])\n", "issue": "Don't auto-add inline links to ref section & rm if empty, per #2130\nFirst step to implementing #2130 , as agreed with @gvanrossum and the PEP editor team.\r\n\r\nWhen building, don't add redundant footnotes and references entries for URLs that are already directly linked inline. This avoids an unnecessary, potentially confusing footnote for each link, and taking up additional space in the references section for no real benefit, plus simplifies the build code and should improve build time, especially for Sphinx. Furthermore, if the references section is empty (not including now-invisible link targets), remove it, as we did before (but in a more robust manner). This allows past and future PEPs to still use inline references with targets stored in the references section, while avoiding showing a now-empty references section.\r\n\r\nThese are both implemented for Sphinx and the legacy builder, and I visually inspected a variety of PEPs with various cases with both builders to ensure the desired results were achieved, and there were no obvious side effects from this change.\r\n\r\nFollowing merging this PR, following the plan outlined in #2130 , I'll proceed with one updating the meta-PEP docs in PEP 0, PEP 1 and PEP 12 to reflect the revised policy of using standard reST links (inline or with separate targets) rather than the references section, and follow that with a PR updating the relative handful of references in the other active meta-PEPs, for consistency.\n", "before_files": [{"content": "import datetime\nfrom pathlib import Path\nimport subprocess\n\nfrom docutils import nodes\nfrom docutils import transforms\n\n\nclass PEPFooter(transforms.Transform):\n \"\"\"Footer transforms for PEPs.\n\n - Removes the References section if it is empty when rendered.\n - Creates a link to the (GitHub) source text.\n\n Source Link:\n Create the link to the source file from the document source path,\n and append the text to the end of the document.\n\n \"\"\"\n\n # Uses same priority as docutils.transforms.TargetNotes\n default_priority = 520\n\n def apply(self) -> None:\n pep_source_path = Path(self.document[\"source\"])\n if not pep_source_path.match(\"pep-*\"):\n return # not a PEP file, exit early\n\n # Iterate through sections from the end of the document\n for section in reversed(self.document[0]):\n if not isinstance(section, nodes.section):\n continue\n title_words = section[0].astext().lower().split()\n if \"references\" in title_words:\n # Remove references section if there are no displayed\n # footnotes (it only has title & link target nodes)\n if all(isinstance(ref_node, (nodes.title, nodes.target))\n for ref_node in section):\n section.parent.remove(section)\n break\n\n # Add link to source text and last modified date\n if pep_source_path.stem != \"pep-0000\":\n self.document += _add_source_link(pep_source_path)\n self.document += _add_commit_history_info(pep_source_path)\n\n\ndef _add_source_link(pep_source_path: Path) -> nodes.paragraph:\n \"\"\"Add link to source text on VCS (GitHub)\"\"\"\n source_link = f\"https://github.com/python/peps/blob/main/{pep_source_path.name}\"\n link_node = nodes.reference(\"\", source_link, refuri=source_link)\n return nodes.paragraph(\"\", \"Source: \", link_node)\n\n\ndef _add_commit_history_info(pep_source_path: Path) -> nodes.paragraph:\n \"\"\"Use local git history to find last modified date.\"\"\"\n try:\n since_epoch = LAST_MODIFIED_TIMES[pep_source_path.name]\n except KeyError:\n return nodes.paragraph()\n\n iso_time = datetime.datetime.utcfromtimestamp(since_epoch).isoformat(sep=\" \")\n commit_link = f\"https://github.com/python/peps/commits/main/{pep_source_path.name}\"\n link_node = nodes.reference(\"\", f\"{iso_time} GMT\", refuri=commit_link)\n return nodes.paragraph(\"\", \"Last modified: \", link_node)\n\n\ndef _get_last_modified_timestamps():\n # get timestamps and changed files from all commits (without paging results)\n args = [\"git\", \"--no-pager\", \"log\", \"--format=#%at\", \"--name-only\"]\n with subprocess.Popen(args, stdout=subprocess.PIPE) as process:\n all_modified = process.stdout.read().decode(\"utf-8\")\n process.stdout.close()\n if process.wait(): # non-zero return code\n return {}\n\n # set up the dictionary with the *current* files\n last_modified = {path.name: 0 for path in Path().glob(\"pep-*\") if path.suffix in {\".txt\", \".rst\"}}\n\n # iterate through newest to oldest, updating per file timestamps\n change_sets = all_modified.removeprefix(\"#\").split(\"#\")\n for change_set in change_sets:\n timestamp, files = change_set.split(\"\\n\", 1)\n for file in files.strip().split(\"\\n\"):\n if file.startswith(\"pep-\") and file[-3:] in {\"txt\", \"rst\"}:\n if last_modified.get(file) == 0:\n try:\n last_modified[file] = float(timestamp)\n except ValueError:\n pass # if float conversion fails\n\n return last_modified\n\n\nLAST_MODIFIED_TIMES = _get_last_modified_timestamps()\n", "path": "pep_sphinx_extensions/pep_processor/transforms/pep_footer.py"}]} | 1,890 | 166 |
gh_patches_debug_10524 | rasdani/github-patches | git_diff | Gallopsled__pwntools-2191 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
readthedocs.org builds are broken
The docs fail to build for a while, but differently since https://github.com/Gallopsled/pwntools/pull/2179. They're still built using Python 2.7, which had problems installing rpyc and is now missing the toml package before running the setup.py.
https://readthedocs.org/projects/pwntools/builds/
You could switch to Python 3 or try adding the `toml; python_version<'3.4'` package to the `docs/requirements.txt` file for a starter? I think only a maintainer with access to the configs can fix this.
</issue>
<code>
[start of pwn/toplevel.py]
1 # Get all the modules from pwnlib
2 import collections
3 import logging
4 import math
5 import operator
6 import os
7 import platform
8 import re
9 import socks
10 import signal
11 import string
12 import struct
13 import subprocess
14 import sys
15 import tempfile
16 import threading
17 import time
18
19 import colored_traceback
20 from pprint import pprint
21
22 import pwnlib
23 from pwnlib import *
24 from pwnlib.asm import *
25 from pwnlib.context import Thread
26 from pwnlib.context import context, LocalContext
27 from pwnlib.dynelf import DynELF
28 from pwnlib.encoders import *
29 from pwnlib.elf.corefile import Core, Corefile, Coredump
30 from pwnlib.elf.elf import ELF, load
31 from pwnlib.encoders import *
32 from pwnlib.exception import PwnlibException
33 from pwnlib.gdb import attach, debug_assembly, debug_shellcode
34 from pwnlib.filepointer import *
35 from pwnlib.filesystem import *
36 from pwnlib.flag import *
37 from pwnlib.fmtstr import FmtStr, fmtstr_payload, fmtstr_split
38 from pwnlib.log import getLogger
39 from pwnlib.memleak import MemLeak, RelativeMemLeak
40 from pwnlib.regsort import *
41 from pwnlib.replacements import *
42 from pwnlib.rop import ROP
43 from pwnlib.rop.call import AppendedArgument
44 from pwnlib.rop.srop import SigreturnFrame
45 from pwnlib.rop.ret2dlresolve import Ret2dlresolvePayload
46 from pwnlib.runner import *
47 from pwnlib.term.readline import str_input
48 from pwnlib.timeout import Timeout
49 from pwnlib.tubes.listen import listen
50 from pwnlib.tubes.process import process, PTY, PIPE, STDOUT
51 from pwnlib.tubes.remote import remote, tcp, udp, connect
52 from pwnlib.tubes.serialtube import serialtube
53 from pwnlib.tubes.server import server
54 from pwnlib.tubes.ssh import ssh
55 from pwnlib.tubes.tube import tube
56 from pwnlib.ui import *
57 from pwnlib.util import crc
58 from pwnlib.util import iters
59 from pwnlib.util import net
60 from pwnlib.util import proc
61 from pwnlib.util import safeeval
62 from pwnlib.util.crc import BitPolynom
63 from pwnlib.util.cyclic import *
64 from pwnlib.util.fiddling import *
65 from pwnlib.util.getdents import *
66 from pwnlib.util.hashes import *
67 from pwnlib.util.lists import *
68 from pwnlib.util.misc import *
69 from pwnlib.util.packing import *
70 from pwnlib.util.proc import pidof
71 from pwnlib.util.sh_string import sh_string, sh_prepare, sh_command_with
72 from pwnlib.util.splash import *
73 from pwnlib.util.web import *
74
75 # Promote these modules, so that "from pwn import *" will let you access them
76
77 from six.moves import cPickle as pickle, cStringIO as StringIO
78 from six import BytesIO
79
80 log = getLogger("pwnlib.exploit")
81 error = log.error
82 warning = log.warning
83 warn = log.warning
84 info = log.info
85 debug = log.debug
86 success = log.success
87
88 colored_traceback.add_hook()
89
90 # Equivalence with the default behavior of "from import *"
91 # __all__ = [x for x in tuple(globals()) if not x.startswith('_')]
92
[end of pwn/toplevel.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pwn/toplevel.py b/pwn/toplevel.py
--- a/pwn/toplevel.py
+++ b/pwn/toplevel.py
@@ -16,7 +16,6 @@
import threading
import time
-import colored_traceback
from pprint import pprint
import pwnlib
@@ -85,7 +84,12 @@
debug = log.debug
success = log.success
-colored_traceback.add_hook()
+try:
+ import colored_traceback
+except ImportError:
+ pass
+else:
+ colored_traceback.add_hook()
# Equivalence with the default behavior of "from import *"
# __all__ = [x for x in tuple(globals()) if not x.startswith('_')]
| {"golden_diff": "diff --git a/pwn/toplevel.py b/pwn/toplevel.py\n--- a/pwn/toplevel.py\n+++ b/pwn/toplevel.py\n@@ -16,7 +16,6 @@\n import threading\n import time\n \n-import colored_traceback\n from pprint import pprint\n \n import pwnlib\n@@ -85,7 +84,12 @@\n debug = log.debug\n success = log.success\n \n-colored_traceback.add_hook()\n+try:\n+ import colored_traceback\n+except ImportError:\n+ pass\n+else:\n+ colored_traceback.add_hook()\n \n # Equivalence with the default behavior of \"from import *\"\n # __all__ = [x for x in tuple(globals()) if not x.startswith('_')]\n", "issue": "readthedocs.org builds are broken\nThe docs fail to build for a while, but differently since https://github.com/Gallopsled/pwntools/pull/2179. They're still built using Python 2.7, which had problems installing rpyc and is now missing the toml package before running the setup.py.\r\n\r\nhttps://readthedocs.org/projects/pwntools/builds/\r\n\r\nYou could switch to Python 3 or try adding the `toml; python_version<'3.4'` package to the `docs/requirements.txt` file for a starter? I think only a maintainer with access to the configs can fix this.\n", "before_files": [{"content": "# Get all the modules from pwnlib\nimport collections\nimport logging\nimport math\nimport operator\nimport os\nimport platform\nimport re\nimport socks\nimport signal\nimport string\nimport struct\nimport subprocess\nimport sys\nimport tempfile\nimport threading\nimport time\n\nimport colored_traceback\nfrom pprint import pprint\n\nimport pwnlib\nfrom pwnlib import *\nfrom pwnlib.asm import *\nfrom pwnlib.context import Thread\nfrom pwnlib.context import context, LocalContext\nfrom pwnlib.dynelf import DynELF\nfrom pwnlib.encoders import *\nfrom pwnlib.elf.corefile import Core, Corefile, Coredump\nfrom pwnlib.elf.elf import ELF, load\nfrom pwnlib.encoders import *\nfrom pwnlib.exception import PwnlibException\nfrom pwnlib.gdb import attach, debug_assembly, debug_shellcode\nfrom pwnlib.filepointer import *\nfrom pwnlib.filesystem import *\nfrom pwnlib.flag import *\nfrom pwnlib.fmtstr import FmtStr, fmtstr_payload, fmtstr_split\nfrom pwnlib.log import getLogger\nfrom pwnlib.memleak import MemLeak, RelativeMemLeak\nfrom pwnlib.regsort import *\nfrom pwnlib.replacements import *\nfrom pwnlib.rop import ROP\nfrom pwnlib.rop.call import AppendedArgument\nfrom pwnlib.rop.srop import SigreturnFrame\nfrom pwnlib.rop.ret2dlresolve import Ret2dlresolvePayload\nfrom pwnlib.runner import *\nfrom pwnlib.term.readline import str_input\nfrom pwnlib.timeout import Timeout\nfrom pwnlib.tubes.listen import listen\nfrom pwnlib.tubes.process import process, PTY, PIPE, STDOUT\nfrom pwnlib.tubes.remote import remote, tcp, udp, connect\nfrom pwnlib.tubes.serialtube import serialtube\nfrom pwnlib.tubes.server import server\nfrom pwnlib.tubes.ssh import ssh\nfrom pwnlib.tubes.tube import tube\nfrom pwnlib.ui import *\nfrom pwnlib.util import crc\nfrom pwnlib.util import iters\nfrom pwnlib.util import net\nfrom pwnlib.util import proc\nfrom pwnlib.util import safeeval\nfrom pwnlib.util.crc import BitPolynom\nfrom pwnlib.util.cyclic import *\nfrom pwnlib.util.fiddling import *\nfrom pwnlib.util.getdents import *\nfrom pwnlib.util.hashes import *\nfrom pwnlib.util.lists import *\nfrom pwnlib.util.misc import *\nfrom pwnlib.util.packing import *\nfrom pwnlib.util.proc import pidof\nfrom pwnlib.util.sh_string import sh_string, sh_prepare, sh_command_with\nfrom pwnlib.util.splash import *\nfrom pwnlib.util.web import *\n\n# Promote these modules, so that \"from pwn import *\" will let you access them\n\nfrom six.moves import cPickle as pickle, cStringIO as StringIO\nfrom six import BytesIO\n\nlog = getLogger(\"pwnlib.exploit\")\nerror = log.error\nwarning = log.warning\nwarn = log.warning\ninfo = log.info\ndebug = log.debug\nsuccess = log.success\n\ncolored_traceback.add_hook()\n\n# Equivalence with the default behavior of \"from import *\"\n# __all__ = [x for x in tuple(globals()) if not x.startswith('_')]\n", "path": "pwn/toplevel.py"}]} | 1,591 | 159 |
gh_patches_debug_11600 | rasdani/github-patches | git_diff | nautobot__nautobot-4260 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Set NAUTOBOT_INSTALLATION_METRICS_ENABLED=false in the dockerfile dev stage
<!--
NOTE: This template is for use by maintainers only. Please do not submit
an issue using this template unless you have been specifically asked to
do so.
-->
### Proposed Changes
This should propagate to all of the app dev instances since they base off of nautobot dev images. Make sure to unset this in the final stage
<!-- Provide justification for the proposed change(s). -->
### Justification
We don't want to send metrics for dev environments
</issue>
<code>
[start of development/nautobot_config.py]
1 """Nautobot development configuration file."""
2 import os
3
4 from nautobot.core.settings import * # noqa: F403
5 from nautobot.core.settings_funcs import is_truthy
6
7 #
8 # Debugging defaults to True rather than False for the development environment
9 #
10 DEBUG = is_truthy(os.getenv("NAUTOBOT_DEBUG", "True"))
11
12 # Django Debug Toolbar - enabled only when debugging
13 if DEBUG:
14 if "debug_toolbar" not in INSTALLED_APPS: # noqa: F405
15 INSTALLED_APPS.append("debug_toolbar") # noqa: F405
16 if "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: # noqa: F405
17 MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") # noqa: F405
18 # By default the toolbar only displays when the request is coming from one of INTERNAL_IPS.
19 # For the Docker dev environment, we don't know in advance what that IP may be, so override to skip that check
20 DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda _request: DEBUG}
21
22 #
23 # Logging for the development environment, taking into account the redefinition of DEBUG above
24 #
25
26 LOG_LEVEL = "DEBUG" if DEBUG else "INFO"
27 LOGGING["loggers"]["nautobot"]["handlers"] = ["verbose_console" if DEBUG else "normal_console"] # noqa: F405
28 LOGGING["loggers"]["nautobot"]["level"] = LOG_LEVEL # noqa: F405
29
30 #
31 # Plugins
32 #
33
34 PLUGINS = [
35 "example_plugin",
36 ]
37
38
39 #
40 # Development Environment for SSO
41 # Configure `invoke.yml` based on example for SSO development environment
42 #
43
44 # OIDC Dev ENV
45 if is_truthy(os.getenv("ENABLE_OIDC", "False")):
46 import requests
47
48 AUTHENTICATION_BACKENDS = (
49 "social_core.backends.keycloak.KeycloakOAuth2",
50 "nautobot.core.authentication.ObjectPermissionBackend",
51 )
52 SOCIAL_AUTH_KEYCLOAK_KEY = "nautobot"
53 SOCIAL_AUTH_KEYCLOAK_SECRET = "7b1c3527-8702-4742-af69-2b74ee5742e8"
54 SOCIAL_AUTH_KEYCLOAK_PUBLIC_KEY = requests.get("http://keycloak:8087/realms/nautobot/", timeout=15).json()[
55 "public_key"
56 ]
57 SOCIAL_AUTH_KEYCLOAK_AUTHORIZATION_URL = "http://localhost:8087/realms/nautobot/protocol/openid-connect/auth"
58 SOCIAL_AUTH_KEYCLOAK_ACCESS_TOKEN_URL = "http://keycloak:8087/realms/nautobot/protocol/openid-connect/token"
59 SOCIAL_AUTH_KEYCLOAK_VERIFY_SSL = False
60
61 METRICS_ENABLED = True
62
63 CELERY_WORKER_PROMETHEUS_PORTS = [8080]
64
[end of development/nautobot_config.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/development/nautobot_config.py b/development/nautobot_config.py
--- a/development/nautobot_config.py
+++ b/development/nautobot_config.py
@@ -19,6 +19,9 @@
# For the Docker dev environment, we don't know in advance what that IP may be, so override to skip that check
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda _request: DEBUG}
+# Do *not* send anonymized install metrics when post_upgrade or send_installation_metrics management commands are run
+INSTALLATION_METRICS_ENABLED = is_truthy(os.getenv("NAUTOBOT_INSTALLATION_METRICS_ENABLED", "False"))
+
#
# Logging for the development environment, taking into account the redefinition of DEBUG above
#
| {"golden_diff": "diff --git a/development/nautobot_config.py b/development/nautobot_config.py\n--- a/development/nautobot_config.py\n+++ b/development/nautobot_config.py\n@@ -19,6 +19,9 @@\n # For the Docker dev environment, we don't know in advance what that IP may be, so override to skip that check\n DEBUG_TOOLBAR_CONFIG = {\"SHOW_TOOLBAR_CALLBACK\": lambda _request: DEBUG}\n \n+# Do *not* send anonymized install metrics when post_upgrade or send_installation_metrics management commands are run\n+INSTALLATION_METRICS_ENABLED = is_truthy(os.getenv(\"NAUTOBOT_INSTALLATION_METRICS_ENABLED\", \"False\"))\n+\n #\n # Logging for the development environment, taking into account the redefinition of DEBUG above\n #\n", "issue": "Set NAUTOBOT_INSTALLATION_METRICS_ENABLED=false in the dockerfile dev stage\n<!--\r\n NOTE: This template is for use by maintainers only. Please do not submit\r\n an issue using this template unless you have been specifically asked to\r\n do so.\r\n-->\r\n### Proposed Changes\r\n\r\nThis should propagate to all of the app dev instances since they base off of nautobot dev images. Make sure to unset this in the final stage\r\n\r\n<!-- Provide justification for the proposed change(s). -->\r\n### Justification\r\n\r\nWe don't want to send metrics for dev environments\n", "before_files": [{"content": "\"\"\"Nautobot development configuration file.\"\"\"\nimport os\n\nfrom nautobot.core.settings import * # noqa: F403\nfrom nautobot.core.settings_funcs import is_truthy\n\n#\n# Debugging defaults to True rather than False for the development environment\n#\nDEBUG = is_truthy(os.getenv(\"NAUTOBOT_DEBUG\", \"True\"))\n\n# Django Debug Toolbar - enabled only when debugging\nif DEBUG:\n if \"debug_toolbar\" not in INSTALLED_APPS: # noqa: F405\n INSTALLED_APPS.append(\"debug_toolbar\") # noqa: F405\n if \"debug_toolbar.middleware.DebugToolbarMiddleware\" not in MIDDLEWARE: # noqa: F405\n MIDDLEWARE.insert(0, \"debug_toolbar.middleware.DebugToolbarMiddleware\") # noqa: F405\n # By default the toolbar only displays when the request is coming from one of INTERNAL_IPS.\n # For the Docker dev environment, we don't know in advance what that IP may be, so override to skip that check\n DEBUG_TOOLBAR_CONFIG = {\"SHOW_TOOLBAR_CALLBACK\": lambda _request: DEBUG}\n\n#\n# Logging for the development environment, taking into account the redefinition of DEBUG above\n#\n\nLOG_LEVEL = \"DEBUG\" if DEBUG else \"INFO\"\nLOGGING[\"loggers\"][\"nautobot\"][\"handlers\"] = [\"verbose_console\" if DEBUG else \"normal_console\"] # noqa: F405\nLOGGING[\"loggers\"][\"nautobot\"][\"level\"] = LOG_LEVEL # noqa: F405\n\n#\n# Plugins\n#\n\nPLUGINS = [\n \"example_plugin\",\n]\n\n\n#\n# Development Environment for SSO\n# Configure `invoke.yml` based on example for SSO development environment\n#\n\n# OIDC Dev ENV\nif is_truthy(os.getenv(\"ENABLE_OIDC\", \"False\")):\n import requests\n\n AUTHENTICATION_BACKENDS = (\n \"social_core.backends.keycloak.KeycloakOAuth2\",\n \"nautobot.core.authentication.ObjectPermissionBackend\",\n )\n SOCIAL_AUTH_KEYCLOAK_KEY = \"nautobot\"\n SOCIAL_AUTH_KEYCLOAK_SECRET = \"7b1c3527-8702-4742-af69-2b74ee5742e8\"\n SOCIAL_AUTH_KEYCLOAK_PUBLIC_KEY = requests.get(\"http://keycloak:8087/realms/nautobot/\", timeout=15).json()[\n \"public_key\"\n ]\n SOCIAL_AUTH_KEYCLOAK_AUTHORIZATION_URL = \"http://localhost:8087/realms/nautobot/protocol/openid-connect/auth\"\n SOCIAL_AUTH_KEYCLOAK_ACCESS_TOKEN_URL = \"http://keycloak:8087/realms/nautobot/protocol/openid-connect/token\"\n SOCIAL_AUTH_KEYCLOAK_VERIFY_SSL = False\n\nMETRICS_ENABLED = True\n\nCELERY_WORKER_PROMETHEUS_PORTS = [8080]\n", "path": "development/nautobot_config.py"}]} | 1,413 | 167 |
gh_patches_debug_14541 | rasdani/github-patches | git_diff | pyinstaller__pyinstaller-5006 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Impossible to build with matplotlib 3.3 on Python 3.7 and 3.8
## Description of the issue
Trying a simple program main.py
```python
import matplotlib.pyplot as plt
plt.plot([0,1,2], [0,1,1])
```
Running the application created with pyinstaller --hiddenimport='pkg_resources.py2_warn' main.py
I get:
```
Traceback (most recent call last): File "main.py", line 3, in <module> File "<frozen importlib._bootstrap>", line 983, in _find_and_load File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 677, in _load_unlocked File "c:\users\***\appdata\local\programs\python\python37\lib\site-packages\PyInstaller\loader\pyimod03_importers.py", line 623, in exec_module File "site-packages\matplotlib\__init__.py", line 898, in <module> File "site-packages\matplotlib\cbook\__init__.py", line 480, in _get_data_path File "site-packages\matplotlib\__init__.py", line 239, in wrapper File "site-packages\matplotlib\__init__.py", line 534, in get_data_path File "site-packages\matplotlib\__init__.py", line 239, in wrapper File "site-packages\matplotlib\__init__.py", line 563, in _get_data_path NameError: name 'defaultParams' is not defined [3064] Failed to execute script main
```
## Context information (for bug reports)
* Output of `pyinstaller --version`: ```3.6 and 4.0-dev0```
* Version of Python: 3.7 and 3.8
* Platform: Windows (english 10)
* Did you also try this on another platform? No
## What solves it
Reverting to matplotlib 3.2.2 resolve the issue (temporarily).
</issue>
<code>
[start of PyInstaller/hooks/hook-matplotlib.py]
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2013-2020, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License (version 2
5 # or later) with exception for distributing the bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #
9 # SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
10 #-----------------------------------------------------------------------------
11
12
13 from PyInstaller.utils.hooks import exec_statement
14
15 mpl_data_dir = exec_statement(
16 "import matplotlib; print(matplotlib._get_data_path())")
17
18 datas = [
19 (mpl_data_dir, "mpl-data"),
20 ]
21
[end of PyInstaller/hooks/hook-matplotlib.py]
[start of PyInstaller/hooks/rthooks/pyi_rth_mpldata.py]
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2013-2020, PyInstaller Development Team.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #
9 # SPDX-License-Identifier: Apache-2.0
10 #-----------------------------------------------------------------------------
11
12
13 import os
14 import sys
15
16 os.environ["MATPLOTLIBDATA"] = os.path.join(sys._MEIPASS, "mpl-data")
17
[end of PyInstaller/hooks/rthooks/pyi_rth_mpldata.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/PyInstaller/hooks/hook-matplotlib.py b/PyInstaller/hooks/hook-matplotlib.py
--- a/PyInstaller/hooks/hook-matplotlib.py
+++ b/PyInstaller/hooks/hook-matplotlib.py
@@ -16,5 +16,5 @@
"import matplotlib; print(matplotlib._get_data_path())")
datas = [
- (mpl_data_dir, "mpl-data"),
+ (mpl_data_dir, "matplotlib/mpl-data"),
]
diff --git a/PyInstaller/hooks/rthooks/pyi_rth_mpldata.py b/PyInstaller/hooks/rthooks/pyi_rth_mpldata.py
deleted file mode 100644
--- a/PyInstaller/hooks/rthooks/pyi_rth_mpldata.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013-2020, PyInstaller Development Team.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#
-# SPDX-License-Identifier: Apache-2.0
-#-----------------------------------------------------------------------------
-
-
-import os
-import sys
-
-os.environ["MATPLOTLIBDATA"] = os.path.join(sys._MEIPASS, "mpl-data")
| {"golden_diff": "diff --git a/PyInstaller/hooks/hook-matplotlib.py b/PyInstaller/hooks/hook-matplotlib.py\n--- a/PyInstaller/hooks/hook-matplotlib.py\n+++ b/PyInstaller/hooks/hook-matplotlib.py\n@@ -16,5 +16,5 @@\n \"import matplotlib; print(matplotlib._get_data_path())\")\n \n datas = [\n- (mpl_data_dir, \"mpl-data\"),\n+ (mpl_data_dir, \"matplotlib/mpl-data\"),\n ]\ndiff --git a/PyInstaller/hooks/rthooks/pyi_rth_mpldata.py b/PyInstaller/hooks/rthooks/pyi_rth_mpldata.py\ndeleted file mode 100644\n--- a/PyInstaller/hooks/rthooks/pyi_rth_mpldata.py\n+++ /dev/null\n@@ -1,16 +0,0 @@\n-#-----------------------------------------------------------------------------\n-# Copyright (c) 2013-2020, PyInstaller Development Team.\n-#\n-# Licensed under the Apache License, Version 2.0 (the \"License\");\n-# you may not use this file except in compliance with the License.\n-#\n-# The full license is in the file COPYING.txt, distributed with this software.\n-#\n-# SPDX-License-Identifier: Apache-2.0\n-#-----------------------------------------------------------------------------\n-\n-\n-import os\n-import sys\n-\n-os.environ[\"MATPLOTLIBDATA\"] = os.path.join(sys._MEIPASS, \"mpl-data\")\n", "issue": "Impossible to build with matplotlib 3.3 on Python 3.7 and 3.8\n## Description of the issue\r\n\r\nTrying a simple program main.py\r\n```python\r\nimport matplotlib.pyplot as plt\r\n\r\nplt.plot([0,1,2], [0,1,1])\r\n```\r\n\r\nRunning the application created with pyinstaller --hiddenimport='pkg_resources.py2_warn' main.py\r\n\r\nI get:\r\n\r\n```\r\nTraceback (most recent call last): File \"main.py\", line 3, in <module> File \"<frozen importlib._bootstrap>\", line 983, in _find_and_load File \"<frozen importlib._bootstrap>\", line 967, in _find_and_load_unlocked File \"<frozen importlib._bootstrap>\", line 677, in _load_unlocked File \"c:\\users\\***\\appdata\\local\\programs\\python\\python37\\lib\\site-packages\\PyInstaller\\loader\\pyimod03_importers.py\", line 623, in exec_module File \"site-packages\\matplotlib\\__init__.py\", line 898, in <module> File \"site-packages\\matplotlib\\cbook\\__init__.py\", line 480, in _get_data_path File \"site-packages\\matplotlib\\__init__.py\", line 239, in wrapper File \"site-packages\\matplotlib\\__init__.py\", line 534, in get_data_path File \"site-packages\\matplotlib\\__init__.py\", line 239, in wrapper File \"site-packages\\matplotlib\\__init__.py\", line 563, in _get_data_path NameError: name 'defaultParams' is not defined [3064] Failed to execute script main\r\n```\r\n## Context information (for bug reports)\r\n\r\n* Output of `pyinstaller --version`: ```3.6 and 4.0-dev0```\r\n* Version of Python: 3.7 and 3.8\r\n* Platform: Windows (english 10)\r\n* Did you also try this on another platform? No\r\n\r\n## What solves it\r\n\r\nReverting to matplotlib 3.2.2 resolve the issue (temporarily).\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2013-2020, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License (version 2\n# or later) with exception for distributing the bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#\n# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)\n#-----------------------------------------------------------------------------\n\n\nfrom PyInstaller.utils.hooks import exec_statement\n\nmpl_data_dir = exec_statement(\n \"import matplotlib; print(matplotlib._get_data_path())\")\n\ndatas = [\n (mpl_data_dir, \"mpl-data\"),\n]\n", "path": "PyInstaller/hooks/hook-matplotlib.py"}, {"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2013-2020, PyInstaller Development Team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#\n# SPDX-License-Identifier: Apache-2.0\n#-----------------------------------------------------------------------------\n\n\nimport os\nimport sys\n\nos.environ[\"MATPLOTLIBDATA\"] = os.path.join(sys._MEIPASS, \"mpl-data\")\n", "path": "PyInstaller/hooks/rthooks/pyi_rth_mpldata.py"}]} | 1,404 | 318 |
gh_patches_debug_16772 | rasdani/github-patches | git_diff | saulpw__visidata-1584 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ZSH completion fails with invalid option definition
I'm using v2.10.2 installed via the [latest Arch package](https://archlinux.org/packages/community/any/visidata/). Visidata shell completion in my ZSH is currently broken.
```
$ vd <tab>
_arguments:comparguments:327: invalid option definition: --fmt_expand_list[format str to use for names of columns expanded from list (colname, index) (default: %s[%s])]:str
```
</issue>
<code>
[start of dev/zsh-completion.py]
1 #!/usr/bin/env python
2 from __future__ import unicode_literals
3
4 import os
5 from os.path import dirname as dirn
6 import sys
7 import re
8
9 sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
10 from visidata import vd
11 from visidata.main import option_aliases
12
13 ZSH_COMPLETION_FILE = "_visidata"
14 ZSH_COMPLETION_TEMPLATE = "dev/zsh-completion.in"
15 pat_class = re.compile("'(.*)'")
16 pat_select = re.compile("^\([^)]*\)")
17
18
19 def generate_completion(opt):
20 prefix = "--" + opt.name
21 shortnames = [key for key, value in option_aliases.items() if value[0] == opt.name]
22 if len(shortnames):
23 if len(shortnames[0]) == 1:
24 shortname = "-" + shortnames[0]
25 else:
26 shortname = "--" + shortnames[0]
27 prefix = "{" + f"{shortname},{prefix}" + "}"
28 if isinstance(opt.value, bool):
29 completion = ""
30 else:
31 completion = ":" + pat_class.findall(str(opt.value.__class__))[0]
32 if opt.name in ["play", "output", "visidata_dir", "config"]:
33 completion += ":_files"
34 elif opt.name in ["plugins_url", "motd_url"]:
35 completion += ":_urls"
36 helpstr = opt.helpstr.replace("[", "\\[").replace("]", "\\]")
37 selections = pat_select.findall(helpstr)
38 if len(selections):
39 completion += f":{selections[0].replace('/', ' ')}"
40 # TODO: use `zstyle ':completion:*' extra-verbose true`
41 # to control the display of default value
42 helpstr = helpstr + f" (default: {opt.value})"
43 return f"{prefix}'[{helpstr}]{completion}'"
44
45
46 flags = [generate_completion(vd._options[opt]["default"]) for opt in vd._options]
47
48 with open(ZSH_COMPLETION_TEMPLATE) as f:
49 template = f.read()
50
51 template = template.replace("{{flags}}", " \\\n ".join(flags))
52
53 with open(ZSH_COMPLETION_FILE, "w") as f:
54 f.write(template)
55
[end of dev/zsh-completion.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dev/zsh-completion.py b/dev/zsh-completion.py
--- a/dev/zsh-completion.py
+++ b/dev/zsh-completion.py
@@ -33,13 +33,14 @@
completion += ":_files"
elif opt.name in ["plugins_url", "motd_url"]:
completion += ":_urls"
- helpstr = opt.helpstr.replace("[", "\\[").replace("]", "\\]")
+ helpstr = opt.helpstr
selections = pat_select.findall(helpstr)
if len(selections):
completion += f":{selections[0].replace('/', ' ')}"
# TODO: use `zstyle ':completion:*' extra-verbose true`
# to control the display of default value
helpstr = helpstr + f" (default: {opt.value})"
+ helpstr = helpstr.replace("[", "\\[").replace("]", "\\]")
return f"{prefix}'[{helpstr}]{completion}'"
| {"golden_diff": "diff --git a/dev/zsh-completion.py b/dev/zsh-completion.py\n--- a/dev/zsh-completion.py\n+++ b/dev/zsh-completion.py\n@@ -33,13 +33,14 @@\n completion += \":_files\"\n elif opt.name in [\"plugins_url\", \"motd_url\"]:\n completion += \":_urls\"\n- helpstr = opt.helpstr.replace(\"[\", \"\\\\[\").replace(\"]\", \"\\\\]\")\n+ helpstr = opt.helpstr\n selections = pat_select.findall(helpstr)\n if len(selections):\n completion += f\":{selections[0].replace('/', ' ')}\"\n # TODO: use `zstyle ':completion:*' extra-verbose true`\n # to control the display of default value\n helpstr = helpstr + f\" (default: {opt.value})\"\n+ helpstr = helpstr.replace(\"[\", \"\\\\[\").replace(\"]\", \"\\\\]\")\n return f\"{prefix}'[{helpstr}]{completion}'\"\n", "issue": "ZSH completion fails with invalid option definition\nI'm using v2.10.2 installed via the [latest Arch package](https://archlinux.org/packages/community/any/visidata/). Visidata shell completion in my ZSH is currently broken.\r\n\r\n```\r\n$ vd <tab>\r\n_arguments:comparguments:327: invalid option definition: --fmt_expand_list[format str to use for names of columns expanded from list (colname, index) (default: %s[%s])]:str\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\nfrom __future__ import unicode_literals\n\nimport os\nfrom os.path import dirname as dirn\nimport sys\nimport re\n\nsys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))\nfrom visidata import vd\nfrom visidata.main import option_aliases\n\nZSH_COMPLETION_FILE = \"_visidata\"\nZSH_COMPLETION_TEMPLATE = \"dev/zsh-completion.in\"\npat_class = re.compile(\"'(.*)'\")\npat_select = re.compile(\"^\\([^)]*\\)\")\n\n\ndef generate_completion(opt):\n prefix = \"--\" + opt.name\n shortnames = [key for key, value in option_aliases.items() if value[0] == opt.name]\n if len(shortnames):\n if len(shortnames[0]) == 1:\n shortname = \"-\" + shortnames[0]\n else:\n shortname = \"--\" + shortnames[0]\n prefix = \"{\" + f\"{shortname},{prefix}\" + \"}\"\n if isinstance(opt.value, bool):\n completion = \"\"\n else:\n completion = \":\" + pat_class.findall(str(opt.value.__class__))[0]\n if opt.name in [\"play\", \"output\", \"visidata_dir\", \"config\"]:\n completion += \":_files\"\n elif opt.name in [\"plugins_url\", \"motd_url\"]:\n completion += \":_urls\"\n helpstr = opt.helpstr.replace(\"[\", \"\\\\[\").replace(\"]\", \"\\\\]\")\n selections = pat_select.findall(helpstr)\n if len(selections):\n completion += f\":{selections[0].replace('/', ' ')}\"\n # TODO: use `zstyle ':completion:*' extra-verbose true`\n # to control the display of default value\n helpstr = helpstr + f\" (default: {opt.value})\"\n return f\"{prefix}'[{helpstr}]{completion}'\"\n\n\nflags = [generate_completion(vd._options[opt][\"default\"]) for opt in vd._options]\n\nwith open(ZSH_COMPLETION_TEMPLATE) as f:\n template = f.read()\n\ntemplate = template.replace(\"{{flags}}\", \" \\\\\\n \".join(flags))\n\nwith open(ZSH_COMPLETION_FILE, \"w\") as f:\n f.write(template)\n", "path": "dev/zsh-completion.py"}]} | 1,226 | 213 |
gh_patches_debug_8893 | rasdani/github-patches | git_diff | vacanza__python-holidays-806 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Swaziland deprecation warning
Resolves #793.
</issue>
<code>
[start of holidays/__init__.py]
1 # python-holidays
2 # ---------------
3 # A fast, efficient Python library for generating country, province and state
4 # specific sets of holidays on the fly. It aims to make determining whether a
5 # specific date is a holiday as fast and flexible as possible.
6 #
7 # Authors: dr-prodigy <[email protected]> (c) 2017-2022
8 # ryanss <[email protected]> (c) 2014-2017
9 # Website: https://github.com/dr-prodigy/python-holidays
10 # License: MIT (see LICENSE file)
11 from holidays.constants import (
12 MON,
13 TUE,
14 WED,
15 THU,
16 FRI,
17 SAT,
18 SUN,
19 WEEKEND,
20 JAN,
21 FEB,
22 MAR,
23 APR,
24 MAY,
25 JUN,
26 JUL,
27 AUG,
28 SEP,
29 OCT,
30 NOV,
31 DEC,
32 )
33 from holidays.countries import *
34 from holidays.financial import *
35 from holidays.holiday_base import * # * import required for IDE docstrings
36 from holidays.utils import (
37 CountryHoliday,
38 country_holidays,
39 financial_holidays,
40 list_supported_countries,
41 list_supported_financial,
42 )
43
44 __version__ = "0.17"
45
[end of holidays/__init__.py]
[start of holidays/countries/eswatini.py]
1 # python-holidays
2 # ---------------
3 # A fast, efficient Python library for generating country, province and state
4 # specific sets of holidays on the fly. It aims to make determining whether a
5 # specific date is a holiday as fast and flexible as possible.
6 #
7 # Authors: dr-prodigy <[email protected]> (c) 2017-2022
8 # ryanss <[email protected]> (c) 2014-2017
9 # Website: https://github.com/dr-prodigy/python-holidays
10 # License: MIT (see LICENSE file)
11
12 import warnings
13 from datetime import date
14
15 from dateutil.easter import easter
16 from dateutil.relativedelta import relativedelta as rd
17
18 from holidays.constants import SUN, JAN, APR, MAY, JUL, SEP, DEC
19 from holidays.holiday_base import HolidayBase
20
21
22 class Eswatini(HolidayBase):
23 """
24 https://swazilii.org/sz/legislation/act/1938/71
25 https://www.officeholidays.com/countries/swaziland
26 """
27
28 country = "SZ"
29
30 def _populate(self, year):
31 super()._populate(year)
32
33 # Observed since 1938
34 if year > 1938:
35 self[date(year, JAN, 1)] = "New Year's Day"
36
37 e = easter(year)
38 good_friday = e - rd(days=2)
39 easter_monday = e + rd(days=1)
40 ascension_day = e + rd(days=39)
41 self[good_friday] = "Good Friday"
42 self[easter_monday] = "Easter Monday"
43 self[ascension_day] = "Ascension Day"
44
45 if year > 1968:
46 self[date(year, APR, 25)] = "National Flag Day"
47
48 if year > 1982:
49 # https://www.officeholidays.com/holidays/swaziland/birthday-of-late-king-sobhuza
50 self[date(year, JUL, 22)] = "Birthday of Late King Sobhuza"
51
52 if year > 1986:
53 # https://www.officeholidays.com/holidays/swaziland/birthday-of-king-mswati-iii
54 self[date(year, APR, 19)] = "King's Birthday"
55
56 self[date(year, MAY, 1)] = "Worker's Day"
57 self[date(year, SEP, 6)] = "Independence Day"
58 self[date(year, DEC, 25)] = "Christmas Day"
59 self[date(year, DEC, 26)] = "Boxing Day"
60
61 # Once-off public holidays
62 y2k = "Y2K changeover"
63
64 if year == 1999:
65 # https://mg.co.za/article/1999-12-09-swaziland-declares-bank-holidays/
66 self[date(1999, DEC, 31)] = y2k
67 if year == 2000:
68 self[date(2000, JAN, 3)] = y2k
69
70 # As of 2021/1/1, whenever a public holiday falls on a
71 # Sunday
72 # it rolls over to the following Monday
73 for k, v in list(self.items()):
74
75 if self.observed and k.weekday() == SUN and k.year == year:
76 add_days = 1
77 while self.get(k + rd(days=add_days)) is not None:
78 add_days += 1
79 self[k + rd(days=add_days)] = v + " (Day Off)"
80
81
82 class Swaziland(Eswatini):
83 warnings.warn(
84 "Swaziland is deprecated, use Eswatini instead.",
85 DeprecationWarning,
86 )
87 pass
88
89
90 class SZ(Eswatini):
91 pass
92
93
94 class SZW(Eswatini):
95 pass
96
[end of holidays/countries/eswatini.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/holidays/__init__.py b/holidays/__init__.py
--- a/holidays/__init__.py
+++ b/holidays/__init__.py
@@ -41,4 +41,4 @@
list_supported_financial,
)
-__version__ = "0.17"
+__version__ = "0.17.1"
diff --git a/holidays/countries/eswatini.py b/holidays/countries/eswatini.py
--- a/holidays/countries/eswatini.py
+++ b/holidays/countries/eswatini.py
@@ -80,11 +80,13 @@
class Swaziland(Eswatini):
- warnings.warn(
- "Swaziland is deprecated, use Eswatini instead.",
- DeprecationWarning,
- )
- pass
+ def __init__(self, *args, **kwargs) -> None:
+ warnings.warn(
+ "Swaziland is deprecated, use Eswatini instead.",
+ DeprecationWarning,
+ )
+
+ super().__init__(*args, **kwargs)
class SZ(Eswatini):
| {"golden_diff": "diff --git a/holidays/__init__.py b/holidays/__init__.py\n--- a/holidays/__init__.py\n+++ b/holidays/__init__.py\n@@ -41,4 +41,4 @@\n list_supported_financial,\n )\n \n-__version__ = \"0.17\"\n+__version__ = \"0.17.1\"\ndiff --git a/holidays/countries/eswatini.py b/holidays/countries/eswatini.py\n--- a/holidays/countries/eswatini.py\n+++ b/holidays/countries/eswatini.py\n@@ -80,11 +80,13 @@\n \n \n class Swaziland(Eswatini):\n- warnings.warn(\n- \"Swaziland is deprecated, use Eswatini instead.\",\n- DeprecationWarning,\n- )\n- pass\n+ def __init__(self, *args, **kwargs) -> None:\n+ warnings.warn(\n+ \"Swaziland is deprecated, use Eswatini instead.\",\n+ DeprecationWarning,\n+ )\n+\n+ super().__init__(*args, **kwargs)\n \n \n class SZ(Eswatini):\n", "issue": "Swaziland deprecation warning\nResolves #793.\n", "before_files": [{"content": "# python-holidays\n# ---------------\n# A fast, efficient Python library for generating country, province and state\n# specific sets of holidays on the fly. It aims to make determining whether a\n# specific date is a holiday as fast and flexible as possible.\n#\n# Authors: dr-prodigy <[email protected]> (c) 2017-2022\n# ryanss <[email protected]> (c) 2014-2017\n# Website: https://github.com/dr-prodigy/python-holidays\n# License: MIT (see LICENSE file)\nfrom holidays.constants import (\n MON,\n TUE,\n WED,\n THU,\n FRI,\n SAT,\n SUN,\n WEEKEND,\n JAN,\n FEB,\n MAR,\n APR,\n MAY,\n JUN,\n JUL,\n AUG,\n SEP,\n OCT,\n NOV,\n DEC,\n)\nfrom holidays.countries import *\nfrom holidays.financial import *\nfrom holidays.holiday_base import * # * import required for IDE docstrings\nfrom holidays.utils import (\n CountryHoliday,\n country_holidays,\n financial_holidays,\n list_supported_countries,\n list_supported_financial,\n)\n\n__version__ = \"0.17\"\n", "path": "holidays/__init__.py"}, {"content": "# python-holidays\n# ---------------\n# A fast, efficient Python library for generating country, province and state\n# specific sets of holidays on the fly. It aims to make determining whether a\n# specific date is a holiday as fast and flexible as possible.\n#\n# Authors: dr-prodigy <[email protected]> (c) 2017-2022\n# ryanss <[email protected]> (c) 2014-2017\n# Website: https://github.com/dr-prodigy/python-holidays\n# License: MIT (see LICENSE file)\n\nimport warnings\nfrom datetime import date\n\nfrom dateutil.easter import easter\nfrom dateutil.relativedelta import relativedelta as rd\n\nfrom holidays.constants import SUN, JAN, APR, MAY, JUL, SEP, DEC\nfrom holidays.holiday_base import HolidayBase\n\n\nclass Eswatini(HolidayBase):\n \"\"\"\n https://swazilii.org/sz/legislation/act/1938/71\n https://www.officeholidays.com/countries/swaziland\n \"\"\"\n\n country = \"SZ\"\n\n def _populate(self, year):\n super()._populate(year)\n\n # Observed since 1938\n if year > 1938:\n self[date(year, JAN, 1)] = \"New Year's Day\"\n\n e = easter(year)\n good_friday = e - rd(days=2)\n easter_monday = e + rd(days=1)\n ascension_day = e + rd(days=39)\n self[good_friday] = \"Good Friday\"\n self[easter_monday] = \"Easter Monday\"\n self[ascension_day] = \"Ascension Day\"\n\n if year > 1968:\n self[date(year, APR, 25)] = \"National Flag Day\"\n\n if year > 1982:\n # https://www.officeholidays.com/holidays/swaziland/birthday-of-late-king-sobhuza\n self[date(year, JUL, 22)] = \"Birthday of Late King Sobhuza\"\n\n if year > 1986:\n # https://www.officeholidays.com/holidays/swaziland/birthday-of-king-mswati-iii\n self[date(year, APR, 19)] = \"King's Birthday\"\n\n self[date(year, MAY, 1)] = \"Worker's Day\"\n self[date(year, SEP, 6)] = \"Independence Day\"\n self[date(year, DEC, 25)] = \"Christmas Day\"\n self[date(year, DEC, 26)] = \"Boxing Day\"\n\n # Once-off public holidays\n y2k = \"Y2K changeover\"\n\n if year == 1999:\n # https://mg.co.za/article/1999-12-09-swaziland-declares-bank-holidays/\n self[date(1999, DEC, 31)] = y2k\n if year == 2000:\n self[date(2000, JAN, 3)] = y2k\n\n # As of 2021/1/1, whenever a public holiday falls on a\n # Sunday\n # it rolls over to the following Monday\n for k, v in list(self.items()):\n\n if self.observed and k.weekday() == SUN and k.year == year:\n add_days = 1\n while self.get(k + rd(days=add_days)) is not None:\n add_days += 1\n self[k + rd(days=add_days)] = v + \" (Day Off)\"\n\n\nclass Swaziland(Eswatini):\n warnings.warn(\n \"Swaziland is deprecated, use Eswatini instead.\",\n DeprecationWarning,\n )\n pass\n\n\nclass SZ(Eswatini):\n pass\n\n\nclass SZW(Eswatini):\n pass\n", "path": "holidays/countries/eswatini.py"}]} | 2,024 | 252 |
gh_patches_debug_27065 | rasdani/github-patches | git_diff | mdn__kuma-7869 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Refactorings for welcome HTML email
Based on https://github.com/mdn/kuma/pull/7866
we'll drop the `utm` query string things and we'll get rid of the plain text email template.
</issue>
<code>
[start of kuma/users/tasks.py]
1 import logging
2
3 from celery import task
4 from django.conf import settings
5 from django.contrib.auth import get_user_model
6 from django.utils import translation
7 from django.utils.translation import gettext_lazy as _
8
9 from kuma.core.decorators import skip_in_maintenance_mode
10 from kuma.core.email_utils import render_email
11 from kuma.core.utils import (
12 EmailMultiAlternativesRetrying,
13 send_mail_retrying,
14 strings_are_translated,
15 )
16
17 log = logging.getLogger("kuma.users.tasks")
18
19
20 WELCOME_EMAIL_STRINGS = [
21 "Like words?",
22 "Don't be shy, if you have any doubt, problems, questions: contact us! We are here to help.",
23 ]
24
25
26 @task
27 @skip_in_maintenance_mode
28 def send_recovery_email(user_pk, email, locale=None):
29 user = get_user_model().objects.get(pk=user_pk)
30 locale = locale or settings.WIKI_DEFAULT_LANGUAGE
31 url = settings.SITE_URL + user.get_recovery_url()
32 context = {"recovery_url": url, "username": user.username}
33 with translation.override(locale):
34 subject = render_email("users/email/recovery/subject.ltxt", context)
35 # Email subject *must not* contain newlines
36 subject = "".join(subject.splitlines())
37 plain = render_email("users/email/recovery/plain.ltxt", context)
38 send_mail_retrying(subject, plain, settings.DEFAULT_FROM_EMAIL, [email])
39
40
41 @task
42 @skip_in_maintenance_mode
43 def send_welcome_email(user_pk, locale):
44 user = get_user_model().objects.get(pk=user_pk)
45 if locale == settings.WIKI_DEFAULT_LANGUAGE or strings_are_translated(
46 WELCOME_EMAIL_STRINGS, locale
47 ):
48 context = {"username": user.username}
49 log.debug("Using the locale %s to send the welcome email", locale)
50 with translation.override(locale):
51 content_plain = render_email("users/email/welcome/plain.ltxt", context)
52 content_html = render_email("users/email/welcome/html.ltxt", context)
53
54 email = EmailMultiAlternativesRetrying(
55 _("Getting started with your new MDN account"),
56 content_plain,
57 settings.WELCOME_EMAIL_FROM,
58 [user.email],
59 )
60 email.attach_alternative(content_html, "text/html")
61 email.send()
62
[end of kuma/users/tasks.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kuma/users/tasks.py b/kuma/users/tasks.py
--- a/kuma/users/tasks.py
+++ b/kuma/users/tasks.py
@@ -5,6 +5,7 @@
from django.contrib.auth import get_user_model
from django.utils import translation
from django.utils.translation import gettext_lazy as _
+from pyquery import PyQuery as pq
from kuma.core.decorators import skip_in_maintenance_mode
from kuma.core.email_utils import render_email
@@ -48,8 +49,18 @@
context = {"username": user.username}
log.debug("Using the locale %s to send the welcome email", locale)
with translation.override(locale):
- content_plain = render_email("users/email/welcome/plain.ltxt", context)
content_html = render_email("users/email/welcome/html.ltxt", context)
+ doc = pq(content_html)
+ urls = []
+ for i, link in enumerate(doc("body a[href]").items()):
+ link.text(f"{link.text()}[{i + 1}]")
+ urls.append((i + 1, link.attr("href")))
+
+ content_plain = doc("body").text().replace("\n", "\n\n")
+ if urls:
+ content_plain += "\n\n"
+ for i, url in urls:
+ content_plain += f"[{i}] {url}\n"
email = EmailMultiAlternativesRetrying(
_("Getting started with your new MDN account"),
| {"golden_diff": "diff --git a/kuma/users/tasks.py b/kuma/users/tasks.py\n--- a/kuma/users/tasks.py\n+++ b/kuma/users/tasks.py\n@@ -5,6 +5,7 @@\n from django.contrib.auth import get_user_model\n from django.utils import translation\n from django.utils.translation import gettext_lazy as _\n+from pyquery import PyQuery as pq\n \n from kuma.core.decorators import skip_in_maintenance_mode\n from kuma.core.email_utils import render_email\n@@ -48,8 +49,18 @@\n context = {\"username\": user.username}\n log.debug(\"Using the locale %s to send the welcome email\", locale)\n with translation.override(locale):\n- content_plain = render_email(\"users/email/welcome/plain.ltxt\", context)\n content_html = render_email(\"users/email/welcome/html.ltxt\", context)\n+ doc = pq(content_html)\n+ urls = []\n+ for i, link in enumerate(doc(\"body a[href]\").items()):\n+ link.text(f\"{link.text()}[{i + 1}]\")\n+ urls.append((i + 1, link.attr(\"href\")))\n+\n+ content_plain = doc(\"body\").text().replace(\"\\n\", \"\\n\\n\")\n+ if urls:\n+ content_plain += \"\\n\\n\"\n+ for i, url in urls:\n+ content_plain += f\"[{i}] {url}\\n\"\n \n email = EmailMultiAlternativesRetrying(\n _(\"Getting started with your new MDN account\"),\n", "issue": "Refactorings for welcome HTML email \nBased on https://github.com/mdn/kuma/pull/7866 \r\nwe'll drop the `utm` query string things and we'll get rid of the plain text email template. \n", "before_files": [{"content": "import logging\n\nfrom celery import task\nfrom django.conf import settings\nfrom django.contrib.auth import get_user_model\nfrom django.utils import translation\nfrom django.utils.translation import gettext_lazy as _\n\nfrom kuma.core.decorators import skip_in_maintenance_mode\nfrom kuma.core.email_utils import render_email\nfrom kuma.core.utils import (\n EmailMultiAlternativesRetrying,\n send_mail_retrying,\n strings_are_translated,\n)\n\nlog = logging.getLogger(\"kuma.users.tasks\")\n\n\nWELCOME_EMAIL_STRINGS = [\n \"Like words?\",\n \"Don't be shy, if you have any doubt, problems, questions: contact us! We are here to help.\",\n]\n\n\n@task\n@skip_in_maintenance_mode\ndef send_recovery_email(user_pk, email, locale=None):\n user = get_user_model().objects.get(pk=user_pk)\n locale = locale or settings.WIKI_DEFAULT_LANGUAGE\n url = settings.SITE_URL + user.get_recovery_url()\n context = {\"recovery_url\": url, \"username\": user.username}\n with translation.override(locale):\n subject = render_email(\"users/email/recovery/subject.ltxt\", context)\n # Email subject *must not* contain newlines\n subject = \"\".join(subject.splitlines())\n plain = render_email(\"users/email/recovery/plain.ltxt\", context)\n send_mail_retrying(subject, plain, settings.DEFAULT_FROM_EMAIL, [email])\n\n\n@task\n@skip_in_maintenance_mode\ndef send_welcome_email(user_pk, locale):\n user = get_user_model().objects.get(pk=user_pk)\n if locale == settings.WIKI_DEFAULT_LANGUAGE or strings_are_translated(\n WELCOME_EMAIL_STRINGS, locale\n ):\n context = {\"username\": user.username}\n log.debug(\"Using the locale %s to send the welcome email\", locale)\n with translation.override(locale):\n content_plain = render_email(\"users/email/welcome/plain.ltxt\", context)\n content_html = render_email(\"users/email/welcome/html.ltxt\", context)\n\n email = EmailMultiAlternativesRetrying(\n _(\"Getting started with your new MDN account\"),\n content_plain,\n settings.WELCOME_EMAIL_FROM,\n [user.email],\n )\n email.attach_alternative(content_html, \"text/html\")\n email.send()\n", "path": "kuma/users/tasks.py"}]} | 1,179 | 317 |
gh_patches_debug_38488 | rasdani/github-patches | git_diff | larq__larq-356 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Make the HyperparameterScheduler compatible with the CaseOptimizer
### Feature motivation
The HyperparameterScheduler is not compatible with the CaseOptimizer since the hyperparameters are attributes of the optimizers inside the CaseOptimizer.
### Feature description
I propose one of the two possible solutions:
Either we could give HyperparameterScheduler the optimizer as an argument. It could be called via ``` HyperparameterScheduler(schedule, hyperparameter, optimizer, verbose=0) ``` and the right optimizer inside the CaseOptimizer can be addressed. (@koenhelwegen)
My second proposal would be to search the CaseOptimizer for optimizers that have the hyperparameter as attribute. Then the schedule can be applied to this optimizer only. The downside of this would be that in case there are two optimizers inside the CaseOptimizer that have a hyperparameter with the same name the schedule would be applied to both of them. I do not think this would happen very often but it could definitively be an issue. See code below for my second proposal.
### Feature implementation
``` python
class HyperparameterScheduler(tf.keras.callbacks.Callback):
"""Generic hyperparameter scheduler.
# Arguments
schedule: a function that takes an epoch index as input
(integer, indexed from 0) and returns a new hyperparameter as output.
hyperparameter: str. the name of the hyperparameter to be scheduled.
verbose: int. 0: quiet, 1: update messages.
"""
def __init__(self, schedule, hyperparameter, verbose=0):
super(HyperparameterScheduler, self).__init__()
self.schedule = schedule
self.hyperparameter = hyperparameter
self.verbose = verbose
def on_epoch_begin(self, epoch, logs=None):
for op in self.model.optimizer.optimizers:
if hasattr(op, self.hyperparameter):
hp = getattr(op, self.hyperparameter)
try: # new API
hyperparameter_val = tf.keras.backend.get_value(hp)
hyperparameter_val = self.schedule(epoch, hyperparameter_val)
except TypeError: # Support for old API for backward compatibility
hyperparameter_val = self.schedule(epoch)
tf.keras.backend.set_value(hp, hyperparameter_val)
if self.verbose > 0:
print(
f"Epoch {epoch + 1}: {self.hyperparameter} changning to {tf.keras.backend.get_value(hp)}."
)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
for op in self.model.optimizer.optimizers:
if hasattr(op, self.hyperparameter):
hp = getattr(op, self.hyperparameter)
logs[self.hyperparameter] = tf.keras.backend.get_value(hp)
```
</issue>
<code>
[start of larq/callbacks.py]
1 import tensorflow as tf
2
3
4 class HyperparameterScheduler(tf.keras.callbacks.Callback):
5 """Generic hyperparameter scheduler.
6
7 # Arguments
8 schedule: a function that takes an epoch index as input
9 (integer, indexed from 0) and returns a new hyperparameter as output.
10 hyperparameter: str. the name of the hyperparameter to be scheduled.
11 verbose: int. 0: quiet, 1: update messages.
12 """
13
14 def __init__(self, schedule, hyperparameter, verbose=0):
15 super(HyperparameterScheduler, self).__init__()
16 self.schedule = schedule
17 self.hyperparameter = hyperparameter
18 self.verbose = verbose
19
20 def on_epoch_begin(self, epoch, logs=None):
21 if not hasattr(self.model.optimizer, self.hyperparameter):
22 raise ValueError(
23 f'Optimizer must have a "{self.hyperparameter}" attribute.'
24 )
25
26 hp = getattr(self.model.optimizer, self.hyperparameter)
27 try: # new API
28 hyperparameter_val = tf.keras.backend.get_value(hp)
29 hyperparameter_val = self.schedule(epoch, hyperparameter_val)
30 except TypeError: # Support for old API for backward compatibility
31 hyperparameter_val = self.schedule(epoch)
32
33 tf.keras.backend.set_value(hp, hyperparameter_val)
34
35 if self.verbose > 0:
36 print(
37 f"Epoch {epoch + 1}: {self.hyperparameter} changning to {tf.keras.backend.get_value(hp)}."
38 )
39
40 def on_epoch_end(self, epoch, logs=None):
41 logs = logs or {}
42 hp = getattr(self.model.optimizer, self.hyperparameter)
43 logs[self.hyperparameter] = tf.keras.backend.get_value(hp)
44
[end of larq/callbacks.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/larq/callbacks.py b/larq/callbacks.py
--- a/larq/callbacks.py
+++ b/larq/callbacks.py
@@ -4,26 +4,40 @@
class HyperparameterScheduler(tf.keras.callbacks.Callback):
"""Generic hyperparameter scheduler.
+ !!! example
+ ```python
+ bop = lq.optimizers.Bop(threshold=1e-6, gamma=1e-3)
+ adam = tf.keras.optimizers.Adam(0.01)
+ optimizer = lq.optimizers.CaseOptimizer(
+ (lq.optimizers.Bop.is_binary_variable, bop), default_optimizer=adam,
+ )
+ callbacks = [
+ HyperparameterScheduler(lambda x: 0.001 * (0.1 ** (x // 30)), "gamma", bop)
+ ]
+ ```
# Arguments
+ optimizer: the optimizer that contains the hyperparameter that will be scheduled.
+ Defaults to `self.model.optimizer` if `optimizer == None`.
schedule: a function that takes an epoch index as input
(integer, indexed from 0) and returns a new hyperparameter as output.
hyperparameter: str. the name of the hyperparameter to be scheduled.
verbose: int. 0: quiet, 1: update messages.
"""
- def __init__(self, schedule, hyperparameter, verbose=0):
+ def __init__(self, schedule, hyperparameter, optimizer=None, verbose=0):
super(HyperparameterScheduler, self).__init__()
+ self.optimizer = optimizer if optimizer else self.model.optimizer
self.schedule = schedule
self.hyperparameter = hyperparameter
self.verbose = verbose
def on_epoch_begin(self, epoch, logs=None):
- if not hasattr(self.model.optimizer, self.hyperparameter):
+ if not hasattr(self.optimizer, self.hyperparameter):
raise ValueError(
f'Optimizer must have a "{self.hyperparameter}" attribute.'
)
- hp = getattr(self.model.optimizer, self.hyperparameter)
+ hp = getattr(self.optimizer, self.hyperparameter)
try: # new API
hyperparameter_val = tf.keras.backend.get_value(hp)
hyperparameter_val = self.schedule(epoch, hyperparameter_val)
@@ -34,10 +48,10 @@
if self.verbose > 0:
print(
- f"Epoch {epoch + 1}: {self.hyperparameter} changning to {tf.keras.backend.get_value(hp)}."
+ f"Epoch {epoch + 1}: {self.hyperparameter} changing to {tf.keras.backend.get_value(hp)}."
)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
- hp = getattr(self.model.optimizer, self.hyperparameter)
+ hp = getattr(self.optimizer, self.hyperparameter)
logs[self.hyperparameter] = tf.keras.backend.get_value(hp)
| {"golden_diff": "diff --git a/larq/callbacks.py b/larq/callbacks.py\n--- a/larq/callbacks.py\n+++ b/larq/callbacks.py\n@@ -4,26 +4,40 @@\n class HyperparameterScheduler(tf.keras.callbacks.Callback):\n \"\"\"Generic hyperparameter scheduler.\n \n+ !!! example\n+ ```python\n+ bop = lq.optimizers.Bop(threshold=1e-6, gamma=1e-3)\n+ adam = tf.keras.optimizers.Adam(0.01)\n+ optimizer = lq.optimizers.CaseOptimizer(\n+ (lq.optimizers.Bop.is_binary_variable, bop), default_optimizer=adam,\n+ )\n+ callbacks = [\n+ HyperparameterScheduler(lambda x: 0.001 * (0.1 ** (x // 30)), \"gamma\", bop)\n+ ]\n+ ```\n # Arguments\n+ optimizer: the optimizer that contains the hyperparameter that will be scheduled.\n+ Defaults to `self.model.optimizer` if `optimizer == None`.\n schedule: a function that takes an epoch index as input\n (integer, indexed from 0) and returns a new hyperparameter as output.\n hyperparameter: str. the name of the hyperparameter to be scheduled.\n verbose: int. 0: quiet, 1: update messages.\n \"\"\"\n \n- def __init__(self, schedule, hyperparameter, verbose=0):\n+ def __init__(self, schedule, hyperparameter, optimizer=None, verbose=0):\n super(HyperparameterScheduler, self).__init__()\n+ self.optimizer = optimizer if optimizer else self.model.optimizer\n self.schedule = schedule\n self.hyperparameter = hyperparameter\n self.verbose = verbose\n \n def on_epoch_begin(self, epoch, logs=None):\n- if not hasattr(self.model.optimizer, self.hyperparameter):\n+ if not hasattr(self.optimizer, self.hyperparameter):\n raise ValueError(\n f'Optimizer must have a \"{self.hyperparameter}\" attribute.'\n )\n \n- hp = getattr(self.model.optimizer, self.hyperparameter)\n+ hp = getattr(self.optimizer, self.hyperparameter)\n try: # new API\n hyperparameter_val = tf.keras.backend.get_value(hp)\n hyperparameter_val = self.schedule(epoch, hyperparameter_val)\n@@ -34,10 +48,10 @@\n \n if self.verbose > 0:\n print(\n- f\"Epoch {epoch + 1}: {self.hyperparameter} changning to {tf.keras.backend.get_value(hp)}.\"\n+ f\"Epoch {epoch + 1}: {self.hyperparameter} changing to {tf.keras.backend.get_value(hp)}.\"\n )\n \n def on_epoch_end(self, epoch, logs=None):\n logs = logs or {}\n- hp = getattr(self.model.optimizer, self.hyperparameter)\n+ hp = getattr(self.optimizer, self.hyperparameter)\n logs[self.hyperparameter] = tf.keras.backend.get_value(hp)\n", "issue": "Make the HyperparameterScheduler compatible with the CaseOptimizer\n### Feature motivation\r\nThe HyperparameterScheduler is not compatible with the CaseOptimizer since the hyperparameters are attributes of the optimizers inside the CaseOptimizer. \r\n\r\n### Feature description\r\nI propose one of the two possible solutions: \r\nEither we could give HyperparameterScheduler the optimizer as an argument. It could be called via ``` HyperparameterScheduler(schedule, hyperparameter, optimizer, verbose=0) ``` and the right optimizer inside the CaseOptimizer can be addressed. (@koenhelwegen) \r\n\r\nMy second proposal would be to search the CaseOptimizer for optimizers that have the hyperparameter as attribute. Then the schedule can be applied to this optimizer only. The downside of this would be that in case there are two optimizers inside the CaseOptimizer that have a hyperparameter with the same name the schedule would be applied to both of them. I do not think this would happen very often but it could definitively be an issue. See code below for my second proposal. \r\n### Feature implementation\r\n``` python \r\nclass HyperparameterScheduler(tf.keras.callbacks.Callback):\r\n \"\"\"Generic hyperparameter scheduler.\r\n # Arguments\r\n schedule: a function that takes an epoch index as input\r\n (integer, indexed from 0) and returns a new hyperparameter as output.\r\n hyperparameter: str. the name of the hyperparameter to be scheduled.\r\n verbose: int. 0: quiet, 1: update messages.\r\n \"\"\"\r\n\r\n def __init__(self, schedule, hyperparameter, verbose=0):\r\n super(HyperparameterScheduler, self).__init__()\r\n self.schedule = schedule\r\n self.hyperparameter = hyperparameter\r\n self.verbose = verbose\r\n\r\n def on_epoch_begin(self, epoch, logs=None):\r\n for op in self.model.optimizer.optimizers:\r\n if hasattr(op, self.hyperparameter):\r\n\r\n hp = getattr(op, self.hyperparameter)\r\n try: # new API\r\n hyperparameter_val = tf.keras.backend.get_value(hp)\r\n hyperparameter_val = self.schedule(epoch, hyperparameter_val)\r\n except TypeError: # Support for old API for backward compatibility\r\n hyperparameter_val = self.schedule(epoch)\r\n\r\n tf.keras.backend.set_value(hp, hyperparameter_val)\r\n\r\n if self.verbose > 0:\r\n print(\r\n f\"Epoch {epoch + 1}: {self.hyperparameter} changning to {tf.keras.backend.get_value(hp)}.\"\r\n )\r\n\r\n def on_epoch_end(self, epoch, logs=None):\r\n logs = logs or {}\r\n for op in self.model.optimizer.optimizers:\r\n if hasattr(op, self.hyperparameter):\r\n hp = getattr(op, self.hyperparameter)\r\n logs[self.hyperparameter] = tf.keras.backend.get_value(hp)\r\n```\n", "before_files": [{"content": "import tensorflow as tf\n\n\nclass HyperparameterScheduler(tf.keras.callbacks.Callback):\n \"\"\"Generic hyperparameter scheduler.\n\n # Arguments\n schedule: a function that takes an epoch index as input\n (integer, indexed from 0) and returns a new hyperparameter as output.\n hyperparameter: str. the name of the hyperparameter to be scheduled.\n verbose: int. 0: quiet, 1: update messages.\n \"\"\"\n\n def __init__(self, schedule, hyperparameter, verbose=0):\n super(HyperparameterScheduler, self).__init__()\n self.schedule = schedule\n self.hyperparameter = hyperparameter\n self.verbose = verbose\n\n def on_epoch_begin(self, epoch, logs=None):\n if not hasattr(self.model.optimizer, self.hyperparameter):\n raise ValueError(\n f'Optimizer must have a \"{self.hyperparameter}\" attribute.'\n )\n\n hp = getattr(self.model.optimizer, self.hyperparameter)\n try: # new API\n hyperparameter_val = tf.keras.backend.get_value(hp)\n hyperparameter_val = self.schedule(epoch, hyperparameter_val)\n except TypeError: # Support for old API for backward compatibility\n hyperparameter_val = self.schedule(epoch)\n\n tf.keras.backend.set_value(hp, hyperparameter_val)\n\n if self.verbose > 0:\n print(\n f\"Epoch {epoch + 1}: {self.hyperparameter} changning to {tf.keras.backend.get_value(hp)}.\"\n )\n\n def on_epoch_end(self, epoch, logs=None):\n logs = logs or {}\n hp = getattr(self.model.optimizer, self.hyperparameter)\n logs[self.hyperparameter] = tf.keras.backend.get_value(hp)\n", "path": "larq/callbacks.py"}]} | 1,550 | 651 |
gh_patches_debug_118 | rasdani/github-patches | git_diff | librosa__librosa-1738 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Release new version to fix scipy tests
https://github.com/librosa/librosa/commit/12dee8eabed7df14c5622b52c05393ddfeb11f4b fixed compatibility with scipy in tests but it's not included in any release.
We rely as downstream packagers on tests to ensure all python dependencies play well together.
</issue>
<code>
[start of librosa/version.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 """Version info"""
4
5 import sys
6 import importlib
7
8 short_version = "0.10"
9 version = "0.10.1dev"
10
11
12 def __get_mod_version(modname):
13 try:
14 if modname in sys.modules:
15 mod = sys.modules[modname]
16 else:
17 mod = importlib.import_module(modname)
18 try:
19 return mod.__version__
20 except AttributeError:
21 return "installed, no version number available"
22
23 except ImportError:
24 return None
25
26
27 def show_versions() -> None:
28 """Return the version information for all librosa dependencies."""
29 core_deps = [
30 "audioread",
31 "numpy",
32 "scipy",
33 "sklearn",
34 "joblib",
35 "decorator",
36 "numba",
37 "soundfile",
38 "pooch",
39 "soxr",
40 "typing_extensions",
41 "lazy_loader",
42 "msgpack",
43 ]
44
45 extra_deps = [
46 "numpydoc",
47 "sphinx",
48 "sphinx_rtd_theme",
49 "matplotlib",
50 "sphinx_multiversion",
51 "sphinx_gallery",
52 "mir_eval",
53 "ipython",
54 "sphinxcontrib.rsvgconverter",
55 "pytest",
56 "pytest_mpl",
57 "pytest_cov",
58 "samplerate",
59 "resampy",
60 "presets",
61 "packaging",
62 ]
63
64 print("INSTALLED VERSIONS")
65 print("------------------")
66 print(f"python: {sys.version}\n")
67 print(f"librosa: {version}\n")
68 for dep in core_deps:
69 print("{}: {}".format(dep, __get_mod_version(dep)))
70 print("")
71 for dep in extra_deps:
72 print("{}: {}".format(dep, __get_mod_version(dep)))
73
[end of librosa/version.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/librosa/version.py b/librosa/version.py
--- a/librosa/version.py
+++ b/librosa/version.py
@@ -6,7 +6,7 @@
import importlib
short_version = "0.10"
-version = "0.10.1dev"
+version = "0.10.1"
def __get_mod_version(modname):
| {"golden_diff": "diff --git a/librosa/version.py b/librosa/version.py\n--- a/librosa/version.py\n+++ b/librosa/version.py\n@@ -6,7 +6,7 @@\n import importlib\n \n short_version = \"0.10\"\n-version = \"0.10.1dev\"\n+version = \"0.10.1\"\n \n \n def __get_mod_version(modname):\n", "issue": "Release new version to fix scipy tests\nhttps://github.com/librosa/librosa/commit/12dee8eabed7df14c5622b52c05393ddfeb11f4b fixed compatibility with scipy in tests but it's not included in any release.\r\nWe rely as downstream packagers on tests to ensure all python dependencies play well together.\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"Version info\"\"\"\n\nimport sys\nimport importlib\n\nshort_version = \"0.10\"\nversion = \"0.10.1dev\"\n\n\ndef __get_mod_version(modname):\n try:\n if modname in sys.modules:\n mod = sys.modules[modname]\n else:\n mod = importlib.import_module(modname)\n try:\n return mod.__version__\n except AttributeError:\n return \"installed, no version number available\"\n\n except ImportError:\n return None\n\n\ndef show_versions() -> None:\n \"\"\"Return the version information for all librosa dependencies.\"\"\"\n core_deps = [\n \"audioread\",\n \"numpy\",\n \"scipy\",\n \"sklearn\",\n \"joblib\",\n \"decorator\",\n \"numba\",\n \"soundfile\",\n \"pooch\",\n \"soxr\",\n \"typing_extensions\",\n \"lazy_loader\",\n \"msgpack\",\n ]\n\n extra_deps = [\n \"numpydoc\",\n \"sphinx\",\n \"sphinx_rtd_theme\",\n \"matplotlib\",\n \"sphinx_multiversion\",\n \"sphinx_gallery\",\n \"mir_eval\",\n \"ipython\",\n \"sphinxcontrib.rsvgconverter\",\n \"pytest\",\n \"pytest_mpl\",\n \"pytest_cov\",\n \"samplerate\",\n \"resampy\",\n \"presets\",\n \"packaging\",\n ]\n\n print(\"INSTALLED VERSIONS\")\n print(\"------------------\")\n print(f\"python: {sys.version}\\n\")\n print(f\"librosa: {version}\\n\")\n for dep in core_deps:\n print(\"{}: {}\".format(dep, __get_mod_version(dep)))\n print(\"\")\n for dep in extra_deps:\n print(\"{}: {}\".format(dep, __get_mod_version(dep)))\n", "path": "librosa/version.py"}]} | 1,157 | 87 |
gh_patches_debug_11399 | rasdani/github-patches | git_diff | ethereum__web3.py-407 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Remove shh from default list of modules
The whisper protocol is not standardized enough to be in the default list.
Also, note in the docs the current fractured nature of whisper.
See #384
</issue>
<code>
[start of web3/main.py]
1 from __future__ import absolute_import
2
3 from eth_utils import (
4 apply_to_return_value,
5 add_0x_prefix,
6 from_wei,
7 is_address,
8 is_checksum_address,
9 keccak,
10 remove_0x_prefix,
11 to_checksum_address,
12 to_wei,
13 )
14
15 from web3.admin import Admin
16 from web3.eth import Eth
17 from web3.iban import Iban
18 from web3.miner import Miner
19 from web3.net import Net
20 from web3.personal import Personal
21 from web3.shh import Shh
22 from web3.testing import Testing
23 from web3.txpool import TxPool
24 from web3.version import Version
25
26 from web3.providers.ipc import (
27 IPCProvider,
28 )
29 from web3.providers.rpc import (
30 HTTPProvider,
31 )
32 from web3.providers.tester import (
33 TestRPCProvider,
34 EthereumTesterProvider,
35 )
36
37 from web3.manager import (
38 RequestManager,
39 )
40
41 from web3.utils.datastructures import (
42 HexBytes,
43 )
44 from web3.utils.encoding import (
45 hex_encode_abi_type,
46 to_bytes,
47 to_int,
48 to_hex,
49 to_text,
50 )
51
52
53 def get_default_modules():
54 return {
55 "eth": Eth,
56 "shh": Shh,
57 "net": Net,
58 "personal": Personal,
59 "version": Version,
60 "txpool": TxPool,
61 "miner": Miner,
62 "admin": Admin,
63 "testing": Testing,
64 }
65
66
67 class Web3(object):
68 # Providers
69 HTTPProvider = HTTPProvider
70 IPCProvider = IPCProvider
71 TestRPCProvider = TestRPCProvider
72 EthereumTesterProvider = EthereumTesterProvider
73
74 # Managers
75 RequestManager = RequestManager
76
77 # Iban
78 Iban = Iban
79
80 # Encoding and Decoding
81 toBytes = staticmethod(to_bytes)
82 toInt = staticmethod(to_int)
83 toHex = staticmethod(to_hex)
84 toText = staticmethod(to_text)
85
86 # Currency Utility
87 toWei = staticmethod(to_wei)
88 fromWei = staticmethod(from_wei)
89
90 # Address Utility
91 isAddress = staticmethod(is_address)
92 isChecksumAddress = staticmethod(is_checksum_address)
93 toChecksumAddress = staticmethod(to_checksum_address)
94
95 def __init__(self, providers, middlewares=None, modules=None):
96 self.manager = RequestManager(self, providers, middlewares)
97
98 if modules is None:
99 modules = get_default_modules()
100
101 for module_name, module_class in modules.items():
102 module_class.attach(self, module_name)
103
104 @property
105 def middleware_stack(self):
106 return self.manager.middleware_stack
107
108 @property
109 def providers(self):
110 return self.manager.providers
111
112 def setProviders(self, providers):
113 self.manager.setProvider(providers)
114
115 @staticmethod
116 @apply_to_return_value(HexBytes)
117 def sha3(primitive=None, text=None, hexstr=None):
118 if isinstance(primitive, (bytes, int, type(None))):
119 input_bytes = to_bytes(primitive, hexstr=hexstr, text=text)
120 return keccak(input_bytes)
121
122 raise TypeError(
123 "You called sha3 with first arg %r and keywords %r. You must call it with one of "
124 "these approaches: sha3(text='txt'), sha3(hexstr='0x747874'), "
125 "sha3(b'\\x74\\x78\\x74'), or sha3(0x747874)." % (
126 primitive,
127 {'text': text, 'hexstr': hexstr}
128 )
129 )
130
131 @classmethod
132 def soliditySha3(cls, abi_types, values):
133 """
134 Executes sha3 (keccak256) exactly as Solidity does.
135 Takes list of abi_types as inputs -- `[uint24, int8[], bool]`
136 and list of corresponding values -- `[20, [-1, 5, 0], True]`
137 """
138 if len(abi_types) != len(values):
139 raise ValueError(
140 "Length mismatch between provided abi types and values. Got "
141 "{0} types and {1} values.".format(len(abi_types), len(values))
142 )
143
144 hex_string = add_0x_prefix(''.join(
145 remove_0x_prefix(hex_encode_abi_type(abi_type, value))
146 for abi_type, value
147 in zip(abi_types, values)
148 ))
149 return cls.sha3(hexstr=hex_string)
150
151 def isConnected(self):
152 for provider in self.providers:
153 if provider.isConnected():
154 return True
155 else:
156 return False
157
[end of web3/main.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/web3/main.py b/web3/main.py
--- a/web3/main.py
+++ b/web3/main.py
@@ -18,7 +18,6 @@
from web3.miner import Miner
from web3.net import Net
from web3.personal import Personal
-from web3.shh import Shh
from web3.testing import Testing
from web3.txpool import TxPool
from web3.version import Version
@@ -53,7 +52,6 @@
def get_default_modules():
return {
"eth": Eth,
- "shh": Shh,
"net": Net,
"personal": Personal,
"version": Version,
| {"golden_diff": "diff --git a/web3/main.py b/web3/main.py\n--- a/web3/main.py\n+++ b/web3/main.py\n@@ -18,7 +18,6 @@\n from web3.miner import Miner\n from web3.net import Net\n from web3.personal import Personal\n-from web3.shh import Shh\n from web3.testing import Testing\n from web3.txpool import TxPool\n from web3.version import Version\n@@ -53,7 +52,6 @@\n def get_default_modules():\n return {\n \"eth\": Eth,\n- \"shh\": Shh,\n \"net\": Net,\n \"personal\": Personal,\n \"version\": Version,\n", "issue": "Remove shh from default list of modules\nThe whisper protocol is not standardized enough to be in the default list.\r\n\r\nAlso, note in the docs the current fractured nature of whisper.\r\n\r\nSee #384 \n", "before_files": [{"content": "from __future__ import absolute_import\n\nfrom eth_utils import (\n apply_to_return_value,\n add_0x_prefix,\n from_wei,\n is_address,\n is_checksum_address,\n keccak,\n remove_0x_prefix,\n to_checksum_address,\n to_wei,\n)\n\nfrom web3.admin import Admin\nfrom web3.eth import Eth\nfrom web3.iban import Iban\nfrom web3.miner import Miner\nfrom web3.net import Net\nfrom web3.personal import Personal\nfrom web3.shh import Shh\nfrom web3.testing import Testing\nfrom web3.txpool import TxPool\nfrom web3.version import Version\n\nfrom web3.providers.ipc import (\n IPCProvider,\n)\nfrom web3.providers.rpc import (\n HTTPProvider,\n)\nfrom web3.providers.tester import (\n TestRPCProvider,\n EthereumTesterProvider,\n)\n\nfrom web3.manager import (\n RequestManager,\n)\n\nfrom web3.utils.datastructures import (\n HexBytes,\n)\nfrom web3.utils.encoding import (\n hex_encode_abi_type,\n to_bytes,\n to_int,\n to_hex,\n to_text,\n)\n\n\ndef get_default_modules():\n return {\n \"eth\": Eth,\n \"shh\": Shh,\n \"net\": Net,\n \"personal\": Personal,\n \"version\": Version,\n \"txpool\": TxPool,\n \"miner\": Miner,\n \"admin\": Admin,\n \"testing\": Testing,\n }\n\n\nclass Web3(object):\n # Providers\n HTTPProvider = HTTPProvider\n IPCProvider = IPCProvider\n TestRPCProvider = TestRPCProvider\n EthereumTesterProvider = EthereumTesterProvider\n\n # Managers\n RequestManager = RequestManager\n\n # Iban\n Iban = Iban\n\n # Encoding and Decoding\n toBytes = staticmethod(to_bytes)\n toInt = staticmethod(to_int)\n toHex = staticmethod(to_hex)\n toText = staticmethod(to_text)\n\n # Currency Utility\n toWei = staticmethod(to_wei)\n fromWei = staticmethod(from_wei)\n\n # Address Utility\n isAddress = staticmethod(is_address)\n isChecksumAddress = staticmethod(is_checksum_address)\n toChecksumAddress = staticmethod(to_checksum_address)\n\n def __init__(self, providers, middlewares=None, modules=None):\n self.manager = RequestManager(self, providers, middlewares)\n\n if modules is None:\n modules = get_default_modules()\n\n for module_name, module_class in modules.items():\n module_class.attach(self, module_name)\n\n @property\n def middleware_stack(self):\n return self.manager.middleware_stack\n\n @property\n def providers(self):\n return self.manager.providers\n\n def setProviders(self, providers):\n self.manager.setProvider(providers)\n\n @staticmethod\n @apply_to_return_value(HexBytes)\n def sha3(primitive=None, text=None, hexstr=None):\n if isinstance(primitive, (bytes, int, type(None))):\n input_bytes = to_bytes(primitive, hexstr=hexstr, text=text)\n return keccak(input_bytes)\n\n raise TypeError(\n \"You called sha3 with first arg %r and keywords %r. You must call it with one of \"\n \"these approaches: sha3(text='txt'), sha3(hexstr='0x747874'), \"\n \"sha3(b'\\\\x74\\\\x78\\\\x74'), or sha3(0x747874).\" % (\n primitive,\n {'text': text, 'hexstr': hexstr}\n )\n )\n\n @classmethod\n def soliditySha3(cls, abi_types, values):\n \"\"\"\n Executes sha3 (keccak256) exactly as Solidity does.\n Takes list of abi_types as inputs -- `[uint24, int8[], bool]`\n and list of corresponding values -- `[20, [-1, 5, 0], True]`\n \"\"\"\n if len(abi_types) != len(values):\n raise ValueError(\n \"Length mismatch between provided abi types and values. Got \"\n \"{0} types and {1} values.\".format(len(abi_types), len(values))\n )\n\n hex_string = add_0x_prefix(''.join(\n remove_0x_prefix(hex_encode_abi_type(abi_type, value))\n for abi_type, value\n in zip(abi_types, values)\n ))\n return cls.sha3(hexstr=hex_string)\n\n def isConnected(self):\n for provider in self.providers:\n if provider.isConnected():\n return True\n else:\n return False\n", "path": "web3/main.py"}]} | 1,944 | 149 |
gh_patches_debug_2327 | rasdani/github-patches | git_diff | encode__httpx-194 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Missing py.typed declaration?
`mypy` is complaining about not being able to find type annotations for `httpx`:
`error: Cannot find module named 'httpx'`
I'm somewhat new to using type annotations/static type checking in Python, but from the mypy documentation [here](https://mypy.readthedocs.io/en/latest/installed_packages.html#making-pep-561-compatible-packages) it looks like there may be a missing declaration in `setup.py`?
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 import os
5 import re
6
7 from setuptools import setup
8
9
10 def get_version(package):
11 """
12 Return package version as listed in `__version__` in `init.py`.
13 """
14 with open(os.path.join(package, "__version__.py")) as f:
15 return re.search("__version__ = ['\"]([^'\"]+)['\"]", f.read()).group(1)
16
17
18 def get_long_description():
19 """
20 Return the README.
21 """
22 with open("README.md", encoding="utf8") as f:
23 return f.read()
24
25
26 def get_packages(package):
27 """
28 Return root package and all sub-packages.
29 """
30 return [
31 dirpath
32 for dirpath, dirnames, filenames in os.walk(package)
33 if os.path.exists(os.path.join(dirpath, "__init__.py"))
34 ]
35
36
37 setup(
38 name="httpx",
39 python_requires=">=3.6",
40 version=get_version("httpx"),
41 url="https://github.com/encode/httpx",
42 license="BSD",
43 description="The next generation HTTP client.",
44 long_description=get_long_description(),
45 long_description_content_type="text/markdown",
46 author="Tom Christie",
47 author_email="[email protected]",
48 packages=get_packages("httpx"),
49 install_requires=[
50 "certifi",
51 "chardet==3.*",
52 "h11==0.8.*",
53 "h2==3.*",
54 "hstspreload",
55 "idna==2.*",
56 "rfc3986==1.*",
57 ],
58 classifiers=[
59 "Development Status :: 3 - Alpha",
60 "Environment :: Web Environment",
61 "Intended Audience :: Developers",
62 "License :: OSI Approved :: BSD License",
63 "Operating System :: OS Independent",
64 "Topic :: Internet :: WWW/HTTP",
65 "Programming Language :: Python :: 3",
66 "Programming Language :: Python :: 3.6",
67 "Programming Language :: Python :: 3.7",
68 "Programming Language :: Python :: 3.8",
69 ],
70 )
71
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -45,6 +45,7 @@
long_description_content_type="text/markdown",
author="Tom Christie",
author_email="[email protected]",
+ package_data={"httpx": ["py.typed"]},
packages=get_packages("httpx"),
install_requires=[
"certifi",
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -45,6 +45,7 @@\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n+ package_data={\"httpx\": [\"py.typed\"]},\n packages=get_packages(\"httpx\"),\n install_requires=[\n \"certifi\",\n", "issue": "Missing py.typed declaration?\n`mypy` is complaining about not being able to find type annotations for `httpx`: \r\n\r\n`error: Cannot find module named 'httpx'`\r\n\r\nI'm somewhat new to using type annotations/static type checking in Python, but from the mypy documentation [here](https://mypy.readthedocs.io/en/latest/installed_packages.html#making-pep-561-compatible-packages) it looks like there may be a missing declaration in `setup.py`?\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport re\n\nfrom setuptools import setup\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n with open(os.path.join(package, \"__version__.py\")) as f:\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", f.read()).group(1)\n\n\ndef get_long_description():\n \"\"\"\n Return the README.\n \"\"\"\n with open(\"README.md\", encoding=\"utf8\") as f:\n return f.read()\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [\n dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, \"__init__.py\"))\n ]\n\n\nsetup(\n name=\"httpx\",\n python_requires=\">=3.6\",\n version=get_version(\"httpx\"),\n url=\"https://github.com/encode/httpx\",\n license=\"BSD\",\n description=\"The next generation HTTP client.\",\n long_description=get_long_description(),\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n packages=get_packages(\"httpx\"),\n install_requires=[\n \"certifi\",\n \"chardet==3.*\",\n \"h11==0.8.*\",\n \"h2==3.*\",\n \"hstspreload\",\n \"idna==2.*\",\n \"rfc3986==1.*\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n)\n", "path": "setup.py"}]} | 1,221 | 91 |
gh_patches_debug_10799 | rasdani/github-patches | git_diff | optuna__optuna-1680 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Use function annotation syntax for Type Hints.
After dropping Python 2.7 support at #710, we can define type hints with function annotation syntax.
~~Do you have a plan to update the coding style guideline?~~
https://github.com/optuna/optuna/wiki/Coding-Style-Conventions
## Progress
- [x] `optuna/integration/sklearn.py` (#1735)
- [x] `optuna/study.py` - assigned to harpy
## Note to the questioner
We still cannot use variable annotation syntax introduced by [PEP 526](https://www.python.org/dev/peps/pep-0526/) because we supports Python 3.5.
</issue>
<code>
[start of optuna/pruners/_nop.py]
1 from optuna.pruners import BasePruner
2 from optuna import type_checking
3
4 if type_checking.TYPE_CHECKING:
5 from optuna.study import Study # NOQA
6 from optuna.trial import FrozenTrial # NOQA
7
8
9 class NopPruner(BasePruner):
10 """Pruner which never prunes trials.
11
12 Example:
13
14 .. testcode::
15
16 import numpy as np
17 from sklearn.datasets import load_iris
18 from sklearn.linear_model import SGDClassifier
19 from sklearn.model_selection import train_test_split
20
21 import optuna
22
23 X, y = load_iris(return_X_y=True)
24 X_train, X_valid, y_train, y_valid = train_test_split(X, y)
25 classes = np.unique(y)
26
27 def objective(trial):
28 alpha = trial.suggest_uniform('alpha', 0.0, 1.0)
29 clf = SGDClassifier(alpha=alpha)
30 n_train_iter = 100
31
32 for step in range(n_train_iter):
33 clf.partial_fit(X_train, y_train, classes=classes)
34
35 intermediate_value = clf.score(X_valid, y_valid)
36 trial.report(intermediate_value, step)
37
38 if trial.should_prune():
39 assert False, "should_prune() should always return False with this pruner."
40 raise optuna.TrialPruned()
41
42 return clf.score(X_valid, y_valid)
43
44 study = optuna.create_study(direction='maximize',
45 pruner=optuna.pruners.NopPruner())
46 study.optimize(objective, n_trials=20)
47 """
48
49 def prune(self, study, trial):
50 # type: (Study, FrozenTrial) -> bool
51
52 return False
53
[end of optuna/pruners/_nop.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/optuna/pruners/_nop.py b/optuna/pruners/_nop.py
--- a/optuna/pruners/_nop.py
+++ b/optuna/pruners/_nop.py
@@ -1,9 +1,5 @@
+import optuna
from optuna.pruners import BasePruner
-from optuna import type_checking
-
-if type_checking.TYPE_CHECKING:
- from optuna.study import Study # NOQA
- from optuna.trial import FrozenTrial # NOQA
class NopPruner(BasePruner):
@@ -46,7 +42,6 @@
study.optimize(objective, n_trials=20)
"""
- def prune(self, study, trial):
- # type: (Study, FrozenTrial) -> bool
+ def prune(self, study: "optuna.study.Study", trial: "optuna.trial.FrozenTrial") -> bool:
return False
| {"golden_diff": "diff --git a/optuna/pruners/_nop.py b/optuna/pruners/_nop.py\n--- a/optuna/pruners/_nop.py\n+++ b/optuna/pruners/_nop.py\n@@ -1,9 +1,5 @@\n+import optuna\n from optuna.pruners import BasePruner\n-from optuna import type_checking\n-\n-if type_checking.TYPE_CHECKING:\n- from optuna.study import Study # NOQA\n- from optuna.trial import FrozenTrial # NOQA\n \n \n class NopPruner(BasePruner):\n@@ -46,7 +42,6 @@\n study.optimize(objective, n_trials=20)\n \"\"\"\n \n- def prune(self, study, trial):\n- # type: (Study, FrozenTrial) -> bool\n+ def prune(self, study: \"optuna.study.Study\", trial: \"optuna.trial.FrozenTrial\") -> bool:\n \n return False\n", "issue": "Use function annotation syntax for Type Hints.\nAfter dropping Python 2.7 support at #710, we can define type hints with function annotation syntax. \r\n~~Do you have a plan to update the coding style guideline?~~\r\nhttps://github.com/optuna/optuna/wiki/Coding-Style-Conventions\r\n\r\n## Progress\r\n\r\n- [x] `optuna/integration/sklearn.py` (#1735)\r\n- [x] `optuna/study.py` - assigned to harpy\r\n\r\n## Note to the questioner\r\n\r\nWe still cannot use variable annotation syntax introduced by [PEP 526](https://www.python.org/dev/peps/pep-0526/) because we supports Python 3.5.\n", "before_files": [{"content": "from optuna.pruners import BasePruner\nfrom optuna import type_checking\n\nif type_checking.TYPE_CHECKING:\n from optuna.study import Study # NOQA\n from optuna.trial import FrozenTrial # NOQA\n\n\nclass NopPruner(BasePruner):\n \"\"\"Pruner which never prunes trials.\n\n Example:\n\n .. testcode::\n\n import numpy as np\n from sklearn.datasets import load_iris\n from sklearn.linear_model import SGDClassifier\n from sklearn.model_selection import train_test_split\n\n import optuna\n\n X, y = load_iris(return_X_y=True)\n X_train, X_valid, y_train, y_valid = train_test_split(X, y)\n classes = np.unique(y)\n\n def objective(trial):\n alpha = trial.suggest_uniform('alpha', 0.0, 1.0)\n clf = SGDClassifier(alpha=alpha)\n n_train_iter = 100\n\n for step in range(n_train_iter):\n clf.partial_fit(X_train, y_train, classes=classes)\n\n intermediate_value = clf.score(X_valid, y_valid)\n trial.report(intermediate_value, step)\n\n if trial.should_prune():\n assert False, \"should_prune() should always return False with this pruner.\"\n raise optuna.TrialPruned()\n\n return clf.score(X_valid, y_valid)\n\n study = optuna.create_study(direction='maximize',\n pruner=optuna.pruners.NopPruner())\n study.optimize(objective, n_trials=20)\n \"\"\"\n\n def prune(self, study, trial):\n # type: (Study, FrozenTrial) -> bool\n\n return False\n", "path": "optuna/pruners/_nop.py"}]} | 1,169 | 211 |
gh_patches_debug_14763 | rasdani/github-patches | git_diff | pantsbuild__pants-20300 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`terraform_deployment` cannot load vars files if the root `terraform_module` is not in the same dir
**Describe the bug**
root/BUILD:
```
terraform_deployment(root_module="//mod0:mod0", var_files=["a.tfvars"])
```
root/a.tfvars:
```
var0 = "hihello"
```
mod/BUILD:
```
terraform_module()
```
mod/main.tf:
```
resource "null_resource" "dep" {}
```
running `pants experimental-deploy //root:root` yields:
```
Engine traceback:
in select
..
in pants.core.goals.deploy.run_deploy
`experimental-deploy` goal
Traceback (most recent call last):
File "/home/lilatomic/vnd/pants/src/python/pants/core/goals/deploy.py", line 176, in run_deploy
deploy_processes = await MultiGet(
File "/home/lilatomic/vnd/pants/src/python/pants/engine/internals/selectors.py", line 374, in MultiGet
return await _MultiGet(tuple(__arg0))
File "/home/lilatomic/vnd/pants/src/python/pants/engine/internals/selectors.py", line 172, in __await__
result = yield self.gets
ValueError: 'root/a.tfvars' is not in the subpath of 'mod0' OR one path is relative and the other is absolute.
```
**Pants version**
2.18+
</issue>
<code>
[start of src/python/pants/backend/terraform/utils.py]
1 # Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3 import shlex
4 from pathlib import PurePath
5
6
7 def terraform_arg(name: str, value: str) -> str:
8 """Format a Terraform arg."""
9 return f"{name}={shlex.quote(value)}"
10
11
12 def terraform_relpath(chdir: str, target: str) -> str:
13 """Compute the relative path of a target file to the Terraform deployment root."""
14 return PurePath(target).relative_to(chdir).as_posix()
15
[end of src/python/pants/backend/terraform/utils.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/python/pants/backend/terraform/utils.py b/src/python/pants/backend/terraform/utils.py
--- a/src/python/pants/backend/terraform/utils.py
+++ b/src/python/pants/backend/terraform/utils.py
@@ -1,7 +1,7 @@
# Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
+import os.path
import shlex
-from pathlib import PurePath
def terraform_arg(name: str, value: str) -> str:
@@ -11,4 +11,4 @@
def terraform_relpath(chdir: str, target: str) -> str:
"""Compute the relative path of a target file to the Terraform deployment root."""
- return PurePath(target).relative_to(chdir).as_posix()
+ return os.path.relpath(target, start=chdir)
| {"golden_diff": "diff --git a/src/python/pants/backend/terraform/utils.py b/src/python/pants/backend/terraform/utils.py\n--- a/src/python/pants/backend/terraform/utils.py\n+++ b/src/python/pants/backend/terraform/utils.py\n@@ -1,7 +1,7 @@\n # Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n+import os.path\n import shlex\n-from pathlib import PurePath\n \n \n def terraform_arg(name: str, value: str) -> str:\n@@ -11,4 +11,4 @@\n \n def terraform_relpath(chdir: str, target: str) -> str:\n \"\"\"Compute the relative path of a target file to the Terraform deployment root.\"\"\"\n- return PurePath(target).relative_to(chdir).as_posix()\n+ return os.path.relpath(target, start=chdir)\n", "issue": "`terraform_deployment` cannot load vars files if the root `terraform_module` is not in the same dir\n**Describe the bug**\r\n\r\nroot/BUILD:\r\n```\r\nterraform_deployment(root_module=\"//mod0:mod0\", var_files=[\"a.tfvars\"])\r\n```\r\nroot/a.tfvars:\r\n```\r\nvar0 = \"hihello\"\r\n```\r\nmod/BUILD:\r\n```\r\nterraform_module()\r\n```\r\nmod/main.tf:\r\n```\r\nresource \"null_resource\" \"dep\" {}\r\n```\r\n\r\nrunning `pants experimental-deploy //root:root` yields:\r\n```\r\nEngine traceback:\r\n in select\r\n ..\r\n in pants.core.goals.deploy.run_deploy\r\n `experimental-deploy` goal\r\n\r\nTraceback (most recent call last):\r\n File \"/home/lilatomic/vnd/pants/src/python/pants/core/goals/deploy.py\", line 176, in run_deploy\r\n deploy_processes = await MultiGet(\r\n File \"/home/lilatomic/vnd/pants/src/python/pants/engine/internals/selectors.py\", line 374, in MultiGet\r\n return await _MultiGet(tuple(__arg0))\r\n File \"/home/lilatomic/vnd/pants/src/python/pants/engine/internals/selectors.py\", line 172, in __await__\r\n result = yield self.gets\r\nValueError: 'root/a.tfvars' is not in the subpath of 'mod0' OR one path is relative and the other is absolute.\r\n```\r\n\r\n**Pants version**\r\n2.18+\r\n\n", "before_files": [{"content": "# Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\nimport shlex\nfrom pathlib import PurePath\n\n\ndef terraform_arg(name: str, value: str) -> str:\n \"\"\"Format a Terraform arg.\"\"\"\n return f\"{name}={shlex.quote(value)}\"\n\n\ndef terraform_relpath(chdir: str, target: str) -> str:\n \"\"\"Compute the relative path of a target file to the Terraform deployment root.\"\"\"\n return PurePath(target).relative_to(chdir).as_posix()\n", "path": "src/python/pants/backend/terraform/utils.py"}]} | 1,010 | 198 |
gh_patches_debug_30438 | rasdani/github-patches | git_diff | bridgecrewio__checkov-5301 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
TypeError in SecretManagerSecret90days
**Describe the issue**
While running a scan on TF code, I'm getting a TypeError
**Examples**
The relevant TF code is:
```
resource "aws_secretsmanager_secret_rotation" "rds_password_rotation" {
secret_id = aws_secretsmanager_secret.credentials.id
rotation_lambda_arn = "arn:..."
rotation_rules {
automatically_after_days = var.db_password_rotation_days
}
}
variable "db_password_rotation_days" {
description = "Number of days in which the RDS password will be rotated"
type = number
}
```
**Exception Trace**
```
Failed to run check CKV_AWS_304 on rds.tf:aws_secretsmanager_secret_rotation.rds_password_rotation
Traceback (most recent call last):
File "\venv\Lib\site-packages\checkov\common\checks\base_check.py", line 73, in run
check_result["result"] = self.scan_entity_conf(entity_configuration, entity_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "\venv\Lib\site-packages\checkov\terraform\checks\resource\base_resource_check.py", line 43, in scan_entity_conf
return self.scan_resource_conf(conf)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "\venv\Lib\site-packages\checkov\terraform\checks\resource\aws\SecretManagerSecret90days.py", line 20, in scan_resource_conf
if days < 90:
^^^^^^^^^
TypeError: '<' not supported between instances of 'str' and 'int'
```
**Desktop (please complete the following information):**
- OS: Windows 10 for Workstation
- Checkov Version 2.3.301
**Additional context**
I inspected the value of date at the line causing the error and it is the string `var.db_password_rotation_days`.
</issue>
<code>
[start of checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py]
1
2 from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
3 from checkov.common.models.enums import CheckCategories, CheckResult
4
5
6 class SecretManagerSecret90days(BaseResourceCheck):
7
8 def __init__(self):
9 name = "Ensure Secrets Manager secrets should be rotated within 90 days"
10 id = "CKV_AWS_304"
11 supported_resources = ["aws_secretsmanager_secret_rotation"]
12 categories = [CheckCategories.GENERAL_SECURITY]
13 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
14
15 def scan_resource_conf(self, conf) -> CheckResult:
16 if conf.get("rotation_rules") and isinstance(conf.get("rotation_rules"), list):
17 rule = conf.get("rotation_rules")[0]
18 if rule.get('automatically_after_days') and isinstance(rule.get('automatically_after_days'), list):
19 days = rule.get('automatically_after_days')[0]
20 if days < 90:
21 return CheckResult.PASSED
22 return CheckResult.FAILED
23
24
25 check = SecretManagerSecret90days()
26
[end of checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py b/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py
--- a/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py
+++ b/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py
@@ -1,23 +1,27 @@
+from __future__ import annotations
+from typing import Any
+
+from checkov.common.util.type_forcers import force_int
from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
from checkov.common.models.enums import CheckCategories, CheckResult
class SecretManagerSecret90days(BaseResourceCheck):
-
- def __init__(self):
+ def __init__(self) -> None:
name = "Ensure Secrets Manager secrets should be rotated within 90 days"
id = "CKV_AWS_304"
- supported_resources = ["aws_secretsmanager_secret_rotation"]
- categories = [CheckCategories.GENERAL_SECURITY]
+ supported_resources = ("aws_secretsmanager_secret_rotation",)
+ categories = (CheckCategories.GENERAL_SECURITY,)
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
- def scan_resource_conf(self, conf) -> CheckResult:
- if conf.get("rotation_rules") and isinstance(conf.get("rotation_rules"), list):
- rule = conf.get("rotation_rules")[0]
- if rule.get('automatically_after_days') and isinstance(rule.get('automatically_after_days'), list):
- days = rule.get('automatically_after_days')[0]
- if days < 90:
+ def scan_resource_conf(self, conf: dict[str, list[Any]]) -> CheckResult:
+ rules = conf.get("rotation_rules")
+ if rules and isinstance(rules, list):
+ days = rules[0].get('automatically_after_days')
+ if days and isinstance(days, list):
+ days = force_int(days[0])
+ if days is not None and days < 90:
return CheckResult.PASSED
return CheckResult.FAILED
| {"golden_diff": "diff --git a/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py b/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py\n--- a/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py\n+++ b/checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py\n@@ -1,23 +1,27 @@\n+from __future__ import annotations\n \n+from typing import Any\n+\n+from checkov.common.util.type_forcers import force_int\n from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n from checkov.common.models.enums import CheckCategories, CheckResult\n \n \n class SecretManagerSecret90days(BaseResourceCheck):\n-\n- def __init__(self):\n+ def __init__(self) -> None:\n name = \"Ensure Secrets Manager secrets should be rotated within 90 days\"\n id = \"CKV_AWS_304\"\n- supported_resources = [\"aws_secretsmanager_secret_rotation\"]\n- categories = [CheckCategories.GENERAL_SECURITY]\n+ supported_resources = (\"aws_secretsmanager_secret_rotation\",)\n+ categories = (CheckCategories.GENERAL_SECURITY,)\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n \n- def scan_resource_conf(self, conf) -> CheckResult:\n- if conf.get(\"rotation_rules\") and isinstance(conf.get(\"rotation_rules\"), list):\n- rule = conf.get(\"rotation_rules\")[0]\n- if rule.get('automatically_after_days') and isinstance(rule.get('automatically_after_days'), list):\n- days = rule.get('automatically_after_days')[0]\n- if days < 90:\n+ def scan_resource_conf(self, conf: dict[str, list[Any]]) -> CheckResult:\n+ rules = conf.get(\"rotation_rules\")\n+ if rules and isinstance(rules, list):\n+ days = rules[0].get('automatically_after_days')\n+ if days and isinstance(days, list):\n+ days = force_int(days[0])\n+ if days is not None and days < 90:\n return CheckResult.PASSED\n return CheckResult.FAILED\n", "issue": "TypeError in SecretManagerSecret90days\n**Describe the issue**\r\nWhile running a scan on TF code, I'm getting a TypeError \r\n\r\n\r\n**Examples**\r\nThe relevant TF code is:\r\n```\r\nresource \"aws_secretsmanager_secret_rotation\" \"rds_password_rotation\" {\r\n secret_id = aws_secretsmanager_secret.credentials.id\r\n rotation_lambda_arn = \"arn:...\"\r\n\r\n rotation_rules {\r\n automatically_after_days = var.db_password_rotation_days\r\n }\r\n\r\n}\r\n\r\nvariable \"db_password_rotation_days\" {\r\n description = \"Number of days in which the RDS password will be rotated\"\r\n type = number\r\n}\r\n\r\n```\r\n**Exception Trace**\r\n```\r\nFailed to run check CKV_AWS_304 on rds.tf:aws_secretsmanager_secret_rotation.rds_password_rotation\r\nTraceback (most recent call last):\r\n File \"\\venv\\Lib\\site-packages\\checkov\\common\\checks\\base_check.py\", line 73, in run\r\n check_result[\"result\"] = self.scan_entity_conf(entity_configuration, entity_type)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"\\venv\\Lib\\site-packages\\checkov\\terraform\\checks\\resource\\base_resource_check.py\", line 43, in scan_entity_conf\r\n return self.scan_resource_conf(conf)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"\\venv\\Lib\\site-packages\\checkov\\terraform\\checks\\resource\\aws\\SecretManagerSecret90days.py\", line 20, in scan_resource_conf\r\n if days < 90:\r\n ^^^^^^^^^\r\nTypeError: '<' not supported between instances of 'str' and 'int' \r\n```\r\n\r\n**Desktop (please complete the following information):**\r\n - OS: Windows 10 for Workstation\r\n - Checkov Version 2.3.301\r\n\r\n**Additional context**\r\nI inspected the value of date at the line causing the error and it is the string `var.db_password_rotation_days`. \n", "before_files": [{"content": "\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\nfrom checkov.common.models.enums import CheckCategories, CheckResult\n\n\nclass SecretManagerSecret90days(BaseResourceCheck):\n\n def __init__(self):\n name = \"Ensure Secrets Manager secrets should be rotated within 90 days\"\n id = \"CKV_AWS_304\"\n supported_resources = [\"aws_secretsmanager_secret_rotation\"]\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf) -> CheckResult:\n if conf.get(\"rotation_rules\") and isinstance(conf.get(\"rotation_rules\"), list):\n rule = conf.get(\"rotation_rules\")[0]\n if rule.get('automatically_after_days') and isinstance(rule.get('automatically_after_days'), list):\n days = rule.get('automatically_after_days')[0]\n if days < 90:\n return CheckResult.PASSED\n return CheckResult.FAILED\n\n\ncheck = SecretManagerSecret90days()\n", "path": "checkov/terraform/checks/resource/aws/SecretManagerSecret90days.py"}]} | 1,259 | 477 |
gh_patches_debug_19980 | rasdani/github-patches | git_diff | cfpb__consumerfinance.gov-229 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Centering on mobile
`the-bureau` page contains media blocks whose content (image & body) becomes centered at mobile sizes via a `media__centered` class. The `office` index page, however, introduces a new pattern of media blocks whose image centers on mobile while the body remains left-aligned.
It seems like it would be more useful to add a general-purpose `.centered-on-mobile` class (or two classes, one for inline & the other for block elements) that could be applied to the appropriate parts of the media object rather than handle this behavior through .`media` modifiers.
Thoughts? Preferences?
</issue>
<code>
[start of _lib/wordpress_office_processor.py]
1 import sys
2 import json
3 import os.path
4 import requests
5
6 def posts_at_url(url):
7
8 current_page = 1
9 max_page = sys.maxint
10
11 while current_page <= max_page:
12
13 url = os.path.expandvars(url)
14 resp = requests.get(url, params={'page':current_page, 'count': '-1'})
15 results = json.loads(resp.content)
16 current_page += 1
17 max_page = results['pages']
18 for p in results['posts']:
19 yield p
20
21 def documents(name, url, **kwargs):
22
23 for post in posts_at_url(url):
24 yield process_office(post)
25
26
27 def process_office(item):
28
29 item['_id'] = item['slug']
30 custom_fields = item['custom_fields']
31
32 # get intro text & subscribe form data from custom fields
33 for attr in ['intro_text', 'intro_subscribe_form', 'related_contact']:
34 if attr in custom_fields:
35 item[attr] = custom_fields[attr][0]
36
37 # build top story dict
38 top_story = {}
39 for attr in ['top_story_head', 'top_story_desc']:
40 if attr in custom_fields:
41 top_story[attr] = custom_fields[attr][0]
42
43 # convert top story links into a proper list
44 top_story_links = []
45 for x in xrange(0,5):
46 key = 'top_story_links_%s' % x
47 if key in custom_fields:
48 top_story_links.append(custom_fields[key])
49
50 if top_story_links:
51 top_story['top_story_links'] = top_story_links
52
53 if top_story:
54 item['top_story'] = top_story
55
56 # create list of office resource dicts
57 item['resources'] = []
58 for x in xrange(1,4):
59 resource = {}
60 fields = ['head', 'desc', 'icon', 'link_0']
61 for field in fields:
62 field_name = 'resource%s_%s' % (str(x), field)
63 if field_name in custom_fields and custom_fields[field_name][0] != '':
64 if field == 'link_0':
65 resource['link'] = custom_fields[field_name]
66 else:
67 resource[field] = custom_fields[field_name][0]
68
69 if resource:
70 item['resources'].append(resource)
71
72 return item
73
74
[end of _lib/wordpress_office_processor.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/_lib/wordpress_office_processor.py b/_lib/wordpress_office_processor.py
--- a/_lib/wordpress_office_processor.py
+++ b/_lib/wordpress_office_processor.py
@@ -55,17 +55,17 @@
# create list of office resource dicts
item['resources'] = []
- for x in xrange(1,4):
+ for x in xrange(0,4):
resource = {}
- fields = ['head', 'desc', 'icon', 'link_0']
+ fields = ['head', 'desc', 'icon', 'link']
for field in fields:
- field_name = 'resource%s_%s' % (str(x), field)
+ field_name = 'resource_%s_%s' % (str(x), field)
if field_name in custom_fields and custom_fields[field_name][0] != '':
- if field == 'link_0':
- resource['link'] = custom_fields[field_name]
+ if field == 'link':
+ resource[field] = custom_fields[field_name]
else:
resource[field] = custom_fields[field_name][0]
-
+
if resource:
item['resources'].append(resource)
| {"golden_diff": "diff --git a/_lib/wordpress_office_processor.py b/_lib/wordpress_office_processor.py\n--- a/_lib/wordpress_office_processor.py\n+++ b/_lib/wordpress_office_processor.py\n@@ -55,17 +55,17 @@\n \n # create list of office resource dicts\n item['resources'] = []\n- for x in xrange(1,4):\n+ for x in xrange(0,4):\n resource = {}\n- fields = ['head', 'desc', 'icon', 'link_0']\n+ fields = ['head', 'desc', 'icon', 'link']\n for field in fields:\n- field_name = 'resource%s_%s' % (str(x), field)\n+ field_name = 'resource_%s_%s' % (str(x), field)\n if field_name in custom_fields and custom_fields[field_name][0] != '':\n- if field == 'link_0':\n- resource['link'] = custom_fields[field_name]\n+ if field == 'link':\n+ resource[field] = custom_fields[field_name]\n else:\n resource[field] = custom_fields[field_name][0]\n- \n+ \n if resource:\n item['resources'].append(resource)\n", "issue": "Centering on mobile\n`the-bureau` page contains media blocks whose content (image & body) becomes centered at mobile sizes via a `media__centered` class. The `office` index page, however, introduces a new pattern of media blocks whose image centers on mobile while the body remains left-aligned. \n\nIt seems like it would be more useful to add a general-purpose `.centered-on-mobile` class (or two classes, one for inline & the other for block elements) that could be applied to the appropriate parts of the media object rather than handle this behavior through .`media` modifiers. \n\nThoughts? Preferences?\n\n", "before_files": [{"content": "import sys\nimport json\nimport os.path\nimport requests\n\ndef posts_at_url(url):\n \n current_page = 1\n max_page = sys.maxint\n\n while current_page <= max_page:\n\n url = os.path.expandvars(url)\n resp = requests.get(url, params={'page':current_page, 'count': '-1'})\n results = json.loads(resp.content) \n current_page += 1\n max_page = results['pages']\n for p in results['posts']:\n yield p\n \ndef documents(name, url, **kwargs):\n \n for post in posts_at_url(url):\n yield process_office(post)\n\n\ndef process_office(item):\n \n item['_id'] = item['slug']\n custom_fields = item['custom_fields']\n \n # get intro text & subscribe form data from custom fields\n for attr in ['intro_text', 'intro_subscribe_form', 'related_contact']:\n if attr in custom_fields:\n item[attr] = custom_fields[attr][0]\n \n # build top story dict\n top_story = {}\n for attr in ['top_story_head', 'top_story_desc']:\n if attr in custom_fields:\n top_story[attr] = custom_fields[attr][0]\n \n # convert top story links into a proper list\n top_story_links = []\n for x in xrange(0,5):\n key = 'top_story_links_%s' % x\n if key in custom_fields:\n top_story_links.append(custom_fields[key])\n \n if top_story_links: \n top_story['top_story_links'] = top_story_links\n \n if top_story:\n item['top_story'] = top_story\n \n # create list of office resource dicts\n item['resources'] = []\n for x in xrange(1,4):\n resource = {}\n fields = ['head', 'desc', 'icon', 'link_0']\n for field in fields:\n field_name = 'resource%s_%s' % (str(x), field)\n if field_name in custom_fields and custom_fields[field_name][0] != '':\n if field == 'link_0':\n resource['link'] = custom_fields[field_name]\n else:\n resource[field] = custom_fields[field_name][0]\n \n if resource:\n item['resources'].append(resource)\n\n return item\n\n", "path": "_lib/wordpress_office_processor.py"}]} | 1,304 | 264 |
gh_patches_debug_27775 | rasdani/github-patches | git_diff | bridgecrewio__checkov-4917 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Checkov Python error - kubernetes_pod_v1
I get the following error when parsing a **kubernetes_pod_v1** resource:
https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/pod_v1
```
Error: -18 02:46:45,476 [MainThread ] [ERROR] Failed to run check CKV_K8S_[27](https://github.com/technology-services-and-platforms-accnz/dotc-aks/actions/runs/4728024195/jobs/8389176473#step:21:28) on /tfplan.json:kubernetes_pod_v1.test
Traceback (most recent call last):
File "/usr/local/lib/python3.10/site-packages/checkov/common/checks/base_check.py", line 73, in run
check_result["result"] = self.scan_entity_conf(entity_configuration, entity_type)
File "/usr/local/lib/python3.10/site-packages/checkov/terraform/checks/resource/base_resource_check.py", line 43, in scan_entity_conf
return self.scan_resource_conf(conf)
File "/usr/local/lib/python3.10/site-packages/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py", line 36, in scan_resource_conf
if v.get("host_path"):
File "/usr/local/lib/python3.10/site-packages/checkov/common/parsers/node.py", line 189, in __getattr__
raise TemplateAttributeError(f'***name*** is invalid')
checkov.common.parsers.node.TemplateAttributeError: get is invalid
[...]
```
For all the checks that fail.
Checkov Version: :2.3.165
</issue>
<code>
[start of checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py]
1 from __future__ import annotations
2
3 from typing import Any
4
5 from checkov.common.models.enums import CheckCategories, CheckResult
6 from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
7
8
9 class DockerSocketVolume(BaseResourceCheck):
10 def __init__(self) -> None:
11 # Exposing the socket gives container information and increases risk of exploit
12 # read-only is not a solution but only makes it harder to exploit.
13 # Location: Pod.spec.volumes[].hostPath.path
14 # Location: CronJob.spec.jobTemplate.spec.template.spec.volumes[].hostPath.path
15 # Location: *.spec.template.spec.volumes[].hostPath.path
16 id = "CKV_K8S_27"
17 name = "Do not expose the docker daemon socket to containers"
18 supported_resources = ("kubernetes_pod", "kubernetes_pod_v1",
19 "kubernetes_deployment", "kubernetes_deployment_v1",
20 "kubernetes_daemonset", "kubernetes_daemon_set_v1")
21 categories = (CheckCategories.NETWORKING,)
22 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
23
24 def scan_resource_conf(self, conf: dict[str, list[Any]]):
25 if "spec" not in conf:
26 self.evaluated_keys = [""]
27 return CheckResult.FAILED
28
29 spec = conf['spec'][0]
30 if not spec:
31 return CheckResult.UNKNOWN
32
33 if "volume" in spec and spec.get("volume"):
34 volumes = spec.get("volume")
35 for idx, v in enumerate(volumes):
36 if v.get("host_path"):
37 if "path" in v["host_path"][0]:
38 if v["host_path"][0]["path"] == ["/var/run/docker.sock"]:
39 self.evaluated_keys = [f"spec/volume/{idx}/host_path/[0]/path"]
40 return CheckResult.FAILED
41 if "template" in spec and spec.get("template"):
42 template = spec.get("template")[0]
43 if "spec" in template:
44 temp_spec = template.get("spec")[0]
45 if "volume" in temp_spec and temp_spec.get("volume"):
46 volumes = temp_spec.get("volume")
47 for idx, v in enumerate(volumes):
48 if isinstance(v, dict) and v.get("host_path"):
49 if "path" in v["host_path"][0]:
50 path = v["host_path"][0]["path"]
51 if path == ["/var/run/docker.sock"]:
52 self.evaluated_keys = [f"spec/template/spec/volume/{idx}/host_path/[0]/path"]
53 return CheckResult.FAILED
54
55 return CheckResult.PASSED
56
57
58 check = DockerSocketVolume()
59
[end of checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py b/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py
--- a/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py
+++ b/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py
@@ -33,7 +33,7 @@
if "volume" in spec and spec.get("volume"):
volumes = spec.get("volume")
for idx, v in enumerate(volumes):
- if v.get("host_path"):
+ if isinstance(v, dict) and v.get("host_path"):
if "path" in v["host_path"][0]:
if v["host_path"][0]["path"] == ["/var/run/docker.sock"]:
self.evaluated_keys = [f"spec/volume/{idx}/host_path/[0]/path"]
@@ -47,8 +47,7 @@
for idx, v in enumerate(volumes):
if isinstance(v, dict) and v.get("host_path"):
if "path" in v["host_path"][0]:
- path = v["host_path"][0]["path"]
- if path == ["/var/run/docker.sock"]:
+ if v["host_path"][0]["path"] == ["/var/run/docker.sock"]:
self.evaluated_keys = [f"spec/template/spec/volume/{idx}/host_path/[0]/path"]
return CheckResult.FAILED
| {"golden_diff": "diff --git a/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py b/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py\n--- a/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py\n+++ b/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py\n@@ -33,7 +33,7 @@\n if \"volume\" in spec and spec.get(\"volume\"):\n volumes = spec.get(\"volume\")\n for idx, v in enumerate(volumes):\n- if v.get(\"host_path\"):\n+ if isinstance(v, dict) and v.get(\"host_path\"):\n if \"path\" in v[\"host_path\"][0]:\n if v[\"host_path\"][0][\"path\"] == [\"/var/run/docker.sock\"]:\n self.evaluated_keys = [f\"spec/volume/{idx}/host_path/[0]/path\"]\n@@ -47,8 +47,7 @@\n for idx, v in enumerate(volumes):\n if isinstance(v, dict) and v.get(\"host_path\"):\n if \"path\" in v[\"host_path\"][0]:\n- path = v[\"host_path\"][0][\"path\"]\n- if path == [\"/var/run/docker.sock\"]:\n+ if v[\"host_path\"][0][\"path\"] == [\"/var/run/docker.sock\"]:\n self.evaluated_keys = [f\"spec/template/spec/volume/{idx}/host_path/[0]/path\"]\n return CheckResult.FAILED\n", "issue": "Checkov Python error - kubernetes_pod_v1\nI get the following error when parsing a **kubernetes_pod_v1** resource:\r\nhttps://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/pod_v1\r\n\r\n```\r\nError: -18 02:46:45,476 [MainThread ] [ERROR] Failed to run check CKV_K8S_[27](https://github.com/technology-services-and-platforms-accnz/dotc-aks/actions/runs/4728024195/jobs/8389176473#step:21:28) on /tfplan.json:kubernetes_pod_v1.test\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.10/site-packages/checkov/common/checks/base_check.py\", line 73, in run\r\n check_result[\"result\"] = self.scan_entity_conf(entity_configuration, entity_type)\r\n File \"/usr/local/lib/python3.10/site-packages/checkov/terraform/checks/resource/base_resource_check.py\", line 43, in scan_entity_conf\r\n return self.scan_resource_conf(conf)\r\n File \"/usr/local/lib/python3.10/site-packages/checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py\", line 36, in scan_resource_conf\r\n if v.get(\"host_path\"):\r\n File \"/usr/local/lib/python3.10/site-packages/checkov/common/parsers/node.py\", line 189, in __getattr__\r\n raise TemplateAttributeError(f'***name*** is invalid')\r\ncheckov.common.parsers.node.TemplateAttributeError: get is invalid\r\n[...]\r\n```\r\n\r\nFor all the checks that fail.\r\n\r\nCheckov Version: :2.3.165\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom typing import Any\n\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n\n\nclass DockerSocketVolume(BaseResourceCheck):\n def __init__(self) -> None:\n # Exposing the socket gives container information and increases risk of exploit\n # read-only is not a solution but only makes it harder to exploit.\n # Location: Pod.spec.volumes[].hostPath.path\n # Location: CronJob.spec.jobTemplate.spec.template.spec.volumes[].hostPath.path\n # Location: *.spec.template.spec.volumes[].hostPath.path\n id = \"CKV_K8S_27\"\n name = \"Do not expose the docker daemon socket to containers\"\n supported_resources = (\"kubernetes_pod\", \"kubernetes_pod_v1\",\n \"kubernetes_deployment\", \"kubernetes_deployment_v1\",\n \"kubernetes_daemonset\", \"kubernetes_daemon_set_v1\")\n categories = (CheckCategories.NETWORKING,)\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf: dict[str, list[Any]]):\n if \"spec\" not in conf:\n self.evaluated_keys = [\"\"]\n return CheckResult.FAILED\n\n spec = conf['spec'][0]\n if not spec:\n return CheckResult.UNKNOWN\n\n if \"volume\" in spec and spec.get(\"volume\"):\n volumes = spec.get(\"volume\")\n for idx, v in enumerate(volumes):\n if v.get(\"host_path\"):\n if \"path\" in v[\"host_path\"][0]:\n if v[\"host_path\"][0][\"path\"] == [\"/var/run/docker.sock\"]:\n self.evaluated_keys = [f\"spec/volume/{idx}/host_path/[0]/path\"]\n return CheckResult.FAILED\n if \"template\" in spec and spec.get(\"template\"):\n template = spec.get(\"template\")[0]\n if \"spec\" in template:\n temp_spec = template.get(\"spec\")[0]\n if \"volume\" in temp_spec and temp_spec.get(\"volume\"):\n volumes = temp_spec.get(\"volume\")\n for idx, v in enumerate(volumes):\n if isinstance(v, dict) and v.get(\"host_path\"):\n if \"path\" in v[\"host_path\"][0]:\n path = v[\"host_path\"][0][\"path\"]\n if path == [\"/var/run/docker.sock\"]:\n self.evaluated_keys = [f\"spec/template/spec/volume/{idx}/host_path/[0]/path\"]\n return CheckResult.FAILED\n\n return CheckResult.PASSED\n\n\ncheck = DockerSocketVolume()\n", "path": "checkov/terraform/checks/resource/kubernetes/DockerSocketVolume.py"}]} | 1,628 | 312 |
gh_patches_debug_14901 | rasdani/github-patches | git_diff | streamlink__streamlink-2102 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ok.ru VODs
<!--
Thanks for reporting a plugin issue!
USE THE TEMPLATE. Otherwise your plugin issue may be rejected.
First, see the contribution guidelines:
https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink
Also check the list of open and closed plugin issues:
https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22
Please see the text preview to avoid unnecessary formatting errors.
-->
## Plugin Issue
<!-- Replace [ ] with [x] in order to check the box -->
- [x ] This is a plugin issue and I have read the contribution guidelines.
### Description
i enter link in #1884 but "https://raw.githubusercontent.com/back-to/plugins/master/plugins/ok_live.py" 404: Not Found. Thanks
<!-- Explain the plugin issue as thoroughly as you can. -->
### Reproduction steps / Explicit stream URLs to test
<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->
1. D:\my\Streamlinkl\bin>streamlink -l debug "https://ok.ru/video/266205792931" best
### Log output
<!--
TEXT LOG OUTPUT IS REQUIRED for a plugin issue!
Use the `--loglevel debug` parameter and avoid using parameters which suppress log output.
https://streamlink.github.io/cli.html#cmdoption-l
Make sure to **remove usernames and passwords**
You can copy the output to https://gist.github.com/ or paste it below.
-->
```
[cli][debug] OS: Windows 8.1
[cli][debug] Python: 3.5.2
[cli][debug] Streamlink: 0.14.2
[cli][debug] Requests(2.19.1), Socks(1.6.7), Websocket(0.48.0)
error: No plugin can handle URL: https://ok.ru/video/266205792931
```
### Additional comments, screenshots, etc.
[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)
</issue>
<code>
[start of src/streamlink/plugins/ok_live.py]
1 import re
2
3 from streamlink.plugin import Plugin
4 from streamlink.plugin.api import validate
5 from streamlink.plugin.api import useragents
6 from streamlink.stream import HLSStream
7
8 _url_re = re.compile(r"https?://(www\.)?ok\.ru/live/\d+")
9 _vod_re = re.compile(r";(?P<hlsurl>[^;]+video\.m3u8.+?)\\"")
10
11 _schema = validate.Schema(
12 validate.transform(_vod_re.search),
13 validate.any(
14 None,
15 validate.all(
16 validate.get("hlsurl"),
17 validate.url()
18 )
19 )
20 )
21
22 class OK_live(Plugin):
23 """
24 Support for ok.ru live stream: http://www.ok.ru/live/
25 """
26 @classmethod
27 def can_handle_url(cls, url):
28 return _url_re.match(url) is not None
29
30 def _get_streams(self):
31 headers = {
32 'User-Agent': useragents.CHROME,
33 'Referer': self.url
34 }
35
36 hls = self.session.http.get(self.url, headers=headers, schema=_schema)
37 if hls:
38 hls = hls.replace(u'\\\\u0026', u'&')
39 return HLSStream.parse_variant_playlist(self.session, hls, headers=headers)
40
41
42 __plugin__ = OK_live
[end of src/streamlink/plugins/ok_live.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/streamlink/plugins/ok_live.py b/src/streamlink/plugins/ok_live.py
--- a/src/streamlink/plugins/ok_live.py
+++ b/src/streamlink/plugins/ok_live.py
@@ -5,7 +5,7 @@
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
-_url_re = re.compile(r"https?://(www\.)?ok\.ru/live/\d+")
+_url_re = re.compile(r"https?://(www\.)?ok\.ru/(live|video)/\d+")
_vod_re = re.compile(r";(?P<hlsurl>[^;]+video\.m3u8.+?)\\"")
_schema = validate.Schema(
@@ -21,7 +21,7 @@
class OK_live(Plugin):
"""
- Support for ok.ru live stream: http://www.ok.ru/live/
+ Support for ok.ru live stream: http://www.ok.ru/live/ and for ok.ru VoDs: http://www.ok.ru/video/
"""
@classmethod
def can_handle_url(cls, url):
| {"golden_diff": "diff --git a/src/streamlink/plugins/ok_live.py b/src/streamlink/plugins/ok_live.py\n--- a/src/streamlink/plugins/ok_live.py\n+++ b/src/streamlink/plugins/ok_live.py\n@@ -5,7 +5,7 @@\n from streamlink.plugin.api import useragents\n from streamlink.stream import HLSStream\n \n-_url_re = re.compile(r\"https?://(www\\.)?ok\\.ru/live/\\d+\")\n+_url_re = re.compile(r\"https?://(www\\.)?ok\\.ru/(live|video)/\\d+\")\n _vod_re = re.compile(r\";(?P<hlsurl>[^;]+video\\.m3u8.+?)\\\\"\")\n \n _schema = validate.Schema(\n@@ -21,7 +21,7 @@\n \n class OK_live(Plugin):\n \"\"\"\n- Support for ok.ru live stream: http://www.ok.ru/live/\n+ Support for ok.ru live stream: http://www.ok.ru/live/ and for ok.ru VoDs: http://www.ok.ru/video/\n \"\"\"\n @classmethod\n def can_handle_url(cls, url):\n", "issue": "ok.ru VODs\n<!--\r\nThanks for reporting a plugin issue!\r\nUSE THE TEMPLATE. Otherwise your plugin issue may be rejected.\r\n\r\nFirst, see the contribution guidelines:\r\nhttps://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink\r\n\r\nAlso check the list of open and closed plugin issues:\r\nhttps://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22\r\n\r\nPlease see the text preview to avoid unnecessary formatting errors.\r\n-->\r\n\r\n\r\n## Plugin Issue\r\n\r\n<!-- Replace [ ] with [x] in order to check the box -->\r\n- [x ] This is a plugin issue and I have read the contribution guidelines.\r\n\r\n\r\n### Description\r\ni enter link in #1884 but \"https://raw.githubusercontent.com/back-to/plugins/master/plugins/ok_live.py\" 404: Not Found. Thanks\r\n<!-- Explain the plugin issue as thoroughly as you can. -->\r\n\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\n<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->\r\n\r\n1. D:\\my\\Streamlinkl\\bin>streamlink -l debug \"https://ok.ru/video/266205792931\" best\r\n\r\n\r\n\r\n### Log output\r\n\r\n<!--\r\nTEXT LOG OUTPUT IS REQUIRED for a plugin issue!\r\nUse the `--loglevel debug` parameter and avoid using parameters which suppress log output.\r\nhttps://streamlink.github.io/cli.html#cmdoption-l\r\n\r\nMake sure to **remove usernames and passwords**\r\nYou can copy the output to https://gist.github.com/ or paste it below.\r\n-->\r\n\r\n```\r\n[cli][debug] OS: Windows 8.1\r\n[cli][debug] Python: 3.5.2\r\n[cli][debug] Streamlink: 0.14.2\r\n[cli][debug] Requests(2.19.1), Socks(1.6.7), Websocket(0.48.0)\r\nerror: No plugin can handle URL: https://ok.ru/video/266205792931\r\n\r\n```\r\n\r\n\r\n### Additional comments, screenshots, etc.\r\n\r\n\r\n\r\n[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)\r\n\n", "before_files": [{"content": "import re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import validate\nfrom streamlink.plugin.api import useragents\nfrom streamlink.stream import HLSStream\n\n_url_re = re.compile(r\"https?://(www\\.)?ok\\.ru/live/\\d+\")\n_vod_re = re.compile(r\";(?P<hlsurl>[^;]+video\\.m3u8.+?)\\\\"\")\n\n_schema = validate.Schema(\n validate.transform(_vod_re.search),\n validate.any(\n None,\n validate.all(\n validate.get(\"hlsurl\"),\n validate.url()\n )\n )\n)\n\nclass OK_live(Plugin):\n \"\"\"\n Support for ok.ru live stream: http://www.ok.ru/live/\n \"\"\"\n @classmethod\n def can_handle_url(cls, url):\n return _url_re.match(url) is not None\n\n def _get_streams(self):\n headers = {\n 'User-Agent': useragents.CHROME,\n 'Referer': self.url\n }\n\n hls = self.session.http.get(self.url, headers=headers, schema=_schema)\n if hls:\n hls = hls.replace(u'\\\\\\\\u0026', u'&')\n return HLSStream.parse_variant_playlist(self.session, hls, headers=headers)\n\n\n__plugin__ = OK_live", "path": "src/streamlink/plugins/ok_live.py"}]} | 1,402 | 240 |
gh_patches_debug_11245 | rasdani/github-patches | git_diff | sunpy__sunpy-4596 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Rethinking and rewriting sunpy.self_test
We are currently using astropy's test runner for `sunpy.self_test` this was really designed for setup.py and is therefore very full of features which are probably not needed for self_test.
Before we (I) go deleting swathes of code as I love to do. What do we want to achieve with self test? Is a very slim wrapper around `pytest --pyargs sunpy` all we need?
</issue>
<code>
[start of sunpy/__init__.py]
1 """
2 SunPy
3 =====
4
5 An open-source Python library for Solar Physics data analysis.
6
7 * Homepage: https://sunpy.org
8 * Documentation: https://docs.sunpy.org/en/stable/
9 """
10 import os
11 import sys
12 import logging
13
14 from sunpy.tests.runner import SunPyTestRunner
15 from sunpy.util import system_info
16 from sunpy.util.config import load_config, print_config
17 from sunpy.util.logger import _init_log
18 from .version import version as __version__
19
20 # Enforce Python version check during package import.
21 __minimum_python_version__ = "3.7"
22
23
24 class UnsupportedPythonError(Exception):
25 """Running on an unsupported version of Python."""
26
27
28 if sys.version_info < tuple(int(val) for val in __minimum_python_version__.split('.')):
29 # This has to be .format to keep backwards compatibly.
30 raise UnsupportedPythonError(
31 "sunpy does not support Python < {}".format(__minimum_python_version__))
32
33
34 def _get_bibtex():
35 import textwrap
36
37 # Set the bibtex entry to the article referenced in CITATION.rst
38 citation_file = os.path.join(os.path.dirname(__file__), 'CITATION.rst')
39
40 # Explicitly specify UTF-8 encoding in case the system's default encoding is problematic
41 with open(citation_file, 'r', encoding='utf-8') as citation:
42 # Extract the first bibtex block:
43 ref = citation.read().partition(".. code:: bibtex\n\n")[2]
44 lines = ref.split("\n")
45 # Only read the lines which are indented
46 lines = lines[:[l.startswith(" ") for l in lines].index(False)]
47 ref = textwrap.dedent('\n'.join(lines))
48 return ref
49
50
51 __citation__ = __bibtex__ = _get_bibtex()
52
53 self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__))
54
55 # Load user configuration
56 config = load_config()
57
58 log = _init_log(config=config)
59
60 __all__ = ['config', 'self_test', 'system_info', 'print_config']
61
[end of sunpy/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sunpy/__init__.py b/sunpy/__init__.py
--- a/sunpy/__init__.py
+++ b/sunpy/__init__.py
@@ -11,7 +11,7 @@
import sys
import logging
-from sunpy.tests.runner import SunPyTestRunner
+from sunpy.tests.self_test import self_test
from sunpy.util import system_info
from sunpy.util.config import load_config, print_config
from sunpy.util.logger import _init_log
@@ -50,8 +50,6 @@
__citation__ = __bibtex__ = _get_bibtex()
-self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__))
-
# Load user configuration
config = load_config()
| {"golden_diff": "diff --git a/sunpy/__init__.py b/sunpy/__init__.py\n--- a/sunpy/__init__.py\n+++ b/sunpy/__init__.py\n@@ -11,7 +11,7 @@\n import sys\n import logging\n \n-from sunpy.tests.runner import SunPyTestRunner\n+from sunpy.tests.self_test import self_test\n from sunpy.util import system_info\n from sunpy.util.config import load_config, print_config\n from sunpy.util.logger import _init_log\n@@ -50,8 +50,6 @@\n \n __citation__ = __bibtex__ = _get_bibtex()\n \n-self_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__))\n-\n # Load user configuration\n config = load_config()\n", "issue": "Rethinking and rewriting sunpy.self_test\nWe are currently using astropy's test runner for `sunpy.self_test` this was really designed for setup.py and is therefore very full of features which are probably not needed for self_test.\n\nBefore we (I) go deleting swathes of code as I love to do. What do we want to achieve with self test? Is a very slim wrapper around `pytest --pyargs sunpy` all we need?\n", "before_files": [{"content": "\"\"\"\nSunPy\n=====\n\nAn open-source Python library for Solar Physics data analysis.\n\n* Homepage: https://sunpy.org\n* Documentation: https://docs.sunpy.org/en/stable/\n\"\"\"\nimport os\nimport sys\nimport logging\n\nfrom sunpy.tests.runner import SunPyTestRunner\nfrom sunpy.util import system_info\nfrom sunpy.util.config import load_config, print_config\nfrom sunpy.util.logger import _init_log\nfrom .version import version as __version__\n\n# Enforce Python version check during package import.\n__minimum_python_version__ = \"3.7\"\n\n\nclass UnsupportedPythonError(Exception):\n \"\"\"Running on an unsupported version of Python.\"\"\"\n\n\nif sys.version_info < tuple(int(val) for val in __minimum_python_version__.split('.')):\n # This has to be .format to keep backwards compatibly.\n raise UnsupportedPythonError(\n \"sunpy does not support Python < {}\".format(__minimum_python_version__))\n\n\ndef _get_bibtex():\n import textwrap\n\n # Set the bibtex entry to the article referenced in CITATION.rst\n citation_file = os.path.join(os.path.dirname(__file__), 'CITATION.rst')\n\n # Explicitly specify UTF-8 encoding in case the system's default encoding is problematic\n with open(citation_file, 'r', encoding='utf-8') as citation:\n # Extract the first bibtex block:\n ref = citation.read().partition(\".. code:: bibtex\\n\\n\")[2]\n lines = ref.split(\"\\n\")\n # Only read the lines which are indented\n lines = lines[:[l.startswith(\" \") for l in lines].index(False)]\n ref = textwrap.dedent('\\n'.join(lines))\n return ref\n\n\n__citation__ = __bibtex__ = _get_bibtex()\n\nself_test = SunPyTestRunner.make_test_runner_in(os.path.dirname(__file__))\n\n# Load user configuration\nconfig = load_config()\n\nlog = _init_log(config=config)\n\n__all__ = ['config', 'self_test', 'system_info', 'print_config']\n", "path": "sunpy/__init__.py"}]} | 1,197 | 169 |
gh_patches_debug_30401 | rasdani/github-patches | git_diff | castorini__pyserini-630 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add ability to select random question for interactive demo
hey @saileshnankani - how about we add a `/random` command to ask a random question from the dev set?
</issue>
<code>
[start of pyserini/demo/msmarco.py]
1 #
2 # Pyserini: Reproducible IR research with sparse and dense representations
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 #
16
17 import cmd
18 import json
19
20 from pyserini.search import SimpleSearcher
21 from pyserini.dsearch import SimpleDenseSearcher, TctColBertQueryEncoder, AnceQueryEncoder
22 from pyserini.hsearch import HybridSearcher
23
24
25 class MsMarcoDemo(cmd.Cmd):
26 ssearcher = SimpleSearcher.from_prebuilt_index('msmarco-passage')
27 dsearcher = None
28 hsearcher = None
29 searcher = ssearcher
30
31 k = 10
32 prompt = '>>> '
33
34 # https://stackoverflow.com/questions/35213134/command-prefixes-in-python-cli-using-cmd-in-pythons-standard-library
35 def precmd(self, line):
36 if line[0] == '/':
37 line = line[1:]
38 return line
39
40 def do_help(self, arg):
41 print(f'/help : returns this message')
42 print(f'/k [NUM] : sets k (number of hits to return) to [NUM]')
43 print(f'/model [MODEL] : sets encoder to use the model [MODEL] (one of tct, ance)')
44 print(f'/mode [MODE] : sets retriver type to [MODE] (one of sparse, dense, hybrid)')
45
46 def do_k(self, arg):
47 print(f'setting k = {int(arg)}')
48 self.k = int(arg)
49
50 def do_mode(self, arg):
51 if arg == "sparse":
52 self.searcher = self.ssearcher
53 elif arg == "dense":
54 if self.dsearcher is None:
55 print(f'Specify model through /model before using dense retrieval.')
56 return
57 self.searcher = self.dsearcher
58 elif arg == "hybrid":
59 if self.hsearcher is None:
60 print(f'Specify model through /model before using hybrid retrieval.')
61 return
62 self.searcher = self.hsearcher
63 else:
64 print(
65 f'Mode "{arg}" is invalid. Mode should be one of [sparse, dense, hybrid].')
66 return
67 print(f'setting retriver = {arg}')
68
69 def do_model(self, arg):
70 if arg == "tct":
71 encoder = TctColBertQueryEncoder("castorini/tct_colbert-msmarco")
72 index = "msmarco-passage-tct_colbert-hnsw"
73 elif arg == "ance":
74 encoder = AnceQueryEncoder("castorini/ance-msmarco-passage")
75 index = "msmarco-passage-ance-bf"
76 else:
77 print(
78 f'Model "{arg}" is invalid. Model should be one of [tct, ance].')
79 return
80
81 self.dsearcher = SimpleDenseSearcher.from_prebuilt_index(
82 index,
83 encoder
84 )
85 self.hsearcher = HybridSearcher(self.dsearcher, self.ssearcher)
86 print(f'setting model = {arg}')
87
88 def do_EOF(self, line):
89 return True
90
91 def default(self, q):
92 hits = self.searcher.search(q, self.k)
93
94 for i in range(0, len(hits)):
95 raw_doc = None
96 if isinstance(self.searcher, SimpleSearcher):
97 raw_doc = hits[i].raw
98 else:
99 doc = self.ssearcher.doc(hits[i].docid)
100 if doc:
101 raw_doc = doc.raw()
102 jsondoc = json.loads(raw_doc)
103 print(f'{i + 1:2} {hits[i].score:.5f} {jsondoc["contents"]}')
104
105
106 if __name__ == '__main__':
107 MsMarcoDemo().cmdloop()
108
[end of pyserini/demo/msmarco.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pyserini/demo/msmarco.py b/pyserini/demo/msmarco.py
--- a/pyserini/demo/msmarco.py
+++ b/pyserini/demo/msmarco.py
@@ -16,13 +16,18 @@
import cmd
import json
+import os
+import random
from pyserini.search import SimpleSearcher
from pyserini.dsearch import SimpleDenseSearcher, TctColBertQueryEncoder, AnceQueryEncoder
from pyserini.hsearch import HybridSearcher
+from pyserini import search
class MsMarcoDemo(cmd.Cmd):
+ dev_topics = list(search.get_topics('msmarco-passage-dev-subset').values())
+
ssearcher = SimpleSearcher.from_prebuilt_index('msmarco-passage')
dsearcher = None
hsearcher = None
@@ -42,6 +47,7 @@
print(f'/k [NUM] : sets k (number of hits to return) to [NUM]')
print(f'/model [MODEL] : sets encoder to use the model [MODEL] (one of tct, ance)')
print(f'/mode [MODE] : sets retriver type to [MODE] (one of sparse, dense, hybrid)')
+ print(f'/random : returns results for a random question from dev subset')
def do_k(self, arg):
print(f'setting k = {int(arg)}')
@@ -85,6 +91,11 @@
self.hsearcher = HybridSearcher(self.dsearcher, self.ssearcher)
print(f'setting model = {arg}')
+ def do_random(self, arg):
+ q = random.choice(self.dev_topics)['title']
+ print(f'question: {q}')
+ self.default(q)
+
def do_EOF(self, line):
return True
| {"golden_diff": "diff --git a/pyserini/demo/msmarco.py b/pyserini/demo/msmarco.py\n--- a/pyserini/demo/msmarco.py\n+++ b/pyserini/demo/msmarco.py\n@@ -16,13 +16,18 @@\n \n import cmd\n import json\n+import os\n+import random\n \n from pyserini.search import SimpleSearcher\n from pyserini.dsearch import SimpleDenseSearcher, TctColBertQueryEncoder, AnceQueryEncoder\n from pyserini.hsearch import HybridSearcher\n+from pyserini import search\n \n \n class MsMarcoDemo(cmd.Cmd):\n+ dev_topics = list(search.get_topics('msmarco-passage-dev-subset').values())\n+\n ssearcher = SimpleSearcher.from_prebuilt_index('msmarco-passage')\n dsearcher = None\n hsearcher = None\n@@ -42,6 +47,7 @@\n print(f'/k [NUM] : sets k (number of hits to return) to [NUM]')\n print(f'/model [MODEL] : sets encoder to use the model [MODEL] (one of tct, ance)')\n print(f'/mode [MODE] : sets retriver type to [MODE] (one of sparse, dense, hybrid)')\n+ print(f'/random : returns results for a random question from dev subset')\n \n def do_k(self, arg):\n print(f'setting k = {int(arg)}')\n@@ -85,6 +91,11 @@\n self.hsearcher = HybridSearcher(self.dsearcher, self.ssearcher)\n print(f'setting model = {arg}')\n \n+ def do_random(self, arg):\n+ q = random.choice(self.dev_topics)['title']\n+ print(f'question: {q}')\n+ self.default(q)\n+\n def do_EOF(self, line):\n return True\n", "issue": "Add ability to select random question for interactive demo\nhey @saileshnankani - how about we add a `/random` command to ask a random question from the dev set?\n", "before_files": [{"content": "#\n# Pyserini: Reproducible IR research with sparse and dense representations\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport cmd\nimport json\n\nfrom pyserini.search import SimpleSearcher\nfrom pyserini.dsearch import SimpleDenseSearcher, TctColBertQueryEncoder, AnceQueryEncoder\nfrom pyserini.hsearch import HybridSearcher\n\n\nclass MsMarcoDemo(cmd.Cmd):\n ssearcher = SimpleSearcher.from_prebuilt_index('msmarco-passage')\n dsearcher = None\n hsearcher = None\n searcher = ssearcher\n\n k = 10\n prompt = '>>> '\n\n # https://stackoverflow.com/questions/35213134/command-prefixes-in-python-cli-using-cmd-in-pythons-standard-library\n def precmd(self, line):\n if line[0] == '/':\n line = line[1:]\n return line\n\n def do_help(self, arg):\n print(f'/help : returns this message')\n print(f'/k [NUM] : sets k (number of hits to return) to [NUM]')\n print(f'/model [MODEL] : sets encoder to use the model [MODEL] (one of tct, ance)')\n print(f'/mode [MODE] : sets retriver type to [MODE] (one of sparse, dense, hybrid)')\n\n def do_k(self, arg):\n print(f'setting k = {int(arg)}')\n self.k = int(arg)\n\n def do_mode(self, arg):\n if arg == \"sparse\":\n self.searcher = self.ssearcher\n elif arg == \"dense\":\n if self.dsearcher is None:\n print(f'Specify model through /model before using dense retrieval.')\n return\n self.searcher = self.dsearcher\n elif arg == \"hybrid\":\n if self.hsearcher is None:\n print(f'Specify model through /model before using hybrid retrieval.')\n return\n self.searcher = self.hsearcher\n else:\n print(\n f'Mode \"{arg}\" is invalid. Mode should be one of [sparse, dense, hybrid].')\n return\n print(f'setting retriver = {arg}')\n\n def do_model(self, arg):\n if arg == \"tct\":\n encoder = TctColBertQueryEncoder(\"castorini/tct_colbert-msmarco\")\n index = \"msmarco-passage-tct_colbert-hnsw\"\n elif arg == \"ance\":\n encoder = AnceQueryEncoder(\"castorini/ance-msmarco-passage\")\n index = \"msmarco-passage-ance-bf\"\n else:\n print(\n f'Model \"{arg}\" is invalid. Model should be one of [tct, ance].')\n return\n\n self.dsearcher = SimpleDenseSearcher.from_prebuilt_index(\n index,\n encoder\n )\n self.hsearcher = HybridSearcher(self.dsearcher, self.ssearcher)\n print(f'setting model = {arg}')\n\n def do_EOF(self, line):\n return True\n\n def default(self, q):\n hits = self.searcher.search(q, self.k)\n\n for i in range(0, len(hits)):\n raw_doc = None\n if isinstance(self.searcher, SimpleSearcher):\n raw_doc = hits[i].raw\n else:\n doc = self.ssearcher.doc(hits[i].docid)\n if doc:\n raw_doc = doc.raw()\n jsondoc = json.loads(raw_doc)\n print(f'{i + 1:2} {hits[i].score:.5f} {jsondoc[\"contents\"]}')\n\n\nif __name__ == '__main__':\n MsMarcoDemo().cmdloop()\n", "path": "pyserini/demo/msmarco.py"}]} | 1,729 | 409 |
gh_patches_debug_8880 | rasdani/github-patches | git_diff | liqd__a4-product-606 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
can't see full time when creating an event on small screen

</issue>
<code>
[start of liqd_product/apps/actions/apps.py]
1 from django.apps import AppConfig
2
3
4 class Config(AppConfig):
5 name = 'liqd_product.apps.actions'
6 label = 'liqd_product_actions'
7
8 def ready(self):
9 from adhocracy4.actions.models import configure_icon
10 from adhocracy4.actions.models import configure_type
11 from adhocracy4.actions.verbs import Verbs
12 configure_type(
13 'project',
14 ('a4projects', 'project')
15 )
16 configure_type(
17 'phase',
18 ('a4phases', 'phase')
19 )
20 configure_type(
21 'comment',
22 ('a4comments', 'comment')
23 )
24 configure_type(
25 'rating',
26 ('a4ratings', 'rating')
27 )
28 configure_type(
29 'item',
30 ('liqd_product_budgeting', 'proposal'),
31 ('liqd_product_ideas', 'idea'),
32 ('liqd_product_mapideas', 'mapidea')
33 )
34
35 configure_icon('far fa-comment', type='comment')
36 configure_icon('far fa-lightbulb', type='item')
37 configure_icon('fas fa-plus', verb=Verbs.ADD)
38 configure_icon('fas fa-pencil-alt', verb=Verbs.UPDATE)
39 configure_icon('fas fa-flag', verb=Verbs.START)
40 configure_icon('far fa-clock', verb=Verbs.SCHEDULE)
41
[end of liqd_product/apps/actions/apps.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/liqd_product/apps/actions/apps.py b/liqd_product/apps/actions/apps.py
--- a/liqd_product/apps/actions/apps.py
+++ b/liqd_product/apps/actions/apps.py
@@ -35,6 +35,6 @@
configure_icon('far fa-comment', type='comment')
configure_icon('far fa-lightbulb', type='item')
configure_icon('fas fa-plus', verb=Verbs.ADD)
- configure_icon('fas fa-pencil-alt', verb=Verbs.UPDATE)
+ configure_icon('fas fa-pencil', verb=Verbs.UPDATE)
configure_icon('fas fa-flag', verb=Verbs.START)
- configure_icon('far fa-clock', verb=Verbs.SCHEDULE)
+ configure_icon('far fa-clock-o', verb=Verbs.SCHEDULE)
| {"golden_diff": "diff --git a/liqd_product/apps/actions/apps.py b/liqd_product/apps/actions/apps.py\n--- a/liqd_product/apps/actions/apps.py\n+++ b/liqd_product/apps/actions/apps.py\n@@ -35,6 +35,6 @@\n configure_icon('far fa-comment', type='comment')\n configure_icon('far fa-lightbulb', type='item')\n configure_icon('fas fa-plus', verb=Verbs.ADD)\n- configure_icon('fas fa-pencil-alt', verb=Verbs.UPDATE)\n+ configure_icon('fas fa-pencil', verb=Verbs.UPDATE)\n configure_icon('fas fa-flag', verb=Verbs.START)\n- configure_icon('far fa-clock', verb=Verbs.SCHEDULE)\n+ configure_icon('far fa-clock-o', verb=Verbs.SCHEDULE)\n", "issue": "can't see full time when creating an event on small screen\n\r\n\n", "before_files": [{"content": "from django.apps import AppConfig\n\n\nclass Config(AppConfig):\n name = 'liqd_product.apps.actions'\n label = 'liqd_product_actions'\n\n def ready(self):\n from adhocracy4.actions.models import configure_icon\n from adhocracy4.actions.models import configure_type\n from adhocracy4.actions.verbs import Verbs\n configure_type(\n 'project',\n ('a4projects', 'project')\n )\n configure_type(\n 'phase',\n ('a4phases', 'phase')\n )\n configure_type(\n 'comment',\n ('a4comments', 'comment')\n )\n configure_type(\n 'rating',\n ('a4ratings', 'rating')\n )\n configure_type(\n 'item',\n ('liqd_product_budgeting', 'proposal'),\n ('liqd_product_ideas', 'idea'),\n ('liqd_product_mapideas', 'mapidea')\n )\n\n configure_icon('far fa-comment', type='comment')\n configure_icon('far fa-lightbulb', type='item')\n configure_icon('fas fa-plus', verb=Verbs.ADD)\n configure_icon('fas fa-pencil-alt', verb=Verbs.UPDATE)\n configure_icon('fas fa-flag', verb=Verbs.START)\n configure_icon('far fa-clock', verb=Verbs.SCHEDULE)\n", "path": "liqd_product/apps/actions/apps.py"}]} | 996 | 171 |
gh_patches_debug_35338 | rasdani/github-patches | git_diff | joke2k__faker-270 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
some generated UK postcodes are invalid
UK postcodes follow strict rules and there are a limited set of codes for each part of the postcode. Faker does not know about these rules and generates postcodes such as: `XC9E 1FL` and `U93 2ZU` which are invalid. See e.g. https://github.com/hamstah/ukpostcodeparser for more info.
</issue>
<code>
[start of faker/providers/address/en_GB/__init__.py]
1 from __future__ import unicode_literals
2 from ..en import Provider as AddressProvider
3
4
5 class Provider(AddressProvider):
6 city_prefixes = ('North', 'East', 'West', 'South', 'New', 'Lake', 'Port')
7 city_suffixes = (
8 'town', 'ton', 'land', 'ville', 'berg', 'burgh', 'borough', 'bury', 'view', 'port', 'mouth', 'stad', 'furt',
9 'chester', 'mouth', 'fort', 'haven', 'side', 'shire')
10 building_number_formats = ('#', '##', '###')
11 street_suffixes = (
12 'alley', 'avenue', 'branch', 'bridge', 'brook', 'brooks', 'burg', 'burgs', 'bypass', 'camp', 'canyon', 'cape',
13 'causeway', 'center', 'centers', 'circle', 'circles', 'cliff', 'cliffs', 'club', 'common', 'corner', 'corners',
14 'course', 'court', 'courts', 'cove', 'coves', 'creek', 'crescent', 'crest', 'crossing', 'crossroad', 'curve',
15 'dale', 'dam', 'divide', 'drive', 'drive', 'drives', 'estate', 'estates', 'expressway', 'extension',
16 'extensions',
17 'fall', 'falls', 'ferry', 'field', 'fields', 'flat', 'flats', 'ford', 'fords', 'forest', 'forge', 'forges',
18 'fork',
19 'forks', 'fort', 'freeway', 'garden', 'gardens', 'gateway', 'glen', 'glens', 'green', 'greens', 'grove',
20 'groves',
21 'harbor', 'harbors', 'haven', 'heights', 'highway', 'hill', 'hills', 'hollow', 'inlet', 'inlet', 'island',
22 'island',
23 'islands', 'islands', 'isle', 'isle', 'junction', 'junctions', 'key', 'keys', 'knoll', 'knolls', 'lake',
24 'lakes',
25 'land', 'landing', 'lane', 'light', 'lights', 'loaf', 'lock', 'locks', 'locks', 'lodge', 'lodge', 'loop',
26 'mall',
27 'manor', 'manors', 'meadow', 'meadows', 'mews', 'mill', 'mills', 'mission', 'mission', 'motorway', 'mount',
28 'mountain', 'mountain', 'mountains', 'mountains', 'neck', 'orchard', 'oval', 'overpass', 'park', 'parks',
29 'parkway',
30 'parkways', 'pass', 'passage', 'path', 'pike', 'pine', 'pines', 'place', 'plain', 'plains', 'plains', 'plaza',
31 'plaza', 'point', 'points', 'port', 'port', 'ports', 'ports', 'prairie', 'prairie', 'radial', 'ramp', 'ranch',
32 'rapid', 'rapids', 'rest', 'ridge', 'ridges', 'river', 'road', 'road', 'roads', 'roads', 'route', 'row', 'rue',
33 'run', 'shoal', 'shoals', 'shore', 'shores', 'skyway', 'spring', 'springs', 'springs', 'spur', 'spurs',
34 'square',
35 'square', 'squares', 'squares', 'station', 'station', 'stravenue', 'stravenue', 'stream', 'stream', 'street',
36 'street', 'streets', 'summit', 'summit', 'terrace', 'throughway', 'trace', 'track', 'trafficway', 'trail',
37 'trail',
38 'tunnel', 'tunnel', 'turnpike', 'turnpike', 'underpass', 'union', 'unions', 'valley', 'valleys', 'via',
39 'viaduct',
40 'view', 'views', 'village', 'village', 'villages', 'ville', 'vista', 'vista', 'walk', 'walks', 'wall', 'way',
41 'ways', 'well', 'wells')
42
43 postcode_formats = ('??#? #??', '?#? #??', '?# #??', '?## #??', '??# #??', '??## #??',)
44
45 city_formats = (
46 '{{city_prefix}} {{first_name}}{{city_suffix}}',
47 '{{city_prefix}} {{first_name}}',
48 '{{first_name}}{{city_suffix}}',
49 '{{last_name}}{{city_suffix}}',
50 )
51 street_name_formats = (
52 '{{first_name}} {{street_suffix}}',
53 '{{last_name}} {{street_suffix}}'
54 )
55 street_address_formats = (
56 '{{building_number}} {{street_name}}',
57 '{{secondary_address}}\n{{street_name}}',
58 )
59 address_formats = (
60 "{{street_address}}\n{{city}}\n{{postcode}}",
61 )
62 secondary_address_formats = ('Flat #', 'Flat ##', 'Flat ##?', 'Studio #', 'Studio ##', 'Studio ##?')
63
64 @classmethod
65 def city_prefix(cls):
66 return cls.random_element(cls.city_prefixes)
67
68 @classmethod
69 def secondary_address(cls):
70 return cls.bothify(cls.random_element(cls.secondary_address_formats))
71
[end of faker/providers/address/en_GB/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/faker/providers/address/en_GB/__init__.py b/faker/providers/address/en_GB/__init__.py
--- a/faker/providers/address/en_GB/__init__.py
+++ b/faker/providers/address/en_GB/__init__.py
@@ -40,7 +40,44 @@
'view', 'views', 'village', 'village', 'villages', 'ville', 'vista', 'vista', 'walk', 'walks', 'wall', 'way',
'ways', 'well', 'wells')
- postcode_formats = ('??#? #??', '?#? #??', '?# #??', '?## #??', '??# #??', '??## #??',)
+ POSTAL_ZONES = (
+ 'AB', 'AL', 'B' , 'BA', 'BB', 'BD', 'BH', 'BL', 'BN', 'BR',
+ 'BS', 'BT', 'CA', 'CB', 'CF', 'CH', 'CM', 'CO', 'CR', 'CT',
+ 'CV', 'CW', 'DA', 'DD', 'DE', 'DG', 'DH', 'DL', 'DN', 'DT',
+ 'DY', 'E' , 'EC', 'EH', 'EN', 'EX', 'FK', 'FY', 'G' , 'GL',
+ 'GY', 'GU', 'HA', 'HD', 'HG', 'HP', 'HR', 'HS', 'HU', 'HX',
+ 'IG', 'IM', 'IP', 'IV', 'JE', 'KA', 'KT', 'KW', 'KY', 'L' ,
+ 'LA', 'LD', 'LE', 'LL', 'LN', 'LS', 'LU', 'M' , 'ME', 'MK',
+ 'ML', 'N' , 'NE', 'NG', 'NN', 'NP', 'NR', 'NW', 'OL', 'OX',
+ 'PA', 'PE', 'PH', 'PL', 'PO', 'PR', 'RG', 'RH', 'RM', 'S' ,
+ 'SA', 'SE', 'SG', 'SK', 'SL', 'SM', 'SN', 'SO', 'SP', 'SR',
+ 'SS', 'ST', 'SW', 'SY', 'TA', 'TD', 'TF', 'TN', 'TQ', 'TR',
+ 'TS', 'TW', 'UB', 'W' , 'WA', 'WC', 'WD', 'WF', 'WN', 'WR',
+ 'WS', 'WV', 'YO', 'ZE'
+ )
+
+ POSTAL_ZONES_ONE_CHAR = [zone for zone in POSTAL_ZONES if len(zone) == 1]
+ POSTAL_ZONES_TWO_CHARS = [zone for zone in POSTAL_ZONES if len(zone) == 2]
+
+ postcode_formats = (
+ 'AN NEE',
+ 'ANN NEE',
+ 'PN NEE',
+ 'PNN NEE',
+ 'ANC NEE',
+ 'PND NEE',
+ )
+
+ _postcode_sets = {
+ ' ': ' ',
+ 'N': [str(i) for i in range(0, 10)],
+ 'A': POSTAL_ZONES_ONE_CHAR,
+ 'B': 'ABCDEFGHKLMNOPQRSTUVWXY',
+ 'C': 'ABCDEFGHJKSTUW',
+ 'D': 'ABEHMNPRVWXY',
+ 'E': 'ABDEFGHJLNPQRSTUWXYZ',
+ 'P': POSTAL_ZONES_TWO_CHARS,
+ }
city_formats = (
'{{city_prefix}} {{first_name}}{{city_suffix}}',
@@ -61,6 +98,17 @@
)
secondary_address_formats = ('Flat #', 'Flat ##', 'Flat ##?', 'Studio #', 'Studio ##', 'Studio ##?')
+ @classmethod
+ def postcode(cls):
+ """
+ See http://web.archive.org/web/20090930140939/http://www.govtalk.gov.uk/gdsc/html/noframes/PostCode-2-1-Release.htm
+ """
+ postcode = ''
+ pattern = cls.random_element(cls.postcode_formats)
+ for placeholder in pattern:
+ postcode += cls.random_element(cls._postcode_sets[placeholder])
+ return postcode
+
@classmethod
def city_prefix(cls):
return cls.random_element(cls.city_prefixes)
| {"golden_diff": "diff --git a/faker/providers/address/en_GB/__init__.py b/faker/providers/address/en_GB/__init__.py\n--- a/faker/providers/address/en_GB/__init__.py\n+++ b/faker/providers/address/en_GB/__init__.py\n@@ -40,7 +40,44 @@\n 'view', 'views', 'village', 'village', 'villages', 'ville', 'vista', 'vista', 'walk', 'walks', 'wall', 'way',\n 'ways', 'well', 'wells')\n \n- postcode_formats = ('??#? #??', '?#? #??', '?# #??', '?## #??', '??# #??', '??## #??',)\n+ POSTAL_ZONES = (\n+ 'AB', 'AL', 'B' , 'BA', 'BB', 'BD', 'BH', 'BL', 'BN', 'BR',\n+ 'BS', 'BT', 'CA', 'CB', 'CF', 'CH', 'CM', 'CO', 'CR', 'CT',\n+ 'CV', 'CW', 'DA', 'DD', 'DE', 'DG', 'DH', 'DL', 'DN', 'DT',\n+ 'DY', 'E' , 'EC', 'EH', 'EN', 'EX', 'FK', 'FY', 'G' , 'GL',\n+ 'GY', 'GU', 'HA', 'HD', 'HG', 'HP', 'HR', 'HS', 'HU', 'HX',\n+ 'IG', 'IM', 'IP', 'IV', 'JE', 'KA', 'KT', 'KW', 'KY', 'L' ,\n+ 'LA', 'LD', 'LE', 'LL', 'LN', 'LS', 'LU', 'M' , 'ME', 'MK',\n+ 'ML', 'N' , 'NE', 'NG', 'NN', 'NP', 'NR', 'NW', 'OL', 'OX',\n+ 'PA', 'PE', 'PH', 'PL', 'PO', 'PR', 'RG', 'RH', 'RM', 'S' ,\n+ 'SA', 'SE', 'SG', 'SK', 'SL', 'SM', 'SN', 'SO', 'SP', 'SR',\n+ 'SS', 'ST', 'SW', 'SY', 'TA', 'TD', 'TF', 'TN', 'TQ', 'TR',\n+ 'TS', 'TW', 'UB', 'W' , 'WA', 'WC', 'WD', 'WF', 'WN', 'WR',\n+ 'WS', 'WV', 'YO', 'ZE'\n+ )\n+\n+ POSTAL_ZONES_ONE_CHAR = [zone for zone in POSTAL_ZONES if len(zone) == 1]\n+ POSTAL_ZONES_TWO_CHARS = [zone for zone in POSTAL_ZONES if len(zone) == 2]\n+\n+ postcode_formats = (\n+ 'AN NEE',\n+ 'ANN NEE',\n+ 'PN NEE',\n+ 'PNN NEE',\n+ 'ANC NEE',\n+ 'PND NEE',\n+ )\n+\n+ _postcode_sets = {\n+ ' ': ' ',\n+ 'N': [str(i) for i in range(0, 10)],\n+ 'A': POSTAL_ZONES_ONE_CHAR,\n+ 'B': 'ABCDEFGHKLMNOPQRSTUVWXY',\n+ 'C': 'ABCDEFGHJKSTUW',\n+ 'D': 'ABEHMNPRVWXY',\n+ 'E': 'ABDEFGHJLNPQRSTUWXYZ',\n+ 'P': POSTAL_ZONES_TWO_CHARS,\n+ }\n \n city_formats = (\n '{{city_prefix}} {{first_name}}{{city_suffix}}',\n@@ -61,6 +98,17 @@\n )\n secondary_address_formats = ('Flat #', 'Flat ##', 'Flat ##?', 'Studio #', 'Studio ##', 'Studio ##?')\n \n+ @classmethod\n+ def postcode(cls):\n+ \"\"\"\n+ See http://web.archive.org/web/20090930140939/http://www.govtalk.gov.uk/gdsc/html/noframes/PostCode-2-1-Release.htm\n+ \"\"\"\n+ postcode = ''\n+ pattern = cls.random_element(cls.postcode_formats)\n+ for placeholder in pattern:\n+ postcode += cls.random_element(cls._postcode_sets[placeholder])\n+ return postcode\n+\n @classmethod\n def city_prefix(cls):\n return cls.random_element(cls.city_prefixes)\n", "issue": "some generated UK postcodes are invalid\nUK postcodes follow strict rules and there are a limited set of codes for each part of the postcode. Faker does not know about these rules and generates postcodes such as: `XC9E 1FL` and `U93 2ZU` which are invalid. See e.g. https://github.com/hamstah/ukpostcodeparser for more info.\n\n", "before_files": [{"content": "from __future__ import unicode_literals \nfrom ..en import Provider as AddressProvider\n\n\nclass Provider(AddressProvider):\n city_prefixes = ('North', 'East', 'West', 'South', 'New', 'Lake', 'Port')\n city_suffixes = (\n 'town', 'ton', 'land', 'ville', 'berg', 'burgh', 'borough', 'bury', 'view', 'port', 'mouth', 'stad', 'furt',\n 'chester', 'mouth', 'fort', 'haven', 'side', 'shire')\n building_number_formats = ('#', '##', '###')\n street_suffixes = (\n 'alley', 'avenue', 'branch', 'bridge', 'brook', 'brooks', 'burg', 'burgs', 'bypass', 'camp', 'canyon', 'cape',\n 'causeway', 'center', 'centers', 'circle', 'circles', 'cliff', 'cliffs', 'club', 'common', 'corner', 'corners',\n 'course', 'court', 'courts', 'cove', 'coves', 'creek', 'crescent', 'crest', 'crossing', 'crossroad', 'curve',\n 'dale', 'dam', 'divide', 'drive', 'drive', 'drives', 'estate', 'estates', 'expressway', 'extension',\n 'extensions',\n 'fall', 'falls', 'ferry', 'field', 'fields', 'flat', 'flats', 'ford', 'fords', 'forest', 'forge', 'forges',\n 'fork',\n 'forks', 'fort', 'freeway', 'garden', 'gardens', 'gateway', 'glen', 'glens', 'green', 'greens', 'grove',\n 'groves',\n 'harbor', 'harbors', 'haven', 'heights', 'highway', 'hill', 'hills', 'hollow', 'inlet', 'inlet', 'island',\n 'island',\n 'islands', 'islands', 'isle', 'isle', 'junction', 'junctions', 'key', 'keys', 'knoll', 'knolls', 'lake',\n 'lakes',\n 'land', 'landing', 'lane', 'light', 'lights', 'loaf', 'lock', 'locks', 'locks', 'lodge', 'lodge', 'loop',\n 'mall',\n 'manor', 'manors', 'meadow', 'meadows', 'mews', 'mill', 'mills', 'mission', 'mission', 'motorway', 'mount',\n 'mountain', 'mountain', 'mountains', 'mountains', 'neck', 'orchard', 'oval', 'overpass', 'park', 'parks',\n 'parkway',\n 'parkways', 'pass', 'passage', 'path', 'pike', 'pine', 'pines', 'place', 'plain', 'plains', 'plains', 'plaza',\n 'plaza', 'point', 'points', 'port', 'port', 'ports', 'ports', 'prairie', 'prairie', 'radial', 'ramp', 'ranch',\n 'rapid', 'rapids', 'rest', 'ridge', 'ridges', 'river', 'road', 'road', 'roads', 'roads', 'route', 'row', 'rue',\n 'run', 'shoal', 'shoals', 'shore', 'shores', 'skyway', 'spring', 'springs', 'springs', 'spur', 'spurs',\n 'square',\n 'square', 'squares', 'squares', 'station', 'station', 'stravenue', 'stravenue', 'stream', 'stream', 'street',\n 'street', 'streets', 'summit', 'summit', 'terrace', 'throughway', 'trace', 'track', 'trafficway', 'trail',\n 'trail',\n 'tunnel', 'tunnel', 'turnpike', 'turnpike', 'underpass', 'union', 'unions', 'valley', 'valleys', 'via',\n 'viaduct',\n 'view', 'views', 'village', 'village', 'villages', 'ville', 'vista', 'vista', 'walk', 'walks', 'wall', 'way',\n 'ways', 'well', 'wells')\n\n postcode_formats = ('??#? #??', '?#? #??', '?# #??', '?## #??', '??# #??', '??## #??',)\n\n city_formats = (\n '{{city_prefix}} {{first_name}}{{city_suffix}}',\n '{{city_prefix}} {{first_name}}',\n '{{first_name}}{{city_suffix}}',\n '{{last_name}}{{city_suffix}}',\n )\n street_name_formats = (\n '{{first_name}} {{street_suffix}}',\n '{{last_name}} {{street_suffix}}'\n )\n street_address_formats = (\n '{{building_number}} {{street_name}}',\n '{{secondary_address}}\\n{{street_name}}',\n )\n address_formats = (\n \"{{street_address}}\\n{{city}}\\n{{postcode}}\",\n )\n secondary_address_formats = ('Flat #', 'Flat ##', 'Flat ##?', 'Studio #', 'Studio ##', 'Studio ##?')\n\n @classmethod\n def city_prefix(cls):\n return cls.random_element(cls.city_prefixes)\n\n @classmethod\n def secondary_address(cls):\n return cls.bothify(cls.random_element(cls.secondary_address_formats))\n", "path": "faker/providers/address/en_GB/__init__.py"}]} | 1,972 | 1,002 |
gh_patches_debug_64108 | rasdani/github-patches | git_diff | facebookresearch__hydra-2242 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[Bug] Colorlog plugin generates `.log` file in cwd instead of output dir
# 🐛 Bug
I'm using hydra v1.2 with `chdir` set to false.
When I don't use colorlog plugin, the `.log` file with python logs gets generated in my output directory (as expected).
But when I attach colorlog plugin with:
```yaml
defaults:
- override hydra/hydra_logging: colorlog
- override hydra/job_logging: colorlog
```
The `.log` file gets generated in current working directory
## Checklist
- [x] I checked on the latest version of Hydra
- [ ] I created a minimal repro (See [this](https://stackoverflow.com/help/minimal-reproducible-example) for tips).
## Expected Behavior
I would expect the `.log` file to be always saved in output directory by default.
## System information
- **Hydra Version** : 1.2
- **Python version** : 3.10
- **Virtual environment type and version** :
- **Operating system** : linux
</issue>
<code>
[start of plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py]
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2
3 __version__ = "1.2.0"
4
[end of plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py b/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py
--- a/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py
+++ b/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py
@@ -1,3 +1,3 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
-__version__ = "1.2.0"
+__version__ = "1.2.1"
| {"golden_diff": "diff --git a/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py b/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py\n--- a/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py\n+++ b/plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py\n@@ -1,3 +1,3 @@\n # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n \n-__version__ = \"1.2.0\"\n+__version__ = \"1.2.1\"\n", "issue": "[Bug] Colorlog plugin generates `.log` file in cwd instead of output dir\n# \ud83d\udc1b Bug\r\nI'm using hydra v1.2 with `chdir` set to false.\r\n\r\nWhen I don't use colorlog plugin, the `.log` file with python logs gets generated in my output directory (as expected).\r\n\r\nBut when I attach colorlog plugin with:\r\n```yaml\r\ndefaults:\r\n - override hydra/hydra_logging: colorlog\r\n - override hydra/job_logging: colorlog\r\n```\r\nThe `.log` file gets generated in current working directory\r\n\r\n## Checklist\r\n- [x] I checked on the latest version of Hydra\r\n- [ ] I created a minimal repro (See [this](https://stackoverflow.com/help/minimal-reproducible-example) for tips).\r\n\r\n## Expected Behavior\r\nI would expect the `.log` file to be always saved in output directory by default.\r\n\r\n## System information\r\n- **Hydra Version** : 1.2\r\n- **Python version** : 3.10\r\n- **Virtual environment type and version** : \r\n- **Operating system** : linux\r\n\r\n\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n__version__ = \"1.2.0\"\n", "path": "plugins/hydra_colorlog/hydra_plugins/hydra_colorlog/__init__.py"}]} | 835 | 140 |
gh_patches_debug_40326 | rasdani/github-patches | git_diff | nextcloud__appstore-201 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Delete user account
A user should be able to delete his account by hitting and confirming it on the download page. The confirmation should not be able to trigger by accident, Github's delete repo ui is a good example.
Before deleting his account, a user will be warned that all his comments and apps will be deleted.
</issue>
<code>
[start of nextcloudappstore/urls.py]
1 from allauth.account.views import signup
2 from allauth.socialaccount.views import signup as social_signup
3 from csp.decorators import csp_exempt
4 from django.conf.urls import url, include
5 from django.contrib import admin
6 from nextcloudappstore.core.user.views import PasswordView, AccountView, \
7 APITokenView
8 from nextcloudappstore.core.views import CategoryAppListView, AppDetailView, \
9 app_description, AppReleasesView, AppUploadView, LegalNoticeView
10
11 urlpatterns = [
12 url(r'^$', CategoryAppListView.as_view(), {'id': None}, name='home'),
13 url(r"^signup/$", csp_exempt(signup), name="account_signup"),
14 url(r"^social/signup/$", csp_exempt(social_signup),
15 name="socialaccount_signup"),
16 url(r'^', include('allauth.urls')),
17 url(r'^account/?$', AccountView.as_view(), name='account'),
18 url(r'^account/password/?$', PasswordView.as_view(),
19 name='account-password'),
20 url(r'^account/token/?$', APITokenView.as_view(),
21 name='account-api-token'),
22 url(r'^legal/?$', LegalNoticeView.as_view(), name='legal-notice'),
23 url(r'^categories/(?P<id>[\w]*)/?$', CategoryAppListView.as_view(),
24 name='category-app-list'),
25 url(r'^app/upload/?$', AppUploadView.as_view(), name='app-upload'),
26 url(r'^app/(?P<id>[\w_]+)/?$', AppDetailView.as_view(), name='app-detail'),
27 url(r'^app/(?P<id>[\w_]+)/releases/?$', AppReleasesView.as_view(),
28 name='app-releases'),
29 url(r'^app/(?P<id>[\w_]+)/description/?$', app_description,
30 name='app-description'),
31 url(r'^api/', include('nextcloudappstore.core.api.urls',
32 namespace='api')),
33 url(r'^admin/', admin.site.urls),
34 ]
35
[end of nextcloudappstore/urls.py]
[start of nextcloudappstore/core/user/views.py]
1 from allauth.account.views import PasswordChangeView
2 from django.contrib import messages
3 from django.contrib.auth.mixins import LoginRequiredMixin
4 from django.contrib.auth.models import User
5 from django.core.urlresolvers import reverse_lazy
6 from django.views.generic import TemplateView
7 from django.views.generic import UpdateView
8
9
10 class AccountView(LoginRequiredMixin, UpdateView):
11 """Display and allow changing of the user's name."""
12
13 template_name = 'user/account.html'
14 template_name_suffix = ''
15 model = User
16 fields = ['first_name', 'last_name']
17 success_url = reverse_lazy('account')
18
19 def get_context_data(self, **kwargs):
20 context = super().get_context_data(**kwargs)
21 context['acc_page'] = 'account'
22 return context
23
24 def form_valid(self, form):
25 messages.success(self.request, 'Name saved.')
26 return super().form_valid(form)
27
28 def get_object(self, queryset=None):
29 return self.request.user
30
31
32 class PasswordView(LoginRequiredMixin, PasswordChangeView):
33 """Allow the user to change their password."""
34
35 template_name = 'user/password.html'
36 success_url = reverse_lazy('account-password')
37
38 def get_context_data(self, **kwargs):
39 context = super().get_context_data(**kwargs)
40 context['acc_page'] = 'password'
41 return context
42
43
44 class APITokenView(LoginRequiredMixin, TemplateView):
45 """Display the user's API token, and allow it to be regenerated."""
46
47 template_name = 'user/api-token.html'
48
49 def get_context_data(self, **kwargs):
50 context = super().get_context_data(**kwargs)
51 context['acc_page'] = 'api-token'
52 return context
53
[end of nextcloudappstore/core/user/views.py]
[start of nextcloudappstore/core/user/forms.py]
1 from django import forms
2 from captcha.fields import ReCaptchaField
3
4
5 class SignupFormRecaptcha(forms.Form):
6 """integrate a recaptcha field."""
7 recaptcha = ReCaptchaField()
8
9 def signup(self, request, user):
10 pass
11
[end of nextcloudappstore/core/user/forms.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nextcloudappstore/core/user/forms.py b/nextcloudappstore/core/user/forms.py
--- a/nextcloudappstore/core/user/forms.py
+++ b/nextcloudappstore/core/user/forms.py
@@ -1,5 +1,7 @@
from django import forms
from captcha.fields import ReCaptchaField
+from django.forms import EmailField
+from django.utils.translation import ugettext_lazy as _
class SignupFormRecaptcha(forms.Form):
@@ -8,3 +10,19 @@
def signup(self, request, user):
pass
+
+
+class DeleteAccountForm(forms.Form):
+ email = EmailField(required=True, label=_('Your e-mail address'))
+
+ def __init__(self, *args, **kwargs):
+ self.user = kwargs.pop('user', None)
+ super().__init__(*args, **kwargs)
+
+ def clean_email(self):
+ email = self.cleaned_data.get('email')
+ if self.user and self.user.email == email:
+ return email
+ else:
+ raise forms.ValidationError(_(
+ 'The given e-mail address does not match your e-mail address'))
diff --git a/nextcloudappstore/core/user/views.py b/nextcloudappstore/core/user/views.py
--- a/nextcloudappstore/core/user/views.py
+++ b/nextcloudappstore/core/user/views.py
@@ -3,9 +3,30 @@
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse_lazy
+from django.shortcuts import redirect, render
from django.views.generic import TemplateView
from django.views.generic import UpdateView
+from nextcloudappstore.core.user.forms import DeleteAccountForm
+
+
+class DeleteAccountView(LoginRequiredMixin, TemplateView):
+ template_name = 'user/delete-account.html'
+
+ def get_context_data(self, **kwargs):
+ context = super().get_context_data(**kwargs)
+ context['form'] = DeleteAccountForm()
+ context['acc_page'] = 'delete-account'
+ return context
+
+ def post(self, request, *args, **kwargs):
+ form = DeleteAccountForm(request.POST, user=request.user)
+ if form.is_valid():
+ request.user.delete()
+ return redirect(reverse_lazy('home'))
+ else:
+ return render(request, self.template_name, {'form': form})
+
class AccountView(LoginRequiredMixin, UpdateView):
"""Display and allow changing of the user's name."""
diff --git a/nextcloudappstore/urls.py b/nextcloudappstore/urls.py
--- a/nextcloudappstore/urls.py
+++ b/nextcloudappstore/urls.py
@@ -4,7 +4,7 @@
from django.conf.urls import url, include
from django.contrib import admin
from nextcloudappstore.core.user.views import PasswordView, AccountView, \
- APITokenView
+ APITokenView, DeleteAccountView
from nextcloudappstore.core.views import CategoryAppListView, AppDetailView, \
app_description, AppReleasesView, AppUploadView, LegalNoticeView
@@ -19,6 +19,8 @@
name='account-password'),
url(r'^account/token/?$', APITokenView.as_view(),
name='account-api-token'),
+ url(r'^account/delete/?$', DeleteAccountView.as_view(),
+ name='account-deletion'),
url(r'^legal/?$', LegalNoticeView.as_view(), name='legal-notice'),
url(r'^categories/(?P<id>[\w]*)/?$', CategoryAppListView.as_view(),
name='category-app-list'),
| {"golden_diff": "diff --git a/nextcloudappstore/core/user/forms.py b/nextcloudappstore/core/user/forms.py\n--- a/nextcloudappstore/core/user/forms.py\n+++ b/nextcloudappstore/core/user/forms.py\n@@ -1,5 +1,7 @@\n from django import forms\n from captcha.fields import ReCaptchaField\n+from django.forms import EmailField\n+from django.utils.translation import ugettext_lazy as _\n \n \n class SignupFormRecaptcha(forms.Form):\n@@ -8,3 +10,19 @@\n \n def signup(self, request, user):\n pass\n+\n+\n+class DeleteAccountForm(forms.Form):\n+ email = EmailField(required=True, label=_('Your e-mail address'))\n+\n+ def __init__(self, *args, **kwargs):\n+ self.user = kwargs.pop('user', None)\n+ super().__init__(*args, **kwargs)\n+\n+ def clean_email(self):\n+ email = self.cleaned_data.get('email')\n+ if self.user and self.user.email == email:\n+ return email\n+ else:\n+ raise forms.ValidationError(_(\n+ 'The given e-mail address does not match your e-mail address'))\ndiff --git a/nextcloudappstore/core/user/views.py b/nextcloudappstore/core/user/views.py\n--- a/nextcloudappstore/core/user/views.py\n+++ b/nextcloudappstore/core/user/views.py\n@@ -3,9 +3,30 @@\n from django.contrib.auth.mixins import LoginRequiredMixin\n from django.contrib.auth.models import User\n from django.core.urlresolvers import reverse_lazy\n+from django.shortcuts import redirect, render\n from django.views.generic import TemplateView\n from django.views.generic import UpdateView\n \n+from nextcloudappstore.core.user.forms import DeleteAccountForm\n+\n+\n+class DeleteAccountView(LoginRequiredMixin, TemplateView):\n+ template_name = 'user/delete-account.html'\n+\n+ def get_context_data(self, **kwargs):\n+ context = super().get_context_data(**kwargs)\n+ context['form'] = DeleteAccountForm()\n+ context['acc_page'] = 'delete-account'\n+ return context\n+\n+ def post(self, request, *args, **kwargs):\n+ form = DeleteAccountForm(request.POST, user=request.user)\n+ if form.is_valid():\n+ request.user.delete()\n+ return redirect(reverse_lazy('home'))\n+ else:\n+ return render(request, self.template_name, {'form': form})\n+\n \n class AccountView(LoginRequiredMixin, UpdateView):\n \"\"\"Display and allow changing of the user's name.\"\"\"\ndiff --git a/nextcloudappstore/urls.py b/nextcloudappstore/urls.py\n--- a/nextcloudappstore/urls.py\n+++ b/nextcloudappstore/urls.py\n@@ -4,7 +4,7 @@\n from django.conf.urls import url, include\n from django.contrib import admin\n from nextcloudappstore.core.user.views import PasswordView, AccountView, \\\n- APITokenView\n+ APITokenView, DeleteAccountView\n from nextcloudappstore.core.views import CategoryAppListView, AppDetailView, \\\n app_description, AppReleasesView, AppUploadView, LegalNoticeView\n \n@@ -19,6 +19,8 @@\n name='account-password'),\n url(r'^account/token/?$', APITokenView.as_view(),\n name='account-api-token'),\n+ url(r'^account/delete/?$', DeleteAccountView.as_view(),\n+ name='account-deletion'),\n url(r'^legal/?$', LegalNoticeView.as_view(), name='legal-notice'),\n url(r'^categories/(?P<id>[\\w]*)/?$', CategoryAppListView.as_view(),\n name='category-app-list'),\n", "issue": "Delete user account\nA user should be able to delete his account by hitting and confirming it on the download page. The confirmation should not be able to trigger by accident, Github's delete repo ui is a good example.\n\nBefore deleting his account, a user will be warned that all his comments and apps will be deleted.\n\n", "before_files": [{"content": "from allauth.account.views import signup\nfrom allauth.socialaccount.views import signup as social_signup\nfrom csp.decorators import csp_exempt\nfrom django.conf.urls import url, include\nfrom django.contrib import admin\nfrom nextcloudappstore.core.user.views import PasswordView, AccountView, \\\n APITokenView\nfrom nextcloudappstore.core.views import CategoryAppListView, AppDetailView, \\\n app_description, AppReleasesView, AppUploadView, LegalNoticeView\n\nurlpatterns = [\n url(r'^$', CategoryAppListView.as_view(), {'id': None}, name='home'),\n url(r\"^signup/$\", csp_exempt(signup), name=\"account_signup\"),\n url(r\"^social/signup/$\", csp_exempt(social_signup),\n name=\"socialaccount_signup\"),\n url(r'^', include('allauth.urls')),\n url(r'^account/?$', AccountView.as_view(), name='account'),\n url(r'^account/password/?$', PasswordView.as_view(),\n name='account-password'),\n url(r'^account/token/?$', APITokenView.as_view(),\n name='account-api-token'),\n url(r'^legal/?$', LegalNoticeView.as_view(), name='legal-notice'),\n url(r'^categories/(?P<id>[\\w]*)/?$', CategoryAppListView.as_view(),\n name='category-app-list'),\n url(r'^app/upload/?$', AppUploadView.as_view(), name='app-upload'),\n url(r'^app/(?P<id>[\\w_]+)/?$', AppDetailView.as_view(), name='app-detail'),\n url(r'^app/(?P<id>[\\w_]+)/releases/?$', AppReleasesView.as_view(),\n name='app-releases'),\n url(r'^app/(?P<id>[\\w_]+)/description/?$', app_description,\n name='app-description'),\n url(r'^api/', include('nextcloudappstore.core.api.urls',\n namespace='api')),\n url(r'^admin/', admin.site.urls),\n]\n", "path": "nextcloudappstore/urls.py"}, {"content": "from allauth.account.views import PasswordChangeView\nfrom django.contrib import messages\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.contrib.auth.models import User\nfrom django.core.urlresolvers import reverse_lazy\nfrom django.views.generic import TemplateView\nfrom django.views.generic import UpdateView\n\n\nclass AccountView(LoginRequiredMixin, UpdateView):\n \"\"\"Display and allow changing of the user's name.\"\"\"\n\n template_name = 'user/account.html'\n template_name_suffix = ''\n model = User\n fields = ['first_name', 'last_name']\n success_url = reverse_lazy('account')\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['acc_page'] = 'account'\n return context\n\n def form_valid(self, form):\n messages.success(self.request, 'Name saved.')\n return super().form_valid(form)\n\n def get_object(self, queryset=None):\n return self.request.user\n\n\nclass PasswordView(LoginRequiredMixin, PasswordChangeView):\n \"\"\"Allow the user to change their password.\"\"\"\n\n template_name = 'user/password.html'\n success_url = reverse_lazy('account-password')\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['acc_page'] = 'password'\n return context\n\n\nclass APITokenView(LoginRequiredMixin, TemplateView):\n \"\"\"Display the user's API token, and allow it to be regenerated.\"\"\"\n\n template_name = 'user/api-token.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['acc_page'] = 'api-token'\n return context\n", "path": "nextcloudappstore/core/user/views.py"}, {"content": "from django import forms\nfrom captcha.fields import ReCaptchaField\n\n\nclass SignupFormRecaptcha(forms.Form):\n \"\"\"integrate a recaptcha field.\"\"\"\n recaptcha = ReCaptchaField()\n\n def signup(self, request, user):\n pass\n", "path": "nextcloudappstore/core/user/forms.py"}]} | 1,646 | 794 |
gh_patches_debug_27720 | rasdani/github-patches | git_diff | scikit-hep__pyhf-383 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Use Binder Build API for Builds in PRs
# Description
After a discussion with @minrk and @betatim on the jupyterhub/binder Gitter, it was made clear that the use of Selenium in [`binder/trigger_binder.py`](https://github.com/diana-hep/pyhf/blob/c81f6007309f4c13241f9efac187594337d0bd08/binder/trigger_binder.py) (and the script itself) is unnecessary. Instead a simple API call can be made just using Python's `webbrowser` with an [endpoint of the form `https://mybinder.org/build/gh/owner/repo/ref`](https://gitter.im/jupyterhub/binder?at=5c2f87038dafa715c73ff54f) as can be [seen in the Binder Hub demo](https://github.com/jupyterhub/binderhub/blob/9ca8fa68bb8b69c6a2736f2275279583073f314f/examples/binder-api.py#L28) (thanks Tim for the link).
So, for example
```
python -m webbrowser "https://mybinder.org/build/gh/diana-hep/pyhf/master"
```
So asking [WWKHTD](https://github.com/kelseyhightower/nocode), this means that `binder/trigger_binder.py` is unnecessary and should be removed and `.travis.yml` should be updated to use the API calls.
</issue>
<code>
[start of binder/trigger_binder.py]
1 #!/usr/bin/env python
2
3 import argparse
4 from contextlib import contextmanager
5 from selenium import webdriver
6 from selenium.webdriver.chrome.options import Options
7 from selenium.webdriver.support.ui import WebDriverWait
8 from selenium.webdriver.support.expected_conditions import staleness_of
9
10
11 class SeleniumSession:
12 def __init__(self, args):
13 self.options = Options()
14 self.options.set_headless()
15 self.options.add_argument('--no-sandbox')
16 if args.chromedriver_path is not None:
17 self.browser = webdriver.Chrome(
18 args.chromedriver_path, chrome_options=self.options
19 )
20 else:
21 self.browser = webdriver.Chrome(chrome_options=self.options)
22
23 @contextmanager
24 def wait_for_page_load(self, timeout=20):
25 old_page = self.browser.find_element_by_tag_name('html')
26 yield
27 WebDriverWait(self.browser, timeout).until(staleness_of(old_page))
28
29 def trigger_binder(self, url):
30 with self.wait_for_page_load():
31 self.browser.get(url)
32
33
34 def main(args):
35 driver = SeleniumSession(args)
36 if args.is_verbose:
37 print('Chrome Headless Browser Invoked')
38 driver.trigger_binder(args.url)
39
40
41 if __name__ == '__main__':
42 parser = argparse.ArgumentParser()
43 parser.add_argument(
44 '-v',
45 '--verbose',
46 dest='is_verbose',
47 action='store_true',
48 help='Print out more information',
49 )
50 parser.add_argument(
51 '--chromedriver-path',
52 dest='chromedriver_path',
53 type=str,
54 default=None,
55 help='System path to ChromeDriver',
56 )
57 parser.add_argument(
58 '--url', dest='url', type=str, default=None, help='URL for Selinium to open'
59 )
60 args = parser.parse_args()
61
62 main(args)
63
[end of binder/trigger_binder.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/binder/trigger_binder.py b/binder/trigger_binder.py
deleted file mode 100644
--- a/binder/trigger_binder.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-
-import argparse
-from contextlib import contextmanager
-from selenium import webdriver
-from selenium.webdriver.chrome.options import Options
-from selenium.webdriver.support.ui import WebDriverWait
-from selenium.webdriver.support.expected_conditions import staleness_of
-
-
-class SeleniumSession:
- def __init__(self, args):
- self.options = Options()
- self.options.set_headless()
- self.options.add_argument('--no-sandbox')
- if args.chromedriver_path is not None:
- self.browser = webdriver.Chrome(
- args.chromedriver_path, chrome_options=self.options
- )
- else:
- self.browser = webdriver.Chrome(chrome_options=self.options)
-
- @contextmanager
- def wait_for_page_load(self, timeout=20):
- old_page = self.browser.find_element_by_tag_name('html')
- yield
- WebDriverWait(self.browser, timeout).until(staleness_of(old_page))
-
- def trigger_binder(self, url):
- with self.wait_for_page_load():
- self.browser.get(url)
-
-
-def main(args):
- driver = SeleniumSession(args)
- if args.is_verbose:
- print('Chrome Headless Browser Invoked')
- driver.trigger_binder(args.url)
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '-v',
- '--verbose',
- dest='is_verbose',
- action='store_true',
- help='Print out more information',
- )
- parser.add_argument(
- '--chromedriver-path',
- dest='chromedriver_path',
- type=str,
- default=None,
- help='System path to ChromeDriver',
- )
- parser.add_argument(
- '--url', dest='url', type=str, default=None, help='URL for Selinium to open'
- )
- args = parser.parse_args()
-
- main(args)
| {"golden_diff": "diff --git a/binder/trigger_binder.py b/binder/trigger_binder.py\ndeleted file mode 100644\n--- a/binder/trigger_binder.py\n+++ /dev/null\n@@ -1,62 +0,0 @@\n-#!/usr/bin/env python\n-\n-import argparse\n-from contextlib import contextmanager\n-from selenium import webdriver\n-from selenium.webdriver.chrome.options import Options\n-from selenium.webdriver.support.ui import WebDriverWait\n-from selenium.webdriver.support.expected_conditions import staleness_of\n-\n-\n-class SeleniumSession:\n- def __init__(self, args):\n- self.options = Options()\n- self.options.set_headless()\n- self.options.add_argument('--no-sandbox')\n- if args.chromedriver_path is not None:\n- self.browser = webdriver.Chrome(\n- args.chromedriver_path, chrome_options=self.options\n- )\n- else:\n- self.browser = webdriver.Chrome(chrome_options=self.options)\n-\n- @contextmanager\n- def wait_for_page_load(self, timeout=20):\n- old_page = self.browser.find_element_by_tag_name('html')\n- yield\n- WebDriverWait(self.browser, timeout).until(staleness_of(old_page))\n-\n- def trigger_binder(self, url):\n- with self.wait_for_page_load():\n- self.browser.get(url)\n-\n-\n-def main(args):\n- driver = SeleniumSession(args)\n- if args.is_verbose:\n- print('Chrome Headless Browser Invoked')\n- driver.trigger_binder(args.url)\n-\n-\n-if __name__ == '__main__':\n- parser = argparse.ArgumentParser()\n- parser.add_argument(\n- '-v',\n- '--verbose',\n- dest='is_verbose',\n- action='store_true',\n- help='Print out more information',\n- )\n- parser.add_argument(\n- '--chromedriver-path',\n- dest='chromedriver_path',\n- type=str,\n- default=None,\n- help='System path to ChromeDriver',\n- )\n- parser.add_argument(\n- '--url', dest='url', type=str, default=None, help='URL for Selinium to open'\n- )\n- args = parser.parse_args()\n-\n- main(args)\n", "issue": "Use Binder Build API for Builds in PRs\n# Description\r\n\r\nAfter a discussion with @minrk and @betatim on the jupyterhub/binder Gitter, it was made clear that the use of Selenium in [`binder/trigger_binder.py`](https://github.com/diana-hep/pyhf/blob/c81f6007309f4c13241f9efac187594337d0bd08/binder/trigger_binder.py) (and the script itself) is unnecessary. Instead a simple API call can be made just using Python's `webbrowser` with an [endpoint of the form `https://mybinder.org/build/gh/owner/repo/ref`](https://gitter.im/jupyterhub/binder?at=5c2f87038dafa715c73ff54f) as can be [seen in the Binder Hub demo](https://github.com/jupyterhub/binderhub/blob/9ca8fa68bb8b69c6a2736f2275279583073f314f/examples/binder-api.py#L28) (thanks Tim for the link).\r\n\r\nSo, for example\r\n\r\n```\r\npython -m webbrowser \"https://mybinder.org/build/gh/diana-hep/pyhf/master\"\r\n```\r\n\r\nSo asking [WWKHTD](https://github.com/kelseyhightower/nocode), this means that `binder/trigger_binder.py` is unnecessary and should be removed and `.travis.yml` should be updated to use the API calls.\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport argparse\nfrom contextlib import contextmanager\nfrom selenium import webdriver\nfrom selenium.webdriver.chrome.options import Options\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support.expected_conditions import staleness_of\n\n\nclass SeleniumSession:\n def __init__(self, args):\n self.options = Options()\n self.options.set_headless()\n self.options.add_argument('--no-sandbox')\n if args.chromedriver_path is not None:\n self.browser = webdriver.Chrome(\n args.chromedriver_path, chrome_options=self.options\n )\n else:\n self.browser = webdriver.Chrome(chrome_options=self.options)\n\n @contextmanager\n def wait_for_page_load(self, timeout=20):\n old_page = self.browser.find_element_by_tag_name('html')\n yield\n WebDriverWait(self.browser, timeout).until(staleness_of(old_page))\n\n def trigger_binder(self, url):\n with self.wait_for_page_load():\n self.browser.get(url)\n\n\ndef main(args):\n driver = SeleniumSession(args)\n if args.is_verbose:\n print('Chrome Headless Browser Invoked')\n driver.trigger_binder(args.url)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument(\n '-v',\n '--verbose',\n dest='is_verbose',\n action='store_true',\n help='Print out more information',\n )\n parser.add_argument(\n '--chromedriver-path',\n dest='chromedriver_path',\n type=str,\n default=None,\n help='System path to ChromeDriver',\n )\n parser.add_argument(\n '--url', dest='url', type=str, default=None, help='URL for Selinium to open'\n )\n args = parser.parse_args()\n\n main(args)\n", "path": "binder/trigger_binder.py"}]} | 1,388 | 477 |
gh_patches_debug_14777 | rasdani/github-patches | git_diff | Mailu__Mailu-1941 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Letsencrypt Force Renewal
Is there a limit on the Subject Alt Name entries?
I have updated my /mailu/mailu.env "HOSTNAMES" variable, but when I restart Mailu it doesn't update the Subject Alt Names on the mailu cert.
Previously it has worked, so I am guessing that I need to force Letsencrypt to refresh as it isnt within the renewal window. But there is no guidance for the new letsencrypt certbot.
I am using the latest Mailu version (1.7) and this is the command I am using to restart mailu '/mailu/docker-compose -p mailu up -d'
Letsencrypt Force Renewal
Is there a limit on the Subject Alt Name entries?
I have updated my /mailu/mailu.env "HOSTNAMES" variable, but when I restart Mailu it doesn't update the Subject Alt Names on the mailu cert.
Previously it has worked, so I am guessing that I need to force Letsencrypt to refresh as it isnt within the renewal window. But there is no guidance for the new letsencrypt certbot.
I am using the latest Mailu version (1.7) and this is the command I am using to restart mailu '/mailu/docker-compose -p mailu up -d'
</issue>
<code>
[start of core/nginx/letsencrypt.py]
1 #!/usr/bin/python3
2
3 import os
4 import time
5 import subprocess
6
7 command = [
8 "certbot",
9 "-n", "--agree-tos", # non-interactive
10 "-d", os.environ["HOSTNAMES"],
11 "-m", "{}@{}".format(os.environ["POSTMASTER"], os.environ["DOMAIN"]),
12 "certonly", "--standalone",
13 "--cert-name", "mailu",
14 "--preferred-challenges", "http", "--http-01-port", "8008",
15 "--keep-until-expiring",
16 "--config-dir", "/certs/letsencrypt",
17 "--post-hook", "/config.py"
18 ]
19 command2 = [
20 "certbot",
21 "-n", "--agree-tos", # non-interactive
22 "-d", os.environ["HOSTNAMES"],
23 "-m", "{}@{}".format(os.environ["POSTMASTER"], os.environ["DOMAIN"]),
24 "certonly", "--standalone",
25 "--cert-name", "mailu-ecdsa",
26 "--preferred-challenges", "http", "--http-01-port", "8008",
27 "--keep-until-expiring",
28 "--key-type", "ecdsa",
29 "--config-dir", "/certs/letsencrypt",
30 "--post-hook", "/config.py"
31 ]
32
33 def format_for_nginx(fullchain, output):
34 """ We may want to strip ISRG Root X1 out
35 """
36 certs = []
37 with open(fullchain, 'r') as pem:
38 cert = ''
39 for line in pem:
40 cert += line
41 if '-----END CERTIFICATE-----' in line:
42 certs += [cert]
43 cert = ''
44 with open(output, 'w') as pem:
45 for cert in certs[:-1] if len(certs)>2 and os.getenv('LETSENCRYPT_SHORTCHAIN', default="False") else certs:
46 pem.write(cert)
47
48 # Wait for nginx to start
49 time.sleep(5)
50
51 # Run certbot every day
52 while True:
53 subprocess.call(command)
54 format_for_nginx('/certs/letsencrypt/live/mailu/fullchain.pem', '/certs/letsencrypt/live/mailu/nginx-chain.pem')
55 subprocess.call(command2)
56 format_for_nginx('/certs/letsencrypt/live/mailu-ecdsa/fullchain.pem', '/certs/letsencrypt/live/mailu-ecdsa/nginx-chain.pem')
57 time.sleep(86400)
58
[end of core/nginx/letsencrypt.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/core/nginx/letsencrypt.py b/core/nginx/letsencrypt.py
--- a/core/nginx/letsencrypt.py
+++ b/core/nginx/letsencrypt.py
@@ -13,6 +13,7 @@
"--cert-name", "mailu",
"--preferred-challenges", "http", "--http-01-port", "8008",
"--keep-until-expiring",
+ "--renew-with-new-domains",
"--config-dir", "/certs/letsencrypt",
"--post-hook", "/config.py"
]
@@ -26,6 +27,7 @@
"--preferred-challenges", "http", "--http-01-port", "8008",
"--keep-until-expiring",
"--key-type", "ecdsa",
+ "--renew-with-new-domains",
"--config-dir", "/certs/letsencrypt",
"--post-hook", "/config.py"
]
| {"golden_diff": "diff --git a/core/nginx/letsencrypt.py b/core/nginx/letsencrypt.py\n--- a/core/nginx/letsencrypt.py\n+++ b/core/nginx/letsencrypt.py\n@@ -13,6 +13,7 @@\n \"--cert-name\", \"mailu\",\n \"--preferred-challenges\", \"http\", \"--http-01-port\", \"8008\",\n \"--keep-until-expiring\",\n+ \"--renew-with-new-domains\",\n \"--config-dir\", \"/certs/letsencrypt\",\n \"--post-hook\", \"/config.py\"\n ]\n@@ -26,6 +27,7 @@\n \"--preferred-challenges\", \"http\", \"--http-01-port\", \"8008\",\n \"--keep-until-expiring\",\n \"--key-type\", \"ecdsa\",\n+ \"--renew-with-new-domains\",\n \"--config-dir\", \"/certs/letsencrypt\",\n \"--post-hook\", \"/config.py\"\n ]\n", "issue": "Letsencrypt Force Renewal\nIs there a limit on the Subject Alt Name entries?\r\n\r\nI have updated my /mailu/mailu.env \"HOSTNAMES\" variable, but when I restart Mailu it doesn't update the Subject Alt Names on the mailu cert.\r\n\r\nPreviously it has worked, so I am guessing that I need to force Letsencrypt to refresh as it isnt within the renewal window. But there is no guidance for the new letsencrypt certbot.\r\n\r\nI am using the latest Mailu version (1.7) and this is the command I am using to restart mailu '/mailu/docker-compose -p mailu up -d'\nLetsencrypt Force Renewal\nIs there a limit on the Subject Alt Name entries?\r\n\r\nI have updated my /mailu/mailu.env \"HOSTNAMES\" variable, but when I restart Mailu it doesn't update the Subject Alt Names on the mailu cert.\r\n\r\nPreviously it has worked, so I am guessing that I need to force Letsencrypt to refresh as it isnt within the renewal window. But there is no guidance for the new letsencrypt certbot.\r\n\r\nI am using the latest Mailu version (1.7) and this is the command I am using to restart mailu '/mailu/docker-compose -p mailu up -d'\n", "before_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport time\nimport subprocess\n\ncommand = [\n \"certbot\",\n \"-n\", \"--agree-tos\", # non-interactive\n \"-d\", os.environ[\"HOSTNAMES\"],\n \"-m\", \"{}@{}\".format(os.environ[\"POSTMASTER\"], os.environ[\"DOMAIN\"]),\n \"certonly\", \"--standalone\",\n \"--cert-name\", \"mailu\",\n \"--preferred-challenges\", \"http\", \"--http-01-port\", \"8008\",\n \"--keep-until-expiring\",\n \"--config-dir\", \"/certs/letsencrypt\",\n \"--post-hook\", \"/config.py\"\n]\ncommand2 = [\n \"certbot\",\n \"-n\", \"--agree-tos\", # non-interactive\n \"-d\", os.environ[\"HOSTNAMES\"],\n \"-m\", \"{}@{}\".format(os.environ[\"POSTMASTER\"], os.environ[\"DOMAIN\"]),\n \"certonly\", \"--standalone\",\n \"--cert-name\", \"mailu-ecdsa\",\n \"--preferred-challenges\", \"http\", \"--http-01-port\", \"8008\",\n \"--keep-until-expiring\",\n \"--key-type\", \"ecdsa\",\n \"--config-dir\", \"/certs/letsencrypt\",\n \"--post-hook\", \"/config.py\"\n]\n\ndef format_for_nginx(fullchain, output):\n \"\"\" We may want to strip ISRG Root X1 out\n \"\"\"\n certs = []\n with open(fullchain, 'r') as pem:\n cert = ''\n for line in pem:\n cert += line\n if '-----END CERTIFICATE-----' in line:\n certs += [cert]\n cert = ''\n with open(output, 'w') as pem:\n for cert in certs[:-1] if len(certs)>2 and os.getenv('LETSENCRYPT_SHORTCHAIN', default=\"False\") else certs:\n pem.write(cert)\n\n# Wait for nginx to start\ntime.sleep(5)\n\n# Run certbot every day\nwhile True:\n subprocess.call(command)\n format_for_nginx('/certs/letsencrypt/live/mailu/fullchain.pem', '/certs/letsencrypt/live/mailu/nginx-chain.pem')\n subprocess.call(command2)\n format_for_nginx('/certs/letsencrypt/live/mailu-ecdsa/fullchain.pem', '/certs/letsencrypt/live/mailu-ecdsa/nginx-chain.pem')\n time.sleep(86400)\n", "path": "core/nginx/letsencrypt.py"}]} | 1,424 | 203 |
gh_patches_debug_13333 | rasdani/github-patches | git_diff | DDMAL__CantusDB-156 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
need to have fields of chant and sequence models synced
or else, it'll lead to errors, such as not being able to retrieve chants from /chant-search
please fix
</issue>
<code>
[start of django/cantusdb_project/main_app/models/sequence.py]
1 from django.contrib.postgres.search import SearchVectorField
2 from django.db import models
3 from main_app.models import BaseModel
4 from users.models import User
5
6
7 class Sequence(BaseModel):
8 visible_status = models.CharField(max_length=1, blank=True, null=True)
9 title = models.CharField(blank=True, null=True, max_length=255)
10 incipit = models.CharField(blank=True, null=True, max_length=255)
11 siglum = models.CharField(blank=True, null=True, max_length=255)
12 folio = models.CharField(blank=True, null=True, max_length=255)
13 sequence = models.CharField(blank=True, null=True, max_length=255)
14 genre = models.ForeignKey("Genre", blank=True, null=True, on_delete=models.PROTECT)
15 rubrics = models.CharField(blank=True, null=True, max_length=255)
16 analecta_hymnica = models.CharField(blank=True, null=True, max_length=255)
17 indexing_notes = models.TextField(blank=True, null=True)
18 date = models.CharField(blank=True, null=True, max_length=255)
19 col1 = models.CharField(blank=True, null=True, max_length=255)
20 col2 = models.CharField(blank=True, null=True, max_length=255)
21 col3 = models.CharField(blank=True, null=True, max_length=255)
22 ah_volume = models.CharField(blank=True, null=True, max_length=255)
23 source = models.ForeignKey(
24 "Source", on_delete=models.PROTECT, blank=True, null=True
25 )
26 cantus_id = models.CharField(blank=True, null=True, max_length=255)
27 image_link = models.URLField(blank=True, null=True)
28 json_info = models.JSONField(null=True, blank=True)
29
30 # The following fields (dummy fields) are just for harmonizing the chant and sequence models to have the same fields
31 # They should never be populated or displayed
32 # The order of the fields must be exactly the same between the seq and chant models
33 marginalia = models.CharField(max_length=63, null=True, blank=True)
34 sequence_number = models.PositiveIntegerField(
35 help_text='Each folio starts with "1"', null=True, blank=True
36 )
37 office = models.ForeignKey(
38 "Office", on_delete=models.PROTECT, null=True, blank=True
39 )
40 position = models.CharField(max_length=63, null=True, blank=True)
41 feast = models.ForeignKey("Feast", on_delete=models.PROTECT, null=True, blank=True)
42 mode = models.CharField(max_length=63, null=True, blank=True)
43 differentia = models.CharField(blank=True, null=True, max_length=63)
44 finalis = models.CharField(blank=True, null=True, max_length=63)
45 extra = models.CharField(blank=True, null=True, max_length=63)
46 chant_range = models.CharField(
47 blank=True,
48 null=True,
49 help_text='Example: "1-c-k-4". Optional field',
50 max_length=255,
51 )
52 addendum = models.CharField(blank=True, null=True, max_length=255)
53 manuscript_full_text_std_spelling = models.TextField(
54 help_text="Manuscript full text with standardized spelling. Enter the words "
55 "according to the manuscript but normalize their spellings following "
56 "Classical Latin forms. Use upper-case letters for proper nouns, "
57 'the first word of each chant, and the first word after "Alleluia" for '
58 "Mass Alleluias. Punctuation is omitted.",
59 null=True,
60 blank=True,
61 )
62 manuscript_full_text_std_proofread = models.BooleanField(blank=True, null=True)
63 manuscript_full_text = models.TextField(
64 help_text="Enter the wording, word order and spellings as found in the manuscript"
65 ", with abbreviations resolved to standard words. Use upper-case letters as found"
66 " in the source. Retain “Xpistum” (Christum), “Ihc” (Jesus) and other instances of "
67 "Greek characters with their closest approximations of Latin letters. Some punctuation"
68 " signs and vertical dividing lines | are employed in this field. Repetenda and psalm "
69 "cues can also be recorded here. For more information, contact Cantus Database staff.",
70 null=True,
71 blank=True,
72 )
73 manuscript_full_text_proofread = models.BooleanField(blank=True, null=True)
74 manuscript_syllabized_full_text = models.TextField(null=True, blank=True)
75 volpiano = models.TextField(null=True, blank=True)
76 volpiano_proofread = models.BooleanField(blank=True, null=True)
77 volpiano_notes = models.TextField(null=True, blank=True)
78 volpiano_intervals = models.TextField(null=True, blank=True)
79 # volpiano_intervals = ArrayField(base_field=models.IntegerField(), null=True, blank=True)
80 cao_concordances = models.CharField(blank=True, null=True, max_length=63)
81 proofread_by = models.ForeignKey(
82 User, on_delete=models.PROTECT, null=True, blank=True
83 )
84 melody_id = models.CharField(blank=True, null=True, max_length=63)
85 search_vector = SearchVectorField(null=True, editable=False)
86 content_structure = models.CharField(
87 blank=True,
88 null=True,
89 max_length=64,
90 help_text="Additional folio number field, if folio numbers appear on the leaves but are not in the 'binding order'.",
91 )
92
[end of django/cantusdb_project/main_app/models/sequence.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/django/cantusdb_project/main_app/models/sequence.py b/django/cantusdb_project/main_app/models/sequence.py
--- a/django/cantusdb_project/main_app/models/sequence.py
+++ b/django/cantusdb_project/main_app/models/sequence.py
@@ -41,6 +41,7 @@
feast = models.ForeignKey("Feast", on_delete=models.PROTECT, null=True, blank=True)
mode = models.CharField(max_length=63, null=True, blank=True)
differentia = models.CharField(blank=True, null=True, max_length=63)
+ differentia_id = models.CharField(blank=True, null=True, max_length=12)
finalis = models.CharField(blank=True, null=True, max_length=63)
extra = models.CharField(blank=True, null=True, max_length=63)
chant_range = models.CharField(
| {"golden_diff": "diff --git a/django/cantusdb_project/main_app/models/sequence.py b/django/cantusdb_project/main_app/models/sequence.py\n--- a/django/cantusdb_project/main_app/models/sequence.py\n+++ b/django/cantusdb_project/main_app/models/sequence.py\n@@ -41,6 +41,7 @@\n feast = models.ForeignKey(\"Feast\", on_delete=models.PROTECT, null=True, blank=True)\n mode = models.CharField(max_length=63, null=True, blank=True)\n differentia = models.CharField(blank=True, null=True, max_length=63)\n+ differentia_id = models.CharField(blank=True, null=True, max_length=12)\n finalis = models.CharField(blank=True, null=True, max_length=63)\n extra = models.CharField(blank=True, null=True, max_length=63)\n chant_range = models.CharField(\n", "issue": "need to have fields of chant and sequence models synced \nor else, it'll lead to errors, such as not being able to retrieve chants from /chant-search\r\nplease fix\n", "before_files": [{"content": "from django.contrib.postgres.search import SearchVectorField\nfrom django.db import models\nfrom main_app.models import BaseModel\nfrom users.models import User\n\n\nclass Sequence(BaseModel):\n visible_status = models.CharField(max_length=1, blank=True, null=True)\n title = models.CharField(blank=True, null=True, max_length=255)\n incipit = models.CharField(blank=True, null=True, max_length=255)\n siglum = models.CharField(blank=True, null=True, max_length=255)\n folio = models.CharField(blank=True, null=True, max_length=255)\n sequence = models.CharField(blank=True, null=True, max_length=255)\n genre = models.ForeignKey(\"Genre\", blank=True, null=True, on_delete=models.PROTECT)\n rubrics = models.CharField(blank=True, null=True, max_length=255)\n analecta_hymnica = models.CharField(blank=True, null=True, max_length=255)\n indexing_notes = models.TextField(blank=True, null=True)\n date = models.CharField(blank=True, null=True, max_length=255)\n col1 = models.CharField(blank=True, null=True, max_length=255)\n col2 = models.CharField(blank=True, null=True, max_length=255)\n col3 = models.CharField(blank=True, null=True, max_length=255)\n ah_volume = models.CharField(blank=True, null=True, max_length=255)\n source = models.ForeignKey(\n \"Source\", on_delete=models.PROTECT, blank=True, null=True\n )\n cantus_id = models.CharField(blank=True, null=True, max_length=255)\n image_link = models.URLField(blank=True, null=True)\n json_info = models.JSONField(null=True, blank=True)\n\n # The following fields (dummy fields) are just for harmonizing the chant and sequence models to have the same fields\n # They should never be populated or displayed\n # The order of the fields must be exactly the same between the seq and chant models\n marginalia = models.CharField(max_length=63, null=True, blank=True)\n sequence_number = models.PositiveIntegerField(\n help_text='Each folio starts with \"1\"', null=True, blank=True\n )\n office = models.ForeignKey(\n \"Office\", on_delete=models.PROTECT, null=True, blank=True\n )\n position = models.CharField(max_length=63, null=True, blank=True)\n feast = models.ForeignKey(\"Feast\", on_delete=models.PROTECT, null=True, blank=True)\n mode = models.CharField(max_length=63, null=True, blank=True)\n differentia = models.CharField(blank=True, null=True, max_length=63)\n finalis = models.CharField(blank=True, null=True, max_length=63)\n extra = models.CharField(blank=True, null=True, max_length=63)\n chant_range = models.CharField(\n blank=True,\n null=True,\n help_text='Example: \"1-c-k-4\". Optional field',\n max_length=255,\n )\n addendum = models.CharField(blank=True, null=True, max_length=255)\n manuscript_full_text_std_spelling = models.TextField(\n help_text=\"Manuscript full text with standardized spelling. Enter the words \"\n \"according to the manuscript but normalize their spellings following \"\n \"Classical Latin forms. Use upper-case letters for proper nouns, \"\n 'the first word of each chant, and the first word after \"Alleluia\" for '\n \"Mass Alleluias. Punctuation is omitted.\",\n null=True,\n blank=True,\n )\n manuscript_full_text_std_proofread = models.BooleanField(blank=True, null=True)\n manuscript_full_text = models.TextField(\n help_text=\"Enter the wording, word order and spellings as found in the manuscript\"\n \", with abbreviations resolved to standard words. Use upper-case letters as found\"\n \" in the source. Retain \u201cXpistum\u201d (Christum), \u201cIhc\u201d (Jesus) and other instances of \"\n \"Greek characters with their closest approximations of Latin letters. Some punctuation\"\n \" signs and vertical dividing lines | are employed in this field. Repetenda and psalm \"\n \"cues can also be recorded here. For more information, contact Cantus Database staff.\",\n null=True,\n blank=True,\n )\n manuscript_full_text_proofread = models.BooleanField(blank=True, null=True)\n manuscript_syllabized_full_text = models.TextField(null=True, blank=True)\n volpiano = models.TextField(null=True, blank=True)\n volpiano_proofread = models.BooleanField(blank=True, null=True)\n volpiano_notes = models.TextField(null=True, blank=True)\n volpiano_intervals = models.TextField(null=True, blank=True)\n # volpiano_intervals = ArrayField(base_field=models.IntegerField(), null=True, blank=True)\n cao_concordances = models.CharField(blank=True, null=True, max_length=63)\n proofread_by = models.ForeignKey(\n User, on_delete=models.PROTECT, null=True, blank=True\n )\n melody_id = models.CharField(blank=True, null=True, max_length=63)\n search_vector = SearchVectorField(null=True, editable=False)\n content_structure = models.CharField(\n blank=True,\n null=True,\n max_length=64,\n help_text=\"Additional folio number field, if folio numbers appear on the leaves but are not in the 'binding order'.\",\n )\n", "path": "django/cantusdb_project/main_app/models/sequence.py"}]} | 1,937 | 195 |
gh_patches_debug_28234 | rasdani/github-patches | git_diff | quantumlib__Cirq-3054 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Support multi-qubit measurements in `cirq.plot_state_histogram`
Quote from the docstring: "Currently this function assumes each measurement gate applies to only a single qubit."
Currently, I get an incorrect histogram if I didn't read the docstring and used a multi-qubit measurement (I always make circuits like this):
```python
qubits = cirq.LineQubit.range(3)
c = cirq.Circuit(
(cirq.X**0.4).on_each(*qubits),
cirq.measure(*qubits), # One multi-qubit measurement
)
cirq.plot_state_histogram(cirq.sample(c, repetitions=10000))
# Incorrect output, no warning or error
```

If I use single-qubit measurement gates, I get the expected histogram:
```python
qubits = cirq.LineQubit.range(3)
c = cirq.Circuit(
(cirq.X**0.4).on_each(*qubits),
cirq.measure_each(*qubits), # One measurement per qubit
)
cirq.plot_state_histogram(cirq.sample(c, repetitions=10000))
```

This looks like it could be fixed by adding some logic to `plot_state_histogram` (https://github.com/quantumlib/Cirq/blob/master/cirq/study/visualize.py#L22) that checks for multi-qubit measurements and either correctly interpret them or raise an error.
</issue>
<code>
[start of cirq/study/visualize.py]
1 # Copyright 2018 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Tool to visualize the results of a study."""
16
17 import numpy as np
18
19 from cirq.study import trial_result
20
21
22 def plot_state_histogram(result: trial_result.TrialResult) -> np.ndarray:
23 """Plot the state histogram from a single result with repetitions.
24
25 States is a bitstring representation of all the qubit states in a single
26 result.
27 Currently this function assumes each measurement gate applies to only
28 a single qubit.
29
30 Args:
31 result: The trial results to plot.
32
33 Returns:
34 The histogram. A list of values plotted on the y-axis.
35 """
36
37 # pyplot import is deferred because it requires a system dependency
38 # (python3-tk) that `python -m pip install cirq` can't handle for the user.
39 # This allows cirq to be usable without python3-tk.
40 import matplotlib.pyplot as plt
41
42 num_qubits = len(result.measurements.keys())
43 states = 2**num_qubits
44 values = np.zeros(states)
45
46 # measurements is a dict of {measurement gate key:
47 # array(repetitions, boolean result)}
48 # Convert this to an array of repetitions, each with an array of booleans.
49 # e.g. {q1: array([[True, True]]), q2: array([[False, False]])}
50 # --> array([[True, False], [True, False]])
51 measurement_by_result = np.array([
52 v.transpose()[0] for k, v in result.measurements.items()]).transpose()
53
54 for meas in measurement_by_result:
55 # Convert each array of booleans to a string representation.
56 # e.g. [True, False] -> [1, 0] -> '10' -> 2
57 state_ind = int(''.join([str(x) for x in [int(x) for x in meas]]), 2)
58 values[state_ind] += 1
59
60 plot_labels = [bin(x)[2:].zfill(num_qubits) for x in range(states)]
61 plt.bar(np.arange(states), values, tick_label=plot_labels)
62 plt.xlabel('qubit state')
63 plt.ylabel('result count')
64 plt.show()
65
66 return values
67
[end of cirq/study/visualize.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/cirq/study/visualize.py b/cirq/study/visualize.py
--- a/cirq/study/visualize.py
+++ b/cirq/study/visualize.py
@@ -24,8 +24,6 @@
States is a bitstring representation of all the qubit states in a single
result.
- Currently this function assumes each measurement gate applies to only
- a single qubit.
Args:
result: The trial results to plot.
@@ -39,17 +37,15 @@
# This allows cirq to be usable without python3-tk.
import matplotlib.pyplot as plt
- num_qubits = len(result.measurements.keys())
+ num_qubits = sum([value.shape[1] for value in result.measurements.values()])
states = 2**num_qubits
values = np.zeros(states)
-
# measurements is a dict of {measurement gate key:
# array(repetitions, boolean result)}
# Convert this to an array of repetitions, each with an array of booleans.
# e.g. {q1: array([[True, True]]), q2: array([[False, False]])}
# --> array([[True, False], [True, False]])
- measurement_by_result = np.array([
- v.transpose()[0] for k, v in result.measurements.items()]).transpose()
+ measurement_by_result = np.hstack(list(result.measurements.values()))
for meas in measurement_by_result:
# Convert each array of booleans to a string representation.
| {"golden_diff": "diff --git a/cirq/study/visualize.py b/cirq/study/visualize.py\n--- a/cirq/study/visualize.py\n+++ b/cirq/study/visualize.py\n@@ -24,8 +24,6 @@\n \n States is a bitstring representation of all the qubit states in a single\n result.\n- Currently this function assumes each measurement gate applies to only\n- a single qubit.\n \n Args:\n result: The trial results to plot.\n@@ -39,17 +37,15 @@\n # This allows cirq to be usable without python3-tk.\n import matplotlib.pyplot as plt\n \n- num_qubits = len(result.measurements.keys())\n+ num_qubits = sum([value.shape[1] for value in result.measurements.values()])\n states = 2**num_qubits\n values = np.zeros(states)\n-\n # measurements is a dict of {measurement gate key:\n # array(repetitions, boolean result)}\n # Convert this to an array of repetitions, each with an array of booleans.\n # e.g. {q1: array([[True, True]]), q2: array([[False, False]])}\n # --> array([[True, False], [True, False]])\n- measurement_by_result = np.array([\n- v.transpose()[0] for k, v in result.measurements.items()]).transpose()\n+ measurement_by_result = np.hstack(list(result.measurements.values()))\n \n for meas in measurement_by_result:\n # Convert each array of booleans to a string representation.\n", "issue": "Support multi-qubit measurements in `cirq.plot_state_histogram`\nQuote from the docstring: \"Currently this function assumes each measurement gate applies to only a single qubit.\"\r\n\r\nCurrently, I get an incorrect histogram if I didn't read the docstring and used a multi-qubit measurement (I always make circuits like this):\r\n```python\r\nqubits = cirq.LineQubit.range(3)\r\nc = cirq.Circuit(\r\n (cirq.X**0.4).on_each(*qubits),\r\n cirq.measure(*qubits), # One multi-qubit measurement\r\n)\r\ncirq.plot_state_histogram(cirq.sample(c, repetitions=10000))\r\n# Incorrect output, no warning or error\r\n```\r\n\r\n\r\nIf I use single-qubit measurement gates, I get the expected histogram:\r\n```python\r\nqubits = cirq.LineQubit.range(3)\r\nc = cirq.Circuit(\r\n (cirq.X**0.4).on_each(*qubits),\r\n cirq.measure_each(*qubits), # One measurement per qubit\r\n)\r\ncirq.plot_state_histogram(cirq.sample(c, repetitions=10000))\r\n```\r\n\r\n\r\nThis looks like it could be fixed by adding some logic to `plot_state_histogram` (https://github.com/quantumlib/Cirq/blob/master/cirq/study/visualize.py#L22) that checks for multi-qubit measurements and either correctly interpret them or raise an error.\n", "before_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tool to visualize the results of a study.\"\"\"\n\nimport numpy as np\n\nfrom cirq.study import trial_result\n\n\ndef plot_state_histogram(result: trial_result.TrialResult) -> np.ndarray:\n \"\"\"Plot the state histogram from a single result with repetitions.\n\n States is a bitstring representation of all the qubit states in a single\n result.\n Currently this function assumes each measurement gate applies to only\n a single qubit.\n\n Args:\n result: The trial results to plot.\n\n Returns:\n The histogram. A list of values plotted on the y-axis.\n \"\"\"\n\n # pyplot import is deferred because it requires a system dependency\n # (python3-tk) that `python -m pip install cirq` can't handle for the user.\n # This allows cirq to be usable without python3-tk.\n import matplotlib.pyplot as plt\n\n num_qubits = len(result.measurements.keys())\n states = 2**num_qubits\n values = np.zeros(states)\n\n # measurements is a dict of {measurement gate key:\n # array(repetitions, boolean result)}\n # Convert this to an array of repetitions, each with an array of booleans.\n # e.g. {q1: array([[True, True]]), q2: array([[False, False]])}\n # --> array([[True, False], [True, False]])\n measurement_by_result = np.array([\n v.transpose()[0] for k, v in result.measurements.items()]).transpose()\n\n for meas in measurement_by_result:\n # Convert each array of booleans to a string representation.\n # e.g. [True, False] -> [1, 0] -> '10' -> 2\n state_ind = int(''.join([str(x) for x in [int(x) for x in meas]]), 2)\n values[state_ind] += 1\n\n plot_labels = [bin(x)[2:].zfill(num_qubits) for x in range(states)]\n plt.bar(np.arange(states), values, tick_label=plot_labels)\n plt.xlabel('qubit state')\n plt.ylabel('result count')\n plt.show()\n\n return values\n", "path": "cirq/study/visualize.py"}]} | 1,696 | 345 |
gh_patches_debug_26493 | rasdani/github-patches | git_diff | Pylons__pyramid-2567 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
exception views squash the original exception if an exception view predicate is tested and fails
If you register an exception view with a predicate such as `config.add_view(view, context=Exception, request_method='GET')`, this will cause any errors from a POST request (without a matching exception view) to be squashed into a `PredicateMismatch` exception which will be propagated up the wsgi stack instead of the original exception.
</issue>
<code>
[start of pyramid/tweens.py]
1 import sys
2
3 from pyramid.interfaces import (
4 IExceptionViewClassifier,
5 IRequest,
6 )
7
8 from zope.interface import providedBy
9 from pyramid.view import _call_view
10
11 def excview_tween_factory(handler, registry):
12 """ A :term:`tween` factory which produces a tween that catches an
13 exception raised by downstream tweens (or the main Pyramid request
14 handler) and, if possible, converts it into a Response using an
15 :term:`exception view`."""
16
17 def excview_tween(request):
18 attrs = request.__dict__
19 try:
20 response = handler(request)
21 except Exception as exc:
22 # WARNING: do not assign the result of sys.exc_info() to a local
23 # var here, doing so will cause a leak. We used to actually
24 # explicitly delete both "exception" and "exc_info" from ``attrs``
25 # in a ``finally:`` clause below, but now we do not because these
26 # attributes are useful to upstream tweens. This actually still
27 # apparently causes a reference cycle, but it is broken
28 # successfully by the garbage collector (see
29 # https://github.com/Pylons/pyramid/issues/1223).
30 attrs['exc_info'] = sys.exc_info()
31 attrs['exception'] = exc
32 # clear old generated request.response, if any; it may
33 # have been mutated by the view, and its state is not
34 # sane (e.g. caching headers)
35 if 'response' in attrs:
36 del attrs['response']
37 # we use .get instead of .__getitem__ below due to
38 # https://github.com/Pylons/pyramid/issues/700
39 request_iface = attrs.get('request_iface', IRequest)
40 provides = providedBy(exc)
41 response = _call_view(
42 registry,
43 request,
44 exc,
45 provides,
46 '',
47 view_classifier=IExceptionViewClassifier,
48 request_iface=request_iface.combined
49 )
50 if response is None:
51 raise
52
53 return response
54
55 return excview_tween
56
57 MAIN = 'MAIN'
58 INGRESS = 'INGRESS'
59 EXCVIEW = 'pyramid.tweens.excview_tween_factory'
60
[end of pyramid/tweens.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pyramid/tweens.py b/pyramid/tweens.py
--- a/pyramid/tweens.py
+++ b/pyramid/tweens.py
@@ -1,5 +1,7 @@
import sys
+from pyramid.compat import reraise
+from pyramid.exceptions import PredicateMismatch
from pyramid.interfaces import (
IExceptionViewClassifier,
IRequest,
@@ -38,17 +40,26 @@
# https://github.com/Pylons/pyramid/issues/700
request_iface = attrs.get('request_iface', IRequest)
provides = providedBy(exc)
- response = _call_view(
- registry,
- request,
- exc,
- provides,
- '',
- view_classifier=IExceptionViewClassifier,
- request_iface=request_iface.combined
- )
+ try:
+ response = _call_view(
+ registry,
+ request,
+ exc,
+ provides,
+ '',
+ view_classifier=IExceptionViewClassifier,
+ request_iface=request_iface.combined
+ )
+
+ # if views matched but did not pass predicates, squash the error
+ # and re-raise the original exception
+ except PredicateMismatch:
+ response = None
+
+ # re-raise the original exception as no exception views were
+ # able to handle the error
if response is None:
- raise
+ reraise(*attrs['exc_info'])
return response
| {"golden_diff": "diff --git a/pyramid/tweens.py b/pyramid/tweens.py\n--- a/pyramid/tweens.py\n+++ b/pyramid/tweens.py\n@@ -1,5 +1,7 @@\n import sys\n \n+from pyramid.compat import reraise\n+from pyramid.exceptions import PredicateMismatch\n from pyramid.interfaces import (\n IExceptionViewClassifier,\n IRequest,\n@@ -38,17 +40,26 @@\n # https://github.com/Pylons/pyramid/issues/700\n request_iface = attrs.get('request_iface', IRequest)\n provides = providedBy(exc)\n- response = _call_view(\n- registry,\n- request,\n- exc,\n- provides,\n- '',\n- view_classifier=IExceptionViewClassifier,\n- request_iface=request_iface.combined\n- )\n+ try:\n+ response = _call_view(\n+ registry,\n+ request,\n+ exc,\n+ provides,\n+ '',\n+ view_classifier=IExceptionViewClassifier,\n+ request_iface=request_iface.combined\n+ )\n+\n+ # if views matched but did not pass predicates, squash the error\n+ # and re-raise the original exception\n+ except PredicateMismatch:\n+ response = None\n+\n+ # re-raise the original exception as no exception views were\n+ # able to handle the error\n if response is None:\n- raise\n+ reraise(*attrs['exc_info'])\n \n return response\n", "issue": "exception views squash the original exception if an exception view predicate is tested and fails\nIf you register an exception view with a predicate such as `config.add_view(view, context=Exception, request_method='GET')`, this will cause any errors from a POST request (without a matching exception view) to be squashed into a `PredicateMismatch` exception which will be propagated up the wsgi stack instead of the original exception.\n\n", "before_files": [{"content": "import sys\n\nfrom pyramid.interfaces import (\n IExceptionViewClassifier,\n IRequest,\n )\n\nfrom zope.interface import providedBy\nfrom pyramid.view import _call_view\n\ndef excview_tween_factory(handler, registry):\n \"\"\" A :term:`tween` factory which produces a tween that catches an\n exception raised by downstream tweens (or the main Pyramid request\n handler) and, if possible, converts it into a Response using an\n :term:`exception view`.\"\"\"\n\n def excview_tween(request):\n attrs = request.__dict__\n try:\n response = handler(request)\n except Exception as exc:\n # WARNING: do not assign the result of sys.exc_info() to a local\n # var here, doing so will cause a leak. We used to actually\n # explicitly delete both \"exception\" and \"exc_info\" from ``attrs``\n # in a ``finally:`` clause below, but now we do not because these\n # attributes are useful to upstream tweens. This actually still\n # apparently causes a reference cycle, but it is broken\n # successfully by the garbage collector (see\n # https://github.com/Pylons/pyramid/issues/1223).\n attrs['exc_info'] = sys.exc_info()\n attrs['exception'] = exc\n # clear old generated request.response, if any; it may\n # have been mutated by the view, and its state is not\n # sane (e.g. caching headers)\n if 'response' in attrs:\n del attrs['response']\n # we use .get instead of .__getitem__ below due to\n # https://github.com/Pylons/pyramid/issues/700\n request_iface = attrs.get('request_iface', IRequest)\n provides = providedBy(exc)\n response = _call_view(\n registry,\n request,\n exc,\n provides,\n '',\n view_classifier=IExceptionViewClassifier,\n request_iface=request_iface.combined\n )\n if response is None:\n raise\n\n return response\n\n return excview_tween\n\nMAIN = 'MAIN'\nINGRESS = 'INGRESS'\nEXCVIEW = 'pyramid.tweens.excview_tween_factory'\n", "path": "pyramid/tweens.py"}]} | 1,211 | 326 |
gh_patches_debug_8056 | rasdani/github-patches | git_diff | googleapis__python-bigquery-80 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
chore: replace Artman with bazel for synthesizing code
The synthtool should start using bazel instead of Artman.
</issue>
<code>
[start of synth.py]
1 # Copyright 2018 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """This script is used to synthesize generated parts of this library."""
16
17 import synthtool as s
18 from synthtool import gcp
19
20 gapic = gcp.GAPICGenerator()
21 common = gcp.CommonTemplates()
22 version = 'v2'
23
24 library = gapic.py_library(
25 'bigquery',
26 version,
27 config_path='/google/cloud/bigquery/'
28 'artman_bigquery_v2.yaml',
29 artman_output_name='bigquery-v2',
30 include_protos=True,
31 )
32
33 s.move(
34 [
35 library / "google/cloud/bigquery_v2/gapic/enums.py",
36 library / "google/cloud/bigquery_v2/types.py",
37 library / "google/cloud/bigquery_v2/proto/location*",
38 library / "google/cloud/bigquery_v2/proto/encryption_config*",
39 library / "google/cloud/bigquery_v2/proto/model*",
40 library / "google/cloud/bigquery_v2/proto/standard_sql*",
41 ],
42 )
43
44 # Fix up proto docs that are missing summary line.
45 s.replace(
46 "google/cloud/bigquery_v2/proto/model_pb2.py",
47 '"""Attributes:',
48 '"""Protocol buffer.\n\n Attributes:',
49 )
50 s.replace(
51 "google/cloud/bigquery_v2/proto/encryption_config_pb2.py",
52 '"""Attributes:',
53 '"""Encryption configuration.\n\n Attributes:',
54 )
55
56 # Remove non-ascii characters from docstrings for Python 2.7.
57 # Format quoted strings as plain text.
58 s.replace("google/cloud/bigquery_v2/proto/*.py", "[“”]", '``')
59
60 # ----------------------------------------------------------------------------
61 # Add templated files
62 # ----------------------------------------------------------------------------
63 templated_files = common.py_library(cov_level=100)
64 s.move(templated_files, excludes=["noxfile.py"])
65
66 s.shell.run(["nox", "-s", "blacken"], hide_output=False)
67
[end of synth.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/synth.py b/synth.py
--- a/synth.py
+++ b/synth.py
@@ -17,16 +17,14 @@
import synthtool as s
from synthtool import gcp
-gapic = gcp.GAPICGenerator()
+gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
version = 'v2'
library = gapic.py_library(
- 'bigquery',
- version,
- config_path='/google/cloud/bigquery/'
- 'artman_bigquery_v2.yaml',
- artman_output_name='bigquery-v2',
+ service='bigquery',
+ version=version,
+ bazel_target=f"//google/cloud/bigquery/{version}:bigquery-{version}-py",
include_protos=True,
)
| {"golden_diff": "diff --git a/synth.py b/synth.py\n--- a/synth.py\n+++ b/synth.py\n@@ -17,16 +17,14 @@\n import synthtool as s\n from synthtool import gcp\n \n-gapic = gcp.GAPICGenerator()\n+gapic = gcp.GAPICBazel()\n common = gcp.CommonTemplates()\n version = 'v2'\n \n library = gapic.py_library(\n- 'bigquery',\n- version,\n- config_path='/google/cloud/bigquery/'\n- 'artman_bigquery_v2.yaml',\n- artman_output_name='bigquery-v2',\n+ service='bigquery',\n+ version=version,\n+ bazel_target=f\"//google/cloud/bigquery/{version}:bigquery-{version}-py\",\n include_protos=True,\n )\n", "issue": "chore: replace Artman with bazel for synthesizing code\nThe synthtool should start using bazel instead of Artman.\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"This script is used to synthesize generated parts of this library.\"\"\"\n\nimport synthtool as s\nfrom synthtool import gcp\n\ngapic = gcp.GAPICGenerator()\ncommon = gcp.CommonTemplates()\nversion = 'v2'\n\nlibrary = gapic.py_library(\n 'bigquery',\n version,\n config_path='/google/cloud/bigquery/'\n 'artman_bigquery_v2.yaml',\n artman_output_name='bigquery-v2',\n include_protos=True,\n)\n\ns.move(\n [\n library / \"google/cloud/bigquery_v2/gapic/enums.py\",\n library / \"google/cloud/bigquery_v2/types.py\",\n library / \"google/cloud/bigquery_v2/proto/location*\",\n library / \"google/cloud/bigquery_v2/proto/encryption_config*\",\n library / \"google/cloud/bigquery_v2/proto/model*\",\n library / \"google/cloud/bigquery_v2/proto/standard_sql*\",\n ],\n)\n\n# Fix up proto docs that are missing summary line.\ns.replace(\n \"google/cloud/bigquery_v2/proto/model_pb2.py\",\n '\"\"\"Attributes:',\n '\"\"\"Protocol buffer.\\n\\n Attributes:',\n)\ns.replace(\n \"google/cloud/bigquery_v2/proto/encryption_config_pb2.py\",\n '\"\"\"Attributes:',\n '\"\"\"Encryption configuration.\\n\\n Attributes:',\n)\n\n# Remove non-ascii characters from docstrings for Python 2.7.\n# Format quoted strings as plain text.\ns.replace(\"google/cloud/bigquery_v2/proto/*.py\", \"[\u201c\u201d]\", '``')\n\n# ----------------------------------------------------------------------------\n# Add templated files\n# ----------------------------------------------------------------------------\ntemplated_files = common.py_library(cov_level=100)\ns.move(templated_files, excludes=[\"noxfile.py\"])\n\ns.shell.run([\"nox\", \"-s\", \"blacken\"], hide_output=False)\n", "path": "synth.py"}]} | 1,201 | 180 |
gh_patches_debug_3011 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-10572 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Most recent available `mambaforge=4.10` is simply too old
Hello guys, just wanted to ask you if it's possible to have a more modern version available for `mambaforge` - the best and latest available to be sourced on RTD via the configuration file is 4.10 which is simply too old (maximum conda 4.10 and mamba 0.19) - updating to a modern mamba doesn't work, as you can see from me changing the conf file in https://github.com/ESMValGroup/ESMValTool/pull/3310/files with output in https://readthedocs.org/projects/esmvaltool/builds/21390633/ - mamba is stuck at 0.19.0, which, in turn, slows down the environment creation process to around 10 minutes (for more recent conda's, updating mamba to something like >=1.4.8 works very well, and updates conda to 23.3 or 23.4 too, but in this case the base versions are too old). If you need any help whatsoever, I offer to help, and once more, many thanks for your great work on RTD :beer:
</issue>
<code>
[start of readthedocs/builds/constants_docker.py]
1 """
2 Define constants here to allow import them without any external dependency.
3
4 There are situations where we want to have access to these values without Django installed
5 (e.g. common/dockerfiles/tasks.py)
6
7 Note these constants where previously defined as Django settings in ``readthedocs/settings/base.py``.
8 """
9
10 DOCKER_DEFAULT_IMAGE = "readthedocs/build"
11
12 # Adding a new tool/version to this setting requires:
13 #
14 # - a mapping between the expected version in the config file, to the full
15 # version installed via asdf (found via ``asdf list all <tool>``)
16 #
17 # - running the script ``./scripts/compile_version_upload.sh`` in
18 # development and production environments to compile and cache the new
19 # tool/version
20 #
21 # Note that when updating this options, you should also update the file:
22 # readthedocs/rtd_tests/fixtures/spec/v2/schema.json
23 RTD_DOCKER_BUILD_SETTINGS = {
24 # Mapping of build.os options to docker image.
25 "os": {
26 "ubuntu-20.04": f"{DOCKER_DEFAULT_IMAGE}:ubuntu-20.04",
27 "ubuntu-22.04": f"{DOCKER_DEFAULT_IMAGE}:ubuntu-22.04",
28 },
29 # Mapping of build.tools options to specific versions.
30 "tools": {
31 "python": {
32 "2.7": "2.7.18",
33 "3.6": "3.6.15",
34 "3.7": "3.7.17",
35 "3.8": "3.8.17",
36 "3.9": "3.9.17",
37 "3.10": "3.10.12",
38 "3.11": "3.11.4",
39 # Always point to the latest stable release.
40 "3": "3.11.4",
41 "miniconda3-4.7": "miniconda3-4.7.12",
42 "mambaforge-4.10": "mambaforge-4.10.3-10",
43 },
44 "nodejs": {
45 "14": "14.20.1",
46 "16": "16.18.1",
47 "18": "18.16.1", # LTS
48 "19": "19.0.1",
49 "20": "20.3.1",
50 },
51 "rust": {
52 "1.55": "1.55.0",
53 "1.61": "1.61.0",
54 "1.64": "1.64.0",
55 "1.70": "1.70.0",
56 },
57 "golang": {
58 "1.17": "1.17.13",
59 "1.18": "1.18.10",
60 "1.19": "1.19.10",
61 "1.20": "1.20.5",
62 },
63 },
64 }
65
[end of readthedocs/builds/constants_docker.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/readthedocs/builds/constants_docker.py b/readthedocs/builds/constants_docker.py
--- a/readthedocs/builds/constants_docker.py
+++ b/readthedocs/builds/constants_docker.py
@@ -40,6 +40,7 @@
"3": "3.11.4",
"miniconda3-4.7": "miniconda3-4.7.12",
"mambaforge-4.10": "mambaforge-4.10.3-10",
+ "mambaforge-22.9": "mambaforge-22.9.0-3",
},
"nodejs": {
"14": "14.20.1",
| {"golden_diff": "diff --git a/readthedocs/builds/constants_docker.py b/readthedocs/builds/constants_docker.py\n--- a/readthedocs/builds/constants_docker.py\n+++ b/readthedocs/builds/constants_docker.py\n@@ -40,6 +40,7 @@\n \"3\": \"3.11.4\",\n \"miniconda3-4.7\": \"miniconda3-4.7.12\",\n \"mambaforge-4.10\": \"mambaforge-4.10.3-10\",\n+ \"mambaforge-22.9\": \"mambaforge-22.9.0-3\",\n },\n \"nodejs\": {\n \"14\": \"14.20.1\",\n", "issue": "Most recent available `mambaforge=4.10` is simply too old\nHello guys, just wanted to ask you if it's possible to have a more modern version available for `mambaforge` - the best and latest available to be sourced on RTD via the configuration file is 4.10 which is simply too old (maximum conda 4.10 and mamba 0.19) - updating to a modern mamba doesn't work, as you can see from me changing the conf file in https://github.com/ESMValGroup/ESMValTool/pull/3310/files with output in https://readthedocs.org/projects/esmvaltool/builds/21390633/ - mamba is stuck at 0.19.0, which, in turn, slows down the environment creation process to around 10 minutes (for more recent conda's, updating mamba to something like >=1.4.8 works very well, and updates conda to 23.3 or 23.4 too, but in this case the base versions are too old). If you need any help whatsoever, I offer to help, and once more, many thanks for your great work on RTD :beer: \n", "before_files": [{"content": "\"\"\"\nDefine constants here to allow import them without any external dependency.\n\nThere are situations where we want to have access to these values without Django installed\n(e.g. common/dockerfiles/tasks.py)\n\nNote these constants where previously defined as Django settings in ``readthedocs/settings/base.py``.\n\"\"\"\n\nDOCKER_DEFAULT_IMAGE = \"readthedocs/build\"\n\n# Adding a new tool/version to this setting requires:\n#\n# - a mapping between the expected version in the config file, to the full\n# version installed via asdf (found via ``asdf list all <tool>``)\n#\n# - running the script ``./scripts/compile_version_upload.sh`` in\n# development and production environments to compile and cache the new\n# tool/version\n#\n# Note that when updating this options, you should also update the file:\n# readthedocs/rtd_tests/fixtures/spec/v2/schema.json\nRTD_DOCKER_BUILD_SETTINGS = {\n # Mapping of build.os options to docker image.\n \"os\": {\n \"ubuntu-20.04\": f\"{DOCKER_DEFAULT_IMAGE}:ubuntu-20.04\",\n \"ubuntu-22.04\": f\"{DOCKER_DEFAULT_IMAGE}:ubuntu-22.04\",\n },\n # Mapping of build.tools options to specific versions.\n \"tools\": {\n \"python\": {\n \"2.7\": \"2.7.18\",\n \"3.6\": \"3.6.15\",\n \"3.7\": \"3.7.17\",\n \"3.8\": \"3.8.17\",\n \"3.9\": \"3.9.17\",\n \"3.10\": \"3.10.12\",\n \"3.11\": \"3.11.4\",\n # Always point to the latest stable release.\n \"3\": \"3.11.4\",\n \"miniconda3-4.7\": \"miniconda3-4.7.12\",\n \"mambaforge-4.10\": \"mambaforge-4.10.3-10\",\n },\n \"nodejs\": {\n \"14\": \"14.20.1\",\n \"16\": \"16.18.1\",\n \"18\": \"18.16.1\", # LTS\n \"19\": \"19.0.1\",\n \"20\": \"20.3.1\",\n },\n \"rust\": {\n \"1.55\": \"1.55.0\",\n \"1.61\": \"1.61.0\",\n \"1.64\": \"1.64.0\",\n \"1.70\": \"1.70.0\",\n },\n \"golang\": {\n \"1.17\": \"1.17.13\",\n \"1.18\": \"1.18.10\",\n \"1.19\": \"1.19.10\",\n \"1.20\": \"1.20.5\",\n },\n },\n}\n", "path": "readthedocs/builds/constants_docker.py"}]} | 1,612 | 171 |
gh_patches_debug_18135 | rasdani/github-patches | git_diff | streamlink__streamlink-3484 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Turkuvaz Plugin missing Streams
Hi,
first of all to be sure installed Streamlink 2.0.0 via Python3 again.
After that tested all streams one by one with the turkuvaz.py
Most of them are working, only 2 of 9 channels missing, "error: No plugin can handle URL"
A2 and A Haber TV:
https://www.atv.com.tr/a2tv/canli-yayin
https://www.ahaber.com.tr/video/canli-yayin
</issue>
<code>
[start of src/streamlink/plugins/turkuvaz.py]
1 import logging
2 import re
3
4 from streamlink.plugin import Plugin
5 from streamlink.plugin.api import useragents, validate
6 from streamlink.stream import HLSStream
7
8 log = logging.getLogger(__name__)
9
10
11 class Turkuvaz(Plugin):
12 """
13 Plugin to support ATV/A2TV Live streams from www.atv.com.tr and www.a2tv.com.tr
14 """
15
16 _url_re = re.compile(r"""(?x)https?://(?:www\.)?
17 (?:
18 (?:
19 (atvavrupa)\.tv
20 |
21 (atv|a2tv|ahaber|aspor|minikago|minikacocuk|anews)\.com\.tr
22 )/webtv/(?:live-broadcast|canli-yayin)
23 |
24 sabah\.com\.tr/(apara)/canli-yayin
25 )""")
26 _hls_url = "https://trkvz-live.ercdn.net/{channel}/{channel}.m3u8"
27 _token_url = "https://securevideotoken.tmgrup.com.tr/webtv/secure"
28 _token_schema = validate.Schema(validate.all(
29 {
30 "Success": True,
31 "Url": validate.url(),
32 },
33 validate.get("Url"))
34 )
35
36 @classmethod
37 def can_handle_url(cls, url):
38 return cls._url_re.match(url) is not None
39
40 def _get_streams(self):
41 url_m = self._url_re.match(self.url)
42 domain = url_m.group(1) or url_m.group(2) or url_m.group(3)
43 # remap the domain to channel
44 channel = {"atv": "atvhd",
45 "ahaber": "ahaberhd",
46 "apara": "aparahd",
47 "aspor": "asporhd",
48 "anews": "anewshd",
49 "minikacocuk": "minikagococuk"}.get(domain, domain)
50 hls_url = self._hls_url.format(channel=channel)
51 # get the secure HLS URL
52 res = self.session.http.get(self._token_url,
53 params="url={0}".format(hls_url),
54 headers={"Referer": self.url,
55 "User-Agent": useragents.CHROME})
56
57 secure_hls_url = self.session.http.json(res, schema=self._token_schema)
58
59 log.debug("Found HLS URL: {0}".format(secure_hls_url))
60 return HLSStream.parse_variant_playlist(self.session, secure_hls_url)
61
62
63 __plugin__ = Turkuvaz
64
[end of src/streamlink/plugins/turkuvaz.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/streamlink/plugins/turkuvaz.py b/src/streamlink/plugins/turkuvaz.py
--- a/src/streamlink/plugins/turkuvaz.py
+++ b/src/streamlink/plugins/turkuvaz.py
@@ -20,6 +20,10 @@
|
(atv|a2tv|ahaber|aspor|minikago|minikacocuk|anews)\.com\.tr
)/webtv/(?:live-broadcast|canli-yayin)
+ |
+ (ahaber)\.com\.tr/video/canli-yayin
+ |
+ atv\.com\.tr/(a2tv)/canli-yayin
|
sabah\.com\.tr/(apara)/canli-yayin
)""")
@@ -39,7 +43,7 @@
def _get_streams(self):
url_m = self._url_re.match(self.url)
- domain = url_m.group(1) or url_m.group(2) or url_m.group(3)
+ domain = url_m.group(1) or url_m.group(2) or url_m.group(3) or url_m.group(4) or url_m.group(5)
# remap the domain to channel
channel = {"atv": "atvhd",
"ahaber": "ahaberhd",
| {"golden_diff": "diff --git a/src/streamlink/plugins/turkuvaz.py b/src/streamlink/plugins/turkuvaz.py\n--- a/src/streamlink/plugins/turkuvaz.py\n+++ b/src/streamlink/plugins/turkuvaz.py\n@@ -20,6 +20,10 @@\n |\n (atv|a2tv|ahaber|aspor|minikago|minikacocuk|anews)\\.com\\.tr\n )/webtv/(?:live-broadcast|canli-yayin)\n+ |\n+ (ahaber)\\.com\\.tr/video/canli-yayin\n+ |\n+ atv\\.com\\.tr/(a2tv)/canli-yayin\n |\n sabah\\.com\\.tr/(apara)/canli-yayin\n )\"\"\")\n@@ -39,7 +43,7 @@\n \n def _get_streams(self):\n url_m = self._url_re.match(self.url)\n- domain = url_m.group(1) or url_m.group(2) or url_m.group(3)\n+ domain = url_m.group(1) or url_m.group(2) or url_m.group(3) or url_m.group(4) or url_m.group(5)\n # remap the domain to channel\n channel = {\"atv\": \"atvhd\",\n \"ahaber\": \"ahaberhd\",\n", "issue": "Turkuvaz Plugin missing Streams\nHi,\r\n\r\nfirst of all to be sure installed Streamlink 2.0.0 via Python3 again.\r\n\r\nAfter that tested all streams one by one with the turkuvaz.py\r\n\r\nMost of them are working, only 2 of 9 channels missing, \"error: No plugin can handle URL\"\r\n\r\nA2 and A Haber TV:\r\n\r\nhttps://www.atv.com.tr/a2tv/canli-yayin\r\nhttps://www.ahaber.com.tr/video/canli-yayin\r\n\r\n\n", "before_files": [{"content": "import logging\nimport re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import useragents, validate\nfrom streamlink.stream import HLSStream\n\nlog = logging.getLogger(__name__)\n\n\nclass Turkuvaz(Plugin):\n \"\"\"\n Plugin to support ATV/A2TV Live streams from www.atv.com.tr and www.a2tv.com.tr\n \"\"\"\n\n _url_re = re.compile(r\"\"\"(?x)https?://(?:www\\.)?\n (?:\n (?:\n (atvavrupa)\\.tv\n |\n (atv|a2tv|ahaber|aspor|minikago|minikacocuk|anews)\\.com\\.tr\n )/webtv/(?:live-broadcast|canli-yayin)\n |\n sabah\\.com\\.tr/(apara)/canli-yayin\n )\"\"\")\n _hls_url = \"https://trkvz-live.ercdn.net/{channel}/{channel}.m3u8\"\n _token_url = \"https://securevideotoken.tmgrup.com.tr/webtv/secure\"\n _token_schema = validate.Schema(validate.all(\n {\n \"Success\": True,\n \"Url\": validate.url(),\n },\n validate.get(\"Url\"))\n )\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url) is not None\n\n def _get_streams(self):\n url_m = self._url_re.match(self.url)\n domain = url_m.group(1) or url_m.group(2) or url_m.group(3)\n # remap the domain to channel\n channel = {\"atv\": \"atvhd\",\n \"ahaber\": \"ahaberhd\",\n \"apara\": \"aparahd\",\n \"aspor\": \"asporhd\",\n \"anews\": \"anewshd\",\n \"minikacocuk\": \"minikagococuk\"}.get(domain, domain)\n hls_url = self._hls_url.format(channel=channel)\n # get the secure HLS URL\n res = self.session.http.get(self._token_url,\n params=\"url={0}\".format(hls_url),\n headers={\"Referer\": self.url,\n \"User-Agent\": useragents.CHROME})\n\n secure_hls_url = self.session.http.json(res, schema=self._token_schema)\n\n log.debug(\"Found HLS URL: {0}\".format(secure_hls_url))\n return HLSStream.parse_variant_playlist(self.session, secure_hls_url)\n\n\n__plugin__ = Turkuvaz\n", "path": "src/streamlink/plugins/turkuvaz.py"}]} | 1,333 | 306 |
gh_patches_debug_30776 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-223 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Marshall's
</issue>
<code>
[start of locations/spiders/marshalls.py]
1 import json
2 import re
3 import scrapy
4 from locations.items import GeojsonPointItem
5
6 STATES = ["AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DC", "DE", "FL", "GA",
7 "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD",
8 "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ",
9 "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC",
10 "SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY"]
11
12 DAYS = {'Mon': 'Mo', 'Tue': 'Tu',
13 'Wed': 'We', 'Thu': 'Th',
14 'Fri': 'Fr', 'Sat': 'Sa',
15 'Sun': 'Su'}
16
17 URL = 'https://mktsvc.tjx.com/storelocator/GetSearchResultsByState'
18
19
20 def normalize_time(hours):
21
22 if not hours:
23 return ''
24
25 day_times = hours.split(',')
26 normalize_day_times = []
27
28 for day_time in day_times:
29 day, hours = [x.strip() for x in day_time.split(': ')]
30 normalize_hours = []
31
32 if re.search('-', day):
33 days = [x.strip() for x in day.split('-')]
34 norm_days = '-'.join([DAYS.get(x, '') for x in days])
35 else:
36 norm_days = DAYS.get(day, '')
37
38 if re.search('CLOSED', hours):
39 norm_hours = ' off'
40 normalize_hours.append(norm_hours)
41 else:
42 if re.search('-', hours):
43 hours = [x.strip() for x in hours.split('-')]
44
45 for hour in hours:
46
47 if hour[-1] == 'p':
48 if re.search(':', hour[:-1]):
49 hora, minute = [x.strip() for x in hour[:-1].split(':')]
50 if int(hora) < 12:
51 norm_hours = str(int(hora) + 12) + ':' + minute
52 else:
53 if int(hour[:-1]) < 12:
54 norm_hours = str(int(hour[:-1]) + 12) + ":00"
55
56 elif hour[-1] == 'a':
57 if re.search(':', hour[:-1]):
58 hora, minute = [x.strip() for x in hour[:-1].split(':')]
59 norm_hours = hora + ':' + minute
60 else:
61 norm_hours = hour[:-1] + ":00"
62
63 normalize_hours.append(norm_hours)
64
65 normalize_day_times.append(' '.join([norm_days, '-'.join(normalize_hours)]))
66 return '; '.join(normalize_day_times)
67
68
69 class MarshallsSpider(scrapy.Spider):
70
71 name = "marshalls"
72 allowed_domains = ["mktsvc.tjx.com", 'www.marshallsonline.com']
73
74 def start_requests(self):
75 url = URL
76
77 headers = {
78 'Accept-Language': 'en-US,en;q=0.8,ru;q=0.6',
79 'Origin': 'https://www.marshallsonline.com',
80 'Accept-Encoding': 'gzip, deflate, br',
81 'Accept': 'application/json, text/plain, */*',
82 'Referer': 'https://www.marshallsonline.com/store-finder/by-state',
83 'Connection': 'keep-alive',
84 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
85 }
86
87 for state in STATES:
88 form_data = {'chain': '10', 'lang': 'en', 'state': state}
89
90 yield scrapy.http.FormRequest(url=url, method='POST', formdata=form_data,
91 headers=headers, callback=self.parse)
92
93 def parse(self, response):
94
95 data = json.loads(response.body_as_unicode())
96 stores = data.get('Stores', None)
97
98 for store in stores:
99 lon_lat = [store.pop('Longitude', None), store.pop('Latitude', None)]
100 store['ref'] = URL + str(store.get('StoreID', None))
101
102 opening_hours = normalize_time(store.get('Hours', ''))
103
104 if opening_hours:
105 store['opening_hours'] = opening_hours
106 store.pop('Hours', None)
107
108 yield GeojsonPointItem(
109 properties=store,
110 lon_lat=lon_lat
111 )
112
[end of locations/spiders/marshalls.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/spiders/marshalls.py b/locations/spiders/marshalls.py
--- a/locations/spiders/marshalls.py
+++ b/locations/spiders/marshalls.py
@@ -17,10 +17,20 @@
URL = 'https://mktsvc.tjx.com/storelocator/GetSearchResultsByState'
+NORMALIZE_KEYS = (
+ ('addr:full', ['Address', 'Address2']),
+ ('addr:city', ['City']),
+ ('addr:state', ['State']),
+ ('addr:postcode', ['Zip']),
+ ('addr:country', ['Country']),
+ ('phone', ['Phone']),
+ )
+
+
def normalize_time(hours):
if not hours:
- return ''
+ return ''
day_times = hours.split(',')
normalize_day_times = []
@@ -94,18 +104,23 @@
data = json.loads(response.body_as_unicode())
stores = data.get('Stores', None)
+ props = {}
for store in stores:
- lon_lat = [store.pop('Longitude', None), store.pop('Latitude', None)]
- store['ref'] = URL + str(store.get('StoreID', None))
+ lon_lat = [store.pop('Longitude', ''), store.pop('Latitude', None)]
+ props['ref'] = store.pop('StoreID', None)
+ props['website'] = URL
+
+ for new_key, old_keys in NORMALIZE_KEYS:
+ props[new_key] = ", ".join([store.pop(key, '').strip() for key in old_keys if store[key]])
- opening_hours = normalize_time(store.get('Hours', ''))
+ opening_hours = normalize_time(store.pop('Hours', ''))
if opening_hours:
- store['opening_hours'] = opening_hours
- store.pop('Hours', None)
+ props['opening_hours'] = opening_hours
+ props.pop('Hours', None)
yield GeojsonPointItem(
- properties=store,
+ properties=props,
lon_lat=lon_lat
)
| {"golden_diff": "diff --git a/locations/spiders/marshalls.py b/locations/spiders/marshalls.py\n--- a/locations/spiders/marshalls.py\n+++ b/locations/spiders/marshalls.py\n@@ -17,10 +17,20 @@\n URL = 'https://mktsvc.tjx.com/storelocator/GetSearchResultsByState'\n \n \n+NORMALIZE_KEYS = (\n+ ('addr:full', ['Address', 'Address2']),\n+ ('addr:city', ['City']),\n+ ('addr:state', ['State']),\n+ ('addr:postcode', ['Zip']),\n+ ('addr:country', ['Country']),\n+ ('phone', ['Phone']),\n+ )\n+\n+\n def normalize_time(hours):\n \n if not hours:\n- return ''\n+ return ''\n \n day_times = hours.split(',')\n normalize_day_times = []\n@@ -94,18 +104,23 @@\n \n data = json.loads(response.body_as_unicode())\n stores = data.get('Stores', None)\n+ props = {}\n \n for store in stores:\n- lon_lat = [store.pop('Longitude', None), store.pop('Latitude', None)]\n- store['ref'] = URL + str(store.get('StoreID', None))\n+ lon_lat = [store.pop('Longitude', ''), store.pop('Latitude', None)]\n+ props['ref'] = store.pop('StoreID', None)\n+ props['website'] = URL\n+\n+ for new_key, old_keys in NORMALIZE_KEYS:\n+ props[new_key] = \", \".join([store.pop(key, '').strip() for key in old_keys if store[key]])\n \n- opening_hours = normalize_time(store.get('Hours', ''))\n+ opening_hours = normalize_time(store.pop('Hours', ''))\n \n if opening_hours:\n- store['opening_hours'] = opening_hours\n- store.pop('Hours', None)\n+ props['opening_hours'] = opening_hours\n+ props.pop('Hours', None)\n \n yield GeojsonPointItem(\n- properties=store,\n+ properties=props,\n lon_lat=lon_lat\n )\n", "issue": "Marshall's\n\n", "before_files": [{"content": "import json\nimport re\nimport scrapy\nfrom locations.items import GeojsonPointItem\n\nSTATES = [\"AL\", \"AK\", \"AZ\", \"AR\", \"CA\", \"CO\", \"CT\", \"DC\", \"DE\", \"FL\", \"GA\",\n \"HI\", \"ID\", \"IL\", \"IN\", \"IA\", \"KS\", \"KY\", \"LA\", \"ME\", \"MD\",\n \"MA\", \"MI\", \"MN\", \"MS\", \"MO\", \"MT\", \"NE\", \"NV\", \"NH\", \"NJ\",\n \"NM\", \"NY\", \"NC\", \"ND\", \"OH\", \"OK\", \"OR\", \"PA\", \"RI\", \"SC\",\n \"SD\", \"TN\", \"TX\", \"UT\", \"VT\", \"VA\", \"WA\", \"WV\", \"WI\", \"WY\"]\n\nDAYS = {'Mon': 'Mo', 'Tue': 'Tu',\n 'Wed': 'We', 'Thu': 'Th',\n 'Fri': 'Fr', 'Sat': 'Sa',\n 'Sun': 'Su'}\n\nURL = 'https://mktsvc.tjx.com/storelocator/GetSearchResultsByState'\n\n\ndef normalize_time(hours):\n\n if not hours:\n return ''\n\n day_times = hours.split(',')\n normalize_day_times = []\n\n for day_time in day_times:\n day, hours = [x.strip() for x in day_time.split(': ')]\n normalize_hours = []\n\n if re.search('-', day):\n days = [x.strip() for x in day.split('-')]\n norm_days = '-'.join([DAYS.get(x, '') for x in days])\n else:\n norm_days = DAYS.get(day, '')\n\n if re.search('CLOSED', hours):\n norm_hours = ' off'\n normalize_hours.append(norm_hours)\n else:\n if re.search('-', hours):\n hours = [x.strip() for x in hours.split('-')]\n\n for hour in hours:\n\n if hour[-1] == 'p':\n if re.search(':', hour[:-1]):\n hora, minute = [x.strip() for x in hour[:-1].split(':')]\n if int(hora) < 12:\n norm_hours = str(int(hora) + 12) + ':' + minute\n else:\n if int(hour[:-1]) < 12:\n norm_hours = str(int(hour[:-1]) + 12) + \":00\"\n\n elif hour[-1] == 'a':\n if re.search(':', hour[:-1]):\n hora, minute = [x.strip() for x in hour[:-1].split(':')]\n norm_hours = hora + ':' + minute\n else:\n norm_hours = hour[:-1] + \":00\"\n\n normalize_hours.append(norm_hours)\n\n normalize_day_times.append(' '.join([norm_days, '-'.join(normalize_hours)]))\n return '; '.join(normalize_day_times)\n\n\nclass MarshallsSpider(scrapy.Spider):\n\n name = \"marshalls\"\n allowed_domains = [\"mktsvc.tjx.com\", 'www.marshallsonline.com']\n\n def start_requests(self):\n url = URL\n\n headers = {\n 'Accept-Language': 'en-US,en;q=0.8,ru;q=0.6',\n 'Origin': 'https://www.marshallsonline.com',\n 'Accept-Encoding': 'gzip, deflate, br',\n 'Accept': 'application/json, text/plain, */*',\n 'Referer': 'https://www.marshallsonline.com/store-finder/by-state',\n 'Connection': 'keep-alive',\n 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',\n }\n\n for state in STATES:\n form_data = {'chain': '10', 'lang': 'en', 'state': state}\n\n yield scrapy.http.FormRequest(url=url, method='POST', formdata=form_data,\n headers=headers, callback=self.parse)\n\n def parse(self, response):\n\n data = json.loads(response.body_as_unicode())\n stores = data.get('Stores', None)\n\n for store in stores:\n lon_lat = [store.pop('Longitude', None), store.pop('Latitude', None)]\n store['ref'] = URL + str(store.get('StoreID', None))\n\n opening_hours = normalize_time(store.get('Hours', ''))\n\n if opening_hours:\n store['opening_hours'] = opening_hours\n store.pop('Hours', None)\n\n yield GeojsonPointItem(\n properties=store,\n lon_lat=lon_lat\n )\n", "path": "locations/spiders/marshalls.py"}]} | 1,758 | 465 |
gh_patches_debug_11984 | rasdani/github-patches | git_diff | dotkom__onlineweb4-420 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Mark rules error for anonymous users on events
'AnonymousUser' object has no attribute 'mark_rules'
</issue>
<code>
[start of apps/events/forms.py]
1 from django import forms
2 from captcha.fields import CaptchaField
3
4 class CaptchaForm(forms.Form):
5 def __init__(self, *args, **kwargs):
6 user = kwargs.pop('user', None)
7 super(CaptchaForm, self).__init__(*args, **kwargs)
8 # Removing mark rules field if user has already accepted the rules
9 if user and user.mark_rules:
10 del self.fields['mark_rules']
11 mark_rules = forms.BooleanField(label=u'Jeg godtar <a href="/profile/#marks" target="_blank">prikkreglene</a>')
12 captcha = CaptchaField()
13
[end of apps/events/forms.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/apps/events/forms.py b/apps/events/forms.py
--- a/apps/events/forms.py
+++ b/apps/events/forms.py
@@ -6,7 +6,7 @@
user = kwargs.pop('user', None)
super(CaptchaForm, self).__init__(*args, **kwargs)
# Removing mark rules field if user has already accepted the rules
- if user and user.mark_rules:
+ if user and user.is_authenticated() and user.mark_rules:
del self.fields['mark_rules']
mark_rules = forms.BooleanField(label=u'Jeg godtar <a href="/profile/#marks" target="_blank">prikkreglene</a>')
captcha = CaptchaField()
| {"golden_diff": "diff --git a/apps/events/forms.py b/apps/events/forms.py\n--- a/apps/events/forms.py\n+++ b/apps/events/forms.py\n@@ -6,7 +6,7 @@\n user = kwargs.pop('user', None)\n super(CaptchaForm, self).__init__(*args, **kwargs)\n # Removing mark rules field if user has already accepted the rules\n- if user and user.mark_rules:\n+ if user and user.is_authenticated() and user.mark_rules:\n del self.fields['mark_rules']\n mark_rules = forms.BooleanField(label=u'Jeg godtar <a href=\"/profile/#marks\" target=\"_blank\">prikkreglene</a>')\n captcha = CaptchaField()\n", "issue": "Mark rules error for anonymous users on events\n'AnonymousUser' object has no attribute 'mark_rules'\n\n", "before_files": [{"content": "from django import forms\nfrom captcha.fields import CaptchaField\n\nclass CaptchaForm(forms.Form):\n def __init__(self, *args, **kwargs):\n user = kwargs.pop('user', None)\n super(CaptchaForm, self).__init__(*args, **kwargs)\n # Removing mark rules field if user has already accepted the rules\n if user and user.mark_rules:\n del self.fields['mark_rules']\n mark_rules = forms.BooleanField(label=u'Jeg godtar <a href=\"/profile/#marks\" target=\"_blank\">prikkreglene</a>')\n captcha = CaptchaField()\n", "path": "apps/events/forms.py"}]} | 701 | 149 |
gh_patches_debug_34686 | rasdani/github-patches | git_diff | wagtail__wagtail-1225 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Memory leak in RoutablePage
When resolving/reversing URLs, `RoutablePage` calls Djangos `django.core.urlresolvers.get_resolver` function to build a resolver object.
This function is wrapped in an unlimited lru cache. As each time we call it is usually with a different page instance, this lru cache would grow forever.
I've not seen any issues caused by this in the wild, but worth fixing.
</issue>
<code>
[start of wagtail/contrib/wagtailroutablepage/models.py]
1 from __future__ import unicode_literals
2
3 from six import string_types
4
5 from django.http import Http404
6 from django.core.urlresolvers import get_resolver
7 from django.core.exceptions import ImproperlyConfigured
8
9 from wagtail.wagtailcore.models import Page
10 from wagtail.wagtailcore.url_routing import RouteResult
11
12
13 class RoutablePageMixin(object):
14 """
15 This class can be mixed in to a Page subclass to allow urlconfs to be
16 embedded inside pages.
17 """
18 #: Set this to a tuple of ``django.conf.urls.url`` objects.
19 subpage_urls = None
20
21 def reverse_subpage(self, name, args=None, kwargs=None):
22 """
23 This method does the same job as Djangos' built in "urlresolvers.reverse()" function for subpage urlconfs.
24 """
25 args = args or []
26 kwargs = kwargs or {}
27
28 if self.subpage_urls is None:
29 raise ImproperlyConfigured("You must set 'subpage_urls' on " + type(self).__name__)
30
31 resolver = get_resolver(self.subpage_urls)
32 return resolver.reverse(name, *args, **kwargs)
33
34 def resolve_subpage(self, path):
35 """
36 This finds a view method/function from a URL path.
37 """
38 if self.subpage_urls is None:
39 raise ImproperlyConfigured("You must set 'subpage_urls' on " + type(self).__name__)
40
41 resolver = get_resolver(self.subpage_urls)
42 view, args, kwargs = resolver.resolve(path)
43
44 # If view is a string, find it as an attribute of self
45 if isinstance(view, string_types):
46 view = getattr(self, view)
47
48 return view, args, kwargs
49
50 def route(self, request, path_components):
51 """
52 This hooks the subpage urls into Wagtails routing.
53 """
54 if self.live:
55 try:
56 path = '/'
57 if path_components:
58 path += '/'.join(path_components) + '/'
59
60 view, args, kwargs = self.resolve_subpage(path)
61 return RouteResult(self, args=(view, args, kwargs))
62 except Http404:
63 pass
64
65 return super(RoutablePageMixin, self).route(request, path_components)
66
67 def serve(self, request, view, args, kwargs):
68 return view(request, *args, **kwargs)
69
70 def serve_preview(self, request, mode_name):
71 view, args, kwargs = self.resolve_subpage('/')
72 return view(request, *args, **kwargs)
73
74
75 class RoutablePage(RoutablePageMixin, Page):
76 """
77 This class extends Page by adding methods to allow urlconfs
78 to be embedded inside pages
79 """
80
81 is_abstract = True
82
83 class Meta:
84 abstract = True
85
[end of wagtail/contrib/wagtailroutablepage/models.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/wagtail/contrib/wagtailroutablepage/models.py b/wagtail/contrib/wagtailroutablepage/models.py
--- a/wagtail/contrib/wagtailroutablepage/models.py
+++ b/wagtail/contrib/wagtailroutablepage/models.py
@@ -3,8 +3,7 @@
from six import string_types
from django.http import Http404
-from django.core.urlresolvers import get_resolver
-from django.core.exceptions import ImproperlyConfigured
+from django.core.urlresolvers import RegexURLResolver
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.url_routing import RouteResult
@@ -18,28 +17,36 @@
#: Set this to a tuple of ``django.conf.urls.url`` objects.
subpage_urls = None
+ @classmethod
+ def get_subpage_urls(cls):
+ if cls.subpage_urls:
+ return cls.subpage_urls
+
+ return ()
+
+ @classmethod
+ def get_resolver(cls):
+ if '_routablepage_urlresolver' not in cls.__dict__:
+ subpage_urls = cls.get_subpage_urls()
+ cls._routablepage_urlresolver = RegexURLResolver(r'^/', subpage_urls)
+
+ return cls._routablepage_urlresolver
+
def reverse_subpage(self, name, args=None, kwargs=None):
"""
- This method does the same job as Djangos' built in "urlresolvers.reverse()" function for subpage urlconfs.
+ This method does the same job as Djangos' built in
+ "urlresolvers.reverse()" function for subpage urlconfs.
"""
args = args or []
kwargs = kwargs or {}
- if self.subpage_urls is None:
- raise ImproperlyConfigured("You must set 'subpage_urls' on " + type(self).__name__)
-
- resolver = get_resolver(self.subpage_urls)
- return resolver.reverse(name, *args, **kwargs)
+ return self.get_resolver().reverse(name, *args, **kwargs)
def resolve_subpage(self, path):
"""
This finds a view method/function from a URL path.
"""
- if self.subpage_urls is None:
- raise ImproperlyConfigured("You must set 'subpage_urls' on " + type(self).__name__)
-
- resolver = get_resolver(self.subpage_urls)
- view, args, kwargs = resolver.resolve(path)
+ view, args, kwargs = self.get_resolver().resolve(path)
# If view is a string, find it as an attribute of self
if isinstance(view, string_types):
| {"golden_diff": "diff --git a/wagtail/contrib/wagtailroutablepage/models.py b/wagtail/contrib/wagtailroutablepage/models.py\n--- a/wagtail/contrib/wagtailroutablepage/models.py\n+++ b/wagtail/contrib/wagtailroutablepage/models.py\n@@ -3,8 +3,7 @@\n from six import string_types\n \n from django.http import Http404\n-from django.core.urlresolvers import get_resolver\n-from django.core.exceptions import ImproperlyConfigured\n+from django.core.urlresolvers import RegexURLResolver\n \n from wagtail.wagtailcore.models import Page\n from wagtail.wagtailcore.url_routing import RouteResult\n@@ -18,28 +17,36 @@\n #: Set this to a tuple of ``django.conf.urls.url`` objects.\n subpage_urls = None\n \n+ @classmethod\n+ def get_subpage_urls(cls):\n+ if cls.subpage_urls:\n+ return cls.subpage_urls\n+\n+ return ()\n+\n+ @classmethod\n+ def get_resolver(cls):\n+ if '_routablepage_urlresolver' not in cls.__dict__:\n+ subpage_urls = cls.get_subpage_urls()\n+ cls._routablepage_urlresolver = RegexURLResolver(r'^/', subpage_urls)\n+\n+ return cls._routablepage_urlresolver\n+\n def reverse_subpage(self, name, args=None, kwargs=None):\n \"\"\"\n- This method does the same job as Djangos' built in \"urlresolvers.reverse()\" function for subpage urlconfs.\n+ This method does the same job as Djangos' built in\n+ \"urlresolvers.reverse()\" function for subpage urlconfs.\n \"\"\"\n args = args or []\n kwargs = kwargs or {}\n \n- if self.subpage_urls is None:\n- raise ImproperlyConfigured(\"You must set 'subpage_urls' on \" + type(self).__name__)\n-\n- resolver = get_resolver(self.subpage_urls)\n- return resolver.reverse(name, *args, **kwargs)\n+ return self.get_resolver().reverse(name, *args, **kwargs)\n \n def resolve_subpage(self, path):\n \"\"\"\n This finds a view method/function from a URL path.\n \"\"\"\n- if self.subpage_urls is None:\n- raise ImproperlyConfigured(\"You must set 'subpage_urls' on \" + type(self).__name__)\n-\n- resolver = get_resolver(self.subpage_urls)\n- view, args, kwargs = resolver.resolve(path)\n+ view, args, kwargs = self.get_resolver().resolve(path)\n \n # If view is a string, find it as an attribute of self\n if isinstance(view, string_types):\n", "issue": "Memory leak in RoutablePage\nWhen resolving/reversing URLs, `RoutablePage` calls Djangos `django.core.urlresolvers.get_resolver` function to build a resolver object.\n\nThis function is wrapped in an unlimited lru cache. As each time we call it is usually with a different page instance, this lru cache would grow forever.\n\nI've not seen any issues caused by this in the wild, but worth fixing.\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nfrom six import string_types\n\nfrom django.http import Http404\nfrom django.core.urlresolvers import get_resolver\nfrom django.core.exceptions import ImproperlyConfigured\n\nfrom wagtail.wagtailcore.models import Page\nfrom wagtail.wagtailcore.url_routing import RouteResult\n\n\nclass RoutablePageMixin(object):\n \"\"\"\n This class can be mixed in to a Page subclass to allow urlconfs to be\n embedded inside pages.\n \"\"\"\n #: Set this to a tuple of ``django.conf.urls.url`` objects.\n subpage_urls = None\n\n def reverse_subpage(self, name, args=None, kwargs=None):\n \"\"\"\n This method does the same job as Djangos' built in \"urlresolvers.reverse()\" function for subpage urlconfs.\n \"\"\"\n args = args or []\n kwargs = kwargs or {}\n\n if self.subpage_urls is None:\n raise ImproperlyConfigured(\"You must set 'subpage_urls' on \" + type(self).__name__)\n\n resolver = get_resolver(self.subpage_urls)\n return resolver.reverse(name, *args, **kwargs)\n\n def resolve_subpage(self, path):\n \"\"\"\n This finds a view method/function from a URL path.\n \"\"\"\n if self.subpage_urls is None:\n raise ImproperlyConfigured(\"You must set 'subpage_urls' on \" + type(self).__name__)\n\n resolver = get_resolver(self.subpage_urls)\n view, args, kwargs = resolver.resolve(path)\n\n # If view is a string, find it as an attribute of self\n if isinstance(view, string_types):\n view = getattr(self, view)\n\n return view, args, kwargs\n\n def route(self, request, path_components):\n \"\"\"\n This hooks the subpage urls into Wagtails routing.\n \"\"\"\n if self.live:\n try:\n path = '/'\n if path_components:\n path += '/'.join(path_components) + '/'\n\n view, args, kwargs = self.resolve_subpage(path)\n return RouteResult(self, args=(view, args, kwargs))\n except Http404:\n pass\n\n return super(RoutablePageMixin, self).route(request, path_components)\n\n def serve(self, request, view, args, kwargs):\n return view(request, *args, **kwargs)\n\n def serve_preview(self, request, mode_name):\n view, args, kwargs = self.resolve_subpage('/')\n return view(request, *args, **kwargs)\n\n\nclass RoutablePage(RoutablePageMixin, Page):\n \"\"\"\n This class extends Page by adding methods to allow urlconfs\n to be embedded inside pages\n \"\"\"\n\n is_abstract = True\n\n class Meta:\n abstract = True\n", "path": "wagtail/contrib/wagtailroutablepage/models.py"}]} | 1,401 | 595 |
gh_patches_debug_25558 | rasdani/github-patches | git_diff | interlegis__sapl-2525 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Sem acesso anônimo de Documento Acessório de Audiencia Pública
<!--- Forneça um resumo geral da _issue_ no título acima -->
## Comportamento Esperado
Usuários anônimos poderem acessar documento acessório das Audiências Públicas
## Comportamento Atual
Usuário anônimo não acessa a parte de "Documento Acessório" da Audiência Pública pedindo um login com a aplicação de documento administrativo "Restritiva", acredito que Audiência Pública não deveria se enquadrar nessa regra.
## Passos para Reproduzir (para bugs)
<!--- Forneça um link para um exemplo, ou um conjunto de passos inequívocos -->
<!--- para reproduzir esse bug. Inclua código para reproduzir, se relevante. -->
1. Com opção de Visibilidade de Documentos Administrativos "Restritiva" -Está deslogado - Institucional - Audiências Públicas - Acessar uma audiência cadastrada - clicar em Documento Acessório
<!-- ## Imagens do Ocorrido -->
## Seu Ambiente
<!--- Inclua detalhes relevantes sobre o ambiente em que você presenciou/experienciou o bug. -->
* Versão usada (_Release_): 3.1.143
* Nome e versão do navegador: Chrome
* Nome e versão do Sistema Operacional (desktop ou mobile): Windows 10
</issue>
<code>
[start of sapl/audiencia/views.py]
1 import sapl
2
3 from django.http import HttpResponse
4 from django.core.urlresolvers import reverse
5 from django.views.decorators.clickjacking import xframe_options_exempt
6 from django.views.generic import UpdateView
7 from sapl.crud.base import RP_DETAIL, RP_LIST, Crud, MasterDetailCrud
8
9 from .forms import AudienciaForm, AnexoAudienciaPublicaForm
10 from .models import AudienciaPublica, AnexoAudienciaPublica
11
12
13 def index(request):
14 return HttpResponse("Audiência Pública")
15
16
17 class AudienciaCrud(Crud):
18 model = AudienciaPublica
19 public = [RP_LIST, RP_DETAIL, ]
20
21 class BaseMixin(Crud.BaseMixin):
22 list_field_names = ['numero', 'nome', 'tipo', 'materia',
23 'data']
24 ordering = 'nome', 'numero', 'tipo', 'data'
25
26 class ListView(Crud.ListView):
27 paginate_by = 10
28
29 def get_context_data(self, **kwargs):
30 context = super().get_context_data(**kwargs)
31
32 audiencia_materia = {}
33 for o in context['object_list']:
34 # indexado pelo numero da audiencia
35 audiencia_materia[str(o.numero)] = o.materia
36
37 for row in context['rows']:
38 coluna_materia = row[3] # se mudar a ordem de listagem mudar aqui
39 if coluna_materia[0]:
40 materia = audiencia_materia[row[0][0]]
41 url_materia = reverse('sapl.materia:materialegislativa_detail',
42 kwargs={'pk': materia.id})
43 row[3] = (coluna_materia[0], url_materia)
44 return context
45
46 class CreateView(Crud.CreateView):
47 form_class = AudienciaForm
48
49 def form_valid(self, form):
50 return super(Crud.CreateView, self).form_valid(form)
51
52 class UpdateView(Crud.UpdateView):
53 form_class = AudienciaForm
54
55 def get_initial(self):
56 initial = super(UpdateView, self).get_initial()
57 if self.object.materia:
58 initial['tipo_materia'] = self.object.materia.tipo.id
59 initial['numero_materia'] = self.object.materia.numero
60 initial['ano_materia'] = self.object.materia.ano
61 return initial
62
63 class DeleteView(Crud.DeleteView):
64 pass
65
66 class DetailView(Crud.DetailView):
67
68 layout_key = 'AudienciaPublicaDetail'
69
70 @xframe_options_exempt
71 def get(self, request, *args, **kwargs):
72 return super().get(request, *args, **kwargs)
73
74
75 class AudienciaPublicaMixin:
76
77 def has_permission(self):
78 app_config = sapl.base.models.AppConfig.objects.last()
79 if app_config and app_config.documentos_administrativos == 'O':
80 return True
81
82 return super().has_permission()
83
84
85 class AnexoAudienciaPublicaCrud(MasterDetailCrud):
86 model = AnexoAudienciaPublica
87 parent_field = 'audiencia'
88 help_topic = 'numeracao_docsacess'
89
90 class BaseMixin(MasterDetailCrud.BaseMixin):
91 list_field_names = ['assunto']
92
93 class CreateView(MasterDetailCrud.CreateView):
94 form_class = AnexoAudienciaPublicaForm
95 layout_key = None
96
97 class UpdateView(MasterDetailCrud.UpdateView):
98 form_class = AnexoAudienciaPublicaForm
99
100 class ListView(AudienciaPublicaMixin, MasterDetailCrud.ListView):
101
102 def get_queryset(self):
103 qs = super(MasterDetailCrud.ListView, self).get_queryset()
104 kwargs = {self.crud.parent_field: self.kwargs['pk']}
105 return qs.filter(**kwargs).order_by('-data', '-id')
106
107 class DetailView(AudienciaPublicaMixin,
108 MasterDetailCrud.DetailView):
109 pass
110
[end of sapl/audiencia/views.py]
[start of sapl/audiencia/urls.py]
1 from django.conf.urls import include, url
2 from sapl.audiencia.views import (index, AudienciaCrud,AnexoAudienciaPublicaCrud)
3
4 from .apps import AppConfig
5
6 app_name = AppConfig.name
7
8 urlpatterns = [
9 url(r'^audiencia/', include(AudienciaCrud.get_urls() +
10 AnexoAudienciaPublicaCrud.get_urls())),
11 ]
[end of sapl/audiencia/urls.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sapl/audiencia/urls.py b/sapl/audiencia/urls.py
--- a/sapl/audiencia/urls.py
+++ b/sapl/audiencia/urls.py
@@ -1,11 +1,10 @@
from django.conf.urls import include, url
-from sapl.audiencia.views import (index, AudienciaCrud,AnexoAudienciaPublicaCrud)
+from sapl.audiencia.views import (index, AudienciaCrud, AnexoAudienciaPublicaCrud)
from .apps import AppConfig
app_name = AppConfig.name
urlpatterns = [
- url(r'^audiencia/', include(AudienciaCrud.get_urls() +
- AnexoAudienciaPublicaCrud.get_urls())),
+ url(r'^audiencia/', include(AudienciaCrud.get_urls() + AnexoAudienciaPublicaCrud.get_urls())),
]
\ No newline at end of file
diff --git a/sapl/audiencia/views.py b/sapl/audiencia/views.py
--- a/sapl/audiencia/views.py
+++ b/sapl/audiencia/views.py
@@ -86,6 +86,7 @@
model = AnexoAudienciaPublica
parent_field = 'audiencia'
help_topic = 'numeracao_docsacess'
+ public = [RP_LIST, RP_DETAIL, ]
class BaseMixin(MasterDetailCrud.BaseMixin):
list_field_names = ['assunto']
@@ -104,7 +105,5 @@
kwargs = {self.crud.parent_field: self.kwargs['pk']}
return qs.filter(**kwargs).order_by('-data', '-id')
- class DetailView(AudienciaPublicaMixin,
- MasterDetailCrud.DetailView):
+ class DetailView(AudienciaPublicaMixin, MasterDetailCrud.DetailView):
pass
-
\ No newline at end of file
| {"golden_diff": "diff --git a/sapl/audiencia/urls.py b/sapl/audiencia/urls.py\n--- a/sapl/audiencia/urls.py\n+++ b/sapl/audiencia/urls.py\n@@ -1,11 +1,10 @@\n from django.conf.urls import include, url\n-from sapl.audiencia.views import (index, AudienciaCrud,AnexoAudienciaPublicaCrud)\n+from sapl.audiencia.views import (index, AudienciaCrud, AnexoAudienciaPublicaCrud)\n \n from .apps import AppConfig\n \n app_name = AppConfig.name\n \n urlpatterns = [\n- url(r'^audiencia/', include(AudienciaCrud.get_urls() +\n- \t\t\t\t\t\t\tAnexoAudienciaPublicaCrud.get_urls())),\n+ url(r'^audiencia/', include(AudienciaCrud.get_urls() + AnexoAudienciaPublicaCrud.get_urls())),\n ]\n\\ No newline at end of file\ndiff --git a/sapl/audiencia/views.py b/sapl/audiencia/views.py\n--- a/sapl/audiencia/views.py\n+++ b/sapl/audiencia/views.py\n@@ -86,6 +86,7 @@\n model = AnexoAudienciaPublica\n parent_field = 'audiencia'\n help_topic = 'numeracao_docsacess'\n+ public = [RP_LIST, RP_DETAIL, ]\n \n class BaseMixin(MasterDetailCrud.BaseMixin):\n list_field_names = ['assunto']\n@@ -104,7 +105,5 @@\n kwargs = {self.crud.parent_field: self.kwargs['pk']}\n return qs.filter(**kwargs).order_by('-data', '-id')\n \n- class DetailView(AudienciaPublicaMixin,\n- MasterDetailCrud.DetailView):\n+ class DetailView(AudienciaPublicaMixin, MasterDetailCrud.DetailView):\n pass\n- \n\\ No newline at end of file\n", "issue": "Sem acesso an\u00f4nimo de Documento Acess\u00f3rio de Audiencia P\u00fablica \n<!--- Forne\u00e7a um resumo geral da _issue_ no t\u00edtulo acima -->\r\n\r\n## Comportamento Esperado\r\nUsu\u00e1rios an\u00f4nimos poderem acessar documento acess\u00f3rio das Audi\u00eancias P\u00fablicas \r\n\r\n## Comportamento Atual\r\nUsu\u00e1rio an\u00f4nimo n\u00e3o acessa a parte de \"Documento Acess\u00f3rio\" da Audi\u00eancia P\u00fablica pedindo um login com a aplica\u00e7\u00e3o de documento administrativo \"Restritiva\", acredito que Audi\u00eancia P\u00fablica n\u00e3o deveria se enquadrar nessa regra.\r\n\r\n## Passos para Reproduzir (para bugs)\r\n<!--- Forne\u00e7a um link para um exemplo, ou um conjunto de passos inequ\u00edvocos -->\r\n<!--- para reproduzir esse bug. Inclua c\u00f3digo para reproduzir, se relevante. -->\r\n1. Com op\u00e7\u00e3o de Visibilidade de Documentos Administrativos \"Restritiva\" -Est\u00e1 deslogado - Institucional - Audi\u00eancias P\u00fablicas - Acessar uma audi\u00eancia cadastrada - clicar em Documento Acess\u00f3rio\r\n\r\n<!-- ## Imagens do Ocorrido -->\r\n\r\n## Seu Ambiente\r\n<!--- Inclua detalhes relevantes sobre o ambiente em que voc\u00ea presenciou/experienciou o bug. -->\r\n* Vers\u00e3o usada (_Release_): 3.1.143\r\n* Nome e vers\u00e3o do navegador: Chrome\r\n* Nome e vers\u00e3o do Sistema Operacional (desktop ou mobile): Windows 10\r\n\n", "before_files": [{"content": "import sapl\n\nfrom django.http import HttpResponse\nfrom django.core.urlresolvers import reverse\nfrom django.views.decorators.clickjacking import xframe_options_exempt\nfrom django.views.generic import UpdateView\nfrom sapl.crud.base import RP_DETAIL, RP_LIST, Crud, MasterDetailCrud\n\nfrom .forms import AudienciaForm, AnexoAudienciaPublicaForm\nfrom .models import AudienciaPublica, AnexoAudienciaPublica\n\n\ndef index(request):\n return HttpResponse(\"Audi\u00eancia P\u00fablica\")\n\n\nclass AudienciaCrud(Crud):\n model = AudienciaPublica\n public = [RP_LIST, RP_DETAIL, ]\n\n class BaseMixin(Crud.BaseMixin):\n list_field_names = ['numero', 'nome', 'tipo', 'materia',\n 'data'] \n ordering = 'nome', 'numero', 'tipo', 'data'\n\n class ListView(Crud.ListView):\n paginate_by = 10\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n audiencia_materia = {}\n for o in context['object_list']:\n # indexado pelo numero da audiencia\n audiencia_materia[str(o.numero)] = o.materia\n\n for row in context['rows']:\n coluna_materia = row[3] # se mudar a ordem de listagem mudar aqui\n if coluna_materia[0]:\n materia = audiencia_materia[row[0][0]]\n url_materia = reverse('sapl.materia:materialegislativa_detail',\n kwargs={'pk': materia.id})\n row[3] = (coluna_materia[0], url_materia)\n return context\n\n class CreateView(Crud.CreateView):\n form_class = AudienciaForm\n\n def form_valid(self, form):\n return super(Crud.CreateView, self).form_valid(form)\n\n class UpdateView(Crud.UpdateView):\n form_class = AudienciaForm\n\n def get_initial(self):\n initial = super(UpdateView, self).get_initial()\n if self.object.materia:\n initial['tipo_materia'] = self.object.materia.tipo.id\n initial['numero_materia'] = self.object.materia.numero\n initial['ano_materia'] = self.object.materia.ano\n return initial\n \n class DeleteView(Crud.DeleteView):\n pass\n\n class DetailView(Crud.DetailView):\n\n layout_key = 'AudienciaPublicaDetail'\n\n @xframe_options_exempt\n def get(self, request, *args, **kwargs):\n return super().get(request, *args, **kwargs)\n\n\nclass AudienciaPublicaMixin:\n\n def has_permission(self):\n app_config = sapl.base.models.AppConfig.objects.last()\n if app_config and app_config.documentos_administrativos == 'O':\n return True\n\n return super().has_permission()\n\n\nclass AnexoAudienciaPublicaCrud(MasterDetailCrud):\n model = AnexoAudienciaPublica\n parent_field = 'audiencia'\n help_topic = 'numeracao_docsacess'\n\n class BaseMixin(MasterDetailCrud.BaseMixin):\n list_field_names = ['assunto']\n\n class CreateView(MasterDetailCrud.CreateView):\n form_class = AnexoAudienciaPublicaForm\n layout_key = None\n\n class UpdateView(MasterDetailCrud.UpdateView):\n form_class = AnexoAudienciaPublicaForm\n\n class ListView(AudienciaPublicaMixin, MasterDetailCrud.ListView):\n\n def get_queryset(self):\n qs = super(MasterDetailCrud.ListView, self).get_queryset()\n kwargs = {self.crud.parent_field: self.kwargs['pk']}\n return qs.filter(**kwargs).order_by('-data', '-id')\n\n class DetailView(AudienciaPublicaMixin,\n MasterDetailCrud.DetailView):\n pass\n ", "path": "sapl/audiencia/views.py"}, {"content": "from django.conf.urls import include, url\nfrom sapl.audiencia.views import (index, AudienciaCrud,AnexoAudienciaPublicaCrud)\n\nfrom .apps import AppConfig\n\napp_name = AppConfig.name\n\nurlpatterns = [\n url(r'^audiencia/', include(AudienciaCrud.get_urls() +\n \t\t\t\t\t\t\tAnexoAudienciaPublicaCrud.get_urls())),\n]", "path": "sapl/audiencia/urls.py"}]} | 2,044 | 411 |
gh_patches_debug_23485 | rasdani/github-patches | git_diff | mindsdb__mindsdb-1954 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
selecting from datasource error
Repoted by David F.
```
use Postgres_Sample;
SELECT * FROM data.insurance LIMIT 200;
```
error:
```
SQL Error [1149] [42000]: 'str' object has no attribute '__name__'
```
</issue>
<code>
[start of mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py]
1 import pandas as pd
2 from mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender
3
4 from mindsdb.api.mysql.mysql_proxy.datahub.datanodes.datanode import DataNode
5 from mindsdb.utilities.log import log
6
7
8 class IntegrationDataNode(DataNode):
9 type = 'integration'
10
11 def __init__(self, integration_name, data_store, ds_type):
12 self.integration_name = integration_name
13 self.data_store = data_store
14 self.ds_type = ds_type
15
16 def get_type(self):
17 return self.type
18
19 def get_tables(self):
20 return []
21
22 def has_table(self, tableName):
23 return True
24
25 def get_table_columns(self, tableName):
26 return []
27
28 def select(self, query):
29 if self.ds_type in ('postgres', 'snowflake'):
30 dialect = 'postgres'
31 else:
32 dialect = 'mysql'
33 render = SqlalchemyRender(dialect)
34 try:
35 query_str = render.get_string(query, with_failback=False)
36 except Exception as e:
37 log.error(f"Exception during query casting to '{dialect}' dialect. Query: {query}. Error: {e}")
38 query_str = render.get_string(query, with_failback=True)
39
40 dso, _creation_info = self.data_store.create_datasource(self.integration_name, {'query': query_str})
41 data = dso.df.to_dict(orient='records')
42 column_names = list(dso.df.columns)
43
44 for column_name in column_names:
45 if pd.core.dtypes.common.is_datetime_or_timedelta_dtype(dso.df[column_name]):
46 pass_data = dso.df[column_name].dt.to_pydatetime()
47 for i, rec in enumerate(data):
48 rec[column_name] = pass_data[i].timestamp()
49
50 if len(column_names) == 0:
51 column_names = ['dataframe_is_empty']
52
53 return data, column_names
54
[end of mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py b/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py
--- a/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py
+++ b/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py
@@ -26,16 +26,19 @@
return []
def select(self, query):
- if self.ds_type in ('postgres', 'snowflake'):
- dialect = 'postgres'
+ if isinstance(query, str):
+ query_str = query
else:
- dialect = 'mysql'
- render = SqlalchemyRender(dialect)
- try:
- query_str = render.get_string(query, with_failback=False)
- except Exception as e:
- log.error(f"Exception during query casting to '{dialect}' dialect. Query: {query}. Error: {e}")
- query_str = render.get_string(query, with_failback=True)
+ if self.ds_type in ('postgres', 'snowflake'):
+ dialect = 'postgres'
+ else:
+ dialect = 'mysql'
+ render = SqlalchemyRender(dialect)
+ try:
+ query_str = render.get_string(query, with_failback=False)
+ except Exception as e:
+ log.error(f"Exception during query casting to '{dialect}' dialect. Query: {query}. Error: {e}")
+ query_str = render.get_string(query, with_failback=True)
dso, _creation_info = self.data_store.create_datasource(self.integration_name, {'query': query_str})
data = dso.df.to_dict(orient='records')
| {"golden_diff": "diff --git a/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py b/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py\n--- a/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py\n+++ b/mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py\n@@ -26,16 +26,19 @@\n return []\n \n def select(self, query):\n- if self.ds_type in ('postgres', 'snowflake'):\n- dialect = 'postgres'\n+ if isinstance(query, str):\n+ query_str = query\n else:\n- dialect = 'mysql'\n- render = SqlalchemyRender(dialect)\n- try:\n- query_str = render.get_string(query, with_failback=False)\n- except Exception as e:\n- log.error(f\"Exception during query casting to '{dialect}' dialect. Query: {query}. Error: {e}\")\n- query_str = render.get_string(query, with_failback=True)\n+ if self.ds_type in ('postgres', 'snowflake'):\n+ dialect = 'postgres'\n+ else:\n+ dialect = 'mysql'\n+ render = SqlalchemyRender(dialect)\n+ try:\n+ query_str = render.get_string(query, with_failback=False)\n+ except Exception as e:\n+ log.error(f\"Exception during query casting to '{dialect}' dialect. Query: {query}. Error: {e}\")\n+ query_str = render.get_string(query, with_failback=True)\n \n dso, _creation_info = self.data_store.create_datasource(self.integration_name, {'query': query_str})\n data = dso.df.to_dict(orient='records')\n", "issue": "selecting from datasource error\nRepoted by David F.\r\n```\r\nuse Postgres_Sample;\r\n\r\nSELECT * FROM data.insurance LIMIT 200;\r\n```\r\nerror:\r\n```\r\nSQL Error [1149] [42000]: 'str' object has no attribute '__name__'\r\n```\n", "before_files": [{"content": "import pandas as pd\nfrom mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender\n\nfrom mindsdb.api.mysql.mysql_proxy.datahub.datanodes.datanode import DataNode\nfrom mindsdb.utilities.log import log\n\n\nclass IntegrationDataNode(DataNode):\n type = 'integration'\n\n def __init__(self, integration_name, data_store, ds_type):\n self.integration_name = integration_name\n self.data_store = data_store\n self.ds_type = ds_type\n\n def get_type(self):\n return self.type\n\n def get_tables(self):\n return []\n\n def has_table(self, tableName):\n return True\n\n def get_table_columns(self, tableName):\n return []\n\n def select(self, query):\n if self.ds_type in ('postgres', 'snowflake'):\n dialect = 'postgres'\n else:\n dialect = 'mysql'\n render = SqlalchemyRender(dialect)\n try:\n query_str = render.get_string(query, with_failback=False)\n except Exception as e:\n log.error(f\"Exception during query casting to '{dialect}' dialect. Query: {query}. Error: {e}\")\n query_str = render.get_string(query, with_failback=True)\n\n dso, _creation_info = self.data_store.create_datasource(self.integration_name, {'query': query_str})\n data = dso.df.to_dict(orient='records')\n column_names = list(dso.df.columns)\n\n for column_name in column_names:\n if pd.core.dtypes.common.is_datetime_or_timedelta_dtype(dso.df[column_name]):\n pass_data = dso.df[column_name].dt.to_pydatetime()\n for i, rec in enumerate(data):\n rec[column_name] = pass_data[i].timestamp()\n\n if len(column_names) == 0:\n column_names = ['dataframe_is_empty']\n\n return data, column_names\n", "path": "mindsdb/api/mysql/mysql_proxy/datahub/datanodes/integration_datanode.py"}]} | 1,124 | 382 |
gh_patches_debug_40307 | rasdani/github-patches | git_diff | Project-MONAI__MONAI-1946 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add `strict_shape` option to CheckpointLoader
**Is your feature request related to a problem? Please describe.**
Currently, we don't support the transfer-learning case that load a checkpoint with same layer names but different shape.
We can refer to below code:
```py
model_3 = get_model_with_3_classes()
state_dict_model_4 = torch.load("best_model_4.pt")
@trainer.on(Events.STARTED, model_3, state_dict_model_4)
def permissive_model_loader(model, state_dict):
this_state_dict = model.state_dict()
matched_state_dict = {
k: v for k, v in state_dict.items()
if k in this_state_dict and v.shape == this_state_dict[k].shape
}
model.load_state_dict(matched_state_dict, strict=False)
trainer.run(...)
```
Thanks.
</issue>
<code>
[start of monai/handlers/checkpoint_loader.py]
1 # Copyright 2020 - 2021 MONAI Consortium
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at
5 # http://www.apache.org/licenses/LICENSE-2.0
6 # Unless required by applicable law or agreed to in writing, software
7 # distributed under the License is distributed on an "AS IS" BASIS,
8 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9 # See the License for the specific language governing permissions and
10 # limitations under the License.
11
12 import logging
13 from typing import TYPE_CHECKING, Dict, Optional
14
15 import torch
16
17 from monai.utils import exact_version, optional_import
18
19 Events, _ = optional_import("ignite.engine", "0.4.4", exact_version, "Events")
20 Checkpoint, _ = optional_import("ignite.handlers", "0.4.4", exact_version, "Checkpoint")
21 if TYPE_CHECKING:
22 from ignite.engine import Engine
23 else:
24 Engine, _ = optional_import("ignite.engine", "0.4.4", exact_version, "Engine")
25
26
27 class CheckpointLoader:
28 """
29 CheckpointLoader acts as an Ignite handler to load checkpoint data from file.
30 It can load variables for network, optimizer, lr_scheduler, etc.
31 If saving checkpoint after `torch.nn.DataParallel`, need to save `model.module` instead
32 as PyTorch recommended and then use this loader to load the model.
33
34 Args:
35 load_path: the file path of checkpoint, it should be a PyTorch `pth` file.
36 load_dict: target objects that load checkpoint to. examples::
37
38 {'network': net, 'optimizer': optimizer, 'lr_scheduler': lr_scheduler}
39
40 name: identifier of logging.logger to use, if None, defaulting to ``engine.logger``.
41 map_location: when loading the module for distributed training/evaluation,
42 need to provide an appropriate map_location argument to prevent a process
43 to step into others’ devices. If map_location is missing, torch.load will
44 first load the module to CPU and then copy each parameter to where it was
45 saved, which would result in all processes on the same machine using the
46 same set of devices.
47 strict: whether to strictly enforce that the keys in :attr:`state_dict` match the keys
48 returned by this module's :meth:`~torch.nn.Module.state_dict` function. Default: ``True``
49
50 """
51
52 def __init__(
53 self,
54 load_path: str,
55 load_dict: Dict,
56 name: Optional[str] = None,
57 map_location: Optional[Dict] = None,
58 strict: bool = True,
59 ) -> None:
60 if load_path is None:
61 raise AssertionError("must provide clear path to load checkpoint.")
62 self.load_path = load_path
63 if not (load_dict is not None and len(load_dict) > 0):
64 raise AssertionError("must provide target objects to load.")
65 self.logger = logging.getLogger(name)
66 self.load_dict = load_dict
67 self._name = name
68 self.map_location = map_location
69 self.strict = strict
70
71 def attach(self, engine: Engine) -> None:
72 """
73 Args:
74 engine: Ignite Engine, it can be a trainer, validator or evaluator.
75 """
76 if self._name is None:
77 self.logger = engine.logger
78 engine.add_event_handler(Events.STARTED, self)
79
80 def __call__(self, engine: Engine) -> None:
81 """
82 Args:
83 engine: Ignite Engine, it can be a trainer, validator or evaluator.
84 """
85 checkpoint = torch.load(self.load_path, map_location=self.map_location)
86
87 # save current max epochs setting in the engine, don't overwrite it if larger than max_epochs in checkpoint
88 prior_max_epochs = engine.state.max_epochs
89 Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint, strict=self.strict)
90 if engine.state.epoch > prior_max_epochs:
91 raise ValueError(
92 f"Epoch count ({engine.state.epoch}) in checkpoint is larger than "
93 f"the `engine.state.max_epochs` ({prior_max_epochs}) of engine. To further train from checkpoint, "
94 "construct trainer with `max_epochs` larger than checkpoint's epoch count. "
95 "To use checkpoint for inference, no need to load state_dict for the engine."
96 )
97 engine.state.max_epochs = prior_max_epochs
98
99 self.logger.info(f"Restored all variables from {self.load_path}")
100
[end of monai/handlers/checkpoint_loader.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/monai/handlers/checkpoint_loader.py b/monai/handlers/checkpoint_loader.py
--- a/monai/handlers/checkpoint_loader.py
+++ b/monai/handlers/checkpoint_loader.py
@@ -13,6 +13,7 @@
from typing import TYPE_CHECKING, Dict, Optional
import torch
+import torch.nn as nn
from monai.utils import exact_version, optional_import
@@ -44,8 +45,12 @@
first load the module to CPU and then copy each parameter to where it was
saved, which would result in all processes on the same machine using the
same set of devices.
- strict: whether to strictly enforce that the keys in :attr:`state_dict` match the keys
- returned by this module's :meth:`~torch.nn.Module.state_dict` function. Default: ``True``
+ strict: whether to strictly enforce that the keys in `state_dict` match the keys
+ returned by `torch.nn.Module.state_dict` function. default to `True`.
+ strict_shape: whether to enforce the data shape of the matched layers in the checkpoint,
+ `if `False`, it will skip the layers that have different data shape with checkpoint content.
+ This can be useful advanced feature for transfer learning. users should totally
+ understand which layers will have different shape. default to `True`.
"""
@@ -56,6 +61,7 @@
name: Optional[str] = None,
map_location: Optional[Dict] = None,
strict: bool = True,
+ strict_shape: bool = True,
) -> None:
if load_path is None:
raise AssertionError("must provide clear path to load checkpoint.")
@@ -67,6 +73,7 @@
self._name = name
self.map_location = map_location
self.strict = strict
+ self.strict_shape = strict_shape
def attach(self, engine: Engine) -> None:
"""
@@ -84,6 +91,20 @@
"""
checkpoint = torch.load(self.load_path, map_location=self.map_location)
+ if not self.strict_shape:
+ k, _ = list(self.load_dict.items())[0]
+ # single object and checkpoint is directly a state_dict
+ if len(self.load_dict) == 1 and k not in checkpoint:
+ checkpoint = {k: checkpoint}
+
+ # skip items that don't match data shape
+ for k, obj in self.load_dict.items():
+ if isinstance(obj, (nn.DataParallel, nn.parallel.DistributedDataParallel)):
+ obj = obj.module
+ if isinstance(obj, torch.nn.Module):
+ d = obj.state_dict()
+ checkpoint[k] = {k: v for k, v in checkpoint[k].items() if k in d and v.shape == d[k].shape}
+
# save current max epochs setting in the engine, don't overwrite it if larger than max_epochs in checkpoint
prior_max_epochs = engine.state.max_epochs
Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint, strict=self.strict)
| {"golden_diff": "diff --git a/monai/handlers/checkpoint_loader.py b/monai/handlers/checkpoint_loader.py\n--- a/monai/handlers/checkpoint_loader.py\n+++ b/monai/handlers/checkpoint_loader.py\n@@ -13,6 +13,7 @@\n from typing import TYPE_CHECKING, Dict, Optional\n \n import torch\n+import torch.nn as nn\n \n from monai.utils import exact_version, optional_import\n \n@@ -44,8 +45,12 @@\n first load the module to CPU and then copy each parameter to where it was\n saved, which would result in all processes on the same machine using the\n same set of devices.\n- strict: whether to strictly enforce that the keys in :attr:`state_dict` match the keys\n- returned by this module's :meth:`~torch.nn.Module.state_dict` function. Default: ``True``\n+ strict: whether to strictly enforce that the keys in `state_dict` match the keys\n+ returned by `torch.nn.Module.state_dict` function. default to `True`.\n+ strict_shape: whether to enforce the data shape of the matched layers in the checkpoint,\n+ `if `False`, it will skip the layers that have different data shape with checkpoint content.\n+ This can be useful advanced feature for transfer learning. users should totally\n+ understand which layers will have different shape. default to `True`.\n \n \"\"\"\n \n@@ -56,6 +61,7 @@\n name: Optional[str] = None,\n map_location: Optional[Dict] = None,\n strict: bool = True,\n+ strict_shape: bool = True,\n ) -> None:\n if load_path is None:\n raise AssertionError(\"must provide clear path to load checkpoint.\")\n@@ -67,6 +73,7 @@\n self._name = name\n self.map_location = map_location\n self.strict = strict\n+ self.strict_shape = strict_shape\n \n def attach(self, engine: Engine) -> None:\n \"\"\"\n@@ -84,6 +91,20 @@\n \"\"\"\n checkpoint = torch.load(self.load_path, map_location=self.map_location)\n \n+ if not self.strict_shape:\n+ k, _ = list(self.load_dict.items())[0]\n+ # single object and checkpoint is directly a state_dict\n+ if len(self.load_dict) == 1 and k not in checkpoint:\n+ checkpoint = {k: checkpoint}\n+\n+ # skip items that don't match data shape\n+ for k, obj in self.load_dict.items():\n+ if isinstance(obj, (nn.DataParallel, nn.parallel.DistributedDataParallel)):\n+ obj = obj.module\n+ if isinstance(obj, torch.nn.Module):\n+ d = obj.state_dict()\n+ checkpoint[k] = {k: v for k, v in checkpoint[k].items() if k in d and v.shape == d[k].shape}\n+\n # save current max epochs setting in the engine, don't overwrite it if larger than max_epochs in checkpoint\n prior_max_epochs = engine.state.max_epochs\n Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint, strict=self.strict)\n", "issue": "Add `strict_shape` option to CheckpointLoader\n**Is your feature request related to a problem? Please describe.**\r\nCurrently, we don't support the transfer-learning case that load a checkpoint with same layer names but different shape.\r\nWe can refer to below code:\r\n```py\r\nmodel_3 = get_model_with_3_classes()\r\nstate_dict_model_4 = torch.load(\"best_model_4.pt\")\r\n\r\[email protected](Events.STARTED, model_3, state_dict_model_4)\r\ndef permissive_model_loader(model, state_dict):\r\n this_state_dict = model.state_dict()\r\n matched_state_dict = {\r\n k: v for k, v in state_dict.items()\r\n if k in this_state_dict and v.shape == this_state_dict[k].shape\r\n }\r\n model.load_state_dict(matched_state_dict, strict=False)\r\n\r\ntrainer.run(...)\r\n```\r\n\r\nThanks.\r\n\n", "before_files": [{"content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nfrom typing import TYPE_CHECKING, Dict, Optional\n\nimport torch\n\nfrom monai.utils import exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nCheckpoint, _ = optional_import(\"ignite.handlers\", \"0.4.4\", exact_version, \"Checkpoint\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n\n\nclass CheckpointLoader:\n \"\"\"\n CheckpointLoader acts as an Ignite handler to load checkpoint data from file.\n It can load variables for network, optimizer, lr_scheduler, etc.\n If saving checkpoint after `torch.nn.DataParallel`, need to save `model.module` instead\n as PyTorch recommended and then use this loader to load the model.\n\n Args:\n load_path: the file path of checkpoint, it should be a PyTorch `pth` file.\n load_dict: target objects that load checkpoint to. examples::\n\n {'network': net, 'optimizer': optimizer, 'lr_scheduler': lr_scheduler}\n\n name: identifier of logging.logger to use, if None, defaulting to ``engine.logger``.\n map_location: when loading the module for distributed training/evaluation,\n need to provide an appropriate map_location argument to prevent a process\n to step into others\u2019 devices. If map_location is missing, torch.load will\n first load the module to CPU and then copy each parameter to where it was\n saved, which would result in all processes on the same machine using the\n same set of devices.\n strict: whether to strictly enforce that the keys in :attr:`state_dict` match the keys\n returned by this module's :meth:`~torch.nn.Module.state_dict` function. Default: ``True``\n\n \"\"\"\n\n def __init__(\n self,\n load_path: str,\n load_dict: Dict,\n name: Optional[str] = None,\n map_location: Optional[Dict] = None,\n strict: bool = True,\n ) -> None:\n if load_path is None:\n raise AssertionError(\"must provide clear path to load checkpoint.\")\n self.load_path = load_path\n if not (load_dict is not None and len(load_dict) > 0):\n raise AssertionError(\"must provide target objects to load.\")\n self.logger = logging.getLogger(name)\n self.load_dict = load_dict\n self._name = name\n self.map_location = map_location\n self.strict = strict\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self._name is None:\n self.logger = engine.logger\n engine.add_event_handler(Events.STARTED, self)\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n checkpoint = torch.load(self.load_path, map_location=self.map_location)\n\n # save current max epochs setting in the engine, don't overwrite it if larger than max_epochs in checkpoint\n prior_max_epochs = engine.state.max_epochs\n Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint, strict=self.strict)\n if engine.state.epoch > prior_max_epochs:\n raise ValueError(\n f\"Epoch count ({engine.state.epoch}) in checkpoint is larger than \"\n f\"the `engine.state.max_epochs` ({prior_max_epochs}) of engine. To further train from checkpoint, \"\n \"construct trainer with `max_epochs` larger than checkpoint's epoch count. \"\n \"To use checkpoint for inference, no need to load state_dict for the engine.\"\n )\n engine.state.max_epochs = prior_max_epochs\n\n self.logger.info(f\"Restored all variables from {self.load_path}\")\n", "path": "monai/handlers/checkpoint_loader.py"}]} | 1,893 | 681 |
gh_patches_debug_11771 | rasdani/github-patches | git_diff | google__timesketch-268 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Wrong app context for CSV task
We need to run the CSV importer task in the correct context.
</issue>
<code>
[start of timesketch/lib/tasks.py]
1 # Copyright 2015 Google Inc. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """Celery task for processing Plaso storage files."""
15
16 import os
17 import logging
18 import sys
19
20 from flask import current_app
21 # We currently don't have plaso in our Travis setup. This is a workaround
22 # for that until we fix the Travis environment.
23 # TODO: Add Plaso to our Travis environment we are running our tests in.
24 try:
25 from plaso.frontend import psort
26 except ImportError:
27 pass
28
29 from timesketch import create_celery_app
30 from timesketch.lib.datastores.elastic import ElasticsearchDataStore
31 from timesketch.lib.utils import read_and_validate_csv
32 from timesketch.models import db_session
33 from timesketch.models.sketch import SearchIndex
34
35 celery = create_celery_app()
36
37
38 def get_data_location():
39 """Path to the plaso data directory.
40
41 Returns:
42 The path to where the plaso data directory is or None if not existing.
43 """
44 data_location = current_app.config.get(u'PLASO_DATA_LOCATION', None)
45 if not data_location:
46 data_location = os.path.join(sys.prefix, u'share', u'plaso')
47 if not os.path.exists(data_location):
48 data_location = None
49 return data_location
50
51
52 @celery.task(track_started=True)
53 def run_plaso(source_file_path, timeline_name, index_name, username=None):
54 """Create a Celery task for processing Plaso storage file.
55
56 Args:
57 source_file_path: Path to plaso storage file.
58 timeline_name: Name of the Timesketch timeline.
59 index_name: Name of the datastore index.
60 username: Username of the user who will own the timeline.
61
62 Returns:
63 Dictionary with count of processed events.
64 """
65 plaso_data_location = get_data_location()
66 flush_interval = 1000 # events to queue before bulk index
67 doc_type = u'plaso_event' # Document type for Elasticsearch
68
69 # Use Plaso psort frontend tool.
70 frontend = psort.PsortFrontend()
71 frontend.SetDataLocation(plaso_data_location)
72 storage_reader = frontend.CreateStorageReader(source_file_path)
73
74 # Setup the Timesketch output module.
75 output_module = frontend.CreateOutputModule(u'timesketch')
76 output_module.SetIndexName(index_name)
77 output_module.SetTimelineName(timeline_name)
78 output_module.SetFlushInterval(flush_interval)
79 output_module.SetDocType(doc_type)
80 if username:
81 output_module.SetUserName(username)
82
83 # Start process the Plaso storage file.
84 counter = frontend.ExportEvents(storage_reader, output_module)
85
86 return dict(counter)
87
88
89 @celery.task(track_started=True)
90 def run_csv(source_file_path, timeline_name, index_name, username=None):
91 """Create a Celery task for processing a CSV file.
92
93 Args:
94 source_file_path: Path to CSV file.
95 timeline_name: Name of the Timesketch timeline.
96 index_name: Name of the datastore index.
97
98 Returns:
99 Dictionary with count of processed events.
100 """
101 flush_interval = 1000 # events to queue before bulk index
102 event_type = u'generic_event' # Document type for Elasticsearch
103
104 # Log information to Celery
105 logging.info(u'Index name: %s', index_name)
106 logging.info(u'Timeline name: %s', timeline_name)
107 logging.info(u'Flush interval: %d', flush_interval)
108 logging.info(u'Document type: %s', event_type)
109 logging.info(u'Owner: %s', username)
110
111 es = ElasticsearchDataStore(
112 host=current_app.config[u'ELASTIC_HOST'],
113 port=current_app.config[u'ELASTIC_PORT'])
114
115 es.create_index(index_name=index_name, doc_type=event_type)
116 for event in read_and_validate_csv(source_file_path):
117 es.import_event(
118 flush_interval, index_name, event_type, event)
119
120 # Import the remaining events
121 total_events = es.import_event(flush_interval, index_name, event_type)
122
123 # We are done so let's remove the processing status flag
124 search_index = SearchIndex.query.filter_by(index_name=index_name).first()
125 search_index.status.remove(search_index.status[0])
126 db_session.add(search_index)
127 db_session.commit()
128
129 return {u'Events processed': total_events}
130
[end of timesketch/lib/tasks.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/timesketch/lib/tasks.py b/timesketch/lib/tasks.py
--- a/timesketch/lib/tasks.py
+++ b/timesketch/lib/tasks.py
@@ -121,9 +121,10 @@
total_events = es.import_event(flush_interval, index_name, event_type)
# We are done so let's remove the processing status flag
- search_index = SearchIndex.query.filter_by(index_name=index_name).first()
- search_index.status.remove(search_index.status[0])
- db_session.add(search_index)
- db_session.commit()
+ with celery.app.app_context():
+ search_index = SearchIndex.query.filter_by(index_name=index_name).first()
+ search_index.status.remove(search_index.status[0])
+ db_session.add(search_index)
+ db_session.commit()
return {u'Events processed': total_events}
| {"golden_diff": "diff --git a/timesketch/lib/tasks.py b/timesketch/lib/tasks.py\n--- a/timesketch/lib/tasks.py\n+++ b/timesketch/lib/tasks.py\n@@ -121,9 +121,10 @@\n total_events = es.import_event(flush_interval, index_name, event_type)\n \n # We are done so let's remove the processing status flag\n- search_index = SearchIndex.query.filter_by(index_name=index_name).first()\n- search_index.status.remove(search_index.status[0])\n- db_session.add(search_index)\n- db_session.commit()\n+ with celery.app.app_context():\n+ search_index = SearchIndex.query.filter_by(index_name=index_name).first()\n+ search_index.status.remove(search_index.status[0])\n+ db_session.add(search_index)\n+ db_session.commit()\n \n return {u'Events processed': total_events}\n", "issue": "Wrong app context for CSV task\nWe need to run the CSV importer task in the correct context.\n", "before_files": [{"content": "# Copyright 2015 Google Inc. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Celery task for processing Plaso storage files.\"\"\"\n\nimport os\nimport logging\nimport sys\n\nfrom flask import current_app\n# We currently don't have plaso in our Travis setup. This is a workaround\n# for that until we fix the Travis environment.\n# TODO: Add Plaso to our Travis environment we are running our tests in.\ntry:\n from plaso.frontend import psort\nexcept ImportError:\n pass\n\nfrom timesketch import create_celery_app\nfrom timesketch.lib.datastores.elastic import ElasticsearchDataStore\nfrom timesketch.lib.utils import read_and_validate_csv\nfrom timesketch.models import db_session\nfrom timesketch.models.sketch import SearchIndex\n\ncelery = create_celery_app()\n\n\ndef get_data_location():\n \"\"\"Path to the plaso data directory.\n\n Returns:\n The path to where the plaso data directory is or None if not existing.\n \"\"\"\n data_location = current_app.config.get(u'PLASO_DATA_LOCATION', None)\n if not data_location:\n data_location = os.path.join(sys.prefix, u'share', u'plaso')\n if not os.path.exists(data_location):\n data_location = None\n return data_location\n\n\[email protected](track_started=True)\ndef run_plaso(source_file_path, timeline_name, index_name, username=None):\n \"\"\"Create a Celery task for processing Plaso storage file.\n\n Args:\n source_file_path: Path to plaso storage file.\n timeline_name: Name of the Timesketch timeline.\n index_name: Name of the datastore index.\n username: Username of the user who will own the timeline.\n\n Returns:\n Dictionary with count of processed events.\n \"\"\"\n plaso_data_location = get_data_location()\n flush_interval = 1000 # events to queue before bulk index\n doc_type = u'plaso_event' # Document type for Elasticsearch\n\n # Use Plaso psort frontend tool.\n frontend = psort.PsortFrontend()\n frontend.SetDataLocation(plaso_data_location)\n storage_reader = frontend.CreateStorageReader(source_file_path)\n\n # Setup the Timesketch output module.\n output_module = frontend.CreateOutputModule(u'timesketch')\n output_module.SetIndexName(index_name)\n output_module.SetTimelineName(timeline_name)\n output_module.SetFlushInterval(flush_interval)\n output_module.SetDocType(doc_type)\n if username:\n output_module.SetUserName(username)\n\n # Start process the Plaso storage file.\n counter = frontend.ExportEvents(storage_reader, output_module)\n\n return dict(counter)\n\n\[email protected](track_started=True)\ndef run_csv(source_file_path, timeline_name, index_name, username=None):\n \"\"\"Create a Celery task for processing a CSV file.\n\n Args:\n source_file_path: Path to CSV file.\n timeline_name: Name of the Timesketch timeline.\n index_name: Name of the datastore index.\n\n Returns:\n Dictionary with count of processed events.\n \"\"\"\n flush_interval = 1000 # events to queue before bulk index\n event_type = u'generic_event' # Document type for Elasticsearch\n\n # Log information to Celery\n logging.info(u'Index name: %s', index_name)\n logging.info(u'Timeline name: %s', timeline_name)\n logging.info(u'Flush interval: %d', flush_interval)\n logging.info(u'Document type: %s', event_type)\n logging.info(u'Owner: %s', username)\n\n es = ElasticsearchDataStore(\n host=current_app.config[u'ELASTIC_HOST'],\n port=current_app.config[u'ELASTIC_PORT'])\n\n es.create_index(index_name=index_name, doc_type=event_type)\n for event in read_and_validate_csv(source_file_path):\n es.import_event(\n flush_interval, index_name, event_type, event)\n\n # Import the remaining events\n total_events = es.import_event(flush_interval, index_name, event_type)\n\n # We are done so let's remove the processing status flag\n search_index = SearchIndex.query.filter_by(index_name=index_name).first()\n search_index.status.remove(search_index.status[0])\n db_session.add(search_index)\n db_session.commit()\n\n return {u'Events processed': total_events}\n", "path": "timesketch/lib/tasks.py"}]} | 1,899 | 194 |
gh_patches_debug_16378 | rasdani/github-patches | git_diff | freedomofpress__securedrop-379 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Display number of docs and messages per source in source list
> At the moment each source in the list displays: source codename, last updated. It would be helpful to also see: total # of messages/docs.
Extracted from #322
</issue>
<code>
[start of securedrop/db.py]
1 import os
2 import datetime
3
4 from sqlalchemy import create_engine, ForeignKey
5 from sqlalchemy.orm import scoped_session, sessionmaker, relationship, backref
6 from sqlalchemy.ext.declarative import declarative_base
7 from sqlalchemy import Column, Integer, String, Boolean, DateTime
8 from sqlalchemy.orm.exc import NoResultFound
9
10 import config
11 import crypto_util
12 import store
13
14 # http://flask.pocoo.org/docs/patterns/sqlalchemy/
15
16 if config.DATABASE_ENGINE == "sqlite":
17 engine = create_engine(
18 config.DATABASE_ENGINE + ":///" +
19 config.DATABASE_FILE
20 )
21 else:
22 engine = create_engine(
23 config.DATABASE_ENGINE + '://' +
24 config.DATABASE_USERNAME + ':' +
25 config.DATABASE_PASSWORD + '@' +
26 config.DATABASE_HOST + '/' +
27 config.DATABASE_NAME, echo=False
28 )
29
30 db_session = scoped_session(sessionmaker(autocommit=False,
31 autoflush=False,
32 bind=engine))
33 Base = declarative_base()
34 Base.query = db_session.query_property()
35
36
37 class Source(Base):
38 __tablename__ = 'sources'
39 id = Column(Integer, primary_key=True)
40 filesystem_id = Column(String(96), unique=True)
41 journalist_designation = Column(String(255), nullable=False)
42 flagged = Column(Boolean, default=False)
43 last_updated = Column(DateTime, default=datetime.datetime.now)
44
45 # sources are "pending" and don't get displayed to journalists until they submit something
46 pending = Column(Boolean, default=True)
47
48 # keep track of how many interactions have happened, for filenames
49 interaction_count = Column(Integer, default=0, nullable=False)
50
51 def __init__(self, filesystem_id=None, journalist_designation=None):
52 self.filesystem_id = filesystem_id
53 self.journalist_designation = journalist_designation
54
55 def __repr__(self):
56 return '<Source %r>' % (self.journalist_designation)
57
58 def journalist_filename(self):
59 valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
60 return ''.join([c for c in self.journalist_designation.lower().replace(' ', '_') if c in valid_chars])
61
62 class Submission(Base):
63 __tablename__ = 'submissions'
64 id = Column(Integer, primary_key=True)
65 source_id = Column(Integer, ForeignKey('sources.id'))
66 source = relationship("Source", backref=backref('submissions', order_by=id))
67 filename = Column(String(255), nullable=False)
68 size = Column(Integer, nullable=False)
69
70 def __init__(self, source, filename):
71 self.source_id = source.id
72 self.filename = filename
73 self.size = os.stat(store.path(source.filesystem_id, filename)).st_size
74
75 def __repr__(self):
76 return '<Submission %r>' % (self.filename)
77
78
79 # Declare (or import) models before init_db
80 def init_db():
81 Base.metadata.create_all(bind=engine)
82
83
[end of securedrop/db.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/securedrop/db.py b/securedrop/db.py
--- a/securedrop/db.py
+++ b/securedrop/db.py
@@ -59,6 +59,19 @@
valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
return ''.join([c for c in self.journalist_designation.lower().replace(' ', '_') if c in valid_chars])
+ def documents_messages_count(self):
+ try:
+ return self.docs_msgs_count
+ except AttributeError:
+ self.docs_msgs_count = {'messages': 0, 'documents': 0}
+ for submission in self.submissions:
+ if submission.filename.endswith('msg.gpg'):
+ self.docs_msgs_count['messages'] += 1
+ elif submission.filename.endswith('doc.zip.gpg'):
+ self.docs_msgs_count['documents'] += 1
+ return self.docs_msgs_count
+
+
class Submission(Base):
__tablename__ = 'submissions'
id = Column(Integer, primary_key=True)
| {"golden_diff": "diff --git a/securedrop/db.py b/securedrop/db.py\n--- a/securedrop/db.py\n+++ b/securedrop/db.py\n@@ -59,6 +59,19 @@\n valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'\n return ''.join([c for c in self.journalist_designation.lower().replace(' ', '_') if c in valid_chars])\n \n+ def documents_messages_count(self):\n+ try:\n+ return self.docs_msgs_count\n+ except AttributeError:\n+ self.docs_msgs_count = {'messages': 0, 'documents': 0}\n+ for submission in self.submissions:\n+ if submission.filename.endswith('msg.gpg'):\n+ self.docs_msgs_count['messages'] += 1\n+ elif submission.filename.endswith('doc.zip.gpg'):\n+ self.docs_msgs_count['documents'] += 1\n+ return self.docs_msgs_count\n+\n+\n class Submission(Base):\n __tablename__ = 'submissions'\n id = Column(Integer, primary_key=True)\n", "issue": "Display number of docs and messages per source in source list\n> At the moment each source in the list displays: source codename, last updated. It would be helpful to also see: total # of messages/docs.\n\nExtracted from #322\n\n", "before_files": [{"content": "import os\nimport datetime\n\nfrom sqlalchemy import create_engine, ForeignKey\nfrom sqlalchemy.orm import scoped_session, sessionmaker, relationship, backref\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Integer, String, Boolean, DateTime\nfrom sqlalchemy.orm.exc import NoResultFound\n\nimport config\nimport crypto_util\nimport store\n\n# http://flask.pocoo.org/docs/patterns/sqlalchemy/\n\nif config.DATABASE_ENGINE == \"sqlite\":\n engine = create_engine(\n config.DATABASE_ENGINE + \":///\" +\n config.DATABASE_FILE\n )\nelse:\n engine = create_engine(\n config.DATABASE_ENGINE + '://' +\n config.DATABASE_USERNAME + ':' +\n config.DATABASE_PASSWORD + '@' +\n config.DATABASE_HOST + '/' +\n config.DATABASE_NAME, echo=False\n )\n\ndb_session = scoped_session(sessionmaker(autocommit=False,\n autoflush=False,\n bind=engine))\nBase = declarative_base()\nBase.query = db_session.query_property()\n\n\nclass Source(Base):\n __tablename__ = 'sources'\n id = Column(Integer, primary_key=True)\n filesystem_id = Column(String(96), unique=True)\n journalist_designation = Column(String(255), nullable=False)\n flagged = Column(Boolean, default=False)\n last_updated = Column(DateTime, default=datetime.datetime.now)\n \n # sources are \"pending\" and don't get displayed to journalists until they submit something\n pending = Column(Boolean, default=True)\n\n # keep track of how many interactions have happened, for filenames\n interaction_count = Column(Integer, default=0, nullable=False)\n\n def __init__(self, filesystem_id=None, journalist_designation=None):\n self.filesystem_id = filesystem_id\n self.journalist_designation = journalist_designation\n\n def __repr__(self):\n return '<Source %r>' % (self.journalist_designation)\n\n def journalist_filename(self):\n valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'\n return ''.join([c for c in self.journalist_designation.lower().replace(' ', '_') if c in valid_chars])\n\nclass Submission(Base):\n __tablename__ = 'submissions'\n id = Column(Integer, primary_key=True)\n source_id = Column(Integer, ForeignKey('sources.id'))\n source = relationship(\"Source\", backref=backref('submissions', order_by=id))\n filename = Column(String(255), nullable=False)\n size = Column(Integer, nullable=False)\n\n def __init__(self, source, filename):\n self.source_id = source.id\n self.filename = filename\n self.size = os.stat(store.path(source.filesystem_id, filename)).st_size\n\n def __repr__(self):\n return '<Submission %r>' % (self.filename)\n\n\n# Declare (or import) models before init_db\ndef init_db():\n Base.metadata.create_all(bind=engine)\n\n", "path": "securedrop/db.py"}]} | 1,373 | 228 |
gh_patches_debug_38619 | rasdani/github-patches | git_diff | sktime__sktime-1600 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Refactor issue #1043
Fixes #1043
Removed methods load_UCR_UEA_dataset & _load_dataset from datasets/base.py and moved them to utils/data_io.py
</issue>
<code>
[start of sktime/transformations/panel/signature_based/_signature_method.py]
1 # -*- coding: utf-8 -*-
2 from sklearn.pipeline import Pipeline
3 from sktime.transformations.base import _PanelToTabularTransformer
4 from sktime.transformations.panel.signature_based._compute import (
5 _WindowSignatureTransform,
6 )
7 from sktime.transformations.panel.signature_based._augmentations import (
8 _make_augmentation_pipeline,
9 )
10 from sktime.transformations.panel.signature_based._checks import (
11 _handle_sktime_signatures,
12 )
13
14
15 class SignatureTransformer(_PanelToTabularTransformer):
16 """Transformation class from the signature method.
17
18 Follows the methodology laid out in the paper:
19 "A Generalised Signature Method for Multivariate Time Series"
20
21 Parameters
22 ----------
23 augmentation_list: tuple of strings, contains the augmentations to be
24 applied before application of the signature transform.
25 window_name: str, The name of the window transform to apply.
26 window_depth: int, The depth of the dyadic window. (Active only if
27 `window_name == 'dyadic'`).
28 window_length: int, The length of the sliding/expanding window. (Active
29 only if `window_name in ['sliding, 'expanding']`.
30 window_step: int, The step of the sliding/expanding window. (Active
31 only if `window_name in ['sliding, 'expanding']`.
32 rescaling: str or None, The method of signature rescaling.
33 sig_tfm: str, String to specify the type of signature transform. One of:
34 ['signature', 'logsignature']).
35 depth: int, Signature truncation depth.
36
37 Attributes
38 ----------
39 signature_method: sklearn.Pipeline, A sklearn pipeline object that contains
40 all the steps to extract the signature features.
41 """
42
43 def __init__(
44 self,
45 augmentation_list=("basepoint", "addtime"),
46 window_name="dyadic",
47 window_depth=3,
48 window_length=None,
49 window_step=None,
50 rescaling=None,
51 sig_tfm="signature",
52 depth=4,
53 ):
54 super(SignatureTransformer, self).__init__()
55 self.augmentation_list = augmentation_list
56 self.window_name = window_name
57 self.window_depth = window_depth
58 self.window_length = window_length
59 self.window_step = window_step
60 self.rescaling = rescaling
61 self.sig_tfm = sig_tfm
62 self.depth = depth
63
64 self.setup_feature_pipeline()
65
66 def _assertions(self):
67 """Some assertions to run on initialisation."""
68 assert not all(
69 [self.sig_tfm == "logsignature", self.rescaling == "post"]
70 ), "Cannot have post rescaling with the logsignature."
71
72 def setup_feature_pipeline(self):
73 """Sets up the signature method as an sklearn pipeline."""
74 augmentation_step = _make_augmentation_pipeline(self.augmentation_list)
75 transform_step = _WindowSignatureTransform(
76 window_name=self.window_name,
77 window_depth=self.window_depth,
78 window_length=self.window_length,
79 window_step=self.window_step,
80 sig_tfm=self.sig_tfm,
81 sig_depth=self.depth,
82 rescaling=self.rescaling,
83 )
84
85 # The so-called 'signature method' as defined in the reference paper
86 self.signature_method = Pipeline(
87 [
88 ("augmentations", augmentation_step),
89 ("window_and_transform", transform_step),
90 ]
91 )
92
93 @_handle_sktime_signatures(check_fitted=False)
94 def fit(self, data, labels=None):
95 self.signature_method.fit(data, labels)
96 self._is_fitted = True
97 return self
98
99 @_handle_sktime_signatures(check_fitted=True)
100 def transform(self, data, labels=None):
101 return self.signature_method.transform(data)
102
[end of sktime/transformations/panel/signature_based/_signature_method.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sktime/transformations/panel/signature_based/_signature_method.py b/sktime/transformations/panel/signature_based/_signature_method.py
--- a/sktime/transformations/panel/signature_based/_signature_method.py
+++ b/sktime/transformations/panel/signature_based/_signature_method.py
@@ -1,15 +1,16 @@
# -*- coding: utf-8 -*-
from sklearn.pipeline import Pipeline
+
from sktime.transformations.base import _PanelToTabularTransformer
-from sktime.transformations.panel.signature_based._compute import (
- _WindowSignatureTransform,
-)
from sktime.transformations.panel.signature_based._augmentations import (
_make_augmentation_pipeline,
)
from sktime.transformations.panel.signature_based._checks import (
_handle_sktime_signatures,
)
+from sktime.transformations.panel.signature_based._compute import (
+ _WindowSignatureTransform,
+)
class SignatureTransformer(_PanelToTabularTransformer):
@@ -63,14 +64,8 @@
self.setup_feature_pipeline()
- def _assertions(self):
- """Some assertions to run on initialisation."""
- assert not all(
- [self.sig_tfm == "logsignature", self.rescaling == "post"]
- ), "Cannot have post rescaling with the logsignature."
-
def setup_feature_pipeline(self):
- """Sets up the signature method as an sklearn pipeline."""
+ """Set up the signature method as an sklearn pipeline."""
augmentation_step = _make_augmentation_pipeline(self.augmentation_list)
transform_step = _WindowSignatureTransform(
window_name=self.window_name,
@@ -92,10 +87,38 @@
@_handle_sktime_signatures(check_fitted=False)
def fit(self, data, labels=None):
+ """Fit to data, then transform it.
+
+ Parameters
+ ----------
+ data: pd.Dataframe or np.ndarray (3d array)
+ Data to transform.
+ labels: np.ndarray (1d array) or pd.series or list
+ Labels for the data.
+
+ Returns
+ -------
+ pd.Dataframe or np.ndarray or pd.series
+ Transformed data.
+ """
self.signature_method.fit(data, labels)
self._is_fitted = True
return self
@_handle_sktime_signatures(check_fitted=True)
def transform(self, data, labels=None):
+ """Transform the class from the signature method.
+
+ Parameters
+ ----------
+ data: pd.Dataframe or np.ndarray (3d array)
+ Data to transform.
+ labels: np.ndarray (1d array) or pd.series or list
+ Labels for the data.
+
+ Returns
+ -------
+ pd.Dataframe or np.ndarray or pd.series
+ Transformed data.
+ """
return self.signature_method.transform(data)
| {"golden_diff": "diff --git a/sktime/transformations/panel/signature_based/_signature_method.py b/sktime/transformations/panel/signature_based/_signature_method.py\n--- a/sktime/transformations/panel/signature_based/_signature_method.py\n+++ b/sktime/transformations/panel/signature_based/_signature_method.py\n@@ -1,15 +1,16 @@\n # -*- coding: utf-8 -*-\n from sklearn.pipeline import Pipeline\n+\n from sktime.transformations.base import _PanelToTabularTransformer\n-from sktime.transformations.panel.signature_based._compute import (\n- _WindowSignatureTransform,\n-)\n from sktime.transformations.panel.signature_based._augmentations import (\n _make_augmentation_pipeline,\n )\n from sktime.transformations.panel.signature_based._checks import (\n _handle_sktime_signatures,\n )\n+from sktime.transformations.panel.signature_based._compute import (\n+ _WindowSignatureTransform,\n+)\n \n \n class SignatureTransformer(_PanelToTabularTransformer):\n@@ -63,14 +64,8 @@\n \n self.setup_feature_pipeline()\n \n- def _assertions(self):\n- \"\"\"Some assertions to run on initialisation.\"\"\"\n- assert not all(\n- [self.sig_tfm == \"logsignature\", self.rescaling == \"post\"]\n- ), \"Cannot have post rescaling with the logsignature.\"\n-\n def setup_feature_pipeline(self):\n- \"\"\"Sets up the signature method as an sklearn pipeline.\"\"\"\n+ \"\"\"Set up the signature method as an sklearn pipeline.\"\"\"\n augmentation_step = _make_augmentation_pipeline(self.augmentation_list)\n transform_step = _WindowSignatureTransform(\n window_name=self.window_name,\n@@ -92,10 +87,38 @@\n \n @_handle_sktime_signatures(check_fitted=False)\n def fit(self, data, labels=None):\n+ \"\"\"Fit to data, then transform it.\n+\n+ Parameters\n+ ----------\n+ data: pd.Dataframe or np.ndarray (3d array)\n+ Data to transform.\n+ labels: np.ndarray (1d array) or pd.series or list\n+ Labels for the data.\n+\n+ Returns\n+ -------\n+ pd.Dataframe or np.ndarray or pd.series\n+ Transformed data.\n+ \"\"\"\n self.signature_method.fit(data, labels)\n self._is_fitted = True\n return self\n \n @_handle_sktime_signatures(check_fitted=True)\n def transform(self, data, labels=None):\n+ \"\"\"Transform the class from the signature method.\n+\n+ Parameters\n+ ----------\n+ data: pd.Dataframe or np.ndarray (3d array)\n+ Data to transform.\n+ labels: np.ndarray (1d array) or pd.series or list\n+ Labels for the data.\n+\n+ Returns\n+ -------\n+ pd.Dataframe or np.ndarray or pd.series\n+ Transformed data.\n+ \"\"\"\n return self.signature_method.transform(data)\n", "issue": "Refactor issue #1043\nFixes #1043 \r\n\r\nRemoved methods load_UCR_UEA_dataset & _load_dataset from datasets/base.py and moved them to utils/data_io.py\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom sklearn.pipeline import Pipeline\nfrom sktime.transformations.base import _PanelToTabularTransformer\nfrom sktime.transformations.panel.signature_based._compute import (\n _WindowSignatureTransform,\n)\nfrom sktime.transformations.panel.signature_based._augmentations import (\n _make_augmentation_pipeline,\n)\nfrom sktime.transformations.panel.signature_based._checks import (\n _handle_sktime_signatures,\n)\n\n\nclass SignatureTransformer(_PanelToTabularTransformer):\n \"\"\"Transformation class from the signature method.\n\n Follows the methodology laid out in the paper:\n \"A Generalised Signature Method for Multivariate Time Series\"\n\n Parameters\n ----------\n augmentation_list: tuple of strings, contains the augmentations to be\n applied before application of the signature transform.\n window_name: str, The name of the window transform to apply.\n window_depth: int, The depth of the dyadic window. (Active only if\n `window_name == 'dyadic'`).\n window_length: int, The length of the sliding/expanding window. (Active\n only if `window_name in ['sliding, 'expanding']`.\n window_step: int, The step of the sliding/expanding window. (Active\n only if `window_name in ['sliding, 'expanding']`.\n rescaling: str or None, The method of signature rescaling.\n sig_tfm: str, String to specify the type of signature transform. One of:\n ['signature', 'logsignature']).\n depth: int, Signature truncation depth.\n\n Attributes\n ----------\n signature_method: sklearn.Pipeline, A sklearn pipeline object that contains\n all the steps to extract the signature features.\n \"\"\"\n\n def __init__(\n self,\n augmentation_list=(\"basepoint\", \"addtime\"),\n window_name=\"dyadic\",\n window_depth=3,\n window_length=None,\n window_step=None,\n rescaling=None,\n sig_tfm=\"signature\",\n depth=4,\n ):\n super(SignatureTransformer, self).__init__()\n self.augmentation_list = augmentation_list\n self.window_name = window_name\n self.window_depth = window_depth\n self.window_length = window_length\n self.window_step = window_step\n self.rescaling = rescaling\n self.sig_tfm = sig_tfm\n self.depth = depth\n\n self.setup_feature_pipeline()\n\n def _assertions(self):\n \"\"\"Some assertions to run on initialisation.\"\"\"\n assert not all(\n [self.sig_tfm == \"logsignature\", self.rescaling == \"post\"]\n ), \"Cannot have post rescaling with the logsignature.\"\n\n def setup_feature_pipeline(self):\n \"\"\"Sets up the signature method as an sklearn pipeline.\"\"\"\n augmentation_step = _make_augmentation_pipeline(self.augmentation_list)\n transform_step = _WindowSignatureTransform(\n window_name=self.window_name,\n window_depth=self.window_depth,\n window_length=self.window_length,\n window_step=self.window_step,\n sig_tfm=self.sig_tfm,\n sig_depth=self.depth,\n rescaling=self.rescaling,\n )\n\n # The so-called 'signature method' as defined in the reference paper\n self.signature_method = Pipeline(\n [\n (\"augmentations\", augmentation_step),\n (\"window_and_transform\", transform_step),\n ]\n )\n\n @_handle_sktime_signatures(check_fitted=False)\n def fit(self, data, labels=None):\n self.signature_method.fit(data, labels)\n self._is_fitted = True\n return self\n\n @_handle_sktime_signatures(check_fitted=True)\n def transform(self, data, labels=None):\n return self.signature_method.transform(data)\n", "path": "sktime/transformations/panel/signature_based/_signature_method.py"}]} | 1,580 | 630 |
gh_patches_debug_56181 | rasdani/github-patches | git_diff | TOMToolkit__tom_base-196 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Missing dataclasses
Following the tom_base install instructions, I pip installed the requirements.txt and then tried
> ./manage.py migrate
which ended with the following error:
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 205, in _call_with_frames_removed
File "/Users/rstreet/software/tom_base/tom_alerts/urls.py", line 3, in <module>
from tom_alerts.views import BrokerQueryCreateView, BrokerQueryListView, BrokerQueryUpdateView, RunQueryView
File "/Users/rstreet/software/tom_base/tom_alerts/views.py", line 3, in <module>
from tom_alerts.alerts import get_service_class, get_service_classes
File "/Users/rstreet/software/tom_base/tom_alerts/alerts.py", line 5, in <module>
from dataclasses import dataclass
ModuleNotFoundError: No module named 'dataclasses'
</issue>
<code>
[start of setup.py]
1 from setuptools import setup, find_packages
2 from os import path
3
4 here = path.abspath(path.dirname(__file__))
5 with open(path.join(here, 'README.md'), encoding='utf-8') as f:
6 long_description = f.read()
7
8 setup(
9 name='tomtoolkit',
10 version='1.1.0',
11 description='The TOM Toolkit and base modules',
12 long_description=long_description,
13 long_description_content_type='text/markdown',
14 url='https://tomtoolkit.github.io',
15 author='TOM Toolkit Project',
16 author_email='[email protected]',
17 classifiers=[
18 'Development Status :: 3 - Alpha',
19 'Intended Audience :: Science/Research',
20 'License :: OSI Approved :: BSD License',
21 'Operating System :: OS Independent',
22 'Programming Language :: Python :: 3',
23 'Programming Language :: Python :: 3.7',
24 'Topic :: Scientific/Engineering :: Astronomy',
25 'Topic :: Scientific/Engineering :: Physics'
26 ],
27 keywords=['tomtoolkit', 'astronomy', 'astrophysics', 'cosmology', 'science', 'fits', 'observatory'],
28 packages=find_packages(),
29 install_requires=[
30 'django',
31 'django-bootstrap4',
32 'django-extensions',
33 'django-filter',
34 'django-contrib-comments',
35 'django-gravatar2',
36 'django-crispy-forms',
37 'django-guardian',
38 'numpy',
39 'python-dateutil',
40 'requests',
41 'astroquery',
42 'astropy',
43 'astroplan',
44 'plotly',
45 'matplotlib',
46 'pillow',
47 'fits2image',
48 'specutils',
49 ],
50 extras_require={
51 'test': ['factory_boy']
52 },
53 include_package_data=True,
54 )
55
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -46,6 +46,7 @@
'pillow',
'fits2image',
'specutils',
+ "dataclasses; python_version < '3.7'",
],
extras_require={
'test': ['factory_boy']
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -46,6 +46,7 @@\n 'pillow',\n 'fits2image',\n 'specutils',\n+ \"dataclasses; python_version < '3.7'\",\n ],\n extras_require={\n 'test': ['factory_boy']\n", "issue": "Missing dataclasses\nFollowing the tom_base install instructions, I pip installed the requirements.txt and then tried \r\n> ./manage.py migrate\r\n\r\nwhich ended with the following error:\r\n File \"<frozen importlib._bootstrap_external>\", line 678, in exec_module\r\n File \"<frozen importlib._bootstrap>\", line 205, in _call_with_frames_removed\r\n File \"/Users/rstreet/software/tom_base/tom_alerts/urls.py\", line 3, in <module>\r\n from tom_alerts.views import BrokerQueryCreateView, BrokerQueryListView, BrokerQueryUpdateView, RunQueryView\r\n File \"/Users/rstreet/software/tom_base/tom_alerts/views.py\", line 3, in <module>\r\n from tom_alerts.alerts import get_service_class, get_service_classes\r\n File \"/Users/rstreet/software/tom_base/tom_alerts/alerts.py\", line 5, in <module>\r\n from dataclasses import dataclass\r\nModuleNotFoundError: No module named 'dataclasses'\r\n\n", "before_files": [{"content": "from setuptools import setup, find_packages\nfrom os import path\n\nhere = path.abspath(path.dirname(__file__))\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\nsetup(\n name='tomtoolkit',\n version='1.1.0',\n description='The TOM Toolkit and base modules',\n long_description=long_description,\n long_description_content_type='text/markdown',\n url='https://tomtoolkit.github.io',\n author='TOM Toolkit Project',\n author_email='[email protected]',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Astronomy',\n 'Topic :: Scientific/Engineering :: Physics'\n ],\n keywords=['tomtoolkit', 'astronomy', 'astrophysics', 'cosmology', 'science', 'fits', 'observatory'],\n packages=find_packages(),\n install_requires=[\n 'django',\n 'django-bootstrap4',\n 'django-extensions',\n 'django-filter',\n 'django-contrib-comments',\n 'django-gravatar2',\n 'django-crispy-forms',\n 'django-guardian',\n 'numpy',\n 'python-dateutil',\n 'requests',\n 'astroquery',\n 'astropy',\n 'astroplan',\n 'plotly',\n 'matplotlib',\n 'pillow',\n 'fits2image',\n 'specutils',\n ],\n extras_require={\n 'test': ['factory_boy']\n },\n include_package_data=True,\n)\n", "path": "setup.py"}]} | 1,225 | 78 |
gh_patches_debug_29137 | rasdani/github-patches | git_diff | spack__spack-4584 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
building flex with +lex variant fails
Using an older system (suse 13 with python 2.7.6) and the symlink code in the package fails entirely.
@mjwoods
</issue>
<code>
[start of var/spack/repos/builtin/packages/flex/package.py]
1 ##############################################################################
2 # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
3 # Produced at the Lawrence Livermore National Laboratory.
4 #
5 # This file is part of Spack.
6 # Created by Todd Gamblin, [email protected], All rights reserved.
7 # LLNL-CODE-647188
8 #
9 # For details, see https://github.com/llnl/spack
10 # Please also see the LICENSE file for our notice and the LGPL.
11 #
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU Lesser General Public License (as
14 # published by the Free Software Foundation) version 2.1, February 1999.
15 #
16 # This program is distributed in the hope that it will be useful, but
17 # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
19 # conditions of the GNU Lesser General Public License for more details.
20 #
21 # You should have received a copy of the GNU Lesser General Public
22 # License along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 ##############################################################################
25 from spack import *
26 import os
27
28
29 class Flex(AutotoolsPackage):
30 """Flex is a tool for generating scanners."""
31
32 homepage = "https://github.com/westes/flex"
33 url = "https://github.com/westes/flex/releases/download/v2.6.1/flex-2.6.1.tar.gz"
34
35 version('2.6.3', 'a5f65570cd9107ec8a8ec88f17b31bb1')
36 # Problematic version:
37 # See issue #2554; https://github.com/westes/flex/issues/113
38 # version('2.6.2', 'cc6d76c333db7653d5caf423a3335239')
39 version('2.6.1', '05bcd8fb629e0ae130311e8a6106fa82')
40 version('2.6.0', '760be2ee9433e822b6eb65318311c19d')
41 version('2.5.39', '5865e76ac69c05699f476515592750d7')
42
43 variant('lex', default=True,
44 description="Provide symlinks for lex and libl")
45
46 depends_on('bison', type='build')
47 depends_on('[email protected]:', type='build')
48 depends_on('help2man', type='build')
49
50 # Older tarballs don't come with a configure script
51 depends_on('m4', type='build')
52 depends_on('autoconf', type='build', when='@:2.6.0')
53 depends_on('automake', type='build', when='@:2.6.0')
54 depends_on('libtool', type='build', when='@:2.6.0')
55
56 def url_for_version(self, version):
57 url = "https://github.com/westes/flex"
58 if version >= Version('2.6.1'):
59 url += "/releases/download/v{0}/flex-{0}.tar.gz".format(version)
60 elif version == Version('2.6.0'):
61 url += "/archive/v{0}.tar.gz".format(version)
62 elif version >= Version('2.5.37'):
63 url += "/archive/flex-{0}.tar.gz".format(version)
64 else:
65 url += "/archive/flex-{0}.tar.gz".format(version.dashed)
66
67 return url
68
69 @run_after('install')
70 def symlink_lex(self):
71 if self.spec.satisfies('+lex'):
72 dso = dso_suffix
73 for dir, flex, lex in \
74 ((self.prefix.bin, 'flex', 'lex'),
75 (self.prefix.lib, 'libfl.a', 'libl.a'),
76 (self.prefix.lib, 'libfl.' + dso, 'libl.' + dso)):
77 with working_dir(dir):
78 if (os.path.isfile(flex) and not
79 os.path.lexists(lex)):
80 symlink(flex, lex)
81
[end of var/spack/repos/builtin/packages/flex/package.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py
--- a/var/spack/repos/builtin/packages/flex/package.py
+++ b/var/spack/repos/builtin/packages/flex/package.py
@@ -32,6 +32,7 @@
homepage = "https://github.com/westes/flex"
url = "https://github.com/westes/flex/releases/download/v2.6.1/flex-2.6.1.tar.gz"
+ version('2.6.4', '2882e3179748cc9f9c23ec593d6adc8d')
version('2.6.3', 'a5f65570cd9107ec8a8ec88f17b31bb1')
# Problematic version:
# See issue #2554; https://github.com/westes/flex/issues/113
@@ -68,13 +69,17 @@
@run_after('install')
def symlink_lex(self):
+ """Install symlinks for lex compatibility."""
if self.spec.satisfies('+lex'):
dso = dso_suffix
for dir, flex, lex in \
- ((self.prefix.bin, 'flex', 'lex'),
- (self.prefix.lib, 'libfl.a', 'libl.a'),
- (self.prefix.lib, 'libfl.' + dso, 'libl.' + dso)):
- with working_dir(dir):
- if (os.path.isfile(flex) and not
- os.path.lexists(lex)):
- symlink(flex, lex)
+ ((self.prefix.bin, 'flex', 'lex'),
+ (self.prefix.lib, 'libfl.a', 'libl.a'),
+ (self.prefix.lib, 'libfl.' + dso, 'libl.' + dso),
+ (self.prefix.lib64, 'libfl.a', 'libl.a'),
+ (self.prefix.lib64, 'libfl.' + dso, 'libl.' + dso)):
+
+ if os.path.isdir(dir):
+ with working_dir(dir):
+ if (os.path.isfile(flex) and not os.path.lexists(lex)):
+ symlink(flex, lex)
| {"golden_diff": "diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py\n--- a/var/spack/repos/builtin/packages/flex/package.py\n+++ b/var/spack/repos/builtin/packages/flex/package.py\n@@ -32,6 +32,7 @@\n homepage = \"https://github.com/westes/flex\"\n url = \"https://github.com/westes/flex/releases/download/v2.6.1/flex-2.6.1.tar.gz\"\n \n+ version('2.6.4', '2882e3179748cc9f9c23ec593d6adc8d')\n version('2.6.3', 'a5f65570cd9107ec8a8ec88f17b31bb1')\n # Problematic version:\n # See issue #2554; https://github.com/westes/flex/issues/113\n@@ -68,13 +69,17 @@\n \n @run_after('install')\n def symlink_lex(self):\n+ \"\"\"Install symlinks for lex compatibility.\"\"\"\n if self.spec.satisfies('+lex'):\n dso = dso_suffix\n for dir, flex, lex in \\\n- ((self.prefix.bin, 'flex', 'lex'),\n- (self.prefix.lib, 'libfl.a', 'libl.a'),\n- (self.prefix.lib, 'libfl.' + dso, 'libl.' + dso)):\n- with working_dir(dir):\n- if (os.path.isfile(flex) and not\n- os.path.lexists(lex)):\n- symlink(flex, lex)\n+ ((self.prefix.bin, 'flex', 'lex'),\n+ (self.prefix.lib, 'libfl.a', 'libl.a'),\n+ (self.prefix.lib, 'libfl.' + dso, 'libl.' + dso),\n+ (self.prefix.lib64, 'libfl.a', 'libl.a'),\n+ (self.prefix.lib64, 'libfl.' + dso, 'libl.' + dso)):\n+\n+ if os.path.isdir(dir):\n+ with working_dir(dir):\n+ if (os.path.isfile(flex) and not os.path.lexists(lex)):\n+ symlink(flex, lex)\n", "issue": "building flex with +lex variant fails\nUsing an older system (suse 13 with python 2.7.6) and the symlink code in the package fails entirely.\r\n@mjwoods \r\n\n", "before_files": [{"content": "##############################################################################\n# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.\n# Produced at the Lawrence Livermore National Laboratory.\n#\n# This file is part of Spack.\n# Created by Todd Gamblin, [email protected], All rights reserved.\n# LLNL-CODE-647188\n#\n# For details, see https://github.com/llnl/spack\n# Please also see the LICENSE file for our notice and the LGPL.\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License (as\n# published by the Free Software Foundation) version 2.1, February 1999.\n#\n# This program is distributed in the hope that it will be useful, but\n# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and\n# conditions of the GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public\n# License along with this program; if not, write to the Free Software\n# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\n##############################################################################\nfrom spack import *\nimport os\n\n\nclass Flex(AutotoolsPackage):\n \"\"\"Flex is a tool for generating scanners.\"\"\"\n\n homepage = \"https://github.com/westes/flex\"\n url = \"https://github.com/westes/flex/releases/download/v2.6.1/flex-2.6.1.tar.gz\"\n\n version('2.6.3', 'a5f65570cd9107ec8a8ec88f17b31bb1')\n # Problematic version:\n # See issue #2554; https://github.com/westes/flex/issues/113\n # version('2.6.2', 'cc6d76c333db7653d5caf423a3335239')\n version('2.6.1', '05bcd8fb629e0ae130311e8a6106fa82')\n version('2.6.0', '760be2ee9433e822b6eb65318311c19d')\n version('2.5.39', '5865e76ac69c05699f476515592750d7')\n\n variant('lex', default=True,\n description=\"Provide symlinks for lex and libl\")\n\n depends_on('bison', type='build')\n depends_on('[email protected]:', type='build')\n depends_on('help2man', type='build')\n\n # Older tarballs don't come with a configure script\n depends_on('m4', type='build')\n depends_on('autoconf', type='build', when='@:2.6.0')\n depends_on('automake', type='build', when='@:2.6.0')\n depends_on('libtool', type='build', when='@:2.6.0')\n\n def url_for_version(self, version):\n url = \"https://github.com/westes/flex\"\n if version >= Version('2.6.1'):\n url += \"/releases/download/v{0}/flex-{0}.tar.gz\".format(version)\n elif version == Version('2.6.0'):\n url += \"/archive/v{0}.tar.gz\".format(version)\n elif version >= Version('2.5.37'):\n url += \"/archive/flex-{0}.tar.gz\".format(version)\n else:\n url += \"/archive/flex-{0}.tar.gz\".format(version.dashed)\n\n return url\n\n @run_after('install')\n def symlink_lex(self):\n if self.spec.satisfies('+lex'):\n dso = dso_suffix\n for dir, flex, lex in \\\n ((self.prefix.bin, 'flex', 'lex'),\n (self.prefix.lib, 'libfl.a', 'libl.a'),\n (self.prefix.lib, 'libfl.' + dso, 'libl.' + dso)):\n with working_dir(dir):\n if (os.path.isfile(flex) and not\n os.path.lexists(lex)):\n symlink(flex, lex)\n", "path": "var/spack/repos/builtin/packages/flex/package.py"}]} | 1,751 | 525 |
gh_patches_debug_15053 | rasdani/github-patches | git_diff | deis__deis-4373 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Better error message when registration is disabled
When `/deis/controller/registrationMode` is `disabled`, attempt to register returns
```
Registration failed: {"detail":"Authentication credentials were not provided."}
```
This message is misleading. It should explicitly say that registration is disabled.
</issue>
<code>
[start of controller/api/permissions.py]
1 from rest_framework import permissions
2 from django.conf import settings
3 from django.contrib.auth.models import AnonymousUser
4
5 from api import models
6
7
8 def is_app_user(request, obj):
9 if request.user.is_superuser or \
10 isinstance(obj, models.App) and obj.owner == request.user or \
11 hasattr(obj, 'app') and obj.app.owner == request.user:
12 return True
13 elif request.user.has_perm('use_app', obj) or \
14 hasattr(obj, 'app') and request.user.has_perm('use_app', obj.app):
15 return request.method != 'DELETE'
16 else:
17 return False
18
19
20 class IsAnonymous(permissions.BasePermission):
21 """
22 View permission to allow anonymous users.
23 """
24
25 def has_permission(self, request, view):
26 """
27 Return `True` if permission is granted, `False` otherwise.
28 """
29 return type(request.user) is AnonymousUser
30
31
32 class IsOwner(permissions.BasePermission):
33 """
34 Object-level permission to allow only owners of an object to access it.
35 Assumes the model instance has an `owner` attribute.
36 """
37
38 def has_object_permission(self, request, view, obj):
39 if hasattr(obj, 'owner'):
40 return obj.owner == request.user
41 else:
42 return False
43
44
45 class IsOwnerOrAdmin(permissions.BasePermission):
46 """
47 Object-level permission to allow only owners of an object or administrators to access it.
48 Assumes the model instance has an `owner` attribute.
49 """
50 def has_object_permission(self, request, view, obj):
51 if request.user.is_superuser:
52 return True
53 if hasattr(obj, 'owner'):
54 return obj.owner == request.user
55 else:
56 return False
57
58
59 class IsAppUser(permissions.BasePermission):
60 """
61 Object-level permission to allow owners or collaborators to access
62 an app-related model.
63 """
64 def has_object_permission(self, request, view, obj):
65 return is_app_user(request, obj)
66
67
68 class IsAdmin(permissions.BasePermission):
69 """
70 View permission to allow only admins.
71 """
72
73 def has_permission(self, request, view):
74 """
75 Return `True` if permission is granted, `False` otherwise.
76 """
77 return request.user.is_superuser
78
79
80 class IsAdminOrSafeMethod(permissions.BasePermission):
81 """
82 View permission to allow only admins to use unsafe methods
83 including POST, PUT, DELETE.
84
85 This allows
86 """
87
88 def has_permission(self, request, view):
89 """
90 Return `True` if permission is granted, `False` otherwise.
91 """
92 return request.method in permissions.SAFE_METHODS or request.user.is_superuser
93
94
95 class HasRegistrationAuth(permissions.BasePermission):
96 """
97 Checks to see if registration is enabled
98 """
99 def has_permission(self, request, view):
100 """
101 If settings.REGISTRATION_MODE does not exist, such as during a test, return True
102 Return `True` if permission is granted, `False` otherwise.
103 """
104 try:
105 if settings.REGISTRATION_MODE == 'disabled':
106 return False
107 if settings.REGISTRATION_MODE == 'enabled':
108 return True
109 elif settings.REGISTRATION_MODE == 'admin_only':
110 return request.user.is_superuser
111 else:
112 raise Exception("{} is not a valid registation mode"
113 .format(settings.REGISTRATION_MODE))
114 except AttributeError:
115 return True
116
117
118 class HasBuilderAuth(permissions.BasePermission):
119 """
120 View permission to allow builder to perform actions
121 with a special HTTP header
122 """
123
124 def has_permission(self, request, view):
125 """
126 Return `True` if permission is granted, `False` otherwise.
127 """
128 auth_header = request.environ.get('HTTP_X_DEIS_BUILDER_AUTH')
129 if not auth_header:
130 return False
131 return auth_header == settings.BUILDER_KEY
132
133
134 class CanRegenerateToken(permissions.BasePermission):
135 """
136 Checks if a user can regenerate a token
137 """
138
139 def has_permission(self, request, view):
140 """
141 Return `True` if permission is granted, `False` otherwise.
142 """
143 if 'username' in request.data or 'all' in request.data:
144 return request.user.is_superuser
145 else:
146 return True
147
[end of controller/api/permissions.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/controller/api/permissions.py b/controller/api/permissions.py
--- a/controller/api/permissions.py
+++ b/controller/api/permissions.py
@@ -1,3 +1,5 @@
+
+from rest_framework import exceptions
from rest_framework import permissions
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
@@ -103,7 +105,7 @@
"""
try:
if settings.REGISTRATION_MODE == 'disabled':
- return False
+ raise exceptions.PermissionDenied('Registration is disabled')
if settings.REGISTRATION_MODE == 'enabled':
return True
elif settings.REGISTRATION_MODE == 'admin_only':
| {"golden_diff": "diff --git a/controller/api/permissions.py b/controller/api/permissions.py\n--- a/controller/api/permissions.py\n+++ b/controller/api/permissions.py\n@@ -1,3 +1,5 @@\n+\n+from rest_framework import exceptions\n from rest_framework import permissions\n from django.conf import settings\n from django.contrib.auth.models import AnonymousUser\n@@ -103,7 +105,7 @@\n \"\"\"\n try:\n if settings.REGISTRATION_MODE == 'disabled':\n- return False\n+ raise exceptions.PermissionDenied('Registration is disabled')\n if settings.REGISTRATION_MODE == 'enabled':\n return True\n elif settings.REGISTRATION_MODE == 'admin_only':\n", "issue": "Better error message when registration is disabled\nWhen `/deis/controller/registrationMode` is `disabled`, attempt to register returns\n\n```\nRegistration failed: {\"detail\":\"Authentication credentials were not provided.\"}\n```\n\nThis message is misleading. It should explicitly say that registration is disabled.\n\n", "before_files": [{"content": "from rest_framework import permissions\nfrom django.conf import settings\nfrom django.contrib.auth.models import AnonymousUser\n\nfrom api import models\n\n\ndef is_app_user(request, obj):\n if request.user.is_superuser or \\\n isinstance(obj, models.App) and obj.owner == request.user or \\\n hasattr(obj, 'app') and obj.app.owner == request.user:\n return True\n elif request.user.has_perm('use_app', obj) or \\\n hasattr(obj, 'app') and request.user.has_perm('use_app', obj.app):\n return request.method != 'DELETE'\n else:\n return False\n\n\nclass IsAnonymous(permissions.BasePermission):\n \"\"\"\n View permission to allow anonymous users.\n \"\"\"\n\n def has_permission(self, request, view):\n \"\"\"\n Return `True` if permission is granted, `False` otherwise.\n \"\"\"\n return type(request.user) is AnonymousUser\n\n\nclass IsOwner(permissions.BasePermission):\n \"\"\"\n Object-level permission to allow only owners of an object to access it.\n Assumes the model instance has an `owner` attribute.\n \"\"\"\n\n def has_object_permission(self, request, view, obj):\n if hasattr(obj, 'owner'):\n return obj.owner == request.user\n else:\n return False\n\n\nclass IsOwnerOrAdmin(permissions.BasePermission):\n \"\"\"\n Object-level permission to allow only owners of an object or administrators to access it.\n Assumes the model instance has an `owner` attribute.\n \"\"\"\n def has_object_permission(self, request, view, obj):\n if request.user.is_superuser:\n return True\n if hasattr(obj, 'owner'):\n return obj.owner == request.user\n else:\n return False\n\n\nclass IsAppUser(permissions.BasePermission):\n \"\"\"\n Object-level permission to allow owners or collaborators to access\n an app-related model.\n \"\"\"\n def has_object_permission(self, request, view, obj):\n return is_app_user(request, obj)\n\n\nclass IsAdmin(permissions.BasePermission):\n \"\"\"\n View permission to allow only admins.\n \"\"\"\n\n def has_permission(self, request, view):\n \"\"\"\n Return `True` if permission is granted, `False` otherwise.\n \"\"\"\n return request.user.is_superuser\n\n\nclass IsAdminOrSafeMethod(permissions.BasePermission):\n \"\"\"\n View permission to allow only admins to use unsafe methods\n including POST, PUT, DELETE.\n\n This allows\n \"\"\"\n\n def has_permission(self, request, view):\n \"\"\"\n Return `True` if permission is granted, `False` otherwise.\n \"\"\"\n return request.method in permissions.SAFE_METHODS or request.user.is_superuser\n\n\nclass HasRegistrationAuth(permissions.BasePermission):\n \"\"\"\n Checks to see if registration is enabled\n \"\"\"\n def has_permission(self, request, view):\n \"\"\"\n If settings.REGISTRATION_MODE does not exist, such as during a test, return True\n Return `True` if permission is granted, `False` otherwise.\n \"\"\"\n try:\n if settings.REGISTRATION_MODE == 'disabled':\n return False\n if settings.REGISTRATION_MODE == 'enabled':\n return True\n elif settings.REGISTRATION_MODE == 'admin_only':\n return request.user.is_superuser\n else:\n raise Exception(\"{} is not a valid registation mode\"\n .format(settings.REGISTRATION_MODE))\n except AttributeError:\n return True\n\n\nclass HasBuilderAuth(permissions.BasePermission):\n \"\"\"\n View permission to allow builder to perform actions\n with a special HTTP header\n \"\"\"\n\n def has_permission(self, request, view):\n \"\"\"\n Return `True` if permission is granted, `False` otherwise.\n \"\"\"\n auth_header = request.environ.get('HTTP_X_DEIS_BUILDER_AUTH')\n if not auth_header:\n return False\n return auth_header == settings.BUILDER_KEY\n\n\nclass CanRegenerateToken(permissions.BasePermission):\n \"\"\"\n Checks if a user can regenerate a token\n \"\"\"\n\n def has_permission(self, request, view):\n \"\"\"\n Return `True` if permission is granted, `False` otherwise.\n \"\"\"\n if 'username' in request.data or 'all' in request.data:\n return request.user.is_superuser\n else:\n return True\n", "path": "controller/api/permissions.py"}]} | 1,823 | 142 |
gh_patches_debug_37554 | rasdani/github-patches | git_diff | litestar-org__litestar-1695 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
</issue>
<code>
[start of litestar/contrib/jwt/jwt_token.py]
1 from __future__ import annotations
2
3 from dataclasses import asdict, dataclass, field
4 from datetime import datetime, timezone
5 from typing import cast
6
7 from jose import JWSError, JWTError, jwt
8
9 from litestar.exceptions import ImproperlyConfiguredException, NotAuthorizedException
10
11 __all__ = ("Token",)
12
13
14 def _normalize_datetime(value: datetime) -> datetime:
15 """Convert the given value into UTC and strip microseconds.
16
17 Args:
18 value: A datetime instance
19
20 Returns:
21 A datetime instance
22 """
23 if value.tzinfo is not None:
24 value.astimezone(timezone.utc)
25
26 return value.replace(microsecond=0)
27
28
29 @dataclass
30 class Token:
31 """JWT Token DTO."""
32
33 exp: datetime
34 """Expiration - datetime for token expiration."""
35 sub: str
36 """Subject - usually a unique identifier of the user or equivalent entity."""
37 iat: datetime = field(default_factory=lambda: _normalize_datetime(datetime.now(timezone.utc)))
38 """Issued at - should always be current now."""
39 iss: str | None = field(default=None)
40 """Issuer - optional unique identifier for the issuer."""
41 aud: str | None = field(default=None)
42 """Audience - intended audience."""
43 jti: str | None = field(default=None)
44 """JWT ID - a unique identifier of the JWT between different issuers."""
45
46 def __post_init__(self) -> None:
47 if len(self.sub) < 1:
48 raise ImproperlyConfiguredException("sub must be a string with a length greater than 0")
49
50 if isinstance(self.exp, datetime) and (
51 (exp := _normalize_datetime(self.exp))
52 and exp.timestamp() >= _normalize_datetime(datetime.now(timezone.utc)).timestamp()
53 ):
54 self.exp = exp
55 else:
56 raise ImproperlyConfiguredException("exp value must be a datetime in the future")
57
58 if isinstance(self.iat, datetime) and (
59 (iat := _normalize_datetime(self.iat))
60 and iat.timestamp() <= _normalize_datetime(datetime.now(timezone.utc)).timestamp()
61 ):
62 self.iat = iat
63 else:
64 raise ImproperlyConfiguredException("iat must be a current or past time")
65
66 @staticmethod
67 def decode(encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Token:
68 """Decode a passed in token string and returns a Token instance.
69
70 Args:
71 encoded_token: A base64 string containing an encoded JWT.
72 secret: The secret with which the JWT is encoded. It may optionally be an individual JWK or JWS set dict
73 algorithm: The algorithm used to encode the JWT.
74
75 Returns:
76 A decoded Token instance.
77
78 Raises:
79 NotAuthorizedException: If the token is invalid.
80 """
81 try:
82 payload = jwt.decode(token=encoded_token, key=secret, algorithms=[algorithm], options={"verify_aud": False})
83 exp = datetime.fromtimestamp(payload.pop("exp"), tz=timezone.utc)
84 iat = datetime.fromtimestamp(payload.pop("iat"), tz=timezone.utc)
85 return Token(exp=exp, iat=iat, **payload)
86 except (KeyError, JWTError, ImproperlyConfiguredException) as e:
87 raise NotAuthorizedException("Invalid token") from e
88
89 def encode(self, secret: str, algorithm: str) -> str:
90 """Encode the token instance into a string.
91
92 Args:
93 secret: The secret with which the JWT is encoded.
94 algorithm: The algorithm used to encode the JWT.
95
96 Returns:
97 An encoded token string.
98
99 Raises:
100 ImproperlyConfiguredException: If encoding fails.
101 """
102 try:
103 return cast(
104 "str",
105 jwt.encode(
106 claims={k: v for k, v in asdict(self).items() if v is not None}, key=secret, algorithm=algorithm
107 ),
108 )
109 except (JWTError, JWSError) as e:
110 raise ImproperlyConfiguredException("Failed to encode token") from e
111
[end of litestar/contrib/jwt/jwt_token.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/litestar/contrib/jwt/jwt_token.py b/litestar/contrib/jwt/jwt_token.py
--- a/litestar/contrib/jwt/jwt_token.py
+++ b/litestar/contrib/jwt/jwt_token.py
@@ -1,13 +1,18 @@
from __future__ import annotations
+import dataclasses
from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
-from typing import cast
+from typing import TYPE_CHECKING, Any, cast
from jose import JWSError, JWTError, jwt
from litestar.exceptions import ImproperlyConfiguredException, NotAuthorizedException
+if TYPE_CHECKING:
+ from typing_extensions import Self
+
+
__all__ = ("Token",)
@@ -42,6 +47,8 @@
"""Audience - intended audience."""
jti: str | None = field(default=None)
"""JWT ID - a unique identifier of the JWT between different issuers."""
+ extras: dict[str, Any] = field(default_factory=dict)
+ """Extra fields that were found on the JWT token."""
def __post_init__(self) -> None:
if len(self.sub) < 1:
@@ -63,8 +70,8 @@
else:
raise ImproperlyConfiguredException("iat must be a current or past time")
- @staticmethod
- def decode(encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Token:
+ @classmethod
+ def decode(cls, encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Self:
"""Decode a passed in token string and returns a Token instance.
Args:
@@ -82,7 +89,12 @@
payload = jwt.decode(token=encoded_token, key=secret, algorithms=[algorithm], options={"verify_aud": False})
exp = datetime.fromtimestamp(payload.pop("exp"), tz=timezone.utc)
iat = datetime.fromtimestamp(payload.pop("iat"), tz=timezone.utc)
- return Token(exp=exp, iat=iat, **payload)
+ field_names = {f.name for f in dataclasses.fields(Token)}
+ extra_fields = payload.keys() - field_names
+ extras = payload.pop("extras", {})
+ for key in extra_fields:
+ extras[key] = payload.pop(key)
+ return cls(exp=exp, iat=iat, **payload, extras=extras)
except (KeyError, JWTError, ImproperlyConfiguredException) as e:
raise NotAuthorizedException("Invalid token") from e
| {"golden_diff": "diff --git a/litestar/contrib/jwt/jwt_token.py b/litestar/contrib/jwt/jwt_token.py\n--- a/litestar/contrib/jwt/jwt_token.py\n+++ b/litestar/contrib/jwt/jwt_token.py\n@@ -1,13 +1,18 @@\n from __future__ import annotations\n \n+import dataclasses\n from dataclasses import asdict, dataclass, field\n from datetime import datetime, timezone\n-from typing import cast\n+from typing import TYPE_CHECKING, Any, cast\n \n from jose import JWSError, JWTError, jwt\n \n from litestar.exceptions import ImproperlyConfiguredException, NotAuthorizedException\n \n+if TYPE_CHECKING:\n+ from typing_extensions import Self\n+\n+\n __all__ = (\"Token\",)\n \n \n@@ -42,6 +47,8 @@\n \"\"\"Audience - intended audience.\"\"\"\n jti: str | None = field(default=None)\n \"\"\"JWT ID - a unique identifier of the JWT between different issuers.\"\"\"\n+ extras: dict[str, Any] = field(default_factory=dict)\n+ \"\"\"Extra fields that were found on the JWT token.\"\"\"\n \n def __post_init__(self) -> None:\n if len(self.sub) < 1:\n@@ -63,8 +70,8 @@\n else:\n raise ImproperlyConfiguredException(\"iat must be a current or past time\")\n \n- @staticmethod\n- def decode(encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Token:\n+ @classmethod\n+ def decode(cls, encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Self:\n \"\"\"Decode a passed in token string and returns a Token instance.\n \n Args:\n@@ -82,7 +89,12 @@\n payload = jwt.decode(token=encoded_token, key=secret, algorithms=[algorithm], options={\"verify_aud\": False})\n exp = datetime.fromtimestamp(payload.pop(\"exp\"), tz=timezone.utc)\n iat = datetime.fromtimestamp(payload.pop(\"iat\"), tz=timezone.utc)\n- return Token(exp=exp, iat=iat, **payload)\n+ field_names = {f.name for f in dataclasses.fields(Token)}\n+ extra_fields = payload.keys() - field_names\n+ extras = payload.pop(\"extras\", {})\n+ for key in extra_fields:\n+ extras[key] = payload.pop(key)\n+ return cls(exp=exp, iat=iat, **payload, extras=extras)\n except (KeyError, JWTError, ImproperlyConfiguredException) as e:\n raise NotAuthorizedException(\"Invalid token\") from e\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom dataclasses import asdict, dataclass, field\nfrom datetime import datetime, timezone\nfrom typing import cast\n\nfrom jose import JWSError, JWTError, jwt\n\nfrom litestar.exceptions import ImproperlyConfiguredException, NotAuthorizedException\n\n__all__ = (\"Token\",)\n\n\ndef _normalize_datetime(value: datetime) -> datetime:\n \"\"\"Convert the given value into UTC and strip microseconds.\n\n Args:\n value: A datetime instance\n\n Returns:\n A datetime instance\n \"\"\"\n if value.tzinfo is not None:\n value.astimezone(timezone.utc)\n\n return value.replace(microsecond=0)\n\n\n@dataclass\nclass Token:\n \"\"\"JWT Token DTO.\"\"\"\n\n exp: datetime\n \"\"\"Expiration - datetime for token expiration.\"\"\"\n sub: str\n \"\"\"Subject - usually a unique identifier of the user or equivalent entity.\"\"\"\n iat: datetime = field(default_factory=lambda: _normalize_datetime(datetime.now(timezone.utc)))\n \"\"\"Issued at - should always be current now.\"\"\"\n iss: str | None = field(default=None)\n \"\"\"Issuer - optional unique identifier for the issuer.\"\"\"\n aud: str | None = field(default=None)\n \"\"\"Audience - intended audience.\"\"\"\n jti: str | None = field(default=None)\n \"\"\"JWT ID - a unique identifier of the JWT between different issuers.\"\"\"\n\n def __post_init__(self) -> None:\n if len(self.sub) < 1:\n raise ImproperlyConfiguredException(\"sub must be a string with a length greater than 0\")\n\n if isinstance(self.exp, datetime) and (\n (exp := _normalize_datetime(self.exp))\n and exp.timestamp() >= _normalize_datetime(datetime.now(timezone.utc)).timestamp()\n ):\n self.exp = exp\n else:\n raise ImproperlyConfiguredException(\"exp value must be a datetime in the future\")\n\n if isinstance(self.iat, datetime) and (\n (iat := _normalize_datetime(self.iat))\n and iat.timestamp() <= _normalize_datetime(datetime.now(timezone.utc)).timestamp()\n ):\n self.iat = iat\n else:\n raise ImproperlyConfiguredException(\"iat must be a current or past time\")\n\n @staticmethod\n def decode(encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Token:\n \"\"\"Decode a passed in token string and returns a Token instance.\n\n Args:\n encoded_token: A base64 string containing an encoded JWT.\n secret: The secret with which the JWT is encoded. It may optionally be an individual JWK or JWS set dict\n algorithm: The algorithm used to encode the JWT.\n\n Returns:\n A decoded Token instance.\n\n Raises:\n NotAuthorizedException: If the token is invalid.\n \"\"\"\n try:\n payload = jwt.decode(token=encoded_token, key=secret, algorithms=[algorithm], options={\"verify_aud\": False})\n exp = datetime.fromtimestamp(payload.pop(\"exp\"), tz=timezone.utc)\n iat = datetime.fromtimestamp(payload.pop(\"iat\"), tz=timezone.utc)\n return Token(exp=exp, iat=iat, **payload)\n except (KeyError, JWTError, ImproperlyConfiguredException) as e:\n raise NotAuthorizedException(\"Invalid token\") from e\n\n def encode(self, secret: str, algorithm: str) -> str:\n \"\"\"Encode the token instance into a string.\n\n Args:\n secret: The secret with which the JWT is encoded.\n algorithm: The algorithm used to encode the JWT.\n\n Returns:\n An encoded token string.\n\n Raises:\n ImproperlyConfiguredException: If encoding fails.\n \"\"\"\n try:\n return cast(\n \"str\",\n jwt.encode(\n claims={k: v for k, v in asdict(self).items() if v is not None}, key=secret, algorithm=algorithm\n ),\n )\n except (JWTError, JWSError) as e:\n raise ImproperlyConfiguredException(\"Failed to encode token\") from e\n", "path": "litestar/contrib/jwt/jwt_token.py"}]} | 1,814 | 576 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.