problem_id
stringlengths
18
22
source
stringclasses
1 value
task_type
stringclasses
1 value
in_source_id
stringlengths
13
58
prompt
stringlengths
1.71k
9.01k
golden_diff
stringlengths
151
4.94k
verification_info
stringlengths
465
11.3k
num_tokens_prompt
int64
557
2.05k
num_tokens_diff
int64
48
1.02k
gh_patches_debug_22780
rasdani/github-patches
git_diff
yt-project__yt-2754
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Matplotlib 3.3.0 Breaks _png <!--To help us understand and resolve your issue, please fill out the form to the best of your ability.--> <!--You can feel free to delete the sections that do not apply.--> ### Bug report **Bug summary** Matplotlib 3.3.0 removed the internal `_png` module, which breaks https://github.com/yt-project/yt/blob/yt-3.6.0/yt/utilities/png_writer.py#L13 See the last mention in https://matplotlib.org/3.3.0/api/api_changes.html#matplotlib-now-uses-pillow-to-save-and-read-pngs **Code for reproduction** Just saw this on our CI: https://travis-ci.com/github/ECP-WarpX/WarpX/jobs/361956903 MPL 3.3.0 was release about 5hrs ago. https://github.com/matplotlib/matplotlib/releases/tag/v3.3.0 **Actual outcome** ``` File "/home/travis/.local/lib/python3.6/site-packages/yt/utilities/png_writer.py", line 13, in <module> import matplotlib._png as _png ModuleNotFoundError: No module named 'matplotlib._png' ``` **Expected outcome** :-) **Version Information** <!--Please specify your platform and versions of the relevant libraries you are using:--> * Operating System: Ubuntu 18.04 * Python Version: 3.6 * yt version: 3.6.0 Installed via `python -m pip install --upgrade cmake matplotlib mpi4py numpy scipy yt`. **Work-Around** Downgrade matplotlib via `python -m pip install --upgrade matplotlib==3.2.2`. Exact details: - https://travis-ci.com/github/ECP-WarpX/WarpX/jobs/361956903 - https://github.com/ECP-WarpX/WarpX/blob/384c6ab9a864d430868a39a065f4a1d4426231af/.travis.yml#L30-L31 </issue> <code> [start of yt/utilities/png_writer.py] 1 from io import BytesIO 2 3 import matplotlib._png as _png 4 5 6 def call_png_write_png(buffer, width, height, fileobj, dpi): 7 _png.write_png(buffer, fileobj, dpi) 8 9 10 def write_png(buffer, filename, dpi=100): 11 width = buffer.shape[1] 12 height = buffer.shape[0] 13 with open(filename, "wb") as fileobj: 14 call_png_write_png(buffer, width, height, fileobj, dpi) 15 16 17 def write_png_to_string(buffer, dpi=100, gray=0): 18 width = buffer.shape[1] 19 height = buffer.shape[0] 20 fileobj = BytesIO() 21 call_png_write_png(buffer, width, height, fileobj, dpi) 22 png_str = fileobj.getvalue() 23 fileobj.close() 24 return png_str 25 [end of yt/utilities/png_writer.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/yt/utilities/png_writer.py b/yt/utilities/png_writer.py --- a/yt/utilities/png_writer.py +++ b/yt/utilities/png_writer.py @@ -1,24 +1,29 @@ from io import BytesIO -import matplotlib._png as _png +try: + # matplotlib switched from an internal submodule _png to using pillow (PIL) + # between v3.1.0 and v3.3.0 + # So PIL should be available on any system where matplotlib._png doesn't exist + import matplotlib._png as _png +except ImportError: + from PIL import Image -def call_png_write_png(buffer, width, height, fileobj, dpi): - _png.write_png(buffer, fileobj, dpi) +def call_png_write_png(buffer, fileobj, dpi): + try: + _png.write_png(buffer, fileobj, dpi) + except NameError: + Image.fromarray(buffer).save(fileobj, dpi=(dpi, dpi)) def write_png(buffer, filename, dpi=100): - width = buffer.shape[1] - height = buffer.shape[0] with open(filename, "wb") as fileobj: - call_png_write_png(buffer, width, height, fileobj, dpi) + call_png_write_png(buffer, fileobj, dpi) -def write_png_to_string(buffer, dpi=100, gray=0): - width = buffer.shape[1] - height = buffer.shape[0] +def write_png_to_string(buffer, dpi=100): fileobj = BytesIO() - call_png_write_png(buffer, width, height, fileobj, dpi) + call_png_write_png(buffer, fileobj, dpi) png_str = fileobj.getvalue() fileobj.close() return png_str
{"golden_diff": "diff --git a/yt/utilities/png_writer.py b/yt/utilities/png_writer.py\n--- a/yt/utilities/png_writer.py\n+++ b/yt/utilities/png_writer.py\n@@ -1,24 +1,29 @@\n from io import BytesIO\n \n-import matplotlib._png as _png\n+try:\n+ # matplotlib switched from an internal submodule _png to using pillow (PIL)\n+ # between v3.1.0 and v3.3.0\n+ # So PIL should be available on any system where matplotlib._png doesn't exist\n+ import matplotlib._png as _png\n+except ImportError:\n+ from PIL import Image\n \n \n-def call_png_write_png(buffer, width, height, fileobj, dpi):\n- _png.write_png(buffer, fileobj, dpi)\n+def call_png_write_png(buffer, fileobj, dpi):\n+ try:\n+ _png.write_png(buffer, fileobj, dpi)\n+ except NameError:\n+ Image.fromarray(buffer).save(fileobj, dpi=(dpi, dpi))\n \n \n def write_png(buffer, filename, dpi=100):\n- width = buffer.shape[1]\n- height = buffer.shape[0]\n with open(filename, \"wb\") as fileobj:\n- call_png_write_png(buffer, width, height, fileobj, dpi)\n+ call_png_write_png(buffer, fileobj, dpi)\n \n \n-def write_png_to_string(buffer, dpi=100, gray=0):\n- width = buffer.shape[1]\n- height = buffer.shape[0]\n+def write_png_to_string(buffer, dpi=100):\n fileobj = BytesIO()\n- call_png_write_png(buffer, width, height, fileobj, dpi)\n+ call_png_write_png(buffer, fileobj, dpi)\n png_str = fileobj.getvalue()\n fileobj.close()\n return png_str\n", "issue": "Matplotlib 3.3.0 Breaks _png\n<!--To help us understand and resolve your issue, please fill out the form to\r\nthe best of your ability.-->\r\n<!--You can feel free to delete the sections that do not apply.-->\r\n\r\n### Bug report\r\n\r\n**Bug summary**\r\n\r\nMatplotlib 3.3.0 removed the internal `_png` module, which breaks\r\nhttps://github.com/yt-project/yt/blob/yt-3.6.0/yt/utilities/png_writer.py#L13\r\n\r\nSee the last mention in https://matplotlib.org/3.3.0/api/api_changes.html#matplotlib-now-uses-pillow-to-save-and-read-pngs\r\n\r\n**Code for reproduction**\r\n\r\nJust saw this on our CI:\r\nhttps://travis-ci.com/github/ECP-WarpX/WarpX/jobs/361956903\r\n\r\nMPL 3.3.0 was release about 5hrs ago.\r\nhttps://github.com/matplotlib/matplotlib/releases/tag/v3.3.0\r\n\r\n**Actual outcome**\r\n\r\n```\r\nFile \"/home/travis/.local/lib/python3.6/site-packages/yt/utilities/png_writer.py\", line 13, in <module>\r\n import matplotlib._png as _png\r\nModuleNotFoundError: No module named 'matplotlib._png'\r\n```\r\n\r\n**Expected outcome**\r\n\r\n:-)\r\n\r\n**Version Information**\r\n<!--Please specify your platform and versions of the relevant libraries you are\r\nusing:-->\r\n * Operating System: Ubuntu 18.04\r\n * Python Version: 3.6\r\n * yt version: 3.6.0\r\n\r\nInstalled via `python -m pip install --upgrade cmake matplotlib mpi4py numpy scipy yt`.\r\n\r\n**Work-Around**\r\n\r\nDowngrade matplotlib via `python -m pip install --upgrade matplotlib==3.2.2`.\r\n\r\nExact details:\r\n- https://travis-ci.com/github/ECP-WarpX/WarpX/jobs/361956903\r\n- https://github.com/ECP-WarpX/WarpX/blob/384c6ab9a864d430868a39a065f4a1d4426231af/.travis.yml#L30-L31\r\n\n", "before_files": [{"content": "from io import BytesIO\n\nimport matplotlib._png as _png\n\n\ndef call_png_write_png(buffer, width, height, fileobj, dpi):\n _png.write_png(buffer, fileobj, dpi)\n\n\ndef write_png(buffer, filename, dpi=100):\n width = buffer.shape[1]\n height = buffer.shape[0]\n with open(filename, \"wb\") as fileobj:\n call_png_write_png(buffer, width, height, fileobj, dpi)\n\n\ndef write_png_to_string(buffer, dpi=100, gray=0):\n width = buffer.shape[1]\n height = buffer.shape[0]\n fileobj = BytesIO()\n call_png_write_png(buffer, width, height, fileobj, dpi)\n png_str = fileobj.getvalue()\n fileobj.close()\n return png_str\n", "path": "yt/utilities/png_writer.py"}]}
1,245
408
gh_patches_debug_25260
rasdani/github-patches
git_diff
streamlit__streamlit-4525
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> st.json collapse parameter _(Note, you don't have to fill out every section here. They're just here for guidance. That said, nicely detailed feature requests are more likely to get eng attention sooner)_ ### Problem Have a parameter for st.json(body, collapse) where the default is `False` but you can set it to `True`. This would allow developers to choose if the json file is expanded or collapsed when rendered on the Streamlit app. Requested by a community member, link to forum post: https://discuss.streamlit.io/t/json-collapse-option/17159 ### Solution **MVP:** a parameter to set the view of a json file on the first render in Streamlit </issue> <code> [start of e2e/scripts/st_json.py] 1 # Copyright 2018-2022 Streamlit Inc. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import streamlit as st 16 17 data = {"foo": "bar"} 18 st.json(data) 19 [end of e2e/scripts/st_json.py] [start of lib/streamlit/elements/json.py] 1 # Copyright 2018-2022 Streamlit Inc. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import json 16 from typing import cast 17 18 import streamlit 19 from streamlit.proto.Json_pb2 import Json as JsonProto 20 from streamlit.state import AutoSessionState 21 22 23 class JsonMixin: 24 def json(self, body): 25 """Display object or string as a pretty-printed JSON string. 26 27 Parameters 28 ---------- 29 body : Object or str 30 The object to print as JSON. All referenced objects should be 31 serializable to JSON as well. If object is a string, we assume it 32 contains serialized JSON. 33 34 Example 35 ------- 36 >>> st.json({ 37 ... 'foo': 'bar', 38 ... 'baz': 'boz', 39 ... 'stuff': [ 40 ... 'stuff 1', 41 ... 'stuff 2', 42 ... 'stuff 3', 43 ... 'stuff 5', 44 ... ], 45 ... }) 46 47 .. output:: 48 https://share.streamlit.io/streamlit/docs/main/python/api-examples-source/data.json.py 49 height: 385px 50 51 """ 52 import streamlit as st 53 54 if isinstance(body, AutoSessionState): 55 body = body.to_dict() 56 57 if not isinstance(body, str): 58 try: 59 body = json.dumps(body, default=repr) 60 except TypeError as err: 61 st.warning( 62 "Warning: this data structure was not fully serializable as " 63 "JSON due to one or more unexpected keys. (Error was: %s)" % err 64 ) 65 body = json.dumps(body, skipkeys=True, default=repr) 66 67 json_proto = JsonProto() 68 json_proto.body = body 69 return self.dg._enqueue("json", json_proto) 70 71 @property 72 def dg(self) -> "streamlit.delta_generator.DeltaGenerator": 73 """Get our DeltaGenerator.""" 74 return cast("streamlit.delta_generator.DeltaGenerator", self) 75 [end of lib/streamlit/elements/json.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/e2e/scripts/st_json.py b/e2e/scripts/st_json.py --- a/e2e/scripts/st_json.py +++ b/e2e/scripts/st_json.py @@ -16,3 +16,4 @@ data = {"foo": "bar"} st.json(data) +st.json(data, expanded=False) diff --git a/lib/streamlit/elements/json.py b/lib/streamlit/elements/json.py --- a/lib/streamlit/elements/json.py +++ b/lib/streamlit/elements/json.py @@ -21,7 +21,12 @@ class JsonMixin: - def json(self, body): + def json( + self, + body, + *, # keyword-only arguments: + expanded=True, + ): """Display object or string as a pretty-printed JSON string. Parameters @@ -31,6 +36,11 @@ serializable to JSON as well. If object is a string, we assume it contains serialized JSON. + expanded : bool + An optional boolean that allows the user to set whether the initial + state of this json element should be expanded. Defaults to True. + This argument can only be supplied by keyword. + Example ------- >>> st.json({ @@ -66,6 +76,7 @@ json_proto = JsonProto() json_proto.body = body + json_proto.expanded = expanded return self.dg._enqueue("json", json_proto) @property
{"golden_diff": "diff --git a/e2e/scripts/st_json.py b/e2e/scripts/st_json.py\n--- a/e2e/scripts/st_json.py\n+++ b/e2e/scripts/st_json.py\n@@ -16,3 +16,4 @@\n \n data = {\"foo\": \"bar\"}\n st.json(data)\n+st.json(data, expanded=False)\ndiff --git a/lib/streamlit/elements/json.py b/lib/streamlit/elements/json.py\n--- a/lib/streamlit/elements/json.py\n+++ b/lib/streamlit/elements/json.py\n@@ -21,7 +21,12 @@\n \n \n class JsonMixin:\n- def json(self, body):\n+ def json(\n+ self,\n+ body,\n+ *, # keyword-only arguments:\n+ expanded=True,\n+ ):\n \"\"\"Display object or string as a pretty-printed JSON string.\n \n Parameters\n@@ -31,6 +36,11 @@\n serializable to JSON as well. If object is a string, we assume it\n contains serialized JSON.\n \n+ expanded : bool\n+ An optional boolean that allows the user to set whether the initial\n+ state of this json element should be expanded. Defaults to True.\n+ This argument can only be supplied by keyword.\n+\n Example\n -------\n >>> st.json({\n@@ -66,6 +76,7 @@\n \n json_proto = JsonProto()\n json_proto.body = body\n+ json_proto.expanded = expanded\n return self.dg._enqueue(\"json\", json_proto)\n \n @property\n", "issue": "st.json collapse parameter\n_(Note, you don't have to fill out every section here. They're just here for guidance. That said, nicely detailed feature requests are more likely to get eng attention sooner)_\r\n\r\n### Problem\r\n\r\nHave a parameter for st.json(body, collapse) where the default is `False` but you can set it to `True`. This would allow developers to choose if the json file is expanded or collapsed when rendered on the Streamlit app.\r\n\r\nRequested by a community member, link to forum post: \r\nhttps://discuss.streamlit.io/t/json-collapse-option/17159\r\n\r\n### Solution\r\n\r\n**MVP:** a parameter to set the view of a json file on the first render in Streamlit\r\n\r\n\n", "before_files": [{"content": "# Copyright 2018-2022 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport streamlit as st\n\ndata = {\"foo\": \"bar\"}\nst.json(data)\n", "path": "e2e/scripts/st_json.py"}, {"content": "# Copyright 2018-2022 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport json\nfrom typing import cast\n\nimport streamlit\nfrom streamlit.proto.Json_pb2 import Json as JsonProto\nfrom streamlit.state import AutoSessionState\n\n\nclass JsonMixin:\n def json(self, body):\n \"\"\"Display object or string as a pretty-printed JSON string.\n\n Parameters\n ----------\n body : Object or str\n The object to print as JSON. All referenced objects should be\n serializable to JSON as well. If object is a string, we assume it\n contains serialized JSON.\n\n Example\n -------\n >>> st.json({\n ... 'foo': 'bar',\n ... 'baz': 'boz',\n ... 'stuff': [\n ... 'stuff 1',\n ... 'stuff 2',\n ... 'stuff 3',\n ... 'stuff 5',\n ... ],\n ... })\n\n .. output::\n https://share.streamlit.io/streamlit/docs/main/python/api-examples-source/data.json.py\n height: 385px\n\n \"\"\"\n import streamlit as st\n\n if isinstance(body, AutoSessionState):\n body = body.to_dict()\n\n if not isinstance(body, str):\n try:\n body = json.dumps(body, default=repr)\n except TypeError as err:\n st.warning(\n \"Warning: this data structure was not fully serializable as \"\n \"JSON due to one or more unexpected keys. (Error was: %s)\" % err\n )\n body = json.dumps(body, skipkeys=True, default=repr)\n\n json_proto = JsonProto()\n json_proto.body = body\n return self.dg._enqueue(\"json\", json_proto)\n\n @property\n def dg(self) -> \"streamlit.delta_generator.DeltaGenerator\":\n \"\"\"Get our DeltaGenerator.\"\"\"\n return cast(\"streamlit.delta_generator.DeltaGenerator\", self)\n", "path": "lib/streamlit/elements/json.py"}]}
1,579
337
gh_patches_debug_21934
rasdani/github-patches
git_diff
Project-MONAI__MONAI-2254
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> ASPP type hints need to be updated In ASPP (https://github.com/Project-MONAI/MONAI/blob/dev/monai/networks/blocks/aspp.py), the type hints of `acti_type` and `norm_type` are missing. </issue> <code> [start of monai/networks/blocks/aspp.py] 1 # Copyright 2020 - 2021 MONAI Consortium 2 # Licensed under the Apache License, Version 2.0 (the "License"); 3 # you may not use this file except in compliance with the License. 4 # You may obtain a copy of the License at 5 # http://www.apache.org/licenses/LICENSE-2.0 6 # Unless required by applicable law or agreed to in writing, software 7 # distributed under the License is distributed on an "AS IS" BASIS, 8 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 # See the License for the specific language governing permissions and 10 # limitations under the License. 11 12 from typing import Sequence 13 14 import torch 15 import torch.nn as nn 16 17 from monai.networks.blocks.convolutions import Convolution 18 from monai.networks.layers import same_padding 19 from monai.networks.layers.factories import Act, Conv, Norm 20 21 22 class SimpleASPP(nn.Module): 23 """ 24 A simplified version of the atrous spatial pyramid pooling (ASPP) module. 25 26 Chen et al., Encoder-Decoder with Atrous Separable Convolution for Semantic Image Segmentation. 27 https://arxiv.org/abs/1802.02611 28 29 Wang et al., A Noise-robust Framework for Automatic Segmentation of COVID-19 Pneumonia Lesions 30 from CT Images. https://ieeexplore.ieee.org/document/9109297 31 """ 32 33 def __init__( 34 self, 35 spatial_dims: int, 36 in_channels: int, 37 conv_out_channels: int, 38 kernel_sizes: Sequence[int] = (1, 3, 3, 3), 39 dilations: Sequence[int] = (1, 2, 4, 6), 40 norm_type=Norm.BATCH, 41 acti_type=Act.LEAKYRELU, 42 ) -> None: 43 """ 44 Args: 45 spatial_dims: number of spatial dimensions, could be 1, 2, or 3. 46 in_channels: number of input channels. 47 conv_out_channels: number of output channels of each atrous conv. 48 The final number of output channels is conv_out_channels * len(kernel_sizes). 49 kernel_sizes: a sequence of four convolutional kernel sizes. 50 Defaults to (1, 3, 3, 3) for four (dilated) convolutions. 51 dilations: a sequence of four convolutional dilation parameters. 52 Defaults to (1, 2, 4, 6) for four (dilated) convolutions. 53 norm_type: final kernel-size-one convolution normalization type. 54 Defaults to batch norm. 55 acti_type: final kernel-size-one convolution activation type. 56 Defaults to leaky ReLU. 57 58 Raises: 59 ValueError: When ``kernel_sizes`` length differs from ``dilations``. 60 61 See also: 62 63 :py:class:`monai.networks.layers.Act` 64 :py:class:`monai.networks.layers.Conv` 65 :py:class:`monai.networks.layers.Norm` 66 67 """ 68 super().__init__() 69 if len(kernel_sizes) != len(dilations): 70 raise ValueError( 71 "kernel_sizes and dilations length must match, " 72 f"got kernel_sizes={len(kernel_sizes)} dilations={len(dilations)}." 73 ) 74 pads = tuple(same_padding(k, d) for k, d in zip(kernel_sizes, dilations)) 75 76 self.convs = nn.ModuleList() 77 for k, d, p in zip(kernel_sizes, dilations, pads): 78 _conv = Conv[Conv.CONV, spatial_dims]( 79 in_channels=in_channels, out_channels=conv_out_channels, kernel_size=k, dilation=d, padding=p 80 ) 81 self.convs.append(_conv) 82 83 out_channels = conv_out_channels * len(pads) # final conv. output channels 84 self.conv_k1 = Convolution( 85 dimensions=spatial_dims, 86 in_channels=out_channels, 87 out_channels=out_channels, 88 kernel_size=1, 89 act=acti_type, 90 norm=norm_type, 91 ) 92 93 def forward(self, x: torch.Tensor) -> torch.Tensor: 94 """ 95 Args: 96 x: in shape (batch, channel, spatial_1[, spatial_2, ...]). 97 """ 98 x_out = torch.cat([conv(x) for conv in self.convs], dim=1) 99 x_out = self.conv_k1(x_out) 100 return x_out 101 [end of monai/networks/blocks/aspp.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/monai/networks/blocks/aspp.py b/monai/networks/blocks/aspp.py --- a/monai/networks/blocks/aspp.py +++ b/monai/networks/blocks/aspp.py @@ -9,14 +9,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Sequence +from typing import Optional, Sequence, Tuple, Union import torch import torch.nn as nn from monai.networks.blocks.convolutions import Convolution from monai.networks.layers import same_padding -from monai.networks.layers.factories import Act, Conv, Norm +from monai.networks.layers.factories import Conv class SimpleASPP(nn.Module): @@ -37,8 +37,8 @@ conv_out_channels: int, kernel_sizes: Sequence[int] = (1, 3, 3, 3), dilations: Sequence[int] = (1, 2, 4, 6), - norm_type=Norm.BATCH, - acti_type=Act.LEAKYRELU, + norm_type: Optional[Union[Tuple, str]] = "BATCH", + acti_type: Optional[Union[Tuple, str]] = "LEAKYRELU", ) -> None: """ Args:
{"golden_diff": "diff --git a/monai/networks/blocks/aspp.py b/monai/networks/blocks/aspp.py\n--- a/monai/networks/blocks/aspp.py\n+++ b/monai/networks/blocks/aspp.py\n@@ -9,14 +9,14 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n-from typing import Sequence\n+from typing import Optional, Sequence, Tuple, Union\n \n import torch\n import torch.nn as nn\n \n from monai.networks.blocks.convolutions import Convolution\n from monai.networks.layers import same_padding\n-from monai.networks.layers.factories import Act, Conv, Norm\n+from monai.networks.layers.factories import Conv\n \n \n class SimpleASPP(nn.Module):\n@@ -37,8 +37,8 @@\n conv_out_channels: int,\n kernel_sizes: Sequence[int] = (1, 3, 3, 3),\n dilations: Sequence[int] = (1, 2, 4, 6),\n- norm_type=Norm.BATCH,\n- acti_type=Act.LEAKYRELU,\n+ norm_type: Optional[Union[Tuple, str]] = \"BATCH\",\n+ acti_type: Optional[Union[Tuple, str]] = \"LEAKYRELU\",\n ) -> None:\n \"\"\"\n Args:\n", "issue": "ASPP type hints need to be updated\nIn ASPP (https://github.com/Project-MONAI/MONAI/blob/dev/monai/networks/blocks/aspp.py), the type hints of `acti_type` and `norm_type` are missing.\n", "before_files": [{"content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Sequence\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks.convolutions import Convolution\nfrom monai.networks.layers import same_padding\nfrom monai.networks.layers.factories import Act, Conv, Norm\n\n\nclass SimpleASPP(nn.Module):\n \"\"\"\n A simplified version of the atrous spatial pyramid pooling (ASPP) module.\n\n Chen et al., Encoder-Decoder with Atrous Separable Convolution for Semantic Image Segmentation.\n https://arxiv.org/abs/1802.02611\n\n Wang et al., A Noise-robust Framework for Automatic Segmentation of COVID-19 Pneumonia Lesions\n from CT Images. https://ieeexplore.ieee.org/document/9109297\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n conv_out_channels: int,\n kernel_sizes: Sequence[int] = (1, 3, 3, 3),\n dilations: Sequence[int] = (1, 2, 4, 6),\n norm_type=Norm.BATCH,\n acti_type=Act.LEAKYRELU,\n ) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions, could be 1, 2, or 3.\n in_channels: number of input channels.\n conv_out_channels: number of output channels of each atrous conv.\n The final number of output channels is conv_out_channels * len(kernel_sizes).\n kernel_sizes: a sequence of four convolutional kernel sizes.\n Defaults to (1, 3, 3, 3) for four (dilated) convolutions.\n dilations: a sequence of four convolutional dilation parameters.\n Defaults to (1, 2, 4, 6) for four (dilated) convolutions.\n norm_type: final kernel-size-one convolution normalization type.\n Defaults to batch norm.\n acti_type: final kernel-size-one convolution activation type.\n Defaults to leaky ReLU.\n\n Raises:\n ValueError: When ``kernel_sizes`` length differs from ``dilations``.\n\n See also:\n\n :py:class:`monai.networks.layers.Act`\n :py:class:`monai.networks.layers.Conv`\n :py:class:`monai.networks.layers.Norm`\n\n \"\"\"\n super().__init__()\n if len(kernel_sizes) != len(dilations):\n raise ValueError(\n \"kernel_sizes and dilations length must match, \"\n f\"got kernel_sizes={len(kernel_sizes)} dilations={len(dilations)}.\"\n )\n pads = tuple(same_padding(k, d) for k, d in zip(kernel_sizes, dilations))\n\n self.convs = nn.ModuleList()\n for k, d, p in zip(kernel_sizes, dilations, pads):\n _conv = Conv[Conv.CONV, spatial_dims](\n in_channels=in_channels, out_channels=conv_out_channels, kernel_size=k, dilation=d, padding=p\n )\n self.convs.append(_conv)\n\n out_channels = conv_out_channels * len(pads) # final conv. output channels\n self.conv_k1 = Convolution(\n dimensions=spatial_dims,\n in_channels=out_channels,\n out_channels=out_channels,\n kernel_size=1,\n act=acti_type,\n norm=norm_type,\n )\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n x: in shape (batch, channel, spatial_1[, spatial_2, ...]).\n \"\"\"\n x_out = torch.cat([conv(x) for conv in self.convs], dim=1)\n x_out = self.conv_k1(x_out)\n return x_out\n", "path": "monai/networks/blocks/aspp.py"}]}
1,747
299
gh_patches_debug_25650
rasdani/github-patches
git_diff
astronomer__astro-sdk-1374
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> All connections tested even though one's been specified **Describe the bug** I ran `astro flow validate --connection=<connection_id>` and all connections were tested even though I passed one conn id specifically. **Version** * Astro Runtime: 7.0.0 * Astro CLI: 1.8.3 **To Reproduce** Steps to reproduce the behavior: 1. Update file `config/default/configuration.yml` as shown below: ``` connections: - conn_id: sqlite_conn conn_type: sqlite host: /Users/magdagultekin/magda-dev/data/imdb.db login: null password: null schema: null - conn_id: sqlite_default conn_type: sqlite host: /tmp/sqlite.db login: null password: null schema: null ``` 4. Run `astro flow validate --connection=sqlite_default` 5. See message: ``` Validating connection(s) for environment 'default' Validating connection sqlite_conn PASSED Validating connection sqlite_default FAILED ``` **Expected behavior** Only `sqlite_default` should be tested. **Screenshots** ![image](https://user-images.githubusercontent.com/97064715/205905971-7c71dc2d-36c8-44ac-9857-9e98180c5ef4.png) </issue> <code> [start of sql-cli/sql_cli/connections.py] 1 from __future__ import annotations 2 3 import os 4 from pathlib import Path 5 6 from airflow.models import Connection 7 8 from sql_cli.utils.rich import rprint 9 10 CONNECTION_ID_OUTPUT_STRING_WIDTH = 25 11 12 13 def validate_connections(connections: list[Connection], connection_id: str | None = None) -> None: 14 """ 15 Validates that the given connections are valid and registers them to Airflow with replace policy for existing 16 connections. 17 """ 18 for connection in connections: 19 os.environ[f"AIRFLOW_CONN_{connection.conn_id.upper()}"] = connection.get_uri() 20 status = "[bold green]PASSED[/bold green]" if _is_valid(connection) else "[bold red]FAILED[/bold red]" 21 rprint(f"Validating connection {connection.conn_id:{CONNECTION_ID_OUTPUT_STRING_WIDTH}}", status) 22 23 if connection_id and not any(connection.conn_id == connection_id for connection in connections): 24 rprint("[bold red]Error: Config file does not contain given connection[/bold red]", connection_id) 25 26 27 def _is_valid(connection: Connection) -> bool: 28 # Sqlite automatically creates the file if it does not exist, 29 # but our users might not expect that. They are referencing a database they expect to exist. 30 if connection.conn_type == "sqlite" and not Path(connection.host).is_file(): 31 return False 32 33 success_status, _ = connection.test_connection() 34 return success_status 35 [end of sql-cli/sql_cli/connections.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sql-cli/sql_cli/connections.py b/sql-cli/sql_cli/connections.py --- a/sql-cli/sql_cli/connections.py +++ b/sql-cli/sql_cli/connections.py @@ -15,14 +15,17 @@ Validates that the given connections are valid and registers them to Airflow with replace policy for existing connections. """ - for connection in connections: - os.environ[f"AIRFLOW_CONN_{connection.conn_id.upper()}"] = connection.get_uri() - status = "[bold green]PASSED[/bold green]" if _is_valid(connection) else "[bold red]FAILED[/bold red]" - rprint(f"Validating connection {connection.conn_id:{CONNECTION_ID_OUTPUT_STRING_WIDTH}}", status) - if connection_id and not any(connection.conn_id == connection_id for connection in connections): rprint("[bold red]Error: Config file does not contain given connection[/bold red]", connection_id) + for connection in connections: + if not connection_id or connection_id and connection.conn_id == connection_id: + os.environ[f"AIRFLOW_CONN_{connection.conn_id.upper()}"] = connection.get_uri() + status = ( + "[bold green]PASSED[/bold green]" if _is_valid(connection) else "[bold red]FAILED[/bold red]" + ) + rprint(f"Validating connection {connection.conn_id:{CONNECTION_ID_OUTPUT_STRING_WIDTH}}", status) + def _is_valid(connection: Connection) -> bool: # Sqlite automatically creates the file if it does not exist,
{"golden_diff": "diff --git a/sql-cli/sql_cli/connections.py b/sql-cli/sql_cli/connections.py\n--- a/sql-cli/sql_cli/connections.py\n+++ b/sql-cli/sql_cli/connections.py\n@@ -15,14 +15,17 @@\n Validates that the given connections are valid and registers them to Airflow with replace policy for existing\n connections.\n \"\"\"\n- for connection in connections:\n- os.environ[f\"AIRFLOW_CONN_{connection.conn_id.upper()}\"] = connection.get_uri()\n- status = \"[bold green]PASSED[/bold green]\" if _is_valid(connection) else \"[bold red]FAILED[/bold red]\"\n- rprint(f\"Validating connection {connection.conn_id:{CONNECTION_ID_OUTPUT_STRING_WIDTH}}\", status)\n-\n if connection_id and not any(connection.conn_id == connection_id for connection in connections):\n rprint(\"[bold red]Error: Config file does not contain given connection[/bold red]\", connection_id)\n \n+ for connection in connections:\n+ if not connection_id or connection_id and connection.conn_id == connection_id:\n+ os.environ[f\"AIRFLOW_CONN_{connection.conn_id.upper()}\"] = connection.get_uri()\n+ status = (\n+ \"[bold green]PASSED[/bold green]\" if _is_valid(connection) else \"[bold red]FAILED[/bold red]\"\n+ )\n+ rprint(f\"Validating connection {connection.conn_id:{CONNECTION_ID_OUTPUT_STRING_WIDTH}}\", status)\n+\n \n def _is_valid(connection: Connection) -> bool:\n # Sqlite automatically creates the file if it does not exist,\n", "issue": "All connections tested even though one's been specified\n**Describe the bug**\r\nI ran `astro flow validate --connection=<connection_id>` and all connections were tested even though I passed one conn id specifically. \r\n\r\n**Version**\r\n* Astro Runtime: 7.0.0\r\n* Astro CLI: 1.8.3\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Update file `config/default/configuration.yml` as shown below:\r\n```\r\nconnections:\r\n - conn_id: sqlite_conn\r\n conn_type: sqlite\r\n host: /Users/magdagultekin/magda-dev/data/imdb.db\r\n login: null\r\n password: null\r\n schema: null\r\n - conn_id: sqlite_default\r\n conn_type: sqlite\r\n host: /tmp/sqlite.db\r\n login: null\r\n password: null\r\n schema: null\r\n```\r\n4. Run `astro flow validate --connection=sqlite_default`\r\n5. See message:\r\n```\r\nValidating connection(s) for environment 'default'\r\nValidating connection sqlite_conn PASSED\r\nValidating connection sqlite_default FAILED\r\n```\r\n\r\n**Expected behavior**\r\nOnly `sqlite_default` should be tested.\r\n\r\n**Screenshots**\r\n![image](https://user-images.githubusercontent.com/97064715/205905971-7c71dc2d-36c8-44ac-9857-9e98180c5ef4.png)\r\n\r\n\n", "before_files": [{"content": "from __future__ import annotations\n\nimport os\nfrom pathlib import Path\n\nfrom airflow.models import Connection\n\nfrom sql_cli.utils.rich import rprint\n\nCONNECTION_ID_OUTPUT_STRING_WIDTH = 25\n\n\ndef validate_connections(connections: list[Connection], connection_id: str | None = None) -> None:\n \"\"\"\n Validates that the given connections are valid and registers them to Airflow with replace policy for existing\n connections.\n \"\"\"\n for connection in connections:\n os.environ[f\"AIRFLOW_CONN_{connection.conn_id.upper()}\"] = connection.get_uri()\n status = \"[bold green]PASSED[/bold green]\" if _is_valid(connection) else \"[bold red]FAILED[/bold red]\"\n rprint(f\"Validating connection {connection.conn_id:{CONNECTION_ID_OUTPUT_STRING_WIDTH}}\", status)\n\n if connection_id and not any(connection.conn_id == connection_id for connection in connections):\n rprint(\"[bold red]Error: Config file does not contain given connection[/bold red]\", connection_id)\n\n\ndef _is_valid(connection: Connection) -> bool:\n # Sqlite automatically creates the file if it does not exist,\n # but our users might not expect that. They are referencing a database they expect to exist.\n if connection.conn_type == \"sqlite\" and not Path(connection.host).is_file():\n return False\n\n success_status, _ = connection.test_connection()\n return success_status\n", "path": "sql-cli/sql_cli/connections.py"}]}
1,220
339
gh_patches_debug_23145
rasdani/github-patches
git_diff
alltheplaces__alltheplaces-881
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Jewel-Osco https://local.jewelosco.com/index.html Looks like it can probably just be added as a start url in the albertsons.py spider. </issue> <code> [start of locations/spiders/albertsons.py] 1 import scrapy 2 import re 3 import json 4 from locations.items import GeojsonPointItem 5 6 DAY_MAPPING = { 7 'M': 'Mo', 8 'T': 'Tu', 9 'W': 'We', 10 'F': 'Fr', 11 'Sat': 'Sa', 12 'Sun': 'Su' 13 } 14 15 16 class AlbertsonsSpider(scrapy.Spider): 17 18 name = "albertsons" 19 allowed_domains = ["local.albertsons.com"] 20 download_delay = 0.5 21 start_urls = ( 22 'https://local.albertsons.com/index.html', 23 ) 24 25 def parse_stores(self, response): 26 ref = re.findall(r"[^(\/)]+.html$" ,response.url) 27 map_data = response.xpath('normalize-space(//script[@id="js-map-config-dir-map-desktop"]/text())').extract_first() 28 map_json= json.loads(map_data) 29 if(len(ref)>0): 30 ref = ref[0].split('.')[0] 31 properties = { 32 'addr_full': response.xpath('normalize-space(//span[@itemprop="streetAddress"]/span/text())').extract_first(), 33 'phone': response.xpath('normalize-space(//span[@itemprop="telephone"]/text())').extract_first(), 34 'city': response.xpath('normalize-space(//span[@itemprop="addressLocality"]/text())').extract_first(), 35 'state': response.xpath('normalize-space(//abbr[@itemprop="addressRegion"]/text())').extract_first(), 36 'postcode': response.xpath('normalize-space(//span[@itemprop="postalCode"]/text())').extract_first(), 37 'ref': ref, 38 'website': response.url, 39 'lat': float(map_json['locs'][0]['latitude']), 40 'lon': float(map_json['locs'][0]['longitude']), 41 } 42 hours = response.xpath('//div[@class="LocationInfo-right"]/div[1]/div[@class="LocationInfo-hoursTable"]/div[@class="c-location-hours-details-wrapper js-location-hours"]/table/tbody/tr/@content').extract() 43 if hours: 44 properties['opening_hours'] = " ;".join(hours) 45 yield GeojsonPointItem(**properties) 46 47 def parse_city_stores(self ,response): 48 stores = response.xpath('//div[@class="Teaser-content"]/h2/a/@href').extract() 49 for store in stores: 50 yield scrapy.Request(response.urljoin(store), callback=self.parse_stores) 51 52 def parse_state(self, response): 53 urls = response.xpath('//div[@class="c-directory-list-content-wrapper"]/ul/li/a/@href').extract() 54 for path in urls: 55 pattern = re.compile("^[a-z]{2}\/[^()]+\/[^()]+.html$") 56 if (pattern.match(path.strip())): 57 yield scrapy.Request(response.urljoin(path), callback=self.parse_stores) 58 else: 59 yield scrapy.Request(response.urljoin(path), callback=self.parse_city_stores) 60 61 def parse(self, response): 62 urls = response.xpath('//div[@class="c-directory-list-content-wrapper"]/ul/li/a/@href').extract() 63 for path in urls: 64 pattern = re.compile("^[a-z]{2}.html$") 65 pattern1 = re.compile("^[a-z]{2}\/[^()]+\/[^()]+.html$") 66 if(pattern.match(path.strip())): 67 yield scrapy.Request(response.urljoin(path), callback=self.parse_state) 68 elif(pattern1.match(path.strip())): 69 yield scrapy.Request(response.urljoin(path), callback=self.parse_stores) 70 else: 71 yield scrapy.Request(response.urljoin(path), callback=self.parse_city_stores) 72 [end of locations/spiders/albertsons.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/locations/spiders/albertsons.py b/locations/spiders/albertsons.py --- a/locations/spiders/albertsons.py +++ b/locations/spiders/albertsons.py @@ -16,10 +16,14 @@ class AlbertsonsSpider(scrapy.Spider): name = "albertsons" - allowed_domains = ["local.albertsons.com"] download_delay = 0.5 + allowed_domains = [ + "local.albertsons.com", + "local.jewelosco.com", + ] start_urls = ( 'https://local.albertsons.com/index.html', + 'https://local.jewelosco.com/index.html', ) def parse_stores(self, response): @@ -41,7 +45,7 @@ } hours = response.xpath('//div[@class="LocationInfo-right"]/div[1]/div[@class="LocationInfo-hoursTable"]/div[@class="c-location-hours-details-wrapper js-location-hours"]/table/tbody/tr/@content').extract() if hours: - properties['opening_hours'] = " ;".join(hours) + properties['opening_hours'] = "; ".join(hours) yield GeojsonPointItem(**properties) def parse_city_stores(self ,response):
{"golden_diff": "diff --git a/locations/spiders/albertsons.py b/locations/spiders/albertsons.py\n--- a/locations/spiders/albertsons.py\n+++ b/locations/spiders/albertsons.py\n@@ -16,10 +16,14 @@\n class AlbertsonsSpider(scrapy.Spider):\n \n name = \"albertsons\"\n- allowed_domains = [\"local.albertsons.com\"]\n download_delay = 0.5\n+ allowed_domains = [\n+ \"local.albertsons.com\",\n+ \"local.jewelosco.com\",\n+ ]\n start_urls = (\n 'https://local.albertsons.com/index.html',\n+ 'https://local.jewelosco.com/index.html',\n )\n \n def parse_stores(self, response):\n@@ -41,7 +45,7 @@\n }\n hours = response.xpath('//div[@class=\"LocationInfo-right\"]/div[1]/div[@class=\"LocationInfo-hoursTable\"]/div[@class=\"c-location-hours-details-wrapper js-location-hours\"]/table/tbody/tr/@content').extract()\n if hours:\n- properties['opening_hours'] = \" ;\".join(hours)\n+ properties['opening_hours'] = \"; \".join(hours)\n yield GeojsonPointItem(**properties)\n \n def parse_city_stores(self ,response):\n", "issue": "Jewel-Osco\nhttps://local.jewelosco.com/index.html\r\n\r\nLooks like it can probably just be added as a start url in the albertsons.py spider.\n", "before_files": [{"content": "import scrapy\nimport re\nimport json\nfrom locations.items import GeojsonPointItem\n\nDAY_MAPPING = {\n 'M': 'Mo',\n 'T': 'Tu',\n 'W': 'We',\n 'F': 'Fr',\n 'Sat': 'Sa',\n 'Sun': 'Su'\n}\n\n\nclass AlbertsonsSpider(scrapy.Spider):\n\n name = \"albertsons\"\n allowed_domains = [\"local.albertsons.com\"]\n download_delay = 0.5\n start_urls = (\n 'https://local.albertsons.com/index.html',\n )\n\n def parse_stores(self, response):\n ref = re.findall(r\"[^(\\/)]+.html$\" ,response.url)\n map_data = response.xpath('normalize-space(//script[@id=\"js-map-config-dir-map-desktop\"]/text())').extract_first()\n map_json= json.loads(map_data)\n if(len(ref)>0):\n ref = ref[0].split('.')[0]\n properties = {\n 'addr_full': response.xpath('normalize-space(//span[@itemprop=\"streetAddress\"]/span/text())').extract_first(),\n 'phone': response.xpath('normalize-space(//span[@itemprop=\"telephone\"]/text())').extract_first(),\n 'city': response.xpath('normalize-space(//span[@itemprop=\"addressLocality\"]/text())').extract_first(),\n 'state': response.xpath('normalize-space(//abbr[@itemprop=\"addressRegion\"]/text())').extract_first(),\n 'postcode': response.xpath('normalize-space(//span[@itemprop=\"postalCode\"]/text())').extract_first(),\n 'ref': ref,\n 'website': response.url,\n 'lat': float(map_json['locs'][0]['latitude']),\n 'lon': float(map_json['locs'][0]['longitude']),\n }\n hours = response.xpath('//div[@class=\"LocationInfo-right\"]/div[1]/div[@class=\"LocationInfo-hoursTable\"]/div[@class=\"c-location-hours-details-wrapper js-location-hours\"]/table/tbody/tr/@content').extract()\n if hours:\n properties['opening_hours'] = \" ;\".join(hours)\n yield GeojsonPointItem(**properties)\n\n def parse_city_stores(self ,response):\n stores = response.xpath('//div[@class=\"Teaser-content\"]/h2/a/@href').extract()\n for store in stores:\n yield scrapy.Request(response.urljoin(store), callback=self.parse_stores)\n\n def parse_state(self, response):\n urls = response.xpath('//div[@class=\"c-directory-list-content-wrapper\"]/ul/li/a/@href').extract()\n for path in urls:\n pattern = re.compile(\"^[a-z]{2}\\/[^()]+\\/[^()]+.html$\")\n if (pattern.match(path.strip())):\n yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n else:\n yield scrapy.Request(response.urljoin(path), callback=self.parse_city_stores)\n\n def parse(self, response):\n urls = response.xpath('//div[@class=\"c-directory-list-content-wrapper\"]/ul/li/a/@href').extract()\n for path in urls:\n pattern = re.compile(\"^[a-z]{2}.html$\")\n pattern1 = re.compile(\"^[a-z]{2}\\/[^()]+\\/[^()]+.html$\")\n if(pattern.match(path.strip())):\n yield scrapy.Request(response.urljoin(path), callback=self.parse_state)\n elif(pattern1.match(path.strip())):\n yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n else:\n yield scrapy.Request(response.urljoin(path), callback=self.parse_city_stores)\n", "path": "locations/spiders/albertsons.py"}]}
1,472
285
gh_patches_debug_8006
rasdani/github-patches
git_diff
tournesol-app__tournesol-1713
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [feat] Make `exclude_compared` configurable in user settings </issue> <code> [start of backend/core/serializers/user_settings.py] 1 from django.utils.translation import gettext_lazy as _ 2 from rest_framework import serializers 3 from rest_framework.serializers import ValidationError 4 5 from tournesol.models.poll import Poll 6 from tournesol.utils.video_language import ACCEPTED_LANGUAGE_CODES 7 8 9 class GeneralUserSettingsSerializer(serializers.Serializer): 10 """ 11 The general user settings that are not related to Tournesol polls. 12 """ 13 14 # The first element of the tuple should be an ISO 639-1 code. 15 NOTIFICATIONS_LANG = [ 16 ("en", "en"), 17 ("fr", "fr"), 18 ] 19 20 notifications__lang = serializers.ChoiceField( 21 choices=NOTIFICATIONS_LANG, required=False 22 ) 23 notifications_email__research = serializers.BooleanField(required=False) 24 notifications_email__new_features = serializers.BooleanField(required=False) 25 26 27 class GenericPollUserSettingsSerializer(serializers.Serializer): 28 """ 29 The settings common to each poll. 30 """ 31 32 COMPONENT_DISPLAY_STATE = [ 33 ("ALWAYS", "always"), 34 ("EMBEDDED_ONLY", "embedded_only"), 35 ("WEBSITE_ONLY", "website_only"), 36 ("NEVER", "never"), 37 ] 38 39 comparison__criteria_order = serializers.ListField( 40 child=serializers.CharField(), required=False 41 ) 42 43 comparison__fill_entity_selector = serializers.BooleanField(required=False) 44 45 comparison_ui__weekly_collective_goal_display = serializers.ChoiceField( 46 choices=COMPONENT_DISPLAY_STATE, allow_blank=True, required=False 47 ) 48 49 rate_later__auto_remove = serializers.IntegerField(required=False) 50 51 def validate_comparison__criteria_order(self, criteria): 52 poll_name = self.context.get("poll_name", self._context["poll_name"]) 53 poll = Poll.objects.get(name=poll_name) 54 55 if poll.main_criteria in criteria: 56 raise ValidationError(_("The main criterion cannot be in the list.")) 57 58 if len(criteria) != len(set(criteria)): 59 raise ValidationError(_("The list cannot contain duplicates.")) 60 61 for criterion in criteria: 62 if criterion not in poll.criterias_list: 63 raise ValidationError( 64 _("Unknown criterion: %(criterion)s.") % {"criterion": criterion} 65 ) 66 67 return criteria 68 69 def validate_rate_later__auto_remove(self, value): 70 if value < 1: 71 raise ValidationError(_("This parameter cannot be lower than 1.")) 72 return value 73 74 75 class VideosPollUserSettingsSerializer(GenericPollUserSettingsSerializer): 76 """ 77 The settings specific to the `videos` poll. 78 79 Also inherit the settings common to each poll. 80 """ 81 82 DEFAULT_DATE_CHOICES = [ 83 ("TODAY", "today"), 84 ("WEEK", "week"), 85 ("MONTH", "month"), 86 ("YEAR", "year"), 87 ("ALL_TIME", "all_time"), 88 ] 89 90 recommendations__default_date = serializers.ChoiceField( 91 choices=DEFAULT_DATE_CHOICES, allow_blank=True, required=False 92 ) 93 recommendations__default_languages = serializers.ListField( 94 child=serializers.CharField(), allow_empty=True, required=False 95 ) 96 recommendations__default_unsafe = serializers.BooleanField(required=False) 97 98 def validate_recommendations__default_languages(self, default_languages): 99 for lang in default_languages: 100 if lang not in ACCEPTED_LANGUAGE_CODES: 101 raise ValidationError(_("Unknown language code: %(lang)s.") % {"lang": lang}) 102 103 return default_languages 104 105 106 class TournesolUserSettingsSerializer(serializers.Serializer): 107 """ 108 A representation of all user settings of the Tournesol project. 109 110 This representation includes poll-agnostic settings in addition to the 111 specific settings of each poll. 112 """ 113 114 general = GeneralUserSettingsSerializer(required=False) 115 videos = VideosPollUserSettingsSerializer(required=False, context={"poll_name": "videos"}) 116 117 def create(self, validated_data): 118 return validated_data 119 120 def update(self, instance, validated_data): 121 for scope, settings in self.validated_data.items(): 122 if scope not in instance: 123 instance[scope] = {} 124 instance[scope].update(settings) 125 return instance 126 [end of backend/core/serializers/user_settings.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/backend/core/serializers/user_settings.py b/backend/core/serializers/user_settings.py --- a/backend/core/serializers/user_settings.py +++ b/backend/core/serializers/user_settings.py @@ -94,6 +94,7 @@ child=serializers.CharField(), allow_empty=True, required=False ) recommendations__default_unsafe = serializers.BooleanField(required=False) + recommendations__default_exclude_compared_entities = serializers.BooleanField(required=False) def validate_recommendations__default_languages(self, default_languages): for lang in default_languages:
{"golden_diff": "diff --git a/backend/core/serializers/user_settings.py b/backend/core/serializers/user_settings.py\n--- a/backend/core/serializers/user_settings.py\n+++ b/backend/core/serializers/user_settings.py\n@@ -94,6 +94,7 @@\n child=serializers.CharField(), allow_empty=True, required=False\n )\n recommendations__default_unsafe = serializers.BooleanField(required=False)\n+ recommendations__default_exclude_compared_entities = serializers.BooleanField(required=False)\n \n def validate_recommendations__default_languages(self, default_languages):\n for lang in default_languages:\n", "issue": "[feat] Make `exclude_compared` configurable in user settings\n\n", "before_files": [{"content": "from django.utils.translation import gettext_lazy as _\nfrom rest_framework import serializers\nfrom rest_framework.serializers import ValidationError\n\nfrom tournesol.models.poll import Poll\nfrom tournesol.utils.video_language import ACCEPTED_LANGUAGE_CODES\n\n\nclass GeneralUserSettingsSerializer(serializers.Serializer):\n \"\"\"\n The general user settings that are not related to Tournesol polls.\n \"\"\"\n\n # The first element of the tuple should be an ISO 639-1 code.\n NOTIFICATIONS_LANG = [\n (\"en\", \"en\"),\n (\"fr\", \"fr\"),\n ]\n\n notifications__lang = serializers.ChoiceField(\n choices=NOTIFICATIONS_LANG, required=False\n )\n notifications_email__research = serializers.BooleanField(required=False)\n notifications_email__new_features = serializers.BooleanField(required=False)\n\n\nclass GenericPollUserSettingsSerializer(serializers.Serializer):\n \"\"\"\n The settings common to each poll.\n \"\"\"\n\n COMPONENT_DISPLAY_STATE = [\n (\"ALWAYS\", \"always\"),\n (\"EMBEDDED_ONLY\", \"embedded_only\"),\n (\"WEBSITE_ONLY\", \"website_only\"),\n (\"NEVER\", \"never\"),\n ]\n\n comparison__criteria_order = serializers.ListField(\n child=serializers.CharField(), required=False\n )\n\n comparison__fill_entity_selector = serializers.BooleanField(required=False)\n\n comparison_ui__weekly_collective_goal_display = serializers.ChoiceField(\n choices=COMPONENT_DISPLAY_STATE, allow_blank=True, required=False\n )\n\n rate_later__auto_remove = serializers.IntegerField(required=False)\n\n def validate_comparison__criteria_order(self, criteria):\n poll_name = self.context.get(\"poll_name\", self._context[\"poll_name\"])\n poll = Poll.objects.get(name=poll_name)\n\n if poll.main_criteria in criteria:\n raise ValidationError(_(\"The main criterion cannot be in the list.\"))\n\n if len(criteria) != len(set(criteria)):\n raise ValidationError(_(\"The list cannot contain duplicates.\"))\n\n for criterion in criteria:\n if criterion not in poll.criterias_list:\n raise ValidationError(\n _(\"Unknown criterion: %(criterion)s.\") % {\"criterion\": criterion}\n )\n\n return criteria\n\n def validate_rate_later__auto_remove(self, value):\n if value < 1:\n raise ValidationError(_(\"This parameter cannot be lower than 1.\"))\n return value\n\n\nclass VideosPollUserSettingsSerializer(GenericPollUserSettingsSerializer):\n \"\"\"\n The settings specific to the `videos` poll.\n\n Also inherit the settings common to each poll.\n \"\"\"\n\n DEFAULT_DATE_CHOICES = [\n (\"TODAY\", \"today\"),\n (\"WEEK\", \"week\"),\n (\"MONTH\", \"month\"),\n (\"YEAR\", \"year\"),\n (\"ALL_TIME\", \"all_time\"),\n ]\n\n recommendations__default_date = serializers.ChoiceField(\n choices=DEFAULT_DATE_CHOICES, allow_blank=True, required=False\n )\n recommendations__default_languages = serializers.ListField(\n child=serializers.CharField(), allow_empty=True, required=False\n )\n recommendations__default_unsafe = serializers.BooleanField(required=False)\n\n def validate_recommendations__default_languages(self, default_languages):\n for lang in default_languages:\n if lang not in ACCEPTED_LANGUAGE_CODES:\n raise ValidationError(_(\"Unknown language code: %(lang)s.\") % {\"lang\": lang})\n\n return default_languages\n\n\nclass TournesolUserSettingsSerializer(serializers.Serializer):\n \"\"\"\n A representation of all user settings of the Tournesol project.\n\n This representation includes poll-agnostic settings in addition to the\n specific settings of each poll.\n \"\"\"\n\n general = GeneralUserSettingsSerializer(required=False)\n videos = VideosPollUserSettingsSerializer(required=False, context={\"poll_name\": \"videos\"})\n\n def create(self, validated_data):\n return validated_data\n\n def update(self, instance, validated_data):\n for scope, settings in self.validated_data.items():\n if scope not in instance:\n instance[scope] = {}\n instance[scope].update(settings)\n return instance\n", "path": "backend/core/serializers/user_settings.py"}]}
1,673
121
gh_patches_debug_5138
rasdani/github-patches
git_diff
dask__dask-2634
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> String Accessors in Converted DataFrame Columns Whenever I try to access strings in a dataframe which are previously converted to strings (ie. datall[['A']] = datall[['A']].applymap(str) and then datall['A']=datall['A'].str[:5]) I get a TypeError: 'StringAccessor' object has no attribute '__getitem__'. This is reproducible as follows: ``` import pandas as pd import dask.dataframe as dd import numpy as np def float_apply(x): try: return float(x) except ValueError: return float('nan') def string_apply(x): try: return str(x) except ValueError: return str('nan') df = pd.DataFrame(np.random.random_integers(0,6,size=(20, 6)), columns=list('ABCDEF')) data = dd.from_pandas(df, npartitions = 2) data=data.applymap(float_apply) data[['A']] = data[['A']].applymap(string_apply) data['A'] = data['A'].str[:1] print data.compute() ``` This will work with pandas dataframes. If .compute() is run prior to this then it works fine but that is probably suboptimal for large datasets. </issue> <code> [start of dask/dataframe/accessor.py] 1 from __future__ import absolute_import, division, print_function 2 3 import numpy as np 4 import pandas as pd 5 from toolz import partial 6 7 from ..utils import derived_from 8 9 10 def maybe_wrap_pandas(obj, x): 11 if isinstance(x, np.ndarray): 12 if isinstance(obj, pd.Series): 13 return pd.Series(x, index=obj.index, dtype=x.dtype) 14 return pd.Index(x) 15 return x 16 17 18 class Accessor(object): 19 """ 20 Base class for pandas Accessor objects cat, dt, and str. 21 22 Notes 23 ----- 24 Subclasses should define the following attributes: 25 26 * _accessor 27 * _accessor_name 28 """ 29 _not_implemented = set() 30 31 def __init__(self, series): 32 from .core import Series 33 if not isinstance(series, Series): 34 raise ValueError('Accessor cannot be initialized') 35 self._validate(series) 36 self._series = series 37 38 def _validate(self, series): 39 pass 40 41 @staticmethod 42 def _delegate_property(obj, accessor, attr): 43 out = getattr(getattr(obj, accessor, obj), attr) 44 return maybe_wrap_pandas(obj, out) 45 46 @staticmethod 47 def _delegate_method(obj, accessor, attr, args, kwargs): 48 out = getattr(getattr(obj, accessor, obj), attr)(*args, **kwargs) 49 return maybe_wrap_pandas(obj, out) 50 51 def _property_map(self, attr): 52 meta = self._delegate_property(self._series._meta, 53 self._accessor_name, attr) 54 token = '%s-%s' % (self._accessor_name, attr) 55 return self._series.map_partitions(self._delegate_property, 56 self._accessor_name, attr, 57 token=token, meta=meta) 58 59 def _function_map(self, attr, *args, **kwargs): 60 meta = self._delegate_method(self._series._meta_nonempty, 61 self._accessor_name, attr, args, kwargs) 62 token = '%s-%s' % (self._accessor_name, attr) 63 return self._series.map_partitions(self._delegate_method, 64 self._accessor_name, attr, args, 65 kwargs, meta=meta, token=token) 66 67 @property 68 def _delegates(self): 69 return set(dir(self._accessor)).difference(self._not_implemented) 70 71 def __dir__(self): 72 o = self._delegates 73 o.update(self.__dict__) 74 o.update(dir(type(self))) 75 return list(o) 76 77 def __getattr__(self, key): 78 if key in self._delegates: 79 if isinstance(getattr(self._accessor, key), property): 80 return self._property_map(key) 81 else: 82 return partial(self._function_map, key) 83 else: 84 raise AttributeError(key) 85 86 87 class DatetimeAccessor(Accessor): 88 """ Accessor object for datetimelike properties of the Series values. 89 90 Examples 91 -------- 92 93 >>> s.dt.microsecond # doctest: +SKIP 94 """ 95 _accessor = pd.Series.dt 96 _accessor_name = 'dt' 97 98 99 class StringAccessor(Accessor): 100 """ Accessor object for string properties of the Series values. 101 102 Examples 103 -------- 104 105 >>> s.str.lower() # doctest: +SKIP 106 """ 107 _accessor = pd.Series.str 108 _accessor_name = 'str' 109 _not_implemented = {'get_dummies'} 110 111 def _validate(self, series): 112 if not series.dtype == 'object': 113 raise AttributeError("Can only use .str accessor with object dtype") 114 115 @derived_from(pd.core.strings.StringMethods) 116 def split(self, pat=None, n=-1): 117 return self._function_map('split', pat=pat, n=n) 118 [end of dask/dataframe/accessor.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/dask/dataframe/accessor.py b/dask/dataframe/accessor.py --- a/dask/dataframe/accessor.py +++ b/dask/dataframe/accessor.py @@ -115,3 +115,12 @@ @derived_from(pd.core.strings.StringMethods) def split(self, pat=None, n=-1): return self._function_map('split', pat=pat, n=n) + + def __getitem__(self, index): + return self._series.map_partitions(str_get, index, + meta=self._series._meta) + + +def str_get(series, index): + """ Implements series.str[index] """ + return series.str[index]
{"golden_diff": "diff --git a/dask/dataframe/accessor.py b/dask/dataframe/accessor.py\n--- a/dask/dataframe/accessor.py\n+++ b/dask/dataframe/accessor.py\n@@ -115,3 +115,12 @@\n @derived_from(pd.core.strings.StringMethods)\n def split(self, pat=None, n=-1):\n return self._function_map('split', pat=pat, n=n)\n+\n+ def __getitem__(self, index):\n+ return self._series.map_partitions(str_get, index,\n+ meta=self._series._meta)\n+\n+\n+def str_get(series, index):\n+ \"\"\" Implements series.str[index] \"\"\"\n+ return series.str[index]\n", "issue": "String Accessors in Converted DataFrame Columns\nWhenever I try to access strings in a dataframe which are previously converted to strings (ie. datall[['A']] = datall[['A']].applymap(str) and then datall['A']=datall['A'].str[:5]) I get a TypeError: 'StringAccessor' object has no attribute '__getitem__'.\r\n\r\nThis is reproducible as follows:\r\n\r\n```\r\nimport pandas as pd\r\nimport dask.dataframe as dd\r\nimport numpy as np\r\n\r\ndef float_apply(x):\r\n try:\r\n return float(x)\r\n except ValueError:\r\n return float('nan')\r\n\r\ndef string_apply(x):\r\n try:\r\n return str(x)\r\n except ValueError:\r\n return str('nan')\r\n\r\ndf = pd.DataFrame(np.random.random_integers(0,6,size=(20, 6)), columns=list('ABCDEF'))\r\n\r\ndata = dd.from_pandas(df, npartitions = 2)\r\n\r\ndata=data.applymap(float_apply)\r\n\r\ndata[['A']] = data[['A']].applymap(string_apply)\r\ndata['A'] = data['A'].str[:1]\r\n\r\nprint data.compute()\r\n\r\n```\r\n\r\nThis will work with pandas dataframes. If .compute() is run prior to this then it works fine but that is probably suboptimal for large datasets.\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport numpy as np\nimport pandas as pd\nfrom toolz import partial\n\nfrom ..utils import derived_from\n\n\ndef maybe_wrap_pandas(obj, x):\n if isinstance(x, np.ndarray):\n if isinstance(obj, pd.Series):\n return pd.Series(x, index=obj.index, dtype=x.dtype)\n return pd.Index(x)\n return x\n\n\nclass Accessor(object):\n \"\"\"\n Base class for pandas Accessor objects cat, dt, and str.\n\n Notes\n -----\n Subclasses should define the following attributes:\n\n * _accessor\n * _accessor_name\n \"\"\"\n _not_implemented = set()\n\n def __init__(self, series):\n from .core import Series\n if not isinstance(series, Series):\n raise ValueError('Accessor cannot be initialized')\n self._validate(series)\n self._series = series\n\n def _validate(self, series):\n pass\n\n @staticmethod\n def _delegate_property(obj, accessor, attr):\n out = getattr(getattr(obj, accessor, obj), attr)\n return maybe_wrap_pandas(obj, out)\n\n @staticmethod\n def _delegate_method(obj, accessor, attr, args, kwargs):\n out = getattr(getattr(obj, accessor, obj), attr)(*args, **kwargs)\n return maybe_wrap_pandas(obj, out)\n\n def _property_map(self, attr):\n meta = self._delegate_property(self._series._meta,\n self._accessor_name, attr)\n token = '%s-%s' % (self._accessor_name, attr)\n return self._series.map_partitions(self._delegate_property,\n self._accessor_name, attr,\n token=token, meta=meta)\n\n def _function_map(self, attr, *args, **kwargs):\n meta = self._delegate_method(self._series._meta_nonempty,\n self._accessor_name, attr, args, kwargs)\n token = '%s-%s' % (self._accessor_name, attr)\n return self._series.map_partitions(self._delegate_method,\n self._accessor_name, attr, args,\n kwargs, meta=meta, token=token)\n\n @property\n def _delegates(self):\n return set(dir(self._accessor)).difference(self._not_implemented)\n\n def __dir__(self):\n o = self._delegates\n o.update(self.__dict__)\n o.update(dir(type(self)))\n return list(o)\n\n def __getattr__(self, key):\n if key in self._delegates:\n if isinstance(getattr(self._accessor, key), property):\n return self._property_map(key)\n else:\n return partial(self._function_map, key)\n else:\n raise AttributeError(key)\n\n\nclass DatetimeAccessor(Accessor):\n \"\"\" Accessor object for datetimelike properties of the Series values.\n\n Examples\n --------\n\n >>> s.dt.microsecond # doctest: +SKIP\n \"\"\"\n _accessor = pd.Series.dt\n _accessor_name = 'dt'\n\n\nclass StringAccessor(Accessor):\n \"\"\" Accessor object for string properties of the Series values.\n\n Examples\n --------\n\n >>> s.str.lower() # doctest: +SKIP\n \"\"\"\n _accessor = pd.Series.str\n _accessor_name = 'str'\n _not_implemented = {'get_dummies'}\n\n def _validate(self, series):\n if not series.dtype == 'object':\n raise AttributeError(\"Can only use .str accessor with object dtype\")\n\n @derived_from(pd.core.strings.StringMethods)\n def split(self, pat=None, n=-1):\n return self._function_map('split', pat=pat, n=n)\n", "path": "dask/dataframe/accessor.py"}]}
1,864
153
gh_patches_debug_8046
rasdani/github-patches
git_diff
conda__conda-build-526
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> AppVeyor: Commit message with braces -> failed build https://ci.appveyor.com/project/mpi4py/mpi4py/build/2.0.0a0-13/job/0q0w2g5o32qk3m94#L522 PS: I got a warning about conda-build being outdated. Isn't `conda update --all` supposed to update it? Maybe conflicting versions with dependencies? </issue> <code> [start of conda_build/windows.py] 1 from __future__ import absolute_import, division, print_function 2 3 import os 4 import sys 5 import shutil 6 from os.path import dirname, isdir, isfile, join, exists 7 8 import conda.config as cc 9 from conda.compat import iteritems 10 11 from conda_build.config import config 12 from conda_build import environ 13 from conda_build import source 14 from conda_build.utils import _check_call 15 16 try: 17 import psutil 18 except ImportError: 19 psutil = None 20 21 assert sys.platform == 'win32' 22 23 24 def fix_staged_scripts(): 25 """ 26 Fixes scripts which have been installed unix-style to have a .bat 27 helper 28 """ 29 scripts_dir = join(config.build_prefix, 'Scripts') 30 if not isdir(scripts_dir): 31 return 32 for fn in os.listdir(scripts_dir): 33 # process all the extensionless files 34 if not isfile(join(scripts_dir, fn)) or '.' in fn: 35 continue 36 37 with open(join(scripts_dir, fn)) as f: 38 line = f.readline().lower() 39 # If it's a #!python script 40 if not (line.startswith('#!') and 'python' in line.lower()): 41 continue 42 print('Adjusting unix-style #! script %s, ' 43 'and adding a .bat file for it' % fn) 44 # copy it with a .py extension (skipping that first #! line) 45 with open(join(scripts_dir, fn + '-script.py'), 'w') as fo: 46 fo.write(f.read()) 47 # now create the .exe file 48 shutil.copyfile(join(dirname(__file__), 49 'cli-%d.exe' % (8 * tuple.__itemsize__)), 50 join(scripts_dir, fn + '.exe')) 51 52 # remove the original script 53 os.remove(join(scripts_dir, fn)) 54 55 56 def msvc_env_cmd(): 57 if 'ProgramFiles(x86)' in os.environ: 58 program_files = os.environ['ProgramFiles(x86)'] 59 else: 60 program_files = os.environ['ProgramFiles'] 61 62 localappdata = os.environ.get("localappdata") 63 64 if config.PY3K: 65 vcvarsall = os.path.join(program_files, 66 r'Microsoft Visual Studio 10.0' 67 r'\VC\vcvarsall.bat') 68 else: 69 vcvarsall = os.path.join(program_files, 70 r'Microsoft Visual Studio 9.0' 71 r'\VC\vcvarsall.bat') 72 73 # Try the Microsoft Visual C++ Compiler for Python 2.7 74 if not isfile(vcvarsall) and localappdata and not config.PY3K: 75 vcvarsall = os.path.join(localappdata, "Programs", "Common", 76 "Microsoft", "Visual C++ for Python", "9.0", "vcvarsall.bat") 77 if not isfile(vcvarsall) and program_files and not config.PY3K: 78 vcvarsall = os.path.join(program_files, 'Common Files', 79 'Microsoft', 'Visual C++ for Python', "9.0", "vcvarsall.bat") 80 if not isfile(vcvarsall): 81 print("Warning: Couldn't find Visual Studio: %r" % vcvarsall) 82 return '' 83 84 return '''\ 85 call "%s" %s 86 ''' % (vcvarsall, {32: 'x86', 64: 'amd64'}[cc.bits]) 87 88 89 def kill_processes(): 90 if psutil is None: 91 return 92 for n in psutil.get_pid_list(): 93 try: 94 p = psutil.Process(n) 95 if p.name.lower() == 'msbuild.exe': 96 print('Terminating:', p.name) 97 p.terminate() 98 except: 99 continue 100 101 102 def build(m): 103 env = dict(os.environ) 104 env.update(environ.get_dict(m)) 105 106 for name in 'BIN', 'INC', 'LIB': 107 path = env['LIBRARY_' + name] 108 if not isdir(path): 109 os.makedirs(path) 110 111 src_dir = source.get_dir() 112 bld_bat = join(m.path, 'bld.bat') 113 if exists(bld_bat): 114 with open(bld_bat) as fi: 115 data = fi.read() 116 with open(join(src_dir, 'bld.bat'), 'w') as fo: 117 fo.write(msvc_env_cmd()) 118 for kv in iteritems(env): 119 fo.write('set %s=%s\n' % kv) 120 # more debuggable with echo on 121 fo.write('@echo on\n') 122 fo.write("REM ===== end generated header =====\n") 123 fo.write(data) 124 125 cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] 126 _check_call(cmd, cwd=src_dir) 127 kill_processes() 128 fix_staged_scripts() 129 [end of conda_build/windows.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/conda_build/windows.py b/conda_build/windows.py --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -116,7 +116,7 @@ with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write(msvc_env_cmd()) for kv in iteritems(env): - fo.write('set %s=%s\n' % kv) + fo.write('set "%s=%s"\n' % kv) # more debuggable with echo on fo.write('@echo on\n') fo.write("REM ===== end generated header =====\n")
{"golden_diff": "diff --git a/conda_build/windows.py b/conda_build/windows.py\n--- a/conda_build/windows.py\n+++ b/conda_build/windows.py\n@@ -116,7 +116,7 @@\n with open(join(src_dir, 'bld.bat'), 'w') as fo:\n fo.write(msvc_env_cmd())\n for kv in iteritems(env):\n- fo.write('set %s=%s\\n' % kv)\n+ fo.write('set \"%s=%s\"\\n' % kv)\n # more debuggable with echo on\n fo.write('@echo on\\n')\n fo.write(\"REM ===== end generated header =====\\n\")\n", "issue": "AppVeyor: Commit message with braces -> failed build\nhttps://ci.appveyor.com/project/mpi4py/mpi4py/build/2.0.0a0-13/job/0q0w2g5o32qk3m94#L522\n\nPS: I got a warning about conda-build being outdated. Isn't `conda update --all` supposed to update it? Maybe conflicting versions with dependencies?\n\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport os\nimport sys\nimport shutil\nfrom os.path import dirname, isdir, isfile, join, exists\n\nimport conda.config as cc\nfrom conda.compat import iteritems\n\nfrom conda_build.config import config\nfrom conda_build import environ\nfrom conda_build import source\nfrom conda_build.utils import _check_call\n\ntry:\n import psutil\nexcept ImportError:\n psutil = None\n\nassert sys.platform == 'win32'\n\n\ndef fix_staged_scripts():\n \"\"\"\n Fixes scripts which have been installed unix-style to have a .bat\n helper\n \"\"\"\n scripts_dir = join(config.build_prefix, 'Scripts')\n if not isdir(scripts_dir):\n return\n for fn in os.listdir(scripts_dir):\n # process all the extensionless files\n if not isfile(join(scripts_dir, fn)) or '.' in fn:\n continue\n\n with open(join(scripts_dir, fn)) as f:\n line = f.readline().lower()\n # If it's a #!python script\n if not (line.startswith('#!') and 'python' in line.lower()):\n continue\n print('Adjusting unix-style #! script %s, '\n 'and adding a .bat file for it' % fn)\n # copy it with a .py extension (skipping that first #! line)\n with open(join(scripts_dir, fn + '-script.py'), 'w') as fo:\n fo.write(f.read())\n # now create the .exe file\n shutil.copyfile(join(dirname(__file__),\n 'cli-%d.exe' % (8 * tuple.__itemsize__)),\n join(scripts_dir, fn + '.exe'))\n\n # remove the original script\n os.remove(join(scripts_dir, fn))\n\n\ndef msvc_env_cmd():\n if 'ProgramFiles(x86)' in os.environ:\n program_files = os.environ['ProgramFiles(x86)']\n else:\n program_files = os.environ['ProgramFiles']\n\n localappdata = os.environ.get(\"localappdata\")\n\n if config.PY3K:\n vcvarsall = os.path.join(program_files,\n r'Microsoft Visual Studio 10.0'\n r'\\VC\\vcvarsall.bat')\n else:\n vcvarsall = os.path.join(program_files,\n r'Microsoft Visual Studio 9.0'\n r'\\VC\\vcvarsall.bat')\n\n # Try the Microsoft Visual C++ Compiler for Python 2.7\n if not isfile(vcvarsall) and localappdata and not config.PY3K:\n vcvarsall = os.path.join(localappdata, \"Programs\", \"Common\",\n \"Microsoft\", \"Visual C++ for Python\", \"9.0\", \"vcvarsall.bat\")\n if not isfile(vcvarsall) and program_files and not config.PY3K:\n vcvarsall = os.path.join(program_files, 'Common Files',\n 'Microsoft', 'Visual C++ for Python', \"9.0\", \"vcvarsall.bat\")\n if not isfile(vcvarsall):\n print(\"Warning: Couldn't find Visual Studio: %r\" % vcvarsall)\n return ''\n\n return '''\\\ncall \"%s\" %s\n''' % (vcvarsall, {32: 'x86', 64: 'amd64'}[cc.bits])\n\n\ndef kill_processes():\n if psutil is None:\n return\n for n in psutil.get_pid_list():\n try:\n p = psutil.Process(n)\n if p.name.lower() == 'msbuild.exe':\n print('Terminating:', p.name)\n p.terminate()\n except:\n continue\n\n\ndef build(m):\n env = dict(os.environ)\n env.update(environ.get_dict(m))\n\n for name in 'BIN', 'INC', 'LIB':\n path = env['LIBRARY_' + name]\n if not isdir(path):\n os.makedirs(path)\n\n src_dir = source.get_dir()\n bld_bat = join(m.path, 'bld.bat')\n if exists(bld_bat):\n with open(bld_bat) as fi:\n data = fi.read()\n with open(join(src_dir, 'bld.bat'), 'w') as fo:\n fo.write(msvc_env_cmd())\n for kv in iteritems(env):\n fo.write('set %s=%s\\n' % kv)\n # more debuggable with echo on\n fo.write('@echo on\\n')\n fo.write(\"REM ===== end generated header =====\\n\")\n fo.write(data)\n\n cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat']\n _check_call(cmd, cwd=src_dir)\n kill_processes()\n fix_staged_scripts()\n", "path": "conda_build/windows.py"}]}
1,958
142
gh_patches_debug_4542
rasdani/github-patches
git_diff
open-mmlab__mmpretrain-122
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> mmcls/models/losses/eval_metrics.py confusion_matrix confusion_matrix[target_label.long(), pred_label.long()] += 1 I think this code is wrong, 【target_label.long(), pred_label.long()】 will list all the coordinates that need + 1, but only once + 1 will work it should be: `for t, p in zip(target_label, pred_label): confusion_matrix[t.long(), p.long()] += 1 ` </issue> <code> [start of mmcls/models/losses/eval_metrics.py] 1 import numpy as np 2 import torch 3 4 5 def calculate_confusion_matrix(pred, target): 6 if isinstance(pred, np.ndarray) and isinstance(target, np.ndarray): 7 pred = torch.from_numpy(pred) 8 target = torch.from_numpy(target) 9 elif not (isinstance(pred, torch.Tensor) 10 and isinstance(target, torch.Tensor)): 11 raise TypeError('pred and target should both be' 12 'torch.Tensor or np.ndarray') 13 _, pred_label = pred.topk(1, dim=1) 14 num_classes = pred.size(1) 15 pred_label = pred_label.view(-1) 16 target_label = target.view(-1) 17 assert len(pred_label) == len(target_label) 18 confusion_matrix = torch.zeros(num_classes, num_classes) 19 with torch.no_grad(): 20 confusion_matrix[target_label.long(), pred_label.long()] += 1 21 return confusion_matrix 22 23 24 def precision(pred, target): 25 """Calculate macro-averaged precision according to the prediction and target 26 27 Args: 28 pred (torch.Tensor | np.array): The model prediction. 29 target (torch.Tensor | np.array): The target of each prediction. 30 31 Returns: 32 float: The function will return a single float as precision. 33 """ 34 confusion_matrix = calculate_confusion_matrix(pred, target) 35 with torch.no_grad(): 36 res = confusion_matrix.diag() / torch.clamp( 37 confusion_matrix.sum(0), min=1) 38 res = res.mean().item() * 100 39 return res 40 41 42 def recall(pred, target): 43 """Calculate macro-averaged recall according to the prediction and target 44 45 Args: 46 pred (torch.Tensor | np.array): The model prediction. 47 target (torch.Tensor | np.array): The target of each prediction. 48 49 Returns: 50 float: The function will return a single float as recall. 51 """ 52 confusion_matrix = calculate_confusion_matrix(pred, target) 53 with torch.no_grad(): 54 res = confusion_matrix.diag() / torch.clamp( 55 confusion_matrix.sum(1), min=1) 56 res = res.mean().item() * 100 57 return res 58 59 60 def f1_score(pred, target): 61 """Calculate macro-averaged F1 score according to the prediction and target 62 63 Args: 64 pred (torch.Tensor | np.array): The model prediction. 65 target (torch.Tensor | np.array): The target of each prediction. 66 67 Returns: 68 float: The function will return a single float as F1 score. 69 """ 70 confusion_matrix = calculate_confusion_matrix(pred, target) 71 with torch.no_grad(): 72 precision = confusion_matrix.diag() / torch.clamp( 73 confusion_matrix.sum(1), min=1) 74 recall = confusion_matrix.diag() / torch.clamp( 75 confusion_matrix.sum(0), min=1) 76 res = 2 * precision * recall / torch.clamp( 77 precision + recall, min=1e-20) 78 res = torch.where(torch.isnan(res), torch.full_like(res, 0), res) 79 res = res.mean().item() * 100 80 return res 81 [end of mmcls/models/losses/eval_metrics.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/mmcls/models/losses/eval_metrics.py b/mmcls/models/losses/eval_metrics.py --- a/mmcls/models/losses/eval_metrics.py +++ b/mmcls/models/losses/eval_metrics.py @@ -17,7 +17,8 @@ assert len(pred_label) == len(target_label) confusion_matrix = torch.zeros(num_classes, num_classes) with torch.no_grad(): - confusion_matrix[target_label.long(), pred_label.long()] += 1 + for t, p in zip(target_label, pred_label): + confusion_matrix[t.long(), p.long()] += 1 return confusion_matrix
{"golden_diff": "diff --git a/mmcls/models/losses/eval_metrics.py b/mmcls/models/losses/eval_metrics.py\n--- a/mmcls/models/losses/eval_metrics.py\n+++ b/mmcls/models/losses/eval_metrics.py\n@@ -17,7 +17,8 @@\n assert len(pred_label) == len(target_label)\n confusion_matrix = torch.zeros(num_classes, num_classes)\n with torch.no_grad():\n- confusion_matrix[target_label.long(), pred_label.long()] += 1\n+ for t, p in zip(target_label, pred_label):\n+ confusion_matrix[t.long(), p.long()] += 1\n return confusion_matrix\n", "issue": "mmcls/models/losses/eval_metrics.py confusion_matrix\nconfusion_matrix[target_label.long(), pred_label.long()] += 1\r\nI think this code is wrong, \u3010target_label.long(), pred_label.long()\u3011 will list all the coordinates that need + 1, but only once + 1 will work\r\nit should be:\r\n`for t, p in zip(target_label, pred_label):\r\n confusion_matrix[t.long(), p.long()] += 1 `\r\n\n", "before_files": [{"content": "import numpy as np\nimport torch\n\n\ndef calculate_confusion_matrix(pred, target):\n if isinstance(pred, np.ndarray) and isinstance(target, np.ndarray):\n pred = torch.from_numpy(pred)\n target = torch.from_numpy(target)\n elif not (isinstance(pred, torch.Tensor)\n and isinstance(target, torch.Tensor)):\n raise TypeError('pred and target should both be'\n 'torch.Tensor or np.ndarray')\n _, pred_label = pred.topk(1, dim=1)\n num_classes = pred.size(1)\n pred_label = pred_label.view(-1)\n target_label = target.view(-1)\n assert len(pred_label) == len(target_label)\n confusion_matrix = torch.zeros(num_classes, num_classes)\n with torch.no_grad():\n confusion_matrix[target_label.long(), pred_label.long()] += 1\n return confusion_matrix\n\n\ndef precision(pred, target):\n \"\"\"Calculate macro-averaged precision according to the prediction and target\n\n Args:\n pred (torch.Tensor | np.array): The model prediction.\n target (torch.Tensor | np.array): The target of each prediction.\n\n Returns:\n float: The function will return a single float as precision.\n \"\"\"\n confusion_matrix = calculate_confusion_matrix(pred, target)\n with torch.no_grad():\n res = confusion_matrix.diag() / torch.clamp(\n confusion_matrix.sum(0), min=1)\n res = res.mean().item() * 100\n return res\n\n\ndef recall(pred, target):\n \"\"\"Calculate macro-averaged recall according to the prediction and target\n\n Args:\n pred (torch.Tensor | np.array): The model prediction.\n target (torch.Tensor | np.array): The target of each prediction.\n\n Returns:\n float: The function will return a single float as recall.\n \"\"\"\n confusion_matrix = calculate_confusion_matrix(pred, target)\n with torch.no_grad():\n res = confusion_matrix.diag() / torch.clamp(\n confusion_matrix.sum(1), min=1)\n res = res.mean().item() * 100\n return res\n\n\ndef f1_score(pred, target):\n \"\"\"Calculate macro-averaged F1 score according to the prediction and target\n\n Args:\n pred (torch.Tensor | np.array): The model prediction.\n target (torch.Tensor | np.array): The target of each prediction.\n\n Returns:\n float: The function will return a single float as F1 score.\n \"\"\"\n confusion_matrix = calculate_confusion_matrix(pred, target)\n with torch.no_grad():\n precision = confusion_matrix.diag() / torch.clamp(\n confusion_matrix.sum(1), min=1)\n recall = confusion_matrix.diag() / torch.clamp(\n confusion_matrix.sum(0), min=1)\n res = 2 * precision * recall / torch.clamp(\n precision + recall, min=1e-20)\n res = torch.where(torch.isnan(res), torch.full_like(res, 0), res)\n res = res.mean().item() * 100\n return res\n", "path": "mmcls/models/losses/eval_metrics.py"}]}
1,450
143
gh_patches_debug_28395
rasdani/github-patches
git_diff
pantsbuild__pants-16264
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Not able to load resources when using pants vs sbt **Describe the bug** When using sbt we are able to call `Thread.currentThread().getContextClassLoader().getResources` and get a list of URLs. When using pants the list is empty. This at the moment limits us from using Flyway with pants. **Pants version** 2.13.0a1 and main. **OS** MacOS **Additional info** Example repo to reproduce the issue: https://github.com/somdoron/test-pants-resources I think the issue is, that pants only compress files in the resources zip file and not the directories. </issue> <code> [start of src/python/pants/jvm/resources.py] 1 # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 import itertools 5 import logging 6 from itertools import chain 7 8 from pants.core.target_types import ResourcesFieldSet, ResourcesGeneratorFieldSet 9 from pants.core.util_rules import stripped_source_files 10 from pants.core.util_rules.source_files import SourceFilesRequest 11 from pants.core.util_rules.stripped_source_files import StrippedSourceFiles 12 from pants.core.util_rules.system_binaries import ZipBinary 13 from pants.engine.fs import Digest, MergeDigests 14 from pants.engine.internals.selectors import MultiGet 15 from pants.engine.process import Process, ProcessResult 16 from pants.engine.rules import Get, collect_rules, rule 17 from pants.engine.target import SourcesField 18 from pants.engine.unions import UnionRule 19 from pants.jvm import compile 20 from pants.jvm.compile import ( 21 ClasspathDependenciesRequest, 22 ClasspathEntry, 23 ClasspathEntryRequest, 24 ClasspathEntryRequests, 25 CompileResult, 26 FallibleClasspathEntries, 27 FallibleClasspathEntry, 28 ) 29 from pants.jvm.strip_jar.strip_jar import StripJarRequest 30 from pants.jvm.subsystems import JvmSubsystem 31 from pants.util.logging import LogLevel 32 33 logger = logging.getLogger(__name__) 34 35 36 class JvmResourcesRequest(ClasspathEntryRequest): 37 field_sets = ( 38 ResourcesFieldSet, 39 ResourcesGeneratorFieldSet, 40 ) 41 42 43 @rule(desc="Assemble resources") 44 async def assemble_resources_jar( 45 zip: ZipBinary, 46 jvm: JvmSubsystem, 47 request: JvmResourcesRequest, 48 ) -> FallibleClasspathEntry: 49 # Request the component's direct dependency classpath, and additionally any prerequisite. 50 # Filter out any dependencies that are generated by our current target so that each resource 51 # only appears in a single input JAR. 52 # NOTE: Generated dependencies will have the same dependencies as the current target, so we 53 # don't need to inspect those dependencies. 54 optional_prereq_request = [*((request.prerequisite,) if request.prerequisite else ())] 55 fallibles = await MultiGet( 56 Get(FallibleClasspathEntries, ClasspathEntryRequests(optional_prereq_request)), 57 Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request, ignore_generated=True)), 58 ) 59 direct_dependency_classpath_entries = FallibleClasspathEntries( 60 itertools.chain(*fallibles) 61 ).if_all_succeeded() 62 63 if direct_dependency_classpath_entries is None: 64 return FallibleClasspathEntry( 65 description=str(request.component), 66 result=CompileResult.DEPENDENCY_FAILED, 67 output=None, 68 exit_code=1, 69 ) 70 71 source_files = await Get( 72 StrippedSourceFiles, 73 SourceFilesRequest([tgt.get(SourcesField) for tgt in request.component.members]), 74 ) 75 76 output_filename = f"{request.component.representative.address.path_safe_spec}.resources.jar" 77 output_files = [output_filename] 78 79 resources_jar_input_digest = source_files.snapshot.digest 80 resources_jar_result = await Get( 81 ProcessResult, 82 Process( 83 argv=[ 84 zip.path, 85 output_filename, 86 *source_files.snapshot.files, 87 ], 88 description="Build resources JAR for {request.component}", 89 input_digest=resources_jar_input_digest, 90 output_files=output_files, 91 level=LogLevel.DEBUG, 92 ), 93 ) 94 95 output_digest = resources_jar_result.output_digest 96 if jvm.reproducible_jars: 97 output_digest = await Get(Digest, StripJarRequest(output_digest, tuple(output_files))) 98 cpe = ClasspathEntry(output_digest, output_files, []) 99 100 merged_cpe_digest = await Get( 101 Digest, 102 MergeDigests(chain((cpe.digest,), (i.digest for i in direct_dependency_classpath_entries))), 103 ) 104 105 merged_cpe = ClasspathEntry.merge( 106 digest=merged_cpe_digest, entries=[cpe, *direct_dependency_classpath_entries] 107 ) 108 109 return FallibleClasspathEntry(output_filename, CompileResult.SUCCEEDED, merged_cpe, 0) 110 111 112 def rules(): 113 return [ 114 *collect_rules(), 115 *compile.rules(), 116 *stripped_source_files.rules(), 117 UnionRule(ClasspathEntryRequest, JvmResourcesRequest), 118 ] 119 [end of src/python/pants/jvm/resources.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/python/pants/jvm/resources.py b/src/python/pants/jvm/resources.py --- a/src/python/pants/jvm/resources.py +++ b/src/python/pants/jvm/resources.py @@ -4,6 +4,7 @@ import itertools import logging from itertools import chain +from pathlib import Path from pants.core.target_types import ResourcesFieldSet, ResourcesGeneratorFieldSet from pants.core.util_rules import stripped_source_files @@ -76,6 +77,13 @@ output_filename = f"{request.component.representative.address.path_safe_spec}.resources.jar" output_files = [output_filename] + # #16231: Valid JAR files need the directories of each resource file as well as the files + # themselves. + + paths = {Path(filename) for filename in source_files.snapshot.files} + directories = {parent for path in paths for parent in path.parents} + input_files = {str(path) for path in chain(paths, directories)} + resources_jar_input_digest = source_files.snapshot.digest resources_jar_result = await Get( ProcessResult, @@ -83,7 +91,7 @@ argv=[ zip.path, output_filename, - *source_files.snapshot.files, + *sorted(input_files), ], description="Build resources JAR for {request.component}", input_digest=resources_jar_input_digest,
{"golden_diff": "diff --git a/src/python/pants/jvm/resources.py b/src/python/pants/jvm/resources.py\n--- a/src/python/pants/jvm/resources.py\n+++ b/src/python/pants/jvm/resources.py\n@@ -4,6 +4,7 @@\n import itertools\n import logging\n from itertools import chain\n+from pathlib import Path\n \n from pants.core.target_types import ResourcesFieldSet, ResourcesGeneratorFieldSet\n from pants.core.util_rules import stripped_source_files\n@@ -76,6 +77,13 @@\n output_filename = f\"{request.component.representative.address.path_safe_spec}.resources.jar\"\n output_files = [output_filename]\n \n+ # #16231: Valid JAR files need the directories of each resource file as well as the files\n+ # themselves.\n+\n+ paths = {Path(filename) for filename in source_files.snapshot.files}\n+ directories = {parent for path in paths for parent in path.parents}\n+ input_files = {str(path) for path in chain(paths, directories)}\n+\n resources_jar_input_digest = source_files.snapshot.digest\n resources_jar_result = await Get(\n ProcessResult,\n@@ -83,7 +91,7 @@\n argv=[\n zip.path,\n output_filename,\n- *source_files.snapshot.files,\n+ *sorted(input_files),\n ],\n description=\"Build resources JAR for {request.component}\",\n input_digest=resources_jar_input_digest,\n", "issue": "Not able to load resources when using pants vs sbt\n**Describe the bug**\r\nWhen using sbt we are able to call `Thread.currentThread().getContextClassLoader().getResources` and get a list of URLs. When using pants the list is empty. \r\n\r\nThis at the moment limits us from using Flyway with pants.\r\n\r\n**Pants version**\r\n2.13.0a1 and main.\r\n\r\n**OS**\r\nMacOS\r\n\r\n**Additional info**\r\nExample repo to reproduce the issue:\r\nhttps://github.com/somdoron/test-pants-resources\r\n\r\nI think the issue is, that pants only compress files in the resources zip file and not the directories.\n", "before_files": [{"content": "# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\nimport itertools\nimport logging\nfrom itertools import chain\n\nfrom pants.core.target_types import ResourcesFieldSet, ResourcesGeneratorFieldSet\nfrom pants.core.util_rules import stripped_source_files\nfrom pants.core.util_rules.source_files import SourceFilesRequest\nfrom pants.core.util_rules.stripped_source_files import StrippedSourceFiles\nfrom pants.core.util_rules.system_binaries import ZipBinary\nfrom pants.engine.fs import Digest, MergeDigests\nfrom pants.engine.internals.selectors import MultiGet\nfrom pants.engine.process import Process, ProcessResult\nfrom pants.engine.rules import Get, collect_rules, rule\nfrom pants.engine.target import SourcesField\nfrom pants.engine.unions import UnionRule\nfrom pants.jvm import compile\nfrom pants.jvm.compile import (\n ClasspathDependenciesRequest,\n ClasspathEntry,\n ClasspathEntryRequest,\n ClasspathEntryRequests,\n CompileResult,\n FallibleClasspathEntries,\n FallibleClasspathEntry,\n)\nfrom pants.jvm.strip_jar.strip_jar import StripJarRequest\nfrom pants.jvm.subsystems import JvmSubsystem\nfrom pants.util.logging import LogLevel\n\nlogger = logging.getLogger(__name__)\n\n\nclass JvmResourcesRequest(ClasspathEntryRequest):\n field_sets = (\n ResourcesFieldSet,\n ResourcesGeneratorFieldSet,\n )\n\n\n@rule(desc=\"Assemble resources\")\nasync def assemble_resources_jar(\n zip: ZipBinary,\n jvm: JvmSubsystem,\n request: JvmResourcesRequest,\n) -> FallibleClasspathEntry:\n # Request the component's direct dependency classpath, and additionally any prerequisite.\n # Filter out any dependencies that are generated by our current target so that each resource\n # only appears in a single input JAR.\n # NOTE: Generated dependencies will have the same dependencies as the current target, so we\n # don't need to inspect those dependencies.\n optional_prereq_request = [*((request.prerequisite,) if request.prerequisite else ())]\n fallibles = await MultiGet(\n Get(FallibleClasspathEntries, ClasspathEntryRequests(optional_prereq_request)),\n Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request, ignore_generated=True)),\n )\n direct_dependency_classpath_entries = FallibleClasspathEntries(\n itertools.chain(*fallibles)\n ).if_all_succeeded()\n\n if direct_dependency_classpath_entries is None:\n return FallibleClasspathEntry(\n description=str(request.component),\n result=CompileResult.DEPENDENCY_FAILED,\n output=None,\n exit_code=1,\n )\n\n source_files = await Get(\n StrippedSourceFiles,\n SourceFilesRequest([tgt.get(SourcesField) for tgt in request.component.members]),\n )\n\n output_filename = f\"{request.component.representative.address.path_safe_spec}.resources.jar\"\n output_files = [output_filename]\n\n resources_jar_input_digest = source_files.snapshot.digest\n resources_jar_result = await Get(\n ProcessResult,\n Process(\n argv=[\n zip.path,\n output_filename,\n *source_files.snapshot.files,\n ],\n description=\"Build resources JAR for {request.component}\",\n input_digest=resources_jar_input_digest,\n output_files=output_files,\n level=LogLevel.DEBUG,\n ),\n )\n\n output_digest = resources_jar_result.output_digest\n if jvm.reproducible_jars:\n output_digest = await Get(Digest, StripJarRequest(output_digest, tuple(output_files)))\n cpe = ClasspathEntry(output_digest, output_files, [])\n\n merged_cpe_digest = await Get(\n Digest,\n MergeDigests(chain((cpe.digest,), (i.digest for i in direct_dependency_classpath_entries))),\n )\n\n merged_cpe = ClasspathEntry.merge(\n digest=merged_cpe_digest, entries=[cpe, *direct_dependency_classpath_entries]\n )\n\n return FallibleClasspathEntry(output_filename, CompileResult.SUCCEEDED, merged_cpe, 0)\n\n\ndef rules():\n return [\n *collect_rules(),\n *compile.rules(),\n *stripped_source_files.rules(),\n UnionRule(ClasspathEntryRequest, JvmResourcesRequest),\n ]\n", "path": "src/python/pants/jvm/resources.py"}]}
1,822
308
gh_patches_debug_20578
rasdani/github-patches
git_diff
google__osv.dev-482
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> PURLs for scoped NPM packages are invalid The package URLs for scoped NPM packages (e.g. [`@hapi/hoek`](https://osv.dev/vulnerability/GHSA-22h7-7wwg-qmgg)) are invalid. Parsing them with any package URL library fails. According to [the spec](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst#rules-for-each-purl-component), segments in the namespace (here: `@hapi`) must be percent-encdoded. So ``` pkg:npm/@hapi/hoek ``` should be ``` pkg:npm/%40hapi/hoek ``` On the same note, the name segment must be percent-encoded, too. I haven't encountered a PURL in OSV where the package name contains characters that'd need encoding, but if this is done for the namespace, it should be considered for the name as well. </issue> <code> [start of lib/osv/purl_helpers.py] 1 # Copyright 2022 Google LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 """PURL conversion utilities.""" 15 16 PURL_ECOSYSTEMS = { 17 'crates.io': 'cargo', 18 'Hex': 'hex', 19 'Go': 'golang', 20 'Maven': 'maven', 21 'NuGet': 'nuget', 22 'npm': 'npm', 23 'Packagist': 'composer', 24 'OSS-Fuzz': 'generic', 25 'PyPI': 'pypi', 26 'RubyGems': 'gem', 27 } 28 29 30 def package_to_purl(ecosystem, package_name): 31 """Convert a ecosystem and package name to PURL.""" 32 purl_type = PURL_ECOSYSTEMS.get(ecosystem) 33 if not purl_type: 34 return None 35 36 if purl_type == 'maven': 37 # PURLs use / to separate the group ID and the artifact ID. 38 package_name = package_name.replace(':', '/', 1) 39 40 return f'pkg:{purl_type}/{package_name}' 41 [end of lib/osv/purl_helpers.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/lib/osv/purl_helpers.py b/lib/osv/purl_helpers.py --- a/lib/osv/purl_helpers.py +++ b/lib/osv/purl_helpers.py @@ -13,6 +13,8 @@ # limitations under the License. """PURL conversion utilities.""" +from urllib.parse import quote + PURL_ECOSYSTEMS = { 'crates.io': 'cargo', 'Hex': 'hex', @@ -27,6 +29,12 @@ } +def _url_encode(package_name): + """URL encode a PURL `namespace/name` or `name`.""" + parts = package_name.split('/') + return '/'.join(quote(p) for p in parts) + + def package_to_purl(ecosystem, package_name): """Convert a ecosystem and package name to PURL.""" purl_type = PURL_ECOSYSTEMS.get(ecosystem) @@ -37,4 +45,4 @@ # PURLs use / to separate the group ID and the artifact ID. package_name = package_name.replace(':', '/', 1) - return f'pkg:{purl_type}/{package_name}' + return f'pkg:{purl_type}/{_url_encode(package_name)}'
{"golden_diff": "diff --git a/lib/osv/purl_helpers.py b/lib/osv/purl_helpers.py\n--- a/lib/osv/purl_helpers.py\n+++ b/lib/osv/purl_helpers.py\n@@ -13,6 +13,8 @@\n # limitations under the License.\n \"\"\"PURL conversion utilities.\"\"\"\n \n+from urllib.parse import quote\n+\n PURL_ECOSYSTEMS = {\n 'crates.io': 'cargo',\n 'Hex': 'hex',\n@@ -27,6 +29,12 @@\n }\n \n \n+def _url_encode(package_name):\n+ \"\"\"URL encode a PURL `namespace/name` or `name`.\"\"\"\n+ parts = package_name.split('/')\n+ return '/'.join(quote(p) for p in parts)\n+\n+\n def package_to_purl(ecosystem, package_name):\n \"\"\"Convert a ecosystem and package name to PURL.\"\"\"\n purl_type = PURL_ECOSYSTEMS.get(ecosystem)\n@@ -37,4 +45,4 @@\n # PURLs use / to separate the group ID and the artifact ID.\n package_name = package_name.replace(':', '/', 1)\n \n- return f'pkg:{purl_type}/{package_name}'\n+ return f'pkg:{purl_type}/{_url_encode(package_name)}'\n", "issue": "PURLs for scoped NPM packages are invalid\nThe package URLs for scoped NPM packages (e.g. [`@hapi/hoek`](https://osv.dev/vulnerability/GHSA-22h7-7wwg-qmgg)) are invalid. Parsing them with any package URL library fails.\r\n\r\nAccording to [the spec](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst#rules-for-each-purl-component), segments in the namespace (here: `@hapi`) must be percent-encdoded.\r\n\r\nSo\r\n\r\n```\r\npkg:npm/@hapi/hoek\r\n```\r\n\r\nshould be\r\n\r\n```\r\npkg:npm/%40hapi/hoek\r\n```\r\n\r\nOn the same note, the name segment must be percent-encoded, too. I haven't encountered a PURL in OSV where the package name contains characters that'd need encoding, but if this is done for the namespace, it should be considered for the name as well.\n", "before_files": [{"content": "# Copyright 2022 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"PURL conversion utilities.\"\"\"\n\nPURL_ECOSYSTEMS = {\n 'crates.io': 'cargo',\n 'Hex': 'hex',\n 'Go': 'golang',\n 'Maven': 'maven',\n 'NuGet': 'nuget',\n 'npm': 'npm',\n 'Packagist': 'composer',\n 'OSS-Fuzz': 'generic',\n 'PyPI': 'pypi',\n 'RubyGems': 'gem',\n}\n\n\ndef package_to_purl(ecosystem, package_name):\n \"\"\"Convert a ecosystem and package name to PURL.\"\"\"\n purl_type = PURL_ECOSYSTEMS.get(ecosystem)\n if not purl_type:\n return None\n\n if purl_type == 'maven':\n # PURLs use / to separate the group ID and the artifact ID.\n package_name = package_name.replace(':', '/', 1)\n\n return f'pkg:{purl_type}/{package_name}'\n", "path": "lib/osv/purl_helpers.py"}]}
1,162
277
gh_patches_debug_21469
rasdani/github-patches
git_diff
cupy__cupy-1999
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Device-to-host copy in `examples/stream/cupy_memcpy.py` is not asynchronous I've interested in asynchronous memcpy for better performance. I checked the `cupy_memcpy.py` sample, and noticed that the last line `x_pinned_cpu = x_gpu.get()` [1] shouldn't work asynchronously. Unfortunately I don't know how to properly fix it. One issue is trivial: this line re-binds the variable `x_pinned_cpu`, instead of updating the value bound to this variable. But there is another more tricky problem. The function `cupy.ndarray.get` creates the new `numpy.ndarray` instance by calling `numpy.empty`, and passes the new host pointer to the `copy_to_host_async` function [2]. IIUC, as the new array not allocated in pinned memory, the copy couldn't be asynchronous with other computations at the GPU. * [1] https://github.com/cupy/cupy/blob/v5.0.0rc1/examples/stream/cupy_memcpy.py#L24 * [2] https://github.com/cupy/cupy/blob/v5.0.0rc1/cupy/core/core.pyx#L1805 </issue> <code> [start of examples/stream/cupy_memcpy.py] 1 # nvprof --print-gpu-trace python examples/stream/cupy_memcpy.py 2 import cupy 3 import numpy 4 5 pinned_memory_pool = cupy.cuda.PinnedMemoryPool() 6 cupy.cuda.set_pinned_memory_allocator(pinned_memory_pool.malloc) 7 8 9 def _pin_memory(array): 10 mem = cupy.cuda.alloc_pinned_memory(array.nbytes) 11 ret = numpy.frombuffer(mem, array.dtype, array.size).reshape(array.shape) 12 ret[...] = array 13 return ret 14 15 16 x_cpu = numpy.array([1, 2, 3], dtype=numpy.float32) 17 x_pinned_cpu = _pin_memory(x_cpu) 18 x_gpu = cupy.core.ndarray((3,), dtype=numpy.float32) 19 with cupy.cuda.stream.Stream(): 20 x_gpu.set(x_pinned_cpu) 21 22 stream = cupy.cuda.stream.Stream() 23 stream.use() 24 x_pinned_cpu = x_gpu.get() 25 [end of examples/stream/cupy_memcpy.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/examples/stream/cupy_memcpy.py b/examples/stream/cupy_memcpy.py --- a/examples/stream/cupy_memcpy.py +++ b/examples/stream/cupy_memcpy.py @@ -13,12 +13,38 @@ return ret -x_cpu = numpy.array([1, 2, 3], dtype=numpy.float32) -x_pinned_cpu = _pin_memory(x_cpu) -x_gpu = cupy.core.ndarray((3,), dtype=numpy.float32) -with cupy.cuda.stream.Stream(): - x_gpu.set(x_pinned_cpu) - -stream = cupy.cuda.stream.Stream() -stream.use() -x_pinned_cpu = x_gpu.get() +SIZE = 1024 * 1024 +x_cpu_src = numpy.arange(SIZE, dtype=numpy.float32) +x_gpu_src = cupy.arange(SIZE, dtype=numpy.float32) + + +# synchronous +stream = cupy.cuda.Stream.null +start = stream.record() +x_gpu_dst = cupy.empty(x_cpu_src.shape, x_cpu_src.dtype) +x_gpu_dst.set(x_cpu_src) +x_cpu_dst = x_gpu_src.get() +end = stream.record() + +print('Synchronous Device to Host / Host to Device (ms)') +print(cupy.cuda.get_elapsed_time(start, end)) + + +# asynchronous +x_gpu_dst = cupy.empty(x_cpu_src.shape, x_cpu_src.dtype) +x_cpu_dst = numpy.empty(x_gpu_src.shape, x_gpu_src.dtype) + +x_pinned_cpu_src = _pin_memory(x_cpu_src) +x_pinned_cpu_dst = _pin_memory(x_cpu_dst) + +with cupy.cuda.stream.Stream() as stream_htod: + start = stream_htod.record() + x_gpu_dst.set(x_pinned_cpu_src) + with cupy.cuda.stream.Stream() as stream_dtoh: + x_gpu_src.get(out=x_pinned_cpu_dst) + stream_dtoh.synchronize() + stream_htod.synchronize() + end = stream_htod.record() + +print('Asynchronous Device to Host / Host to Device (ms)') +print(cupy.cuda.get_elapsed_time(start, end))
{"golden_diff": "diff --git a/examples/stream/cupy_memcpy.py b/examples/stream/cupy_memcpy.py\n--- a/examples/stream/cupy_memcpy.py\n+++ b/examples/stream/cupy_memcpy.py\n@@ -13,12 +13,38 @@\n return ret\n \n \n-x_cpu = numpy.array([1, 2, 3], dtype=numpy.float32)\n-x_pinned_cpu = _pin_memory(x_cpu)\n-x_gpu = cupy.core.ndarray((3,), dtype=numpy.float32)\n-with cupy.cuda.stream.Stream():\n- x_gpu.set(x_pinned_cpu)\n-\n-stream = cupy.cuda.stream.Stream()\n-stream.use()\n-x_pinned_cpu = x_gpu.get()\n+SIZE = 1024 * 1024\n+x_cpu_src = numpy.arange(SIZE, dtype=numpy.float32)\n+x_gpu_src = cupy.arange(SIZE, dtype=numpy.float32)\n+\n+\n+# synchronous\n+stream = cupy.cuda.Stream.null\n+start = stream.record()\n+x_gpu_dst = cupy.empty(x_cpu_src.shape, x_cpu_src.dtype)\n+x_gpu_dst.set(x_cpu_src)\n+x_cpu_dst = x_gpu_src.get()\n+end = stream.record()\n+\n+print('Synchronous Device to Host / Host to Device (ms)')\n+print(cupy.cuda.get_elapsed_time(start, end))\n+\n+\n+# asynchronous\n+x_gpu_dst = cupy.empty(x_cpu_src.shape, x_cpu_src.dtype)\n+x_cpu_dst = numpy.empty(x_gpu_src.shape, x_gpu_src.dtype)\n+\n+x_pinned_cpu_src = _pin_memory(x_cpu_src)\n+x_pinned_cpu_dst = _pin_memory(x_cpu_dst)\n+\n+with cupy.cuda.stream.Stream() as stream_htod:\n+ start = stream_htod.record()\n+ x_gpu_dst.set(x_pinned_cpu_src)\n+ with cupy.cuda.stream.Stream() as stream_dtoh:\n+ x_gpu_src.get(out=x_pinned_cpu_dst)\n+ stream_dtoh.synchronize()\n+ stream_htod.synchronize()\n+ end = stream_htod.record()\n+\n+print('Asynchronous Device to Host / Host to Device (ms)')\n+print(cupy.cuda.get_elapsed_time(start, end))\n", "issue": "Device-to-host copy in `examples/stream/cupy_memcpy.py` is not asynchronous\nI've interested in asynchronous memcpy for better performance. I checked the `cupy_memcpy.py` sample, and noticed that the last line `x_pinned_cpu = x_gpu.get()` [1] shouldn't work asynchronously. Unfortunately I don't know how to properly fix it.\r\n\r\nOne issue is trivial: this line re-binds the variable `x_pinned_cpu`, instead of updating the value bound to this variable.\r\n\r\nBut there is another more tricky problem. The function `cupy.ndarray.get` creates the new `numpy.ndarray` instance by calling `numpy.empty`, and passes the new host pointer to the `copy_to_host_async` function [2]. IIUC, as the new array not allocated in pinned memory, the copy couldn't be asynchronous with other computations at the GPU.\r\n\r\n* [1] https://github.com/cupy/cupy/blob/v5.0.0rc1/examples/stream/cupy_memcpy.py#L24\r\n* [2] https://github.com/cupy/cupy/blob/v5.0.0rc1/cupy/core/core.pyx#L1805\n", "before_files": [{"content": "# nvprof --print-gpu-trace python examples/stream/cupy_memcpy.py\nimport cupy\nimport numpy\n\npinned_memory_pool = cupy.cuda.PinnedMemoryPool()\ncupy.cuda.set_pinned_memory_allocator(pinned_memory_pool.malloc)\n\n\ndef _pin_memory(array):\n mem = cupy.cuda.alloc_pinned_memory(array.nbytes)\n ret = numpy.frombuffer(mem, array.dtype, array.size).reshape(array.shape)\n ret[...] = array\n return ret\n\n\nx_cpu = numpy.array([1, 2, 3], dtype=numpy.float32)\nx_pinned_cpu = _pin_memory(x_cpu)\nx_gpu = cupy.core.ndarray((3,), dtype=numpy.float32)\nwith cupy.cuda.stream.Stream():\n x_gpu.set(x_pinned_cpu)\n\nstream = cupy.cuda.stream.Stream()\nstream.use()\nx_pinned_cpu = x_gpu.get()\n", "path": "examples/stream/cupy_memcpy.py"}]}
1,011
461
gh_patches_debug_14223
rasdani/github-patches
git_diff
ibis-project__ibis-2556
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> CLN: Remove or consolidate dev dependencies from setup.py and environment.yml I noticed in https://github.com/ibis-project/ibis/pull/2547#issue-529169508 that the dev dependencies are not in sync in https://github.com/ibis-project/ibis/blob/master/setup.py#L63 and https://github.com/ibis-project/ibis/blob/master/environment.yml#L24 `environment.yml` looks more up to date; the dev dependencies in `setup.py` should either be synced with that file or just removed. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 """Ibis setup module.""" 3 import pathlib 4 import sys 5 6 from setuptools import find_packages, setup 7 8 import versioneer 9 10 LONG_DESCRIPTION = """ 11 Ibis is a productivity-centric Python big data framework. 12 13 See http://ibis-project.org 14 """ 15 16 VERSION = sys.version_info.major, sys.version_info.minor 17 18 impala_requires = ['hdfs>=2.0.16', 'sqlalchemy>=1.1,<1.3.7', 'requests'] 19 impala_requires.append('impyla[kerberos]>=0.15.0') 20 21 sqlite_requires = ['sqlalchemy>=1.1,<1.3.7'] 22 postgres_requires = sqlite_requires + ['psycopg2'] 23 mysql_requires = sqlite_requires + ['pymysql'] 24 25 omniscidb_requires = ['pymapd==0.24', 'pyarrow'] 26 kerberos_requires = ['requests-kerberos'] 27 visualization_requires = ['graphviz'] 28 clickhouse_requires = [ 29 'clickhouse-driver>=0.1.3', 30 'clickhouse-cityhash', 31 ] 32 bigquery_requires = [ 33 'google-cloud-bigquery[bqstorage,pandas]>=1.12.0,<2.0.0dev', 34 'pydata-google-auth', 35 ] 36 hdf5_requires = ['tables>=3.0.0'] 37 38 parquet_requires = ['pyarrow>=0.12.0'] 39 spark_requires = ['pyspark>=2.4.3'] 40 41 geospatial_requires = ['geoalchemy2', 'geopandas', 'shapely'] 42 43 dask_requires = [ 44 'dask[dataframe, array]', 45 ] 46 47 all_requires = ( 48 impala_requires 49 + postgres_requires 50 + omniscidb_requires 51 + mysql_requires 52 + kerberos_requires 53 + visualization_requires 54 + clickhouse_requires 55 + bigquery_requires 56 + hdf5_requires 57 + parquet_requires 58 + spark_requires 59 + geospatial_requires 60 + dask_requires 61 ) 62 63 develop_requires = all_requires + [ 64 'black', 65 'click', 66 'pydocstyle==4.0.1', 67 'flake8', 68 'isort', 69 'mypy', 70 'pre-commit', 71 'pygit2', 72 'pytest>=4.5', 73 ] 74 75 install_requires = [ 76 line.strip() 77 for line in pathlib.Path(__file__) 78 .parent.joinpath('requirements.txt') 79 .read_text() 80 .splitlines() 81 ] 82 83 setup( 84 name='ibis-framework', 85 url='https://github.com/ibis-project/ibis', 86 packages=find_packages(), 87 version=versioneer.get_version(), 88 cmdclass=versioneer.get_cmdclass(), 89 install_requires=install_requires, 90 python_requires='>=3.7', 91 extras_require={ 92 'all': all_requires, 93 'develop': develop_requires, 94 'impala': impala_requires, 95 'kerberos': kerberos_requires, 96 'postgres': postgres_requires, 97 'omniscidb': omniscidb_requires, 98 'mysql': mysql_requires, 99 'sqlite': sqlite_requires, 100 'visualization': visualization_requires, 101 'clickhouse': clickhouse_requires, 102 'bigquery': bigquery_requires, 103 'hdf5': hdf5_requires, 104 'parquet': parquet_requires, 105 'spark': spark_requires, 106 'geospatial': geospatial_requires, 107 'dask': dask_requires, 108 }, 109 description="Productivity-centric Python Big Data Framework", 110 long_description=LONG_DESCRIPTION, 111 classifiers=[ 112 'Development Status :: 4 - Beta', 113 'Operating System :: OS Independent', 114 'Intended Audience :: Science/Research', 115 'Programming Language :: Python', 116 'Programming Language :: Python :: 3', 117 'Topic :: Scientific/Engineering', 118 ], 119 license='Apache License, Version 2.0', 120 maintainer="Phillip Cloud", 121 maintainer_email="[email protected]", 122 ) 123 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -60,18 +60,6 @@ + dask_requires ) -develop_requires = all_requires + [ - 'black', - 'click', - 'pydocstyle==4.0.1', - 'flake8', - 'isort', - 'mypy', - 'pre-commit', - 'pygit2', - 'pytest>=4.5', -] - install_requires = [ line.strip() for line in pathlib.Path(__file__) @@ -90,7 +78,6 @@ python_requires='>=3.7', extras_require={ 'all': all_requires, - 'develop': develop_requires, 'impala': impala_requires, 'kerberos': kerberos_requires, 'postgres': postgres_requires,
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -60,18 +60,6 @@\n + dask_requires\n )\n \n-develop_requires = all_requires + [\n- 'black',\n- 'click',\n- 'pydocstyle==4.0.1',\n- 'flake8',\n- 'isort',\n- 'mypy',\n- 'pre-commit',\n- 'pygit2',\n- 'pytest>=4.5',\n-]\n-\n install_requires = [\n line.strip()\n for line in pathlib.Path(__file__)\n@@ -90,7 +78,6 @@\n python_requires='>=3.7',\n extras_require={\n 'all': all_requires,\n- 'develop': develop_requires,\n 'impala': impala_requires,\n 'kerberos': kerberos_requires,\n 'postgres': postgres_requires,\n", "issue": "CLN: Remove or consolidate dev dependencies from setup.py and environment.yml\nI noticed in https://github.com/ibis-project/ibis/pull/2547#issue-529169508 that the dev dependencies are not in sync in https://github.com/ibis-project/ibis/blob/master/setup.py#L63 and https://github.com/ibis-project/ibis/blob/master/environment.yml#L24\r\n\r\n`environment.yml` looks more up to date; the dev dependencies in `setup.py` should either be synced with that file or just removed.\n", "before_files": [{"content": "#!/usr/bin/env python\n\"\"\"Ibis setup module.\"\"\"\nimport pathlib\nimport sys\n\nfrom setuptools import find_packages, setup\n\nimport versioneer\n\nLONG_DESCRIPTION = \"\"\"\nIbis is a productivity-centric Python big data framework.\n\nSee http://ibis-project.org\n\"\"\"\n\nVERSION = sys.version_info.major, sys.version_info.minor\n\nimpala_requires = ['hdfs>=2.0.16', 'sqlalchemy>=1.1,<1.3.7', 'requests']\nimpala_requires.append('impyla[kerberos]>=0.15.0')\n\nsqlite_requires = ['sqlalchemy>=1.1,<1.3.7']\npostgres_requires = sqlite_requires + ['psycopg2']\nmysql_requires = sqlite_requires + ['pymysql']\n\nomniscidb_requires = ['pymapd==0.24', 'pyarrow']\nkerberos_requires = ['requests-kerberos']\nvisualization_requires = ['graphviz']\nclickhouse_requires = [\n 'clickhouse-driver>=0.1.3',\n 'clickhouse-cityhash',\n]\nbigquery_requires = [\n 'google-cloud-bigquery[bqstorage,pandas]>=1.12.0,<2.0.0dev',\n 'pydata-google-auth',\n]\nhdf5_requires = ['tables>=3.0.0']\n\nparquet_requires = ['pyarrow>=0.12.0']\nspark_requires = ['pyspark>=2.4.3']\n\ngeospatial_requires = ['geoalchemy2', 'geopandas', 'shapely']\n\ndask_requires = [\n 'dask[dataframe, array]',\n]\n\nall_requires = (\n impala_requires\n + postgres_requires\n + omniscidb_requires\n + mysql_requires\n + kerberos_requires\n + visualization_requires\n + clickhouse_requires\n + bigquery_requires\n + hdf5_requires\n + parquet_requires\n + spark_requires\n + geospatial_requires\n + dask_requires\n)\n\ndevelop_requires = all_requires + [\n 'black',\n 'click',\n 'pydocstyle==4.0.1',\n 'flake8',\n 'isort',\n 'mypy',\n 'pre-commit',\n 'pygit2',\n 'pytest>=4.5',\n]\n\ninstall_requires = [\n line.strip()\n for line in pathlib.Path(__file__)\n .parent.joinpath('requirements.txt')\n .read_text()\n .splitlines()\n]\n\nsetup(\n name='ibis-framework',\n url='https://github.com/ibis-project/ibis',\n packages=find_packages(),\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n install_requires=install_requires,\n python_requires='>=3.7',\n extras_require={\n 'all': all_requires,\n 'develop': develop_requires,\n 'impala': impala_requires,\n 'kerberos': kerberos_requires,\n 'postgres': postgres_requires,\n 'omniscidb': omniscidb_requires,\n 'mysql': mysql_requires,\n 'sqlite': sqlite_requires,\n 'visualization': visualization_requires,\n 'clickhouse': clickhouse_requires,\n 'bigquery': bigquery_requires,\n 'hdf5': hdf5_requires,\n 'parquet': parquet_requires,\n 'spark': spark_requires,\n 'geospatial': geospatial_requires,\n 'dask': dask_requires,\n },\n description=\"Productivity-centric Python Big Data Framework\",\n long_description=LONG_DESCRIPTION,\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Operating System :: OS Independent',\n 'Intended Audience :: Science/Research',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Topic :: Scientific/Engineering',\n ],\n license='Apache License, Version 2.0',\n maintainer=\"Phillip Cloud\",\n maintainer_email=\"[email protected]\",\n)\n", "path": "setup.py"}]}
1,768
197
gh_patches_debug_16105
rasdani/github-patches
git_diff
comic__grand-challenge.org-1812
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Increase width of algorithm result table The table on the algorithm results page can become wider than the page container if the name of the scan is very long. The user then has to scroll to the right to see the "Open Result in Viewer" button, which is quite confusing. ![image](https://user-images.githubusercontent.com/773597/112984872-ba647b00-915f-11eb-9837-560159d3a314.png) </issue> <code> [start of app/grandchallenge/core/context_processors.py] 1 import logging 2 3 from django.conf import settings 4 from guardian.shortcuts import get_perms 5 from guardian.utils import get_anonymous_user 6 7 from grandchallenge.blogs.models import Post 8 from grandchallenge.policies.models import Policy 9 10 logger = logging.getLogger(__name__) 11 12 13 def challenge(request): 14 try: 15 challenge = request.challenge 16 17 if challenge is None: 18 return {} 19 20 except AttributeError: 21 logger.warning(f"Could not get challenge for request: {request}") 22 return {} 23 24 try: 25 user = request.user 26 except AttributeError: 27 user = get_anonymous_user() 28 29 return { 30 "challenge": challenge, 31 "challenge_perms": get_perms(user, challenge), 32 "user_is_participant": challenge.is_participant(user), 33 "pages": challenge.page_set.all(), 34 } 35 36 37 def deployment_info(*_, **__): 38 return { 39 "google_analytics_id": settings.GOOGLE_ANALYTICS_ID, 40 "geochart_api_key": settings.GOOGLE_MAPS_API_KEY, 41 "COMMIT_ID": settings.COMMIT_ID, 42 } 43 44 45 def debug(*_, **__): 46 return { 47 "DEBUG": settings.DEBUG, 48 "ACTSTREAM_ENABLE": settings.ACTSTREAM_ENABLE, 49 } 50 51 52 def sentry_dsn(*_, **__): 53 return { 54 "SENTRY_DSN": settings.SENTRY_DSN, 55 "SENTRY_ENABLE_JS_REPORTING": settings.SENTRY_ENABLE_JS_REPORTING, 56 } 57 58 59 def footer_links(*_, **__): 60 return { 61 "policy_pages": Policy.objects.all(), 62 "blog_posts": Post.objects.filter(published=True), 63 } 64 [end of app/grandchallenge/core/context_processors.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/app/grandchallenge/core/context_processors.py b/app/grandchallenge/core/context_processors.py --- a/app/grandchallenge/core/context_processors.py +++ b/app/grandchallenge/core/context_processors.py @@ -5,6 +5,7 @@ from guardian.utils import get_anonymous_user from grandchallenge.blogs.models import Post +from grandchallenge.participants.models import RegistrationRequest from grandchallenge.policies.models import Policy logger = logging.getLogger(__name__) @@ -31,6 +32,9 @@ "challenge_perms": get_perms(user, challenge), "user_is_participant": challenge.is_participant(user), "pages": challenge.page_set.all(), + "pending_requests": challenge.registrationrequest_set.filter( + status=RegistrationRequest.PENDING + ), }
{"golden_diff": "diff --git a/app/grandchallenge/core/context_processors.py b/app/grandchallenge/core/context_processors.py\n--- a/app/grandchallenge/core/context_processors.py\n+++ b/app/grandchallenge/core/context_processors.py\n@@ -5,6 +5,7 @@\n from guardian.utils import get_anonymous_user\n \n from grandchallenge.blogs.models import Post\n+from grandchallenge.participants.models import RegistrationRequest\n from grandchallenge.policies.models import Policy\n \n logger = logging.getLogger(__name__)\n@@ -31,6 +32,9 @@\n \"challenge_perms\": get_perms(user, challenge),\n \"user_is_participant\": challenge.is_participant(user),\n \"pages\": challenge.page_set.all(),\n+ \"pending_requests\": challenge.registrationrequest_set.filter(\n+ status=RegistrationRequest.PENDING\n+ ),\n }\n", "issue": "Increase width of algorithm result table\nThe table on the algorithm results page can become wider than the page container if the name of the scan is very long. The user then has to scroll to the right to see the \"Open Result in Viewer\" button, which is quite confusing.\r\n\r\n![image](https://user-images.githubusercontent.com/773597/112984872-ba647b00-915f-11eb-9837-560159d3a314.png)\n", "before_files": [{"content": "import logging\n\nfrom django.conf import settings\nfrom guardian.shortcuts import get_perms\nfrom guardian.utils import get_anonymous_user\n\nfrom grandchallenge.blogs.models import Post\nfrom grandchallenge.policies.models import Policy\n\nlogger = logging.getLogger(__name__)\n\n\ndef challenge(request):\n try:\n challenge = request.challenge\n\n if challenge is None:\n return {}\n\n except AttributeError:\n logger.warning(f\"Could not get challenge for request: {request}\")\n return {}\n\n try:\n user = request.user\n except AttributeError:\n user = get_anonymous_user()\n\n return {\n \"challenge\": challenge,\n \"challenge_perms\": get_perms(user, challenge),\n \"user_is_participant\": challenge.is_participant(user),\n \"pages\": challenge.page_set.all(),\n }\n\n\ndef deployment_info(*_, **__):\n return {\n \"google_analytics_id\": settings.GOOGLE_ANALYTICS_ID,\n \"geochart_api_key\": settings.GOOGLE_MAPS_API_KEY,\n \"COMMIT_ID\": settings.COMMIT_ID,\n }\n\n\ndef debug(*_, **__):\n return {\n \"DEBUG\": settings.DEBUG,\n \"ACTSTREAM_ENABLE\": settings.ACTSTREAM_ENABLE,\n }\n\n\ndef sentry_dsn(*_, **__):\n return {\n \"SENTRY_DSN\": settings.SENTRY_DSN,\n \"SENTRY_ENABLE_JS_REPORTING\": settings.SENTRY_ENABLE_JS_REPORTING,\n }\n\n\ndef footer_links(*_, **__):\n return {\n \"policy_pages\": Policy.objects.all(),\n \"blog_posts\": Post.objects.filter(published=True),\n }\n", "path": "app/grandchallenge/core/context_processors.py"}]}
1,121
171
gh_patches_debug_25878
rasdani/github-patches
git_diff
alltheplaces__alltheplaces-7567
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> primanti_bros_us: switch to YextSpider as Where2GetIt seemingly no longer used The store locator at `https://restaurants.primantibros.com/search` now uses Yext APIs for querying store locations, not Where2GetIt. </issue> <code> [start of locations/spiders/primanti_bros_us.py] 1 from locations.categories import Extras, apply_yes_no 2 from locations.hours import DAYS_FULL, OpeningHours 3 from locations.storefinders.where2getit import Where2GetItSpider 4 5 6 class PrimantiBrosUSSpider(Where2GetItSpider): 7 name = "primanti_bros_us" 8 item_attributes = {"brand": "Primanti Bros", "brand_wikidata": "Q7243049"} 9 api_brand_name = "primantibros" 10 api_key = "7CDBB1A2-4AC6-11EB-932C-8917919C4603" 11 12 def parse_item(self, item, location): 13 item["ref"] = location["uid"] 14 item["street_address"] = ", ".join(filter(None, [location.get("address1"), location.get("address2")])) 15 item["website"] = location.get("menuurl") 16 item["opening_hours"] = OpeningHours() 17 hours_string = "" 18 for day_name in DAYS_FULL: 19 hours_string = f"{hours_string} {day_name}: " + location["{}hours".format(day_name.lower())] 20 item["opening_hours"].add_ranges_from_string(hours_string) 21 apply_yes_no(Extras.DRIVE_THROUGH, item, location["has_drive_through"] == "1", False) 22 yield item 23 [end of locations/spiders/primanti_bros_us.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/locations/spiders/primanti_bros_us.py b/locations/spiders/primanti_bros_us.py --- a/locations/spiders/primanti_bros_us.py +++ b/locations/spiders/primanti_bros_us.py @@ -1,22 +1,18 @@ -from locations.categories import Extras, apply_yes_no -from locations.hours import DAYS_FULL, OpeningHours -from locations.storefinders.where2getit import Where2GetItSpider +from locations.categories import Categories +from locations.storefinders.yext import YextSpider -class PrimantiBrosUSSpider(Where2GetItSpider): +class PrimantiBrosUSSpider(YextSpider): name = "primanti_bros_us" - item_attributes = {"brand": "Primanti Bros", "brand_wikidata": "Q7243049"} - api_brand_name = "primantibros" - api_key = "7CDBB1A2-4AC6-11EB-932C-8917919C4603" + item_attributes = {"brand": "Primanti Bros", "brand_wikidata": "Q7243049", "extras": Categories.RESTAURANT.value} + api_key = "7515c25fc685bbdd7c5975b6573c6912" + api_version = "20220511" def parse_item(self, item, location): - item["ref"] = location["uid"] - item["street_address"] = ", ".join(filter(None, [location.get("address1"), location.get("address2")])) - item["website"] = location.get("menuurl") - item["opening_hours"] = OpeningHours() - hours_string = "" - for day_name in DAYS_FULL: - hours_string = f"{hours_string} {day_name}: " + location["{}hours".format(day_name.lower())] - item["opening_hours"].add_ranges_from_string(hours_string) - apply_yes_no(Extras.DRIVE_THROUGH, item, location["has_drive_through"] == "1", False) + if "test-location" in item["ref"]: + return + item["ref"] = location.get("c_pagesURL") + item["name"] = location.get("c_searchName") + item["website"] = location.get("c_pagesURL") + item.pop("twitter", None) yield item
{"golden_diff": "diff --git a/locations/spiders/primanti_bros_us.py b/locations/spiders/primanti_bros_us.py\n--- a/locations/spiders/primanti_bros_us.py\n+++ b/locations/spiders/primanti_bros_us.py\n@@ -1,22 +1,18 @@\n-from locations.categories import Extras, apply_yes_no\n-from locations.hours import DAYS_FULL, OpeningHours\n-from locations.storefinders.where2getit import Where2GetItSpider\n+from locations.categories import Categories\n+from locations.storefinders.yext import YextSpider\n \n \n-class PrimantiBrosUSSpider(Where2GetItSpider):\n+class PrimantiBrosUSSpider(YextSpider):\n name = \"primanti_bros_us\"\n- item_attributes = {\"brand\": \"Primanti Bros\", \"brand_wikidata\": \"Q7243049\"}\n- api_brand_name = \"primantibros\"\n- api_key = \"7CDBB1A2-4AC6-11EB-932C-8917919C4603\"\n+ item_attributes = {\"brand\": \"Primanti Bros\", \"brand_wikidata\": \"Q7243049\", \"extras\": Categories.RESTAURANT.value}\n+ api_key = \"7515c25fc685bbdd7c5975b6573c6912\"\n+ api_version = \"20220511\"\n \n def parse_item(self, item, location):\n- item[\"ref\"] = location[\"uid\"]\n- item[\"street_address\"] = \", \".join(filter(None, [location.get(\"address1\"), location.get(\"address2\")]))\n- item[\"website\"] = location.get(\"menuurl\")\n- item[\"opening_hours\"] = OpeningHours()\n- hours_string = \"\"\n- for day_name in DAYS_FULL:\n- hours_string = f\"{hours_string} {day_name}: \" + location[\"{}hours\".format(day_name.lower())]\n- item[\"opening_hours\"].add_ranges_from_string(hours_string)\n- apply_yes_no(Extras.DRIVE_THROUGH, item, location[\"has_drive_through\"] == \"1\", False)\n+ if \"test-location\" in item[\"ref\"]:\n+ return\n+ item[\"ref\"] = location.get(\"c_pagesURL\")\n+ item[\"name\"] = location.get(\"c_searchName\")\n+ item[\"website\"] = location.get(\"c_pagesURL\")\n+ item.pop(\"twitter\", None)\n yield item\n", "issue": "primanti_bros_us: switch to YextSpider as Where2GetIt seemingly no longer used\nThe store locator at `https://restaurants.primantibros.com/search` now uses Yext APIs for querying store locations, not Where2GetIt.\n", "before_files": [{"content": "from locations.categories import Extras, apply_yes_no\nfrom locations.hours import DAYS_FULL, OpeningHours\nfrom locations.storefinders.where2getit import Where2GetItSpider\n\n\nclass PrimantiBrosUSSpider(Where2GetItSpider):\n name = \"primanti_bros_us\"\n item_attributes = {\"brand\": \"Primanti Bros\", \"brand_wikidata\": \"Q7243049\"}\n api_brand_name = \"primantibros\"\n api_key = \"7CDBB1A2-4AC6-11EB-932C-8917919C4603\"\n\n def parse_item(self, item, location):\n item[\"ref\"] = location[\"uid\"]\n item[\"street_address\"] = \", \".join(filter(None, [location.get(\"address1\"), location.get(\"address2\")]))\n item[\"website\"] = location.get(\"menuurl\")\n item[\"opening_hours\"] = OpeningHours()\n hours_string = \"\"\n for day_name in DAYS_FULL:\n hours_string = f\"{hours_string} {day_name}: \" + location[\"{}hours\".format(day_name.lower())]\n item[\"opening_hours\"].add_ranges_from_string(hours_string)\n apply_yes_no(Extras.DRIVE_THROUGH, item, location[\"has_drive_through\"] == \"1\", False)\n yield item\n", "path": "locations/spiders/primanti_bros_us.py"}]}
930
565
gh_patches_debug_20381
rasdani/github-patches
git_diff
scoutapp__scout_apm_python-663
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Track when an exception occurs in a Celery task Similar to how we do this in other libraries `tracked_request.tag("error", "true")` </issue> <code> [start of src/scout_apm/celery.py] 1 # coding=utf-8 2 from __future__ import absolute_import, division, print_function, unicode_literals 3 4 import datetime as dt 5 6 from celery.signals import before_task_publish, task_postrun, task_prerun 7 8 import scout_apm.core 9 from scout_apm.compat import datetime_to_timestamp 10 from scout_apm.core.config import scout_config 11 from scout_apm.core.tracked_request import TrackedRequest 12 13 14 def before_task_publish_callback(headers=None, properties=None, **kwargs): 15 if "scout_task_start" not in headers: 16 headers["scout_task_start"] = datetime_to_timestamp(dt.datetime.utcnow()) 17 18 19 def task_prerun_callback(task=None, **kwargs): 20 tracked_request = TrackedRequest.instance() 21 tracked_request.is_real_request = True 22 23 start = getattr(task.request, "scout_task_start", None) 24 if start is not None: 25 now = datetime_to_timestamp(dt.datetime.utcnow()) 26 try: 27 queue_time = now - start 28 except TypeError: 29 pass 30 else: 31 tracked_request.tag("queue_time", queue_time) 32 33 task_id = getattr(task.request, "id", None) 34 if task_id: 35 tracked_request.tag("task_id", task_id) 36 parent_task_id = getattr(task.request, "parent_id", None) 37 if parent_task_id: 38 tracked_request.tag("parent_task_id", parent_task_id) 39 40 delivery_info = task.request.delivery_info 41 tracked_request.tag("is_eager", delivery_info.get("is_eager", False)) 42 tracked_request.tag("exchange", delivery_info.get("exchange", "unknown")) 43 tracked_request.tag("priority", delivery_info.get("priority", "unknown")) 44 tracked_request.tag("routing_key", delivery_info.get("routing_key", "unknown")) 45 tracked_request.tag("queue", delivery_info.get("queue", "unknown")) 46 47 tracked_request.start_span(operation=("Job/" + task.name)) 48 49 50 def task_postrun_callback(task=None, **kwargs): 51 tracked_request = TrackedRequest.instance() 52 tracked_request.stop_span() 53 54 55 def install(app=None): 56 if app is not None: 57 copy_configuration(app) 58 59 installed = scout_apm.core.install() 60 if not installed: 61 return 62 63 before_task_publish.connect(before_task_publish_callback) 64 task_prerun.connect(task_prerun_callback) 65 task_postrun.connect(task_postrun_callback) 66 67 68 def copy_configuration(app): 69 prefix = "scout_" 70 prefix_len = len(prefix) 71 72 to_set = {} 73 for key, value in app.conf.items(): 74 key_lower = key.lower() 75 if key_lower.startswith(prefix) and len(key_lower) > prefix_len: 76 scout_key = key_lower[prefix_len:] 77 to_set[scout_key] = value 78 79 scout_config.set(**to_set) 80 81 82 def uninstall(): 83 before_task_publish.disconnect(before_task_publish_callback) 84 task_prerun.disconnect(task_prerun_callback) 85 task_postrun.disconnect(task_postrun_callback) 86 [end of src/scout_apm/celery.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/scout_apm/celery.py b/src/scout_apm/celery.py --- a/src/scout_apm/celery.py +++ b/src/scout_apm/celery.py @@ -3,7 +3,7 @@ import datetime as dt -from celery.signals import before_task_publish, task_postrun, task_prerun +from celery.signals import before_task_publish, task_failure, task_postrun, task_prerun import scout_apm.core from scout_apm.compat import datetime_to_timestamp @@ -52,6 +52,11 @@ tracked_request.stop_span() +def task_failure_callback(task_id=None, **kwargs): + tracked_request = TrackedRequest.instance() + tracked_request.tag("error", "true") + + def install(app=None): if app is not None: copy_configuration(app) @@ -62,6 +67,7 @@ before_task_publish.connect(before_task_publish_callback) task_prerun.connect(task_prerun_callback) + task_failure.connect(task_failure_callback) task_postrun.connect(task_postrun_callback)
{"golden_diff": "diff --git a/src/scout_apm/celery.py b/src/scout_apm/celery.py\n--- a/src/scout_apm/celery.py\n+++ b/src/scout_apm/celery.py\n@@ -3,7 +3,7 @@\n \n import datetime as dt\n \n-from celery.signals import before_task_publish, task_postrun, task_prerun\n+from celery.signals import before_task_publish, task_failure, task_postrun, task_prerun\n \n import scout_apm.core\n from scout_apm.compat import datetime_to_timestamp\n@@ -52,6 +52,11 @@\n tracked_request.stop_span()\n \n \n+def task_failure_callback(task_id=None, **kwargs):\n+ tracked_request = TrackedRequest.instance()\n+ tracked_request.tag(\"error\", \"true\")\n+\n+\n def install(app=None):\n if app is not None:\n copy_configuration(app)\n@@ -62,6 +67,7 @@\n \n before_task_publish.connect(before_task_publish_callback)\n task_prerun.connect(task_prerun_callback)\n+ task_failure.connect(task_failure_callback)\n task_postrun.connect(task_postrun_callback)\n", "issue": "Track when an exception occurs in a Celery task\nSimilar to how we do this in other libraries\r\n`tracked_request.tag(\"error\", \"true\")`\r\n\n", "before_files": [{"content": "# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport datetime as dt\n\nfrom celery.signals import before_task_publish, task_postrun, task_prerun\n\nimport scout_apm.core\nfrom scout_apm.compat import datetime_to_timestamp\nfrom scout_apm.core.config import scout_config\nfrom scout_apm.core.tracked_request import TrackedRequest\n\n\ndef before_task_publish_callback(headers=None, properties=None, **kwargs):\n if \"scout_task_start\" not in headers:\n headers[\"scout_task_start\"] = datetime_to_timestamp(dt.datetime.utcnow())\n\n\ndef task_prerun_callback(task=None, **kwargs):\n tracked_request = TrackedRequest.instance()\n tracked_request.is_real_request = True\n\n start = getattr(task.request, \"scout_task_start\", None)\n if start is not None:\n now = datetime_to_timestamp(dt.datetime.utcnow())\n try:\n queue_time = now - start\n except TypeError:\n pass\n else:\n tracked_request.tag(\"queue_time\", queue_time)\n\n task_id = getattr(task.request, \"id\", None)\n if task_id:\n tracked_request.tag(\"task_id\", task_id)\n parent_task_id = getattr(task.request, \"parent_id\", None)\n if parent_task_id:\n tracked_request.tag(\"parent_task_id\", parent_task_id)\n\n delivery_info = task.request.delivery_info\n tracked_request.tag(\"is_eager\", delivery_info.get(\"is_eager\", False))\n tracked_request.tag(\"exchange\", delivery_info.get(\"exchange\", \"unknown\"))\n tracked_request.tag(\"priority\", delivery_info.get(\"priority\", \"unknown\"))\n tracked_request.tag(\"routing_key\", delivery_info.get(\"routing_key\", \"unknown\"))\n tracked_request.tag(\"queue\", delivery_info.get(\"queue\", \"unknown\"))\n\n tracked_request.start_span(operation=(\"Job/\" + task.name))\n\n\ndef task_postrun_callback(task=None, **kwargs):\n tracked_request = TrackedRequest.instance()\n tracked_request.stop_span()\n\n\ndef install(app=None):\n if app is not None:\n copy_configuration(app)\n\n installed = scout_apm.core.install()\n if not installed:\n return\n\n before_task_publish.connect(before_task_publish_callback)\n task_prerun.connect(task_prerun_callback)\n task_postrun.connect(task_postrun_callback)\n\n\ndef copy_configuration(app):\n prefix = \"scout_\"\n prefix_len = len(prefix)\n\n to_set = {}\n for key, value in app.conf.items():\n key_lower = key.lower()\n if key_lower.startswith(prefix) and len(key_lower) > prefix_len:\n scout_key = key_lower[prefix_len:]\n to_set[scout_key] = value\n\n scout_config.set(**to_set)\n\n\ndef uninstall():\n before_task_publish.disconnect(before_task_publish_callback)\n task_prerun.disconnect(task_prerun_callback)\n task_postrun.disconnect(task_postrun_callback)\n", "path": "src/scout_apm/celery.py"}]}
1,373
249
gh_patches_debug_5122
rasdani/github-patches
git_diff
liqd__a4-meinberlin-3044
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [dev/stage] auto-fill-in overwrites my bplan-name **URL:** https://meinberlin-stage.liqd.net/dashboard/projects/caro-testing-new-bplan-mail-2/bplan/ **user:** initiator addin bplan **expected behaviour:** I can use autofill to add my mail-address **behaviour:** if I do so, the title of bplan is overwritten by my name but as it is far up the form I don't notice it. **important screensize:** **device & browser:** mac, chrome **Comment/Question:** is that even something we can influence? Screenshot? <img width="673" alt="Bildschirmfoto 2020-07-10 um 11 02 30" src="https://user-images.githubusercontent.com/35491681/87137579-6b0eaf80-c29d-11ea-928f-c888dc8eb430.png"> <img width="673" alt="Bildschirmfoto 2020-07-10 um 11 06 10" src="https://user-images.githubusercontent.com/35491681/87137586-6cd87300-c29d-11ea-965d-74b4ecba8bc8.png"> </issue> <code> [start of meinberlin/apps/bplan/forms.py] 1 from django import forms 2 3 from meinberlin.apps.extprojects.forms import ExternalProjectCreateForm 4 from meinberlin.apps.extprojects.forms import ExternalProjectForm 5 6 from . import models 7 8 9 class StatementForm(forms.ModelForm): 10 class Meta: 11 model = models.Statement 12 fields = ['name', 'email', 'statement', 13 'street_number', 'postal_code_city'] 14 15 16 class BplanProjectCreateForm(ExternalProjectCreateForm): 17 18 class Meta: 19 model = models.Bplan 20 fields = ['name', 'description', 'tile_image', 'tile_image_copyright'] 21 22 23 class BplanProjectForm(ExternalProjectForm): 24 25 class Meta: 26 model = models.Bplan 27 fields = ['name', 'identifier', 'url', 'description', 'tile_image', 28 'tile_image_copyright', 'is_archived', 'office_worker_email', 29 'start_date', 'end_date'] 30 required_for_project_publish = ['name', 'url', 'description', 31 'office_worker_email'] 32 [end of meinberlin/apps/bplan/forms.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/meinberlin/apps/bplan/forms.py b/meinberlin/apps/bplan/forms.py --- a/meinberlin/apps/bplan/forms.py +++ b/meinberlin/apps/bplan/forms.py @@ -29,3 +29,9 @@ 'start_date', 'end_date'] required_for_project_publish = ['name', 'url', 'description', 'office_worker_email'] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields['name'].widget.attrs.update({ + 'autocomplete': 'off', 'autofill': 'off' + })
{"golden_diff": "diff --git a/meinberlin/apps/bplan/forms.py b/meinberlin/apps/bplan/forms.py\n--- a/meinberlin/apps/bplan/forms.py\n+++ b/meinberlin/apps/bplan/forms.py\n@@ -29,3 +29,9 @@\n 'start_date', 'end_date']\n required_for_project_publish = ['name', 'url', 'description',\n 'office_worker_email']\n+\n+ def __init__(self, *args, **kwargs):\n+ super().__init__(*args, **kwargs)\n+ self.fields['name'].widget.attrs.update({\n+ 'autocomplete': 'off', 'autofill': 'off'\n+ })\n", "issue": "[dev/stage] auto-fill-in overwrites my bplan-name\n**URL:** https://meinberlin-stage.liqd.net/dashboard/projects/caro-testing-new-bplan-mail-2/bplan/\r\n**user:** initiator addin bplan\r\n**expected behaviour:** I can use autofill to add my mail-address\r\n**behaviour:** if I do so, the title of bplan is overwritten by my name but as it is far up the form I don't notice it.\r\n**important screensize:**\r\n**device & browser:** mac, chrome\r\n**Comment/Question:** is that even something we can influence?\r\n\r\nScreenshot?\r\n<img width=\"673\" alt=\"Bildschirmfoto 2020-07-10 um 11 02 30\" src=\"https://user-images.githubusercontent.com/35491681/87137579-6b0eaf80-c29d-11ea-928f-c888dc8eb430.png\">\r\n<img width=\"673\" alt=\"Bildschirmfoto 2020-07-10 um 11 06 10\" src=\"https://user-images.githubusercontent.com/35491681/87137586-6cd87300-c29d-11ea-965d-74b4ecba8bc8.png\">\r\n\r\n\n", "before_files": [{"content": "from django import forms\n\nfrom meinberlin.apps.extprojects.forms import ExternalProjectCreateForm\nfrom meinberlin.apps.extprojects.forms import ExternalProjectForm\n\nfrom . import models\n\n\nclass StatementForm(forms.ModelForm):\n class Meta:\n model = models.Statement\n fields = ['name', 'email', 'statement',\n 'street_number', 'postal_code_city']\n\n\nclass BplanProjectCreateForm(ExternalProjectCreateForm):\n\n class Meta:\n model = models.Bplan\n fields = ['name', 'description', 'tile_image', 'tile_image_copyright']\n\n\nclass BplanProjectForm(ExternalProjectForm):\n\n class Meta:\n model = models.Bplan\n fields = ['name', 'identifier', 'url', 'description', 'tile_image',\n 'tile_image_copyright', 'is_archived', 'office_worker_email',\n 'start_date', 'end_date']\n required_for_project_publish = ['name', 'url', 'description',\n 'office_worker_email']\n", "path": "meinberlin/apps/bplan/forms.py"}]}
1,133
147
gh_patches_debug_5041
rasdani/github-patches
git_diff
dask__dask-256
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> dot_graph does not work in stable version I try to generate visual graphs as [described in documentation](http://dask.pydata.org/en/latest/inspect.html), but get: `'module' object has no attribute 'to_pydot'` The graphviz is installed with homebrew. Dask is installed from conda (latest stable release): ``` In [15]: dask.__version__ Out[15]: '0.5.0' ``` The code and traceback are below (I had to replace `blockshape` with `chunks`, otherwise it did not create task graph): ``` python In [1]: import dask.array as da from dask.dot import dot_graph In [2]: x = da.ones((5, 15), chunks=(5, 5)) In [5]: d = (x + 1).dask In [6]: dot_graph(d) --------------------------------------------------------------------------- AttributeError Traceback (most recent call last) <ipython-input-6-c797e633866d> in <module>() ----> 1 dot_graph(d) /Users/koldunov/miniconda/lib/python2.7/site-packages/dask/dot.pyc in dot_graph(d, filename, **kwargs) 73 def dot_graph(d, filename='mydask', **kwargs): 74 dg = to_networkx(d, **kwargs) ---> 75 write_networkx_to_dot(dg, filename=filename) 76 77 /Users/koldunov/miniconda/lib/python2.7/site-packages/dask/dot.pyc in write_networkx_to_dot(dg, filename) 61 def write_networkx_to_dot(dg, filename='mydask'): 62 import os ---> 63 p = nx.to_pydot(dg) 64 p.set_rankdir('BT') 65 with open(filename + '.dot', 'w') as f: AttributeError: 'module' object has no attribute 'to_pydot' ``` </issue> <code> [start of dask/dot.py] 1 from __future__ import absolute_import, division, print_function 2 3 import networkx as nx 4 from dask.core import istask, get_dependencies 5 6 7 def make_hashable(x): 8 try: 9 hash(x) 10 return x 11 except TypeError: 12 return hash(str(x)) 13 14 15 def lower(func): 16 while hasattr(func, 'func'): 17 func = func.func 18 return func 19 20 def name(func): 21 try: 22 return lower(func).__name__ 23 except AttributeError: 24 return 'func' 25 26 27 def to_networkx(d, data_attributes=None, function_attributes=None): 28 if data_attributes is None: 29 data_attributes = dict() 30 if function_attributes is None: 31 function_attributes = dict() 32 33 g = nx.DiGraph() 34 35 for k, v in sorted(d.items(), key=lambda x: x[0]): 36 g.add_node(k, shape='box', **data_attributes.get(k, dict())) 37 if istask(v): 38 func, args = v[0], v[1:] 39 func_node = make_hashable((v, 'function')) 40 g.add_node(func_node, 41 shape='circle', 42 label=name(func), 43 **function_attributes.get(k, dict())) 44 g.add_edge(func_node, k) 45 for dep in sorted(get_dependencies(d, k)): 46 arg2 = make_hashable(dep) 47 g.add_node(arg2, 48 label=str(dep), 49 shape='box', 50 **data_attributes.get(dep, dict())) 51 g.add_edge(arg2, func_node) 52 else: 53 if v not in d: 54 g.add_node(k, label='%s=%s' % (k, v), **data_attributes.get(k, dict())) 55 else: # alias situation 56 g.add_edge(v, k) 57 58 return g 59 60 61 def write_networkx_to_dot(dg, filename='mydask'): 62 import os 63 p = nx.to_pydot(dg) 64 p.set_rankdir('BT') 65 with open(filename + '.dot', 'w') as f: 66 f.write(p.to_string()) 67 68 os.system('dot -Tpdf %s.dot -o %s.pdf' % (filename, filename)) 69 os.system('dot -Tpng %s.dot -o %s.png' % (filename, filename)) 70 print("Writing graph to %s.pdf" % filename) 71 72 73 def dot_graph(d, filename='mydask', **kwargs): 74 dg = to_networkx(d, **kwargs) 75 write_networkx_to_dot(dg, filename=filename) 76 77 78 if __name__ == '__main__': 79 def add(x, y): 80 return x + y 81 def inc(x): 82 return x + 1 83 84 dsk = {'x': 1, 'y': (inc, 'x'), 85 'a': 2, 'b': (inc, 'a'), 86 'z': (add, 'y', 'b')} 87 88 dot_graph(dsk) 89 [end of dask/dot.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/dask/dot.py b/dask/dot.py --- a/dask/dot.py +++ b/dask/dot.py @@ -60,7 +60,11 @@ def write_networkx_to_dot(dg, filename='mydask'): import os - p = nx.to_pydot(dg) + try: + p = nx.to_pydot(dg) + except AttributeError: + raise ImportError("Can not find pydot module. Please install.\n" + " pip install pydot") p.set_rankdir('BT') with open(filename + '.dot', 'w') as f: f.write(p.to_string())
{"golden_diff": "diff --git a/dask/dot.py b/dask/dot.py\n--- a/dask/dot.py\n+++ b/dask/dot.py\n@@ -60,7 +60,11 @@\n \n def write_networkx_to_dot(dg, filename='mydask'):\n import os\n- p = nx.to_pydot(dg)\n+ try:\n+ p = nx.to_pydot(dg)\n+ except AttributeError:\n+ raise ImportError(\"Can not find pydot module. Please install.\\n\"\n+ \" pip install pydot\")\n p.set_rankdir('BT')\n with open(filename + '.dot', 'w') as f:\n f.write(p.to_string())\n", "issue": "dot_graph does not work in stable version\nI try to generate visual graphs as [described in documentation](http://dask.pydata.org/en/latest/inspect.html), but get:\n`'module' object has no attribute 'to_pydot'`\n\nThe graphviz is installed with homebrew. Dask is installed from conda (latest stable release):\n\n```\nIn [15]: dask.__version__\n\nOut[15]: '0.5.0'\n```\n\nThe code and traceback are below (I had to replace `blockshape` with `chunks`, otherwise it did not create task graph):\n\n``` python\nIn [1]: \nimport dask.array as da\nfrom dask.dot import dot_graph\nIn [2]:\n\nx = da.ones((5, 15), chunks=(5, 5))\nIn [5]:\n\nd = (x + 1).dask\nIn [6]:\n\ndot_graph(d)\n---------------------------------------------------------------------------\nAttributeError Traceback (most recent call last)\n<ipython-input-6-c797e633866d> in <module>()\n----> 1 dot_graph(d)\n\n/Users/koldunov/miniconda/lib/python2.7/site-packages/dask/dot.pyc in dot_graph(d, filename, **kwargs)\n 73 def dot_graph(d, filename='mydask', **kwargs):\n 74 dg = to_networkx(d, **kwargs)\n---> 75 write_networkx_to_dot(dg, filename=filename)\n 76 \n 77 \n\n/Users/koldunov/miniconda/lib/python2.7/site-packages/dask/dot.pyc in write_networkx_to_dot(dg, filename)\n 61 def write_networkx_to_dot(dg, filename='mydask'):\n 62 import os\n---> 63 p = nx.to_pydot(dg)\n 64 p.set_rankdir('BT')\n 65 with open(filename + '.dot', 'w') as f:\n\nAttributeError: 'module' object has no attribute 'to_pydot'\n```\n\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport networkx as nx\nfrom dask.core import istask, get_dependencies\n\n\ndef make_hashable(x):\n try:\n hash(x)\n return x\n except TypeError:\n return hash(str(x))\n\n\ndef lower(func):\n while hasattr(func, 'func'):\n func = func.func\n return func\n\ndef name(func):\n try:\n return lower(func).__name__\n except AttributeError:\n return 'func'\n\n\ndef to_networkx(d, data_attributes=None, function_attributes=None):\n if data_attributes is None:\n data_attributes = dict()\n if function_attributes is None:\n function_attributes = dict()\n\n g = nx.DiGraph()\n\n for k, v in sorted(d.items(), key=lambda x: x[0]):\n g.add_node(k, shape='box', **data_attributes.get(k, dict()))\n if istask(v):\n func, args = v[0], v[1:]\n func_node = make_hashable((v, 'function'))\n g.add_node(func_node,\n shape='circle',\n label=name(func),\n **function_attributes.get(k, dict()))\n g.add_edge(func_node, k)\n for dep in sorted(get_dependencies(d, k)):\n arg2 = make_hashable(dep)\n g.add_node(arg2,\n label=str(dep),\n shape='box',\n **data_attributes.get(dep, dict()))\n g.add_edge(arg2, func_node)\n else:\n if v not in d:\n g.add_node(k, label='%s=%s' % (k, v), **data_attributes.get(k, dict()))\n else: # alias situation\n g.add_edge(v, k)\n\n return g\n\n\ndef write_networkx_to_dot(dg, filename='mydask'):\n import os\n p = nx.to_pydot(dg)\n p.set_rankdir('BT')\n with open(filename + '.dot', 'w') as f:\n f.write(p.to_string())\n\n os.system('dot -Tpdf %s.dot -o %s.pdf' % (filename, filename))\n os.system('dot -Tpng %s.dot -o %s.png' % (filename, filename))\n print(\"Writing graph to %s.pdf\" % filename)\n\n\ndef dot_graph(d, filename='mydask', **kwargs):\n dg = to_networkx(d, **kwargs)\n write_networkx_to_dot(dg, filename=filename)\n\n\nif __name__ == '__main__':\n def add(x, y):\n return x + y\n def inc(x):\n return x + 1\n\n dsk = {'x': 1, 'y': (inc, 'x'),\n 'a': 2, 'b': (inc, 'a'),\n 'z': (add, 'y', 'b')}\n\n dot_graph(dsk)\n", "path": "dask/dot.py"}]}
1,782
152
gh_patches_debug_20616
rasdani/github-patches
git_diff
rasterio__rasterio-1259
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> examples/total.py won't run in Python3 The line `total /= 3` should read instead, `total = total / 3`. </issue> <code> [start of examples/sieve.py] 1 #!/usr/bin/env python 2 # 3 # sieve: demonstrate sieving and polygonizing of raster features. 4 5 import subprocess 6 7 import numpy as np 8 import rasterio 9 from rasterio.features import sieve, shapes 10 11 12 # Register GDAL and OGR drivers. 13 with rasterio.Env(): 14 15 # Read a raster to be sieved. 16 with rasterio.open('tests/data/shade.tif') as src: 17 shade = src.read(1) 18 19 # Print the number of shapes in the source raster. 20 print("Slope shapes: %d" % len(list(shapes(shade)))) 21 22 # Sieve out features 13 pixels or smaller. 23 sieved = sieve(shade, 13, out=np.zeros(src.shape, src.dtypes[0])) 24 25 # Print the number of shapes in the sieved raster. 26 print("Sieved (13) shapes: %d" % len(list(shapes(sieved)))) 27 28 # Write out the sieved raster. 29 kwargs = src.meta 30 kwargs['transform'] = kwargs.pop('affine') 31 with rasterio.open('example-sieved.tif', 'w', **kwargs) as dst: 32 dst.write(sieved, indexes=1) 33 34 # Dump out gdalinfo's report card and open (or "eog") the TIFF. 35 print(subprocess.check_output( 36 ['gdalinfo', '-stats', 'example-sieved.tif'])) 37 subprocess.call(['open', 'example-sieved.tif']) 38 [end of examples/sieve.py] [start of examples/total.py] 1 import numpy as np 2 import rasterio 3 import subprocess 4 5 with rasterio.Env(CPL_DEBUG=True): 6 7 # Read raster bands directly to Numpy arrays. 8 with rasterio.open('tests/data/RGB.byte.tif') as src: 9 r, g, b = src.read() 10 11 # Combine arrays using the 'iadd' ufunc. Expecting that the sum will 12 # exceed the 8-bit integer range, initialize it as 16-bit. Adding other 13 # arrays to it in-place converts those arrays up and preserves the type 14 # of the total array. 15 total = np.zeros(r.shape, dtype=rasterio.uint16) 16 for band in (r, g, b): 17 total += band 18 total /= 3 19 20 # Write the product as a raster band to a new 8-bit file. For keyword 21 # arguments, we start with the meta attributes of the source file, but 22 # then change the band count to 1, set the dtype to uint8, and specify 23 # LZW compression. 24 kwargs = src.meta 25 kwargs.update( 26 dtype=rasterio.uint8, 27 count=1, 28 compress='lzw') 29 30 with rasterio.open('example-total.tif', 'w', **kwargs) as dst: 31 dst.write(total.astype(rasterio.uint8), indexes=1) 32 33 # Dump out gdalinfo's report card and open the image. 34 info = subprocess.check_output( 35 ['gdalinfo', '-stats', 'example-total.tif']) 36 print(info) 37 subprocess.call(['open', 'example-total.tif']) 38 [end of examples/total.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/examples/sieve.py b/examples/sieve.py --- a/examples/sieve.py +++ b/examples/sieve.py @@ -27,7 +27,7 @@ # Write out the sieved raster. kwargs = src.meta - kwargs['transform'] = kwargs.pop('affine') + kwargs['transform'] = rasterio.transform.guard_transform(kwargs['transform']) with rasterio.open('example-sieved.tif', 'w', **kwargs) as dst: dst.write(sieved, indexes=1) diff --git a/examples/total.py b/examples/total.py --- a/examples/total.py +++ b/examples/total.py @@ -1,3 +1,4 @@ +from __future__ import division import numpy as np import rasterio import subprocess @@ -15,7 +16,7 @@ total = np.zeros(r.shape, dtype=rasterio.uint16) for band in (r, g, b): total += band - total /= 3 + total = total // 3 # Write the product as a raster band to a new 8-bit file. For keyword # arguments, we start with the meta attributes of the source file, but
{"golden_diff": "diff --git a/examples/sieve.py b/examples/sieve.py\n--- a/examples/sieve.py\n+++ b/examples/sieve.py\n@@ -27,7 +27,7 @@\n \n # Write out the sieved raster.\n kwargs = src.meta\n- kwargs['transform'] = kwargs.pop('affine')\n+ kwargs['transform'] = rasterio.transform.guard_transform(kwargs['transform'])\n with rasterio.open('example-sieved.tif', 'w', **kwargs) as dst:\n dst.write(sieved, indexes=1)\n \ndiff --git a/examples/total.py b/examples/total.py\n--- a/examples/total.py\n+++ b/examples/total.py\n@@ -1,3 +1,4 @@\n+from __future__ import division\n import numpy as np\n import rasterio\n import subprocess\n@@ -15,7 +16,7 @@\n total = np.zeros(r.shape, dtype=rasterio.uint16)\n for band in (r, g, b):\n total += band\n- total /= 3\n+ total = total // 3\n \n # Write the product as a raster band to a new 8-bit file. For keyword\n # arguments, we start with the meta attributes of the source file, but\n", "issue": "examples/total.py won't run in Python3\nThe line `total /= 3` should read instead, `total = total / 3`.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n#\n# sieve: demonstrate sieving and polygonizing of raster features.\n\nimport subprocess\n\nimport numpy as np\nimport rasterio\nfrom rasterio.features import sieve, shapes\n\n\n# Register GDAL and OGR drivers.\nwith rasterio.Env():\n\n # Read a raster to be sieved.\n with rasterio.open('tests/data/shade.tif') as src:\n shade = src.read(1)\n\n # Print the number of shapes in the source raster.\n print(\"Slope shapes: %d\" % len(list(shapes(shade))))\n\n # Sieve out features 13 pixels or smaller.\n sieved = sieve(shade, 13, out=np.zeros(src.shape, src.dtypes[0]))\n\n # Print the number of shapes in the sieved raster.\n print(\"Sieved (13) shapes: %d\" % len(list(shapes(sieved))))\n\n # Write out the sieved raster.\n kwargs = src.meta\n kwargs['transform'] = kwargs.pop('affine')\n with rasterio.open('example-sieved.tif', 'w', **kwargs) as dst:\n dst.write(sieved, indexes=1)\n\n# Dump out gdalinfo's report card and open (or \"eog\") the TIFF.\nprint(subprocess.check_output(\n ['gdalinfo', '-stats', 'example-sieved.tif']))\nsubprocess.call(['open', 'example-sieved.tif'])\n", "path": "examples/sieve.py"}, {"content": "import numpy as np\nimport rasterio\nimport subprocess\n\nwith rasterio.Env(CPL_DEBUG=True):\n\n # Read raster bands directly to Numpy arrays.\n with rasterio.open('tests/data/RGB.byte.tif') as src:\n r, g, b = src.read()\n\n # Combine arrays using the 'iadd' ufunc. Expecting that the sum will\n # exceed the 8-bit integer range, initialize it as 16-bit. Adding other\n # arrays to it in-place converts those arrays up and preserves the type\n # of the total array.\n total = np.zeros(r.shape, dtype=rasterio.uint16)\n for band in (r, g, b):\n total += band\n total /= 3\n\n # Write the product as a raster band to a new 8-bit file. For keyword\n # arguments, we start with the meta attributes of the source file, but\n # then change the band count to 1, set the dtype to uint8, and specify\n # LZW compression.\n kwargs = src.meta\n kwargs.update(\n dtype=rasterio.uint8,\n count=1,\n compress='lzw')\n\n with rasterio.open('example-total.tif', 'w', **kwargs) as dst:\n dst.write(total.astype(rasterio.uint8), indexes=1)\n\n# Dump out gdalinfo's report card and open the image.\ninfo = subprocess.check_output(\n ['gdalinfo', '-stats', 'example-total.tif'])\nprint(info)\nsubprocess.call(['open', 'example-total.tif'])\n", "path": "examples/total.py"}]}
1,366
272
gh_patches_debug_58650
rasdani/github-patches
git_diff
googleapis__google-api-python-client-295
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> BatchError is unprintable using default constructor (one string) This one should be pretty simple, I hope. Here's the constructor signature: `def __init__(self, reason, resp=None, content=None):`, which doesn't require `resp` to be defined, and I can see it is not defined most of the time, for example, in googleapiclient/http.py. Then, given the representation method: ``` def __repr__(self): return '<BatchError %s "%s">' % (self.resp.status, self.reason) ``` Which is also the string method: ``` __str__ = __repr__ ``` This results in unprintable exceptions where `resp` is undefined, which is not very helpful when attempting to understand the error (e.g. #164). </issue> <code> [start of googleapiclient/errors.py] 1 # Copyright 2014 Google Inc. All Rights Reserved. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 """Errors for the library. 16 17 All exceptions defined by the library 18 should be defined in this file. 19 """ 20 from __future__ import absolute_import 21 22 __author__ = '[email protected] (Joe Gregorio)' 23 24 import json 25 26 # Oauth2client < 3 has the positional helper in 'util', >= 3 has it 27 # in '_helpers'. 28 try: 29 from oauth2client import util 30 except ImportError: 31 from oauth2client import _helpers as util 32 33 34 class Error(Exception): 35 """Base error for this module.""" 36 pass 37 38 39 class HttpError(Error): 40 """HTTP data was invalid or unexpected.""" 41 42 @util.positional(3) 43 def __init__(self, resp, content, uri=None): 44 self.resp = resp 45 if not isinstance(content, bytes): 46 raise TypeError("HTTP content should be bytes") 47 self.content = content 48 self.uri = uri 49 50 def _get_reason(self): 51 """Calculate the reason for the error from the response content.""" 52 reason = self.resp.reason 53 try: 54 data = json.loads(self.content.decode('utf-8')) 55 if isinstance(data, dict): 56 reason = data['error']['message'] 57 elif isinstance(data, list) and len(data) > 0: 58 first_error = data[0] 59 reason = first_error['error']['message'] 60 except (ValueError, KeyError, TypeError): 61 pass 62 if reason is None: 63 reason = '' 64 return reason 65 66 def __repr__(self): 67 if self.uri: 68 return '<HttpError %s when requesting %s returned "%s">' % ( 69 self.resp.status, self.uri, self._get_reason().strip()) 70 else: 71 return '<HttpError %s "%s">' % (self.resp.status, self._get_reason()) 72 73 __str__ = __repr__ 74 75 76 class InvalidJsonError(Error): 77 """The JSON returned could not be parsed.""" 78 pass 79 80 81 class UnknownFileType(Error): 82 """File type unknown or unexpected.""" 83 pass 84 85 86 class UnknownLinkType(Error): 87 """Link type unknown or unexpected.""" 88 pass 89 90 91 class UnknownApiNameOrVersion(Error): 92 """No API with that name and version exists.""" 93 pass 94 95 96 class UnacceptableMimeTypeError(Error): 97 """That is an unacceptable mimetype for this operation.""" 98 pass 99 100 101 class MediaUploadSizeError(Error): 102 """Media is larger than the method can accept.""" 103 pass 104 105 106 class ResumableUploadError(HttpError): 107 """Error occured during resumable upload.""" 108 pass 109 110 111 class InvalidChunkSizeError(Error): 112 """The given chunksize is not valid.""" 113 pass 114 115 class InvalidNotificationError(Error): 116 """The channel Notification is invalid.""" 117 pass 118 119 class BatchError(HttpError): 120 """Error occured during batch operations.""" 121 122 @util.positional(2) 123 def __init__(self, reason, resp=None, content=None): 124 self.resp = resp 125 self.content = content 126 self.reason = reason 127 128 def __repr__(self): 129 return '<BatchError %s "%s">' % (self.resp.status, self.reason) 130 131 __str__ = __repr__ 132 133 134 class UnexpectedMethodError(Error): 135 """Exception raised by RequestMockBuilder on unexpected calls.""" 136 137 @util.positional(1) 138 def __init__(self, methodId=None): 139 """Constructor for an UnexpectedMethodError.""" 140 super(UnexpectedMethodError, self).__init__( 141 'Received unexpected call %s' % methodId) 142 143 144 class UnexpectedBodyError(Error): 145 """Exception raised by RequestMockBuilder on unexpected bodies.""" 146 147 def __init__(self, expected, provided): 148 """Constructor for an UnexpectedMethodError.""" 149 super(UnexpectedBodyError, self).__init__( 150 'Expected: [%s] - Provided: [%s]' % (expected, provided)) 151 [end of googleapiclient/errors.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/googleapiclient/errors.py b/googleapiclient/errors.py --- a/googleapiclient/errors.py +++ b/googleapiclient/errors.py @@ -126,6 +126,9 @@ self.reason = reason def __repr__(self): + if getattr(self.resp, 'status', None) is None: + return '<BatchError "%s">' % (self.reason) + else: return '<BatchError %s "%s">' % (self.resp.status, self.reason) __str__ = __repr__
{"golden_diff": "diff --git a/googleapiclient/errors.py b/googleapiclient/errors.py\n--- a/googleapiclient/errors.py\n+++ b/googleapiclient/errors.py\n@@ -126,6 +126,9 @@\n self.reason = reason\n \n def __repr__(self):\n+ if getattr(self.resp, 'status', None) is None:\n+ return '<BatchError \"%s\">' % (self.reason)\n+ else:\n return '<BatchError %s \"%s\">' % (self.resp.status, self.reason)\n \n __str__ = __repr__\n", "issue": "BatchError is unprintable using default constructor (one string)\nThis one should be pretty simple, I hope.\n\nHere's the constructor signature: `def __init__(self, reason, resp=None, content=None):`, which doesn't require `resp` to be defined, and I can see it is not defined most of the time, for example, in googleapiclient/http.py.\n\nThen, given the representation method:\n\n```\ndef __repr__(self):\n return '<BatchError %s \"%s\">' % (self.resp.status, self.reason)\n```\n\nWhich is also the string method:\n\n```\n__str__ = __repr__\n```\n\nThis results in unprintable exceptions where `resp` is undefined, which is not very helpful when attempting to understand the error (e.g. #164).\n\n", "before_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Errors for the library.\n\nAll exceptions defined by the library\nshould be defined in this file.\n\"\"\"\nfrom __future__ import absolute_import\n\n__author__ = '[email protected] (Joe Gregorio)'\n\nimport json\n\n# Oauth2client < 3 has the positional helper in 'util', >= 3 has it\n# in '_helpers'.\ntry:\n from oauth2client import util\nexcept ImportError:\n from oauth2client import _helpers as util\n\n\nclass Error(Exception):\n \"\"\"Base error for this module.\"\"\"\n pass\n\n\nclass HttpError(Error):\n \"\"\"HTTP data was invalid or unexpected.\"\"\"\n\n @util.positional(3)\n def __init__(self, resp, content, uri=None):\n self.resp = resp\n if not isinstance(content, bytes):\n raise TypeError(\"HTTP content should be bytes\")\n self.content = content\n self.uri = uri\n\n def _get_reason(self):\n \"\"\"Calculate the reason for the error from the response content.\"\"\"\n reason = self.resp.reason\n try:\n data = json.loads(self.content.decode('utf-8'))\n if isinstance(data, dict):\n reason = data['error']['message']\n elif isinstance(data, list) and len(data) > 0:\n first_error = data[0]\n reason = first_error['error']['message']\n except (ValueError, KeyError, TypeError):\n pass\n if reason is None:\n reason = ''\n return reason\n\n def __repr__(self):\n if self.uri:\n return '<HttpError %s when requesting %s returned \"%s\">' % (\n self.resp.status, self.uri, self._get_reason().strip())\n else:\n return '<HttpError %s \"%s\">' % (self.resp.status, self._get_reason())\n\n __str__ = __repr__\n\n\nclass InvalidJsonError(Error):\n \"\"\"The JSON returned could not be parsed.\"\"\"\n pass\n\n\nclass UnknownFileType(Error):\n \"\"\"File type unknown or unexpected.\"\"\"\n pass\n\n\nclass UnknownLinkType(Error):\n \"\"\"Link type unknown or unexpected.\"\"\"\n pass\n\n\nclass UnknownApiNameOrVersion(Error):\n \"\"\"No API with that name and version exists.\"\"\"\n pass\n\n\nclass UnacceptableMimeTypeError(Error):\n \"\"\"That is an unacceptable mimetype for this operation.\"\"\"\n pass\n\n\nclass MediaUploadSizeError(Error):\n \"\"\"Media is larger than the method can accept.\"\"\"\n pass\n\n\nclass ResumableUploadError(HttpError):\n \"\"\"Error occured during resumable upload.\"\"\"\n pass\n\n\nclass InvalidChunkSizeError(Error):\n \"\"\"The given chunksize is not valid.\"\"\"\n pass\n\nclass InvalidNotificationError(Error):\n \"\"\"The channel Notification is invalid.\"\"\"\n pass\n\nclass BatchError(HttpError):\n \"\"\"Error occured during batch operations.\"\"\"\n\n @util.positional(2)\n def __init__(self, reason, resp=None, content=None):\n self.resp = resp\n self.content = content\n self.reason = reason\n\n def __repr__(self):\n return '<BatchError %s \"%s\">' % (self.resp.status, self.reason)\n\n __str__ = __repr__\n\n\nclass UnexpectedMethodError(Error):\n \"\"\"Exception raised by RequestMockBuilder on unexpected calls.\"\"\"\n\n @util.positional(1)\n def __init__(self, methodId=None):\n \"\"\"Constructor for an UnexpectedMethodError.\"\"\"\n super(UnexpectedMethodError, self).__init__(\n 'Received unexpected call %s' % methodId)\n\n\nclass UnexpectedBodyError(Error):\n \"\"\"Exception raised by RequestMockBuilder on unexpected bodies.\"\"\"\n\n def __init__(self, expected, provided):\n \"\"\"Constructor for an UnexpectedMethodError.\"\"\"\n super(UnexpectedBodyError, self).__init__(\n 'Expected: [%s] - Provided: [%s]' % (expected, provided))\n", "path": "googleapiclient/errors.py"}]}
2,007
125
gh_patches_debug_7422
rasdani/github-patches
git_diff
Lightning-AI__pytorch-lightning-1091
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update CHANGELOG for 0.7.x ## 🐛 Bug <!-- A clear and concise description of what the bug is. --> Updated CHANGELOG according to the reset changes (about last two weeks) especially deprecated items like `data_loader` or `xxxxx_end` ### Additional context <!-- Add any other context about the problem here. --> https://github.com/PyTorchLightning/pytorch-lightning/milestone/4 </issue> <code> [start of pytorch_lightning/core/decorators.py] 1 import traceback 2 from functools import wraps 3 import warnings 4 5 6 def data_loader(fn): 7 """Decorator to make any fx with this use the lazy property. 8 9 :param fn: 10 :return: 11 """ 12 w = 'data_loader decorator deprecated in 0.7.0. Will remove 0.9.0' 13 warnings.warn(w) 14 15 def inner_fx(self): 16 return fn(self) 17 return inner_fx 18 [end of pytorch_lightning/core/decorators.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pytorch_lightning/core/decorators.py b/pytorch_lightning/core/decorators.py --- a/pytorch_lightning/core/decorators.py +++ b/pytorch_lightning/core/decorators.py @@ -6,11 +6,10 @@ def data_loader(fn): """Decorator to make any fx with this use the lazy property. - :param fn: - :return: + Warnings: + This decorator deprecated in v0.7.0 and it will be removed v0.9.0. """ - w = 'data_loader decorator deprecated in 0.7.0. Will remove 0.9.0' - warnings.warn(w) + warnings.warn('`data_loader` decorator deprecated in v0.7.0. Will be removed v0.9.0', DeprecationWarning) def inner_fx(self): return fn(self)
{"golden_diff": "diff --git a/pytorch_lightning/core/decorators.py b/pytorch_lightning/core/decorators.py\n--- a/pytorch_lightning/core/decorators.py\n+++ b/pytorch_lightning/core/decorators.py\n@@ -6,11 +6,10 @@\n def data_loader(fn):\n \"\"\"Decorator to make any fx with this use the lazy property.\n \n- :param fn:\n- :return:\n+ Warnings:\n+ This decorator deprecated in v0.7.0 and it will be removed v0.9.0.\n \"\"\"\n- w = 'data_loader decorator deprecated in 0.7.0. Will remove 0.9.0'\n- warnings.warn(w)\n+ warnings.warn('`data_loader` decorator deprecated in v0.7.0. Will be removed v0.9.0', DeprecationWarning)\n \n def inner_fx(self):\n return fn(self)\n", "issue": "Update CHANGELOG for 0.7.x\n## \ud83d\udc1b Bug\r\n\r\n<!-- A clear and concise description of what the bug is. -->\r\n\r\nUpdated CHANGELOG according to the reset changes (about last two weeks) especially deprecated items like `data_loader` or `xxxxx_end`\r\n\r\n### Additional context\r\n\r\n<!-- Add any other context about the problem here. -->\r\n\r\nhttps://github.com/PyTorchLightning/pytorch-lightning/milestone/4\n", "before_files": [{"content": "import traceback\nfrom functools import wraps\nimport warnings\n\n\ndef data_loader(fn):\n \"\"\"Decorator to make any fx with this use the lazy property.\n\n :param fn:\n :return:\n \"\"\"\n w = 'data_loader decorator deprecated in 0.7.0. Will remove 0.9.0'\n warnings.warn(w)\n\n def inner_fx(self):\n return fn(self)\n return inner_fx\n", "path": "pytorch_lightning/core/decorators.py"}]}
754
201
gh_patches_debug_54565
rasdani/github-patches
git_diff
dbt-labs__dbt-core-2832
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> set colorama upper bound to <0.4.4 colorama v0.4.4 (released in the last 24 hours) is missing an sdist, which trips up the homebrew packaging step of our [dbt release flow](https://github.com/fishtown-analytics/dbt-release/runs/1249693542). Let's set the [upper bound](https://github.com/fishtown-analytics/dbt/blob/dev/kiyoshi-kuromiya/core/setup.py#L67) to <0.4.4 instead of <0.5 for now. </issue> <code> [start of core/setup.py] 1 #!/usr/bin/env python 2 import os 3 import sys 4 5 if sys.version_info < (3, 6): 6 print('Error: dbt does not support this version of Python.') 7 print('Please upgrade to Python 3.6 or higher.') 8 sys.exit(1) 9 10 11 from setuptools import setup 12 try: 13 from setuptools import find_namespace_packages 14 except ImportError: 15 # the user has a downlevel version of setuptools. 16 print('Error: dbt requires setuptools v40.1.0 or higher.') 17 print('Please upgrade setuptools with "pip install --upgrade setuptools" ' 18 'and try again') 19 sys.exit(1) 20 21 22 def read(fname): 23 return open(os.path.join(os.path.dirname(__file__), fname)).read() 24 25 26 package_name = "dbt-core" 27 package_version = "0.18.1rc1" 28 description = """dbt (data build tool) is a command line tool that helps \ 29 analysts and engineers transform data in their warehouse more effectively""" 30 31 32 setup( 33 name=package_name, 34 version=package_version, 35 description=description, 36 long_description=description, 37 author="Fishtown Analytics", 38 author_email="[email protected]", 39 url="https://github.com/fishtown-analytics/dbt", 40 packages=find_namespace_packages(include=['dbt', 'dbt.*']), 41 package_data={ 42 'dbt': [ 43 'include/index.html', 44 'include/global_project/dbt_project.yml', 45 'include/global_project/docs/*.md', 46 'include/global_project/macros/*.sql', 47 'include/global_project/macros/**/*.sql', 48 'include/global_project/macros/**/**/*.sql', 49 'py.typed', 50 ] 51 }, 52 test_suite='test', 53 entry_points={ 54 'console_scripts': [ 55 'dbt = dbt.main:main', 56 ], 57 }, 58 scripts=[ 59 'scripts/dbt', 60 ], 61 install_requires=[ 62 'Jinja2==2.11.2', 63 'PyYAML>=3.11', 64 'sqlparse>=0.2.3,<0.4', 65 'networkx>=2.3,<3', 66 'minimal-snowplow-tracker==0.0.2', 67 'colorama>=0.3.9,<0.5', 68 'agate>=1.6,<2', 69 'isodate>=0.6,<0.7', 70 'json-rpc>=1.12,<2', 71 'werkzeug>=0.15,<0.17', 72 'dataclasses==0.6;python_version<"3.7"', 73 'hologram==0.0.10', 74 'logbook>=1.5,<1.6', 75 'typing-extensions>=3.7.4,<3.8', 76 # the following are all to match snowflake-connector-python 77 'requests>=2.18.0,<2.24.0', 78 'idna<2.10', 79 'cffi>=1.9,<1.15', 80 ], 81 zip_safe=False, 82 classifiers=[ 83 'Development Status :: 5 - Production/Stable', 84 85 'License :: OSI Approved :: Apache Software License', 86 87 'Operating System :: Microsoft :: Windows', 88 'Operating System :: MacOS :: MacOS X', 89 'Operating System :: POSIX :: Linux', 90 91 'Programming Language :: Python :: 3.6', 92 'Programming Language :: Python :: 3.7', 93 'Programming Language :: Python :: 3.8', 94 ], 95 python_requires=">=3.6.3", 96 ) 97 [end of core/setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/core/setup.py b/core/setup.py --- a/core/setup.py +++ b/core/setup.py @@ -64,7 +64,7 @@ 'sqlparse>=0.2.3,<0.4', 'networkx>=2.3,<3', 'minimal-snowplow-tracker==0.0.2', - 'colorama>=0.3.9,<0.5', + 'colorama>=0.3.9,<0.4.4', 'agate>=1.6,<2', 'isodate>=0.6,<0.7', 'json-rpc>=1.12,<2',
{"golden_diff": "diff --git a/core/setup.py b/core/setup.py\n--- a/core/setup.py\n+++ b/core/setup.py\n@@ -64,7 +64,7 @@\n 'sqlparse>=0.2.3,<0.4',\n 'networkx>=2.3,<3',\n 'minimal-snowplow-tracker==0.0.2',\n- 'colorama>=0.3.9,<0.5',\n+ 'colorama>=0.3.9,<0.4.4',\n 'agate>=1.6,<2',\n 'isodate>=0.6,<0.7',\n 'json-rpc>=1.12,<2',\n", "issue": "set colorama upper bound to <0.4.4\ncolorama v0.4.4 (released in the last 24 hours) is missing an sdist, which trips up the homebrew packaging step of our [dbt release flow](https://github.com/fishtown-analytics/dbt-release/runs/1249693542). Let's set the [upper bound](https://github.com/fishtown-analytics/dbt/blob/dev/kiyoshi-kuromiya/core/setup.py#L67) to <0.4.4 instead of <0.5 for now.\n", "before_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 6):\n print('Error: dbt does not support this version of Python.')\n print('Please upgrade to Python 3.6 or higher.')\n sys.exit(1)\n\n\nfrom setuptools import setup\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print('Error: dbt requires setuptools v40.1.0 or higher.')\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" '\n 'and try again')\n sys.exit(1)\n\n\ndef read(fname):\n return open(os.path.join(os.path.dirname(__file__), fname)).read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"0.18.1rc1\"\ndescription = \"\"\"dbt (data build tool) is a command line tool that helps \\\nanalysts and engineers transform data in their warehouse more effectively\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=description,\n author=\"Fishtown Analytics\",\n author_email=\"[email protected]\",\n url=\"https://github.com/fishtown-analytics/dbt\",\n packages=find_namespace_packages(include=['dbt', 'dbt.*']),\n package_data={\n 'dbt': [\n 'include/index.html',\n 'include/global_project/dbt_project.yml',\n 'include/global_project/docs/*.md',\n 'include/global_project/macros/*.sql',\n 'include/global_project/macros/**/*.sql',\n 'include/global_project/macros/**/**/*.sql',\n 'py.typed',\n ]\n },\n test_suite='test',\n entry_points={\n 'console_scripts': [\n 'dbt = dbt.main:main',\n ],\n },\n scripts=[\n 'scripts/dbt',\n ],\n install_requires=[\n 'Jinja2==2.11.2',\n 'PyYAML>=3.11',\n 'sqlparse>=0.2.3,<0.4',\n 'networkx>=2.3,<3',\n 'minimal-snowplow-tracker==0.0.2',\n 'colorama>=0.3.9,<0.5',\n 'agate>=1.6,<2',\n 'isodate>=0.6,<0.7',\n 'json-rpc>=1.12,<2',\n 'werkzeug>=0.15,<0.17',\n 'dataclasses==0.6;python_version<\"3.7\"',\n 'hologram==0.0.10',\n 'logbook>=1.5,<1.6',\n 'typing-extensions>=3.7.4,<3.8',\n # the following are all to match snowflake-connector-python\n 'requests>=2.18.0,<2.24.0',\n 'idna<2.10',\n 'cffi>=1.9,<1.15',\n ],\n zip_safe=False,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n\n 'License :: OSI Approved :: Apache Software License',\n\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: POSIX :: Linux',\n\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n ],\n python_requires=\">=3.6.3\",\n)\n", "path": "core/setup.py"}]}
1,622
149
gh_patches_debug_14724
rasdani/github-patches
git_diff
scikit-hep__pyhf-235
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> loosen numpy requirements for non-extra installs # Description we are pretty restrictive in the numpy version range due to trying to conform to TF's valid range, but TF is only one of the backends. If just installing `pip install pyhf` we should not force users to a speciic range unless we require the APIs `numpy>=1.14.0` should be enough unless i'm missing something. @kratsg since you changed this last, any reason you see to restrict numpy further? </issue> <code> [start of setup.py] 1 from setuptools import setup, find_packages 2 setup( 3 name = 'pyhf', 4 version = '0.0.15', 5 description = '(partial) pure python histfactory implementation', 6 url = '', 7 author = 'Lukas Heinrich', 8 author_email = '[email protected]', 9 packages = find_packages(), 10 include_package_data = True, 11 install_requires = [ 12 'numpy<=1.14.5,>=1.14.3', # required by tensorflow, mxnet, and us 13 'scipy', 14 'click>=6.0', # for console scripts, 15 'tqdm', # for readxml 16 'six', # for modifiers 17 'jsonschema>=v3.0.0a2', # for utils, alpha-release for draft 6 18 ], 19 extras_require = { 20 'xmlimport': [ 21 'uproot', 22 ], 23 'torch': [ 24 'torch>=0.4.0' 25 ], 26 'mxnet':[ 27 'mxnet>=1.0.0', 28 'requests<2.19.0,>=2.18.4', 29 'numpy<1.15.0,>=1.8.2', 30 'requests<2.19.0,>=2.18.4', 31 ], 32 'tensorflow':[ 33 'tensorflow>=1.10.0', 34 'numpy<=1.14.5,>=1.13.3', 35 'setuptools<=39.1.0', 36 ], 37 'develop': [ 38 'pyflakes', 39 'pytest>=3.5.1', 40 'pytest-cov>=2.5.1', 41 'pytest-benchmark[histogram]', 42 'pytest-console-scripts', 43 'python-coveralls', 44 'coverage>=4.0', # coveralls 45 'matplotlib', 46 'jupyter', 47 'uproot', 48 'papermill', 49 'graphviz', 50 'sphinx', 51 'sphinxcontrib-bibtex', 52 'sphinxcontrib-napoleon', 53 'sphinx_rtd_theme', 54 'nbsphinx', 55 'jsonpatch' 56 ] 57 }, 58 entry_points = { 59 'console_scripts': ['pyhf=pyhf.commandline:pyhf'] 60 }, 61 dependency_links = [ 62 ] 63 ) 64 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -9,8 +9,7 @@ packages = find_packages(), include_package_data = True, install_requires = [ - 'numpy<=1.14.5,>=1.14.3', # required by tensorflow, mxnet, and us - 'scipy', + 'scipy', # requires numpy, which is required by pyhf, tensorflow, and mxnet 'click>=6.0', # for console scripts, 'tqdm', # for readxml 'six', # for modifiers @@ -31,7 +30,7 @@ ], 'tensorflow':[ 'tensorflow>=1.10.0', - 'numpy<=1.14.5,>=1.13.3', + 'numpy<=1.14.5,>=1.14.0', # Lower of 1.14.0 instead of 1.13.3 to ensure doctest pass 'setuptools<=39.1.0', ], 'develop': [
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -9,8 +9,7 @@\n packages = find_packages(),\n include_package_data = True,\n install_requires = [\n- 'numpy<=1.14.5,>=1.14.3', # required by tensorflow, mxnet, and us\n- 'scipy',\n+ 'scipy', # requires numpy, which is required by pyhf, tensorflow, and mxnet\n 'click>=6.0', # for console scripts,\n 'tqdm', # for readxml\n 'six', # for modifiers\n@@ -31,7 +30,7 @@\n ],\n 'tensorflow':[\n 'tensorflow>=1.10.0',\n- 'numpy<=1.14.5,>=1.13.3',\n+ 'numpy<=1.14.5,>=1.14.0', # Lower of 1.14.0 instead of 1.13.3 to ensure doctest pass\n 'setuptools<=39.1.0',\n ],\n 'develop': [\n", "issue": "loosen numpy requirements for non-extra installs\n# Description\r\n\r\nwe are pretty restrictive in the numpy version range due to trying to conform to TF's valid range, but TF is only one of the backends. If just installing `pip install pyhf` we should not force users to a speciic range unless we require the APIs\r\n\r\n`numpy>=1.14.0` should be enough unless i'm missing something. @kratsg since you changed this last, any reason you see to restrict numpy further?\n", "before_files": [{"content": "from setuptools import setup, find_packages\nsetup(\n name = 'pyhf',\n version = '0.0.15',\n description = '(partial) pure python histfactory implementation',\n url = '',\n author = 'Lukas Heinrich',\n author_email = '[email protected]',\n packages = find_packages(),\n include_package_data = True,\n install_requires = [\n 'numpy<=1.14.5,>=1.14.3', # required by tensorflow, mxnet, and us\n 'scipy',\n 'click>=6.0', # for console scripts,\n 'tqdm', # for readxml\n 'six', # for modifiers\n 'jsonschema>=v3.0.0a2', # for utils, alpha-release for draft 6\n ],\n extras_require = {\n 'xmlimport': [\n 'uproot',\n ],\n 'torch': [\n 'torch>=0.4.0'\n ],\n 'mxnet':[\n 'mxnet>=1.0.0',\n 'requests<2.19.0,>=2.18.4',\n 'numpy<1.15.0,>=1.8.2',\n 'requests<2.19.0,>=2.18.4',\n ],\n 'tensorflow':[\n 'tensorflow>=1.10.0',\n 'numpy<=1.14.5,>=1.13.3',\n 'setuptools<=39.1.0',\n ],\n 'develop': [\n 'pyflakes',\n 'pytest>=3.5.1',\n 'pytest-cov>=2.5.1',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'python-coveralls',\n 'coverage>=4.0', # coveralls\n 'matplotlib',\n 'jupyter',\n 'uproot',\n 'papermill',\n 'graphviz',\n 'sphinx',\n 'sphinxcontrib-bibtex',\n 'sphinxcontrib-napoleon',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'jsonpatch'\n ]\n },\n entry_points = {\n 'console_scripts': ['pyhf=pyhf.commandline:pyhf']\n },\n dependency_links = [\n ]\n)\n", "path": "setup.py"}]}
1,269
261
gh_patches_debug_1393
rasdani/github-patches
git_diff
pytorch__audio-1583
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Use of deprecated `AutoNonVariableTypeMode`. `AutoNonVariableTypeMode` is deprecated and will be removed in PyTorch 1.10. https://github.com/pytorch/audio/search?q=AutoNonVariableTypeMode Migration: https://github.com/pytorch/pytorch/blob/master/docs/cpp/source/notes/inference_mode.rst#migration-guide-from-autononvariabletypemode cc @carolineechen </issue> <code> [start of torchaudio/__init__.py] 1 from . import extension # noqa: F401 2 from torchaudio._internal import module_utils as _mod_utils # noqa: F401 3 from torchaudio import ( 4 compliance, 5 datasets, 6 functional, 7 kaldi_io, 8 utils, 9 sox_effects, 10 transforms, 11 ) 12 13 from torchaudio.backend import ( 14 list_audio_backends, 15 get_audio_backend, 16 set_audio_backend, 17 ) 18 19 try: 20 from .version import __version__, git_version # noqa: F401 21 except ImportError: 22 pass 23 24 __all__ = [ 25 'compliance', 26 'datasets', 27 'functional', 28 'kaldi_io', 29 'utils', 30 'sox_effects', 31 'transforms', 32 'list_audio_backends', 33 'get_audio_backend', 34 'set_audio_backend', 35 'save_encinfo', 36 'sox_signalinfo_t', 37 'sox_encodinginfo_t', 38 'get_sox_option_t', 39 'get_sox_encoding_t', 40 'get_sox_bool', 41 'SignalInfo', 42 'EncodingInfo', 43 ] 44 [end of torchaudio/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/torchaudio/__init__.py b/torchaudio/__init__.py --- a/torchaudio/__init__.py +++ b/torchaudio/__init__.py @@ -32,12 +32,4 @@ 'list_audio_backends', 'get_audio_backend', 'set_audio_backend', - 'save_encinfo', - 'sox_signalinfo_t', - 'sox_encodinginfo_t', - 'get_sox_option_t', - 'get_sox_encoding_t', - 'get_sox_bool', - 'SignalInfo', - 'EncodingInfo', ]
{"golden_diff": "diff --git a/torchaudio/__init__.py b/torchaudio/__init__.py\n--- a/torchaudio/__init__.py\n+++ b/torchaudio/__init__.py\n@@ -32,12 +32,4 @@\n 'list_audio_backends',\n 'get_audio_backend',\n 'set_audio_backend',\n- 'save_encinfo',\n- 'sox_signalinfo_t',\n- 'sox_encodinginfo_t',\n- 'get_sox_option_t',\n- 'get_sox_encoding_t',\n- 'get_sox_bool',\n- 'SignalInfo',\n- 'EncodingInfo',\n ]\n", "issue": "Use of deprecated `AutoNonVariableTypeMode`.\n`AutoNonVariableTypeMode` is deprecated and will be removed in PyTorch 1.10.\r\n\r\nhttps://github.com/pytorch/audio/search?q=AutoNonVariableTypeMode\r\n\r\nMigration: https://github.com/pytorch/pytorch/blob/master/docs/cpp/source/notes/inference_mode.rst#migration-guide-from-autononvariabletypemode\r\n\r\ncc @carolineechen \n", "before_files": [{"content": "from . import extension # noqa: F401\nfrom torchaudio._internal import module_utils as _mod_utils # noqa: F401\nfrom torchaudio import (\n compliance,\n datasets,\n functional,\n kaldi_io,\n utils,\n sox_effects,\n transforms,\n)\n\nfrom torchaudio.backend import (\n list_audio_backends,\n get_audio_backend,\n set_audio_backend,\n)\n\ntry:\n from .version import __version__, git_version # noqa: F401\nexcept ImportError:\n pass\n\n__all__ = [\n 'compliance',\n 'datasets',\n 'functional',\n 'kaldi_io',\n 'utils',\n 'sox_effects',\n 'transforms',\n 'list_audio_backends',\n 'get_audio_backend',\n 'set_audio_backend',\n 'save_encinfo',\n 'sox_signalinfo_t',\n 'sox_encodinginfo_t',\n 'get_sox_option_t',\n 'get_sox_encoding_t',\n 'get_sox_bool',\n 'SignalInfo',\n 'EncodingInfo',\n]\n", "path": "torchaudio/__init__.py"}]}
942
141
gh_patches_debug_4807
rasdani/github-patches
git_diff
bridgecrewio__checkov-5045
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update CKV_AZURE_43 `each.` **Describe the issue** CKV_AZURE_43 StorageAccountName.py VARIABLE_REFS list does not include the `each.` used with for_each meta argument to return UNKNOWN and currently returns FAILED check which is incorrect. **Examples** ``` module "bootstrap" { source = "../../modules/bootstrap" for_each = var.bootstrap_storage create_storage_account = try(each.value.create_storage, true) name = each.value.name resource_group_name = try(each.value.resource_group_name, local.resource_group.name) location = var.location storage_acl = try(each.value.storage_acl, false) tags = var.tags } ``` Within the bootstrap module - we use the `azurerm_storage_account` : ``` resource "azurerm_storage_account" "this" { count = var.create_storage_account ? 1 : 0 name = var.name location = var.location resource_group_name = var.resource_group_name min_tls_version = var.min_tls_version account_replication_type = "LRS" account_tier = "Standard" tags = var.tags queue_properties { logging { delete = true read = true write = true version = "1.0" retention_policy_days = var.retention_policy_days } } network_rules { default_action = var.storage_acl == true ? "Deny" : "Allow" ip_rules = var.storage_acl == true ? var.storage_allow_inbound_public_ips : null virtual_network_subnet_ids = var.storage_acl == true ? var.storage_allow_vnet_subnets : null } } ``` And Checkov returns this : ``` Check: CKV_AZURE_43: "Ensure Storage Accounts adhere to the naming rules" FAILED for resource: module.bootstrap.azurerm_storage_account.this File: /modules/bootstrap/main.tf:1-25 Calling File: /examples/standalone_vm/main.tf:192-204 Guide: https://docs.bridgecrew.io/docs/ensure-storage-accounts-adhere-to-the-naming-rules ``` **Version (please complete the following information):** - Checkov Version 2.2.125 **Additional context** </issue> <code> [start of checkov/terraform/checks/resource/azure/StorageAccountName.py] 1 import re 2 from typing import List, Dict, Any 3 4 from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck 5 from checkov.common.models.enums import CheckResult, CheckCategories 6 7 STO_NAME_REGEX = re.compile(r"^[a-z0-9]{3,24}$") 8 VARIABLE_REFS = ("local.", "module.", "var.", "random_string.", "random_id.", "random_integer.", "random_pet.", 9 "azurecaf_name") 10 11 12 class StorageAccountName(BaseResourceCheck): 13 def __init__(self) -> None: 14 name = "Ensure Storage Accounts adhere to the naming rules" 15 id = "CKV_AZURE_43" 16 supported_resources = ["azurerm_storage_account"] 17 categories = [CheckCategories.CONVENTION] 18 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 19 20 def scan_resource_conf(self, conf: Dict[str, Any]) -> CheckResult: 21 """ 22 The Storage Account naming reference: 23 https://docs.microsoft.com/en-us/azure/storage/common/storage-account-overview#naming-storage-accounts 24 :param conf: azurerm_storage_account configuration 25 :return: <CheckResult> 26 """ 27 name = conf.get("name") 28 if name: 29 name = str(name[0]) 30 if any(x in name for x in VARIABLE_REFS): 31 # in the case we couldn't evaluate the name, just ignore 32 return CheckResult.UNKNOWN 33 if re.findall(STO_NAME_REGEX, str(conf["name"][0])): 34 return CheckResult.PASSED 35 36 return CheckResult.FAILED 37 38 def get_evaluated_keys(self) -> List[str]: 39 return ["name"] 40 41 42 check = StorageAccountName() 43 [end of checkov/terraform/checks/resource/azure/StorageAccountName.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/checkov/terraform/checks/resource/azure/StorageAccountName.py b/checkov/terraform/checks/resource/azure/StorageAccountName.py --- a/checkov/terraform/checks/resource/azure/StorageAccountName.py +++ b/checkov/terraform/checks/resource/azure/StorageAccountName.py @@ -6,7 +6,7 @@ STO_NAME_REGEX = re.compile(r"^[a-z0-9]{3,24}$") VARIABLE_REFS = ("local.", "module.", "var.", "random_string.", "random_id.", "random_integer.", "random_pet.", - "azurecaf_name") + "azurecaf_name", "each.") class StorageAccountName(BaseResourceCheck):
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/azure/StorageAccountName.py b/checkov/terraform/checks/resource/azure/StorageAccountName.py\n--- a/checkov/terraform/checks/resource/azure/StorageAccountName.py\n+++ b/checkov/terraform/checks/resource/azure/StorageAccountName.py\n@@ -6,7 +6,7 @@\n \n STO_NAME_REGEX = re.compile(r\"^[a-z0-9]{3,24}$\")\n VARIABLE_REFS = (\"local.\", \"module.\", \"var.\", \"random_string.\", \"random_id.\", \"random_integer.\", \"random_pet.\",\n- \"azurecaf_name\")\n+ \"azurecaf_name\", \"each.\")\n \n \n class StorageAccountName(BaseResourceCheck):\n", "issue": "Update CKV_AZURE_43 `each.`\n**Describe the issue**\r\nCKV_AZURE_43 StorageAccountName.py VARIABLE_REFS list does not include the `each.` used with for_each meta argument to return UNKNOWN and currently returns FAILED check which is incorrect.\r\n\r\n**Examples**\r\n\r\n```\r\nmodule \"bootstrap\" {\r\n source = \"../../modules/bootstrap\"\r\n\r\n for_each = var.bootstrap_storage\r\n\r\n create_storage_account = try(each.value.create_storage, true)\r\n name = each.value.name\r\n resource_group_name = try(each.value.resource_group_name, local.resource_group.name)\r\n location = var.location\r\n storage_acl = try(each.value.storage_acl, false)\r\n\r\n tags = var.tags\r\n}\r\n```\r\n\r\nWithin the bootstrap module - we use the `azurerm_storage_account` :\r\n\r\n```\r\nresource \"azurerm_storage_account\" \"this\" {\r\n count = var.create_storage_account ? 1 : 0\r\n\r\n name = var.name\r\n location = var.location\r\n resource_group_name = var.resource_group_name\r\n min_tls_version = var.min_tls_version\r\n account_replication_type = \"LRS\"\r\n account_tier = \"Standard\"\r\n tags = var.tags\r\n queue_properties {\r\n logging {\r\n delete = true\r\n read = true\r\n write = true\r\n version = \"1.0\"\r\n retention_policy_days = var.retention_policy_days\r\n }\r\n }\r\n network_rules {\r\n default_action = var.storage_acl == true ? \"Deny\" : \"Allow\"\r\n ip_rules = var.storage_acl == true ? var.storage_allow_inbound_public_ips : null\r\n virtual_network_subnet_ids = var.storage_acl == true ? var.storage_allow_vnet_subnets : null\r\n }\r\n}\r\n```\r\n\r\nAnd Checkov returns this :\r\n\r\n```\r\nCheck: CKV_AZURE_43: \"Ensure Storage Accounts adhere to the naming rules\"\r\n FAILED for resource: module.bootstrap.azurerm_storage_account.this\r\n File: /modules/bootstrap/main.tf:1-25\r\n Calling File: /examples/standalone_vm/main.tf:192-204\r\n Guide: https://docs.bridgecrew.io/docs/ensure-storage-accounts-adhere-to-the-naming-rules\r\n```\r\n\r\n**Version (please complete the following information):**\r\n - Checkov Version 2.2.125\r\n\r\n**Additional context**\r\n\n", "before_files": [{"content": "import re\nfrom typing import List, Dict, Any\n\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\nfrom checkov.common.models.enums import CheckResult, CheckCategories\n\nSTO_NAME_REGEX = re.compile(r\"^[a-z0-9]{3,24}$\")\nVARIABLE_REFS = (\"local.\", \"module.\", \"var.\", \"random_string.\", \"random_id.\", \"random_integer.\", \"random_pet.\",\n \"azurecaf_name\")\n\n\nclass StorageAccountName(BaseResourceCheck):\n def __init__(self) -> None:\n name = \"Ensure Storage Accounts adhere to the naming rules\"\n id = \"CKV_AZURE_43\"\n supported_resources = [\"azurerm_storage_account\"]\n categories = [CheckCategories.CONVENTION]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf: Dict[str, Any]) -> CheckResult:\n \"\"\"\n The Storage Account naming reference:\n https://docs.microsoft.com/en-us/azure/storage/common/storage-account-overview#naming-storage-accounts\n :param conf: azurerm_storage_account configuration\n :return: <CheckResult>\n \"\"\"\n name = conf.get(\"name\")\n if name:\n name = str(name[0])\n if any(x in name for x in VARIABLE_REFS):\n # in the case we couldn't evaluate the name, just ignore\n return CheckResult.UNKNOWN\n if re.findall(STO_NAME_REGEX, str(conf[\"name\"][0])):\n return CheckResult.PASSED\n\n return CheckResult.FAILED\n\n def get_evaluated_keys(self) -> List[str]:\n return [\"name\"]\n\n\ncheck = StorageAccountName()\n", "path": "checkov/terraform/checks/resource/azure/StorageAccountName.py"}]}
1,520
157
gh_patches_debug_29165
rasdani/github-patches
git_diff
spack__spack-7545
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> gcc v5.4.0 build fails due to mpfr patching problem There seems to be a patch application issue in the mpfr-3.1.5 build procedure I was expecting something like my previous build: ``` ==> Installing mpfr ==> Fetching file://MIRROR_DIR/mirror/mpfr/mpfr-3.1.5.tar.bz2 ==> Staging archive: WORKING_DIR/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5/mpfr-3.1.5.tar.bz2 ==> Created stage in WORKING_DIR/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5 ==> Applied patch vasprintf.patch ==> Applied patch strtofr.patch ==> Building mpfr [AutotoolsPackage] ==> Executing phase: 'autoreconf' ==> Executing phase: 'configure' ==> Executing phase: 'build' ==> Executing phase: 'install' ==> Successfully installed mpfr Fetch: 0.04s. Build: 9.54s. Total: 9.58s. [+] WORKING_DIR/opt/spack/linux-centos7-x86_64/gcc-4.8.5/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5 ``` When I tried to build the gcc compiler yesterday (and again this morning) the results were strange: ``` ==> Installing mpfr 1 out of 1 hunk FAILED -- saving rejects to file VERSION.rej 1 out of 1 hunk FAILED -- saving rejects to file src/mpfr.h.rej 1 out of 1 hunk FAILED -- saving rejects to file src/version.c.rej ==> Fetching file://MIRROR_DIR/mirror/mpfr/mpfr-3.1.5.tar.bz2 ==> Staging archive: WORKING_DIR/sat/spack/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5/mpfr-3.1.5.tar.bz2 ==> Created stage in WORKING_DIR/sat/spack/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5 ==> Patch strtofr.patch failed. ==> Error: ProcessError: Command exited with status 1: '/usr/bin/patch' '-s' '-p' '1' '-i' 'WORKING_DIR/sat/spack/var/spack/repos/builtin/packages/mpfr/strtofr.patch' '-d' '.' ==> Error: [Errno 2] No such file or directory: 'WORKING_DIR/sat/spack/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5/mpfr-3.1.5/spack-build.out' ``` Not only the error, but the order of the messages seem strange. A clean clone of the spack repo made no difference ```console $ spack install [email protected] ``` Default environment: ```linux-centos7-x86_64/gcc-4.8.5``` </issue> <code> [start of var/spack/repos/builtin/packages/mpfr/package.py] 1 ############################################################################## 2 # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. 3 # Produced at the Lawrence Livermore National Laboratory. 4 # 5 # This file is part of Spack. 6 # Created by Todd Gamblin, [email protected], All rights reserved. 7 # LLNL-CODE-647188 8 # 9 # For details, see https://github.com/spack/spack 10 # Please also see the NOTICE and LICENSE files for our notice and the LGPL. 11 # 12 # This program is free software; you can redistribute it and/or modify 13 # it under the terms of the GNU Lesser General Public License (as 14 # published by the Free Software Foundation) version 2.1, February 1999. 15 # 16 # This program is distributed in the hope that it will be useful, but 17 # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF 18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and 19 # conditions of the GNU Lesser General Public License for more details. 20 # 21 # You should have received a copy of the GNU Lesser General Public 22 # License along with this program; if not, write to the Free Software 23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 24 ############################################################################## 25 from spack import * 26 27 28 class Mpfr(AutotoolsPackage): 29 """The MPFR library is a C library for multiple-precision 30 floating-point computations with correct rounding.""" 31 32 homepage = "http://www.mpfr.org" 33 url = "https://ftp.gnu.org/gnu/mpfr/mpfr-3.1.5.tar.bz2" 34 35 version('3.1.5', 'b1d23a55588e3b2a13e3be66bc69fd8d') 36 version('3.1.4', 'b8a2f6b0e68bef46e53da2ac439e1cf4') 37 version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138') 38 version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19') 39 40 # mpir is a drop-in replacement for gmp 41 depends_on('[email protected]:') # 4.2.3 or higher is recommended 42 43 patch('vasprintf.patch', when='@3.1.5') 44 patch('strtofr.patch', when='@3.1.5') 45 46 def configure_args(self): 47 args = [ 48 '--with-gmp=' + self.spec['gmp'].prefix, 49 ] 50 return args 51 [end of var/spack/repos/builtin/packages/mpfr/package.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py --- a/var/spack/repos/builtin/packages/mpfr/package.py +++ b/var/spack/repos/builtin/packages/mpfr/package.py @@ -30,18 +30,33 @@ floating-point computations with correct rounding.""" homepage = "http://www.mpfr.org" - url = "https://ftp.gnu.org/gnu/mpfr/mpfr-3.1.5.tar.bz2" + url = "https://ftp.gnu.org/gnu/mpfr/mpfr-4.0.1.tar.bz2" + version('4.0.1', '8c21d8ac7460493b2b9f3ef3cc610454') + version('4.0.0', 'ef619f3bb68039e35c4a219e06be72d0') + version('3.1.6', '320c28198def956aeacdb240b46b8969') version('3.1.5', 'b1d23a55588e3b2a13e3be66bc69fd8d') version('3.1.4', 'b8a2f6b0e68bef46e53da2ac439e1cf4') version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138') version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19') # mpir is a drop-in replacement for gmp - depends_on('[email protected]:') # 4.2.3 or higher is recommended + depends_on('[email protected]:') # 4.2.3 or higher is recommended + depends_on('[email protected]:', when='@4.0.0:') # http://www.mpfr.org/mpfr-4.0.0/ - patch('vasprintf.patch', when='@3.1.5') - patch('strtofr.patch', when='@3.1.5') + # Check the Bugs section of old release pages for patches. + # http://www.mpfr.org/mpfr-X.Y.Z/#bugs + patches = { + '3.1.6': '66a5d58364113a21405fc53f4a48f4e8', + '3.1.5': '1dc5fe65feb5607b89fe0f410d53b627', + '3.1.4': 'd124381573404fe83654c7d5a79aeabf', + '3.1.3': 'ebd1d835e0ae2fd8a9339210ccd1d0a8', + '3.1.2': '9f96a5c7cac1d6cd983ed9cf7d997074', + } + + for ver, checksum in patches.items(): + patch('http://www.mpfr.org/mpfr-{0}/allpatches'.format(ver), + when='@' + ver, sha256=checksum) def configure_args(self): args = [
{"golden_diff": "diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py\n--- a/var/spack/repos/builtin/packages/mpfr/package.py\n+++ b/var/spack/repos/builtin/packages/mpfr/package.py\n@@ -30,18 +30,33 @@\n floating-point computations with correct rounding.\"\"\"\n \n homepage = \"http://www.mpfr.org\"\n- url = \"https://ftp.gnu.org/gnu/mpfr/mpfr-3.1.5.tar.bz2\"\n+ url = \"https://ftp.gnu.org/gnu/mpfr/mpfr-4.0.1.tar.bz2\"\n \n+ version('4.0.1', '8c21d8ac7460493b2b9f3ef3cc610454')\n+ version('4.0.0', 'ef619f3bb68039e35c4a219e06be72d0')\n+ version('3.1.6', '320c28198def956aeacdb240b46b8969')\n version('3.1.5', 'b1d23a55588e3b2a13e3be66bc69fd8d')\n version('3.1.4', 'b8a2f6b0e68bef46e53da2ac439e1cf4')\n version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138')\n version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19')\n \n # mpir is a drop-in replacement for gmp\n- depends_on('[email protected]:') # 4.2.3 or higher is recommended\n+ depends_on('[email protected]:') # 4.2.3 or higher is recommended\n+ depends_on('[email protected]:', when='@4.0.0:') # http://www.mpfr.org/mpfr-4.0.0/\n \n- patch('vasprintf.patch', when='@3.1.5')\n- patch('strtofr.patch', when='@3.1.5')\n+ # Check the Bugs section of old release pages for patches.\n+ # http://www.mpfr.org/mpfr-X.Y.Z/#bugs\n+ patches = {\n+ '3.1.6': '66a5d58364113a21405fc53f4a48f4e8',\n+ '3.1.5': '1dc5fe65feb5607b89fe0f410d53b627',\n+ '3.1.4': 'd124381573404fe83654c7d5a79aeabf',\n+ '3.1.3': 'ebd1d835e0ae2fd8a9339210ccd1d0a8',\n+ '3.1.2': '9f96a5c7cac1d6cd983ed9cf7d997074',\n+ }\n+\n+ for ver, checksum in patches.items():\n+ patch('http://www.mpfr.org/mpfr-{0}/allpatches'.format(ver),\n+ when='@' + ver, sha256=checksum)\n \n def configure_args(self):\n args = [\n", "issue": "gcc v5.4.0 build fails due to mpfr patching problem\nThere seems to be a patch application issue in the mpfr-3.1.5 build procedure\r\n\r\nI was expecting something like my previous build:\r\n```\r\n==> Installing mpfr\r\n==> Fetching file://MIRROR_DIR/mirror/mpfr/mpfr-3.1.5.tar.bz2\r\n==> Staging archive: WORKING_DIR/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5/mpfr-3.1.5.tar.bz2\r\n==> Created stage in WORKING_DIR/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5\r\n==> Applied patch vasprintf.patch\r\n==> Applied patch strtofr.patch\r\n==> Building mpfr [AutotoolsPackage]\r\n==> Executing phase: 'autoreconf'\r\n==> Executing phase: 'configure'\r\n==> Executing phase: 'build'\r\n==> Executing phase: 'install'\r\n==> Successfully installed mpfr\r\n Fetch: 0.04s. Build: 9.54s. Total: 9.58s.\r\n[+] WORKING_DIR/opt/spack/linux-centos7-x86_64/gcc-4.8.5/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5\r\n```\r\nWhen I tried to build the gcc compiler yesterday (and again this morning) the results were strange:\r\n```\r\n==> Installing mpfr\r\n1 out of 1 hunk FAILED -- saving rejects to file VERSION.rej\r\n1 out of 1 hunk FAILED -- saving rejects to file src/mpfr.h.rej\r\n1 out of 1 hunk FAILED -- saving rejects to file src/version.c.rej\r\n==> Fetching file://MIRROR_DIR/mirror/mpfr/mpfr-3.1.5.tar.bz2\r\n==> Staging archive: WORKING_DIR/sat/spack/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5/mpfr-3.1.5.tar.bz2\r\n==> Created stage in WORKING_DIR/sat/spack/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5\r\n==> Patch strtofr.patch failed.\r\n==> Error: ProcessError: Command exited with status 1:\r\n '/usr/bin/patch' '-s' '-p' '1' '-i' 'WORKING_DIR/sat/spack/var/spack/repos/builtin/packages/mpfr/strtofr.patch' '-d' '.'\r\n==> Error: [Errno 2] No such file or directory: 'WORKING_DIR/sat/spack/var/spack/stage/mpfr-3.1.5-rmi7bmi3oaqduvjown2v46snr6ps2zr5/mpfr-3.1.5/spack-build.out'\r\n```\r\nNot only the error, but the order of the messages seem strange.\r\n\r\nA clean clone of the spack repo made no difference\r\n```console\r\n$ spack install [email protected]\r\n```\r\n\r\nDefault environment:\r\n```linux-centos7-x86_64/gcc-4.8.5```\n", "before_files": [{"content": "##############################################################################\n# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.\n# Produced at the Lawrence Livermore National Laboratory.\n#\n# This file is part of Spack.\n# Created by Todd Gamblin, [email protected], All rights reserved.\n# LLNL-CODE-647188\n#\n# For details, see https://github.com/spack/spack\n# Please also see the NOTICE and LICENSE files for our notice and the LGPL.\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License (as\n# published by the Free Software Foundation) version 2.1, February 1999.\n#\n# This program is distributed in the hope that it will be useful, but\n# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and\n# conditions of the GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public\n# License along with this program; if not, write to the Free Software\n# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\n##############################################################################\nfrom spack import *\n\n\nclass Mpfr(AutotoolsPackage):\n \"\"\"The MPFR library is a C library for multiple-precision\n floating-point computations with correct rounding.\"\"\"\n\n homepage = \"http://www.mpfr.org\"\n url = \"https://ftp.gnu.org/gnu/mpfr/mpfr-3.1.5.tar.bz2\"\n\n version('3.1.5', 'b1d23a55588e3b2a13e3be66bc69fd8d')\n version('3.1.4', 'b8a2f6b0e68bef46e53da2ac439e1cf4')\n version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138')\n version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19')\n\n # mpir is a drop-in replacement for gmp\n depends_on('[email protected]:') # 4.2.3 or higher is recommended\n\n patch('vasprintf.patch', when='@3.1.5')\n patch('strtofr.patch', when='@3.1.5')\n\n def configure_args(self):\n args = [\n '--with-gmp=' + self.spec['gmp'].prefix,\n ]\n return args\n", "path": "var/spack/repos/builtin/packages/mpfr/package.py"}]}
2,012
854
gh_patches_debug_35167
rasdani/github-patches
git_diff
translate__pootle-4148
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Translation of the Report Email I would like to translate the words of the report email, if you could integrate this kind of template on the po file, it would be amazing... naturally title of the email included, which it would be `[(name-site)] Unit #(num) ((lang))` ``` Username: (username) Current URL: (url) IP address: (ip_address) User-Agent: (user_agent) Unit: (url_string) Source: (source_string) Current translation: Your question or comment: ``` Thx in advance ;) </issue> <code> [start of pootle/apps/contact/views.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 # 4 # Copyright (C) Pootle contributors. 5 # 6 # This file is a part of the Pootle project. It is distributed under the GPL3 7 # or later license. See the LICENSE file for a copy of the license and the 8 # AUTHORS file for copyright and authorship information. 9 10 from django.core.urlresolvers import reverse 11 from django.views.generic import TemplateView 12 13 from contact_form.views import ContactFormView as OriginalContactFormView 14 15 from pootle.core.views import AjaxResponseMixin 16 17 from .forms import ContactForm, ReportForm 18 19 20 SUBJECT_TEMPLATE = 'Unit #%d (%s)' 21 BODY_TEMPLATE = ''' 22 Unit: %s 23 24 Source: %s 25 26 Current translation: %s 27 28 Your question or comment: 29 ''' 30 31 32 class ContactFormTemplateView(TemplateView): 33 template_name = 'contact_form/contact_form.html' 34 35 36 class ContactFormView(AjaxResponseMixin, OriginalContactFormView): 37 form_class = ContactForm 38 template_name = 'contact_form/xhr_contact_form.html' 39 40 def get_context_data(self, **kwargs): 41 ctx = super(ContactFormView, self).get_context_data(**kwargs) 42 # Provide the form action URL to use in the template that renders the 43 # contact dialog. 44 ctx.update({ 45 'contact_form_url': reverse('pootle-contact-xhr'), 46 }) 47 return ctx 48 49 def get_initial(self): 50 initial = super(ContactFormView, self).get_initial() 51 52 user = self.request.user 53 if user.is_authenticated(): 54 initial.update({ 55 'name': user.full_name, 56 'email': user.email, 57 }) 58 59 return initial 60 61 def get_success_url(self): 62 # XXX: This is unused. We don't need a `/contact/sent/` URL, but 63 # the parent :cls:`ContactView` enforces us to set some value here 64 return reverse('pootle-contact') 65 66 67 class ReportFormView(ContactFormView): 68 form_class = ReportForm 69 70 def get_context_data(self, **kwargs): 71 ctx = super(ReportFormView, self).get_context_data(**kwargs) 72 # Provide the form action URL to use in the template that renders the 73 # contact dialog. 74 ctx.update({ 75 'contact_form_url': reverse('pootle-contact-report-error'), 76 }) 77 return ctx 78 79 def get_initial(self): 80 initial = super(ReportFormView, self).get_initial() 81 82 report = self.request.GET.get('report', False) 83 if report: 84 try: 85 from pootle_store.models import Unit 86 uid = int(report) 87 try: 88 unit = Unit.objects.select_related( 89 'store__translation_project__project', 90 ).get(id=uid) 91 if unit.is_accessible_by(self.request.user): 92 unit_absolute_url = self.request.build_absolute_uri( 93 unit.get_translate_url() 94 ) 95 initial.update({ 96 'subject': SUBJECT_TEMPLATE % ( 97 unit.id, 98 unit.store.translation_project.language.code 99 ), 100 'body': BODY_TEMPLATE % ( 101 unit_absolute_url, 102 unit.source, 103 unit.target 104 ), 105 'report_email': unit.store.translation_project \ 106 .project.report_email, 107 }) 108 except Unit.DoesNotExist: 109 pass 110 except ValueError: 111 pass 112 113 return initial 114 [end of pootle/apps/contact/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pootle/apps/contact/views.py b/pootle/apps/contact/views.py --- a/pootle/apps/contact/views.py +++ b/pootle/apps/contact/views.py @@ -8,6 +8,7 @@ # AUTHORS file for copyright and authorship information. from django.core.urlresolvers import reverse +from django.template.loader import render_to_string from django.views.generic import TemplateView from contact_form.views import ContactFormView as OriginalContactFormView @@ -17,18 +18,6 @@ from .forms import ContactForm, ReportForm -SUBJECT_TEMPLATE = 'Unit #%d (%s)' -BODY_TEMPLATE = ''' -Unit: %s - -Source: %s - -Current translation: %s - -Your question or comment: -''' - - class ContactFormTemplateView(TemplateView): template_name = 'contact_form/contact_form.html' @@ -93,15 +82,18 @@ unit.get_translate_url() ) initial.update({ - 'subject': SUBJECT_TEMPLATE % ( - unit.id, - unit.store.translation_project.language.code - ), - 'body': BODY_TEMPLATE % ( - unit_absolute_url, - unit.source, - unit.target - ), + 'subject': render_to_string( + 'contact_form/report_form_subject.txt', { + 'unit': unit, + 'language': unit.store \ + .translation_project \ + .language.code, + }), + 'body': render_to_string( + 'contact_form/report_form_body.txt', { + 'unit': unit, + 'unit_absolute_url': unit_absolute_url, + }), 'report_email': unit.store.translation_project \ .project.report_email, })
{"golden_diff": "diff --git a/pootle/apps/contact/views.py b/pootle/apps/contact/views.py\n--- a/pootle/apps/contact/views.py\n+++ b/pootle/apps/contact/views.py\n@@ -8,6 +8,7 @@\n # AUTHORS file for copyright and authorship information.\n \n from django.core.urlresolvers import reverse\n+from django.template.loader import render_to_string\n from django.views.generic import TemplateView\n \n from contact_form.views import ContactFormView as OriginalContactFormView\n@@ -17,18 +18,6 @@\n from .forms import ContactForm, ReportForm\n \n \n-SUBJECT_TEMPLATE = 'Unit #%d (%s)'\n-BODY_TEMPLATE = '''\n-Unit: %s\n-\n-Source: %s\n-\n-Current translation: %s\n-\n-Your question or comment:\n-'''\n-\n-\n class ContactFormTemplateView(TemplateView):\n template_name = 'contact_form/contact_form.html'\n \n@@ -93,15 +82,18 @@\n unit.get_translate_url()\n )\n initial.update({\n- 'subject': SUBJECT_TEMPLATE % (\n- unit.id,\n- unit.store.translation_project.language.code\n- ),\n- 'body': BODY_TEMPLATE % (\n- unit_absolute_url,\n- unit.source,\n- unit.target\n- ),\n+ 'subject': render_to_string(\n+ 'contact_form/report_form_subject.txt', {\n+ 'unit': unit,\n+ 'language': unit.store \\\n+ .translation_project \\\n+ .language.code,\n+ }),\n+ 'body': render_to_string(\n+ 'contact_form/report_form_body.txt', {\n+ 'unit': unit,\n+ 'unit_absolute_url': unit_absolute_url,\n+ }),\n 'report_email': unit.store.translation_project \\\n .project.report_email,\n })\n", "issue": "Translation of the Report Email\nI would like to translate the words of the report email, if you could integrate this kind of template on the po file, it would be amazing... naturally title of the email included, which it would be `[(name-site)] Unit #(num) ((lang))`\n\n```\nUsername: (username)\nCurrent URL: (url)\nIP address: (ip_address)\nUser-Agent: (user_agent)\n\nUnit: (url_string)\n\nSource: (source_string)\n\nCurrent translation: \n\nYour question or comment:\n```\n\nThx in advance ;)\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.core.urlresolvers import reverse\nfrom django.views.generic import TemplateView\n\nfrom contact_form.views import ContactFormView as OriginalContactFormView\n\nfrom pootle.core.views import AjaxResponseMixin\n\nfrom .forms import ContactForm, ReportForm\n\n\nSUBJECT_TEMPLATE = 'Unit #%d (%s)'\nBODY_TEMPLATE = '''\nUnit: %s\n\nSource: %s\n\nCurrent translation: %s\n\nYour question or comment:\n'''\n\n\nclass ContactFormTemplateView(TemplateView):\n template_name = 'contact_form/contact_form.html'\n\n\nclass ContactFormView(AjaxResponseMixin, OriginalContactFormView):\n form_class = ContactForm\n template_name = 'contact_form/xhr_contact_form.html'\n\n def get_context_data(self, **kwargs):\n ctx = super(ContactFormView, self).get_context_data(**kwargs)\n # Provide the form action URL to use in the template that renders the\n # contact dialog.\n ctx.update({\n 'contact_form_url': reverse('pootle-contact-xhr'),\n })\n return ctx\n\n def get_initial(self):\n initial = super(ContactFormView, self).get_initial()\n\n user = self.request.user\n if user.is_authenticated():\n initial.update({\n 'name': user.full_name,\n 'email': user.email,\n })\n\n return initial\n\n def get_success_url(self):\n # XXX: This is unused. We don't need a `/contact/sent/` URL, but\n # the parent :cls:`ContactView` enforces us to set some value here\n return reverse('pootle-contact')\n\n\nclass ReportFormView(ContactFormView):\n form_class = ReportForm\n\n def get_context_data(self, **kwargs):\n ctx = super(ReportFormView, self).get_context_data(**kwargs)\n # Provide the form action URL to use in the template that renders the\n # contact dialog.\n ctx.update({\n 'contact_form_url': reverse('pootle-contact-report-error'),\n })\n return ctx\n\n def get_initial(self):\n initial = super(ReportFormView, self).get_initial()\n\n report = self.request.GET.get('report', False)\n if report:\n try:\n from pootle_store.models import Unit\n uid = int(report)\n try:\n unit = Unit.objects.select_related(\n 'store__translation_project__project',\n ).get(id=uid)\n if unit.is_accessible_by(self.request.user):\n unit_absolute_url = self.request.build_absolute_uri(\n unit.get_translate_url()\n )\n initial.update({\n 'subject': SUBJECT_TEMPLATE % (\n unit.id,\n unit.store.translation_project.language.code\n ),\n 'body': BODY_TEMPLATE % (\n unit_absolute_url,\n unit.source,\n unit.target\n ),\n 'report_email': unit.store.translation_project \\\n .project.report_email,\n })\n except Unit.DoesNotExist:\n pass\n except ValueError:\n pass\n\n return initial\n", "path": "pootle/apps/contact/views.py"}]}
1,604
386
gh_patches_debug_16638
rasdani/github-patches
git_diff
python-poetry__poetry-6338
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> `poetry cache clear` no longer respects `--no-interaction` flag <!-- Hi there! Thank you for discovering and submitting an issue. Before you submit this; let's make sure of a few things. Please make sure the following boxes are ticked if they are correct. If not, please try and fulfill these first. --> <!-- Checked checkbox should look like this: [x] --> - [x] I am on the [latest](https://github.com/python-poetry/poetry/releases/latest) Poetry version. - [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate. - [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option). <!-- Once those are done, if you're able to fill in the following list with your information, it'd be very helpful to whoever handles the issue. --> - **OS version and name**: Ubuntu 22.04 - **Poetry version**: 1.2.0 - **Link of a [Gist](https://gist.github.com/) with the contents of your pyproject.toml file**: <!-- Gist Link Here --> ## Issue <!-- Now feel free to write your issue, but please be descriptive! Thanks again 🙌 ❤️ --> Since poetry version 1.2.0, the `poetry cache clear` command no longer respects the `--no-interaction` flag: ``` $ poetry cache clear --all --no-interaction . Delete 1882 entries? (yes/no) [no] ^C ``` </issue> <code> [start of src/poetry/console/commands/cache/clear.py] 1 from __future__ import annotations 2 3 import os 4 5 from cleo.helpers import argument 6 from cleo.helpers import option 7 8 from poetry.config.config import Config 9 from poetry.console.commands.command import Command 10 11 12 class CacheClearCommand(Command): 13 name = "cache clear" 14 description = "Clears Poetry's cache." 15 16 arguments = [argument("cache", description="The name of the cache to clear.")] 17 options = [option("all", description="Clear all entries in the cache.")] 18 19 def handle(self) -> int: 20 from cachy import CacheManager 21 22 cache = self.argument("cache") 23 24 parts = cache.split(":") 25 root = parts[0] 26 27 config = Config.create() 28 cache_dir = config.repository_cache_directory / root 29 30 try: 31 cache_dir.relative_to(config.repository_cache_directory) 32 except ValueError: 33 raise ValueError(f"{root} is not a valid repository cache") 34 35 cache = CacheManager( 36 { 37 "default": parts[0], 38 "serializer": "json", 39 "stores": {parts[0]: {"driver": "file", "path": str(cache_dir)}}, 40 } 41 ) 42 43 if len(parts) == 1: 44 if not self.option("all"): 45 raise RuntimeError( 46 f"Add the --all option if you want to clear all {parts[0]} caches" 47 ) 48 49 if not cache_dir.exists(): 50 self.line(f"No cache entries for {parts[0]}") 51 return 0 52 53 # Calculate number of entries 54 entries_count = sum( 55 len(files) for _path, _dirs, files in os.walk(str(cache_dir)) 56 ) 57 58 delete = self.confirm(f"<question>Delete {entries_count} entries?</>") 59 if not delete: 60 return 0 61 62 cache.flush() 63 elif len(parts) == 2: 64 raise RuntimeError( 65 "Only specifying the package name is not yet supported. " 66 "Add a specific version to clear" 67 ) 68 elif len(parts) == 3: 69 package = parts[1] 70 version = parts[2] 71 72 if not cache.has(f"{package}:{version}"): 73 self.line(f"No cache entries for {package}:{version}") 74 return 0 75 76 delete = self.confirm(f"Delete cache entry {package}:{version}") 77 if not delete: 78 return 0 79 80 cache.forget(f"{package}:{version}") 81 else: 82 raise ValueError("Invalid cache key") 83 84 return 0 85 [end of src/poetry/console/commands/cache/clear.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/poetry/console/commands/cache/clear.py b/src/poetry/console/commands/cache/clear.py --- a/src/poetry/console/commands/cache/clear.py +++ b/src/poetry/console/commands/cache/clear.py @@ -55,7 +55,7 @@ len(files) for _path, _dirs, files in os.walk(str(cache_dir)) ) - delete = self.confirm(f"<question>Delete {entries_count} entries?</>") + delete = self.confirm(f"<question>Delete {entries_count} entries?</>", True) if not delete: return 0 @@ -73,7 +73,7 @@ self.line(f"No cache entries for {package}:{version}") return 0 - delete = self.confirm(f"Delete cache entry {package}:{version}") + delete = self.confirm(f"Delete cache entry {package}:{version}", True) if not delete: return 0
{"golden_diff": "diff --git a/src/poetry/console/commands/cache/clear.py b/src/poetry/console/commands/cache/clear.py\n--- a/src/poetry/console/commands/cache/clear.py\n+++ b/src/poetry/console/commands/cache/clear.py\n@@ -55,7 +55,7 @@\n len(files) for _path, _dirs, files in os.walk(str(cache_dir))\n )\n \n- delete = self.confirm(f\"<question>Delete {entries_count} entries?</>\")\n+ delete = self.confirm(f\"<question>Delete {entries_count} entries?</>\", True)\n if not delete:\n return 0\n \n@@ -73,7 +73,7 @@\n self.line(f\"No cache entries for {package}:{version}\")\n return 0\n \n- delete = self.confirm(f\"Delete cache entry {package}:{version}\")\n+ delete = self.confirm(f\"Delete cache entry {package}:{version}\", True)\n if not delete:\n return 0\n", "issue": "`poetry cache clear` no longer respects `--no-interaction` flag\n<!--\r\n Hi there! Thank you for discovering and submitting an issue.\r\n\r\n Before you submit this; let's make sure of a few things.\r\n Please make sure the following boxes are ticked if they are correct.\r\n If not, please try and fulfill these first.\r\n-->\r\n\r\n<!-- Checked checkbox should look like this: [x] -->\r\n- [x] I am on the [latest](https://github.com/python-poetry/poetry/releases/latest) Poetry version.\r\n- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.\r\n- [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option).\r\n\r\n<!--\r\n Once those are done, if you're able to fill in the following list with your information,\r\n it'd be very helpful to whoever handles the issue.\r\n-->\r\n\r\n- **OS version and name**: Ubuntu 22.04\r\n- **Poetry version**: 1.2.0\r\n- **Link of a [Gist](https://gist.github.com/) with the contents of your pyproject.toml file**: <!-- Gist Link Here -->\r\n\r\n## Issue\r\n<!-- Now feel free to write your issue, but please be descriptive! Thanks again \ud83d\ude4c \u2764\ufe0f -->\r\nSince poetry version 1.2.0, the `poetry cache clear` command no longer respects the `--no-interaction` flag:\r\n\r\n```\r\n$ poetry cache clear --all --no-interaction .\r\nDelete 1882 entries? (yes/no) [no] ^C\r\n```\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "from __future__ import annotations\n\nimport os\n\nfrom cleo.helpers import argument\nfrom cleo.helpers import option\n\nfrom poetry.config.config import Config\nfrom poetry.console.commands.command import Command\n\n\nclass CacheClearCommand(Command):\n name = \"cache clear\"\n description = \"Clears Poetry's cache.\"\n\n arguments = [argument(\"cache\", description=\"The name of the cache to clear.\")]\n options = [option(\"all\", description=\"Clear all entries in the cache.\")]\n\n def handle(self) -> int:\n from cachy import CacheManager\n\n cache = self.argument(\"cache\")\n\n parts = cache.split(\":\")\n root = parts[0]\n\n config = Config.create()\n cache_dir = config.repository_cache_directory / root\n\n try:\n cache_dir.relative_to(config.repository_cache_directory)\n except ValueError:\n raise ValueError(f\"{root} is not a valid repository cache\")\n\n cache = CacheManager(\n {\n \"default\": parts[0],\n \"serializer\": \"json\",\n \"stores\": {parts[0]: {\"driver\": \"file\", \"path\": str(cache_dir)}},\n }\n )\n\n if len(parts) == 1:\n if not self.option(\"all\"):\n raise RuntimeError(\n f\"Add the --all option if you want to clear all {parts[0]} caches\"\n )\n\n if not cache_dir.exists():\n self.line(f\"No cache entries for {parts[0]}\")\n return 0\n\n # Calculate number of entries\n entries_count = sum(\n len(files) for _path, _dirs, files in os.walk(str(cache_dir))\n )\n\n delete = self.confirm(f\"<question>Delete {entries_count} entries?</>\")\n if not delete:\n return 0\n\n cache.flush()\n elif len(parts) == 2:\n raise RuntimeError(\n \"Only specifying the package name is not yet supported. \"\n \"Add a specific version to clear\"\n )\n elif len(parts) == 3:\n package = parts[1]\n version = parts[2]\n\n if not cache.has(f\"{package}:{version}\"):\n self.line(f\"No cache entries for {package}:{version}\")\n return 0\n\n delete = self.confirm(f\"Delete cache entry {package}:{version}\")\n if not delete:\n return 0\n\n cache.forget(f\"{package}:{version}\")\n else:\n raise ValueError(\"Invalid cache key\")\n\n return 0\n", "path": "src/poetry/console/commands/cache/clear.py"}]}
1,601
211
gh_patches_debug_890
rasdani/github-patches
git_diff
falconry__falcon-801
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Default OPTIONS responder does not set Content-Length to "0" Per RFC 7231: > A server MUST generate a Content-Length field with a value of "0" if no payload body is to be sent in the response. </issue> <code> [start of falcon/responders.py] 1 # Copyright 2013 by Rackspace Hosting, Inc. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 from falcon.errors import HTTPBadRequest 16 from falcon.errors import HTTPNotFound 17 from falcon.status_codes import HTTP_204 18 from falcon.status_codes import HTTP_405 19 20 21 def path_not_found(req, resp, **kwargs): 22 """Raise 404 HTTPNotFound error""" 23 raise HTTPNotFound() 24 25 26 def bad_request(req, resp, **kwargs): 27 """Raise 400 HTTPBadRequest error""" 28 raise HTTPBadRequest('Bad request', 'Invalid HTTP method') 29 30 31 def create_method_not_allowed(allowed_methods): 32 """Creates a responder for "405 Method Not Allowed" 33 34 Args: 35 allowed_methods: A list of HTTP methods (uppercase) that should be 36 returned in the Allow header. 37 38 """ 39 allowed = ', '.join(allowed_methods) 40 41 def method_not_allowed(req, resp, **kwargs): 42 resp.status = HTTP_405 43 resp.set_header('Allow', allowed) 44 45 return method_not_allowed 46 47 48 def create_default_options(allowed_methods): 49 """Creates a default responder for the OPTIONS method 50 51 Args: 52 allowed_methods: A list of HTTP methods (uppercase) that should be 53 returned in the Allow header. 54 55 """ 56 allowed = ', '.join(allowed_methods) 57 58 def on_options(req, resp, **kwargs): 59 resp.status = HTTP_204 60 resp.set_header('Allow', allowed) 61 62 return on_options 63 [end of falcon/responders.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/falcon/responders.py b/falcon/responders.py --- a/falcon/responders.py +++ b/falcon/responders.py @@ -58,5 +58,6 @@ def on_options(req, resp, **kwargs): resp.status = HTTP_204 resp.set_header('Allow', allowed) + resp.set_header('Content-Length', '0') return on_options
{"golden_diff": "diff --git a/falcon/responders.py b/falcon/responders.py\n--- a/falcon/responders.py\n+++ b/falcon/responders.py\n@@ -58,5 +58,6 @@\n def on_options(req, resp, **kwargs):\n resp.status = HTTP_204\n resp.set_header('Allow', allowed)\n+ resp.set_header('Content-Length', '0')\n \n return on_options\n", "issue": "Default OPTIONS responder does not set Content-Length to \"0\"\nPer RFC 7231:\n\n> A server MUST generate a Content-Length field with a value of \"0\" if no payload body is to be sent in the response.\n\n", "before_files": [{"content": "# Copyright 2013 by Rackspace Hosting, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom falcon.errors import HTTPBadRequest\nfrom falcon.errors import HTTPNotFound\nfrom falcon.status_codes import HTTP_204\nfrom falcon.status_codes import HTTP_405\n\n\ndef path_not_found(req, resp, **kwargs):\n \"\"\"Raise 404 HTTPNotFound error\"\"\"\n raise HTTPNotFound()\n\n\ndef bad_request(req, resp, **kwargs):\n \"\"\"Raise 400 HTTPBadRequest error\"\"\"\n raise HTTPBadRequest('Bad request', 'Invalid HTTP method')\n\n\ndef create_method_not_allowed(allowed_methods):\n \"\"\"Creates a responder for \"405 Method Not Allowed\"\n\n Args:\n allowed_methods: A list of HTTP methods (uppercase) that should be\n returned in the Allow header.\n\n \"\"\"\n allowed = ', '.join(allowed_methods)\n\n def method_not_allowed(req, resp, **kwargs):\n resp.status = HTTP_405\n resp.set_header('Allow', allowed)\n\n return method_not_allowed\n\n\ndef create_default_options(allowed_methods):\n \"\"\"Creates a default responder for the OPTIONS method\n\n Args:\n allowed_methods: A list of HTTP methods (uppercase) that should be\n returned in the Allow header.\n\n \"\"\"\n allowed = ', '.join(allowed_methods)\n\n def on_options(req, resp, **kwargs):\n resp.status = HTTP_204\n resp.set_header('Allow', allowed)\n\n return on_options\n", "path": "falcon/responders.py"}]}
1,144
93
gh_patches_debug_13547
rasdani/github-patches
git_diff
kartoza__prj.app-263
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Disqus functionality is currently broken There should be disqus inline chat widgets on each version page and each entry page. Currently these are not working - can we work to fix it. </issue> <code> [start of django_project/core/settings/project.py] 1 # coding=utf-8 2 3 """Project level settings. 4 5 Adjust these values as needed but don't commit passwords etc. to any public 6 repository! 7 """ 8 9 import os # noqa 10 from django.utils.translation import ugettext_lazy as _ 11 from .utils import absolute_path 12 from .contrib import * # noqa 13 14 # Project apps 15 INSTALLED_APPS += ( 16 'base', 17 'changes', 18 'github_issue', 19 'vota', 20 'disqus', 21 ) 22 23 # Due to profile page does not available, this will redirect to home page after login 24 LOGIN_REDIRECT_URL = '/' 25 26 # How many versions to list in each project box 27 PROJECT_VERSION_LIST_SIZE = 10 28 29 # Set debug to false for production 30 DEBUG = TEMPLATE_DEBUG = False 31 32 SOUTH_TESTS_MIGRATE = False 33 34 35 # Set languages which want to be translated 36 LANGUAGES = ( 37 ('en', _('English')), 38 ('af', _('Afrikaans')), 39 ('id', _('Indonesian')), 40 ('ko', _('Korean')), 41 ) 42 43 # Set storage path for the translation files 44 LOCALE_PATHS = (absolute_path('locale'),) 45 46 47 MIDDLEWARE_CLASSES = ( 48 # For nav bar generation 49 'core.custom_middleware.NavContextMiddleware', 50 ) + MIDDLEWARE_CLASSES 51 52 # Project specific javascript files to be pipelined 53 # For third party libs like jquery should go in contrib.py 54 PIPELINE_JS['project'] = { 55 'source_filenames': ( 56 'js/csrf-ajax.js', 57 'js/changelog.js', 58 'js/github-issue.js' 59 ), 60 'output_filename': 'js/project.js', 61 } 62 63 # Project specific css files to be pipelined 64 # For third party libs like bootstrap should go in contrib.py 65 PIPELINE_CSS['project'] = { 66 'source_filenames': ( 67 'css/changelog.css', 68 ), 69 'output_filename': 'css/project.css', 70 'extra_context': { 71 'media': 'screen, projection', 72 }, 73 } 74 [end of django_project/core/settings/project.py] [start of django_project/core/settings/contrib.py] 1 # coding=utf-8 2 """ 3 core.settings.contrib 4 """ 5 from .base import * # noqa 6 7 # Extra installed apps - grapelli needs to be added before others 8 INSTALLED_APPS = ( 9 'grappelli', 10 ) + INSTALLED_APPS 11 12 INSTALLED_APPS += ( 13 'raven.contrib.django.raven_compat', # enable Raven plugin 14 'crispy_forms', 15 'widget_tweaks', # lets us add some bootstrap css to form elements 16 'easy_thumbnails', 17 'reversion', 18 'rosetta', 19 'embed_video', 20 'django_hashedfilenamestorage', 21 'django_countries', # for sponsor addresses 22 # 'user_map', 23 ) 24 25 26 MIGRATION_MODULES = {'accounts': 'core.migration'} 27 28 GRAPPELLI_ADMIN_TITLE = 'Site administration panel' 29 30 STOP_WORDS = ( 31 'a', 'an', 'and', 'if', 'is', 'the', 'in', 'i', 'you', 'other', 32 'this', 'that' 33 ) 34 35 CRISPY_TEMPLATE_PACK = 'bootstrap3' 36 37 # Easy-thumbnails options 38 THUMBNAIL_SUBDIR = 'thumbnails' 39 THUMBNAIL_ALIASES = { 40 '': { 41 'entry': {'size': (50, 50), 'crop': True}, 42 'medium-entry': {'size': (100, 100), 'crop': True}, 43 'large-entry': {'size': (400, 300), 'crop': True}, 44 'thumb300x200': {'size': (300, 200), 'crop': True}, 45 }, 46 } 47 48 # Pipeline related settings 49 50 INSTALLED_APPS += ( 51 'pipeline',) 52 53 MIDDLEWARE_CLASSES += ( 54 # For rosetta localisation 55 'django.middleware.locale.LocaleMiddleware', 56 ) 57 58 DEFAULT_FILE_STORAGE = ( 59 'django_hashedfilenamestorage.storage.HashedFilenameFileSystemStorage') 60 61 # use underscore template function 62 PIPELINE_TEMPLATE_FUNC = '_.template' 63 64 # enable cached storage - requires uglify.js (node.js) 65 STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage' 66 67 # Contributed / third party js libs for pipeline compression 68 # For hand rolled js for this app, use project.py 69 PIPELINE_JS = {} 70 71 # Contributed / third party css for pipeline compression 72 # For hand rolled css for this app, use project.py 73 PIPELINE_CSS = {} 74 75 # These get enabled in prod.py 76 PIPELINE_ENABLED = False 77 PIPELINE_CSS_COMPRESSOR = None 78 PIPELINE_JS_COMPRESSOR = None 79 [end of django_project/core/settings/contrib.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/django_project/core/settings/contrib.py b/django_project/core/settings/contrib.py --- a/django_project/core/settings/contrib.py +++ b/django_project/core/settings/contrib.py @@ -20,8 +20,12 @@ 'django_hashedfilenamestorage', 'django_countries', # for sponsor addresses # 'user_map', + 'disqus', ) +# Set disqus and shortname +# noinspection PyUnresolvedReferences +from .secret import DISQUS_WEBSITE_SHORTNAME # noqa MIGRATION_MODULES = {'accounts': 'core.migration'} diff --git a/django_project/core/settings/project.py b/django_project/core/settings/project.py --- a/django_project/core/settings/project.py +++ b/django_project/core/settings/project.py @@ -17,7 +17,6 @@ 'changes', 'github_issue', 'vota', - 'disqus', ) # Due to profile page does not available, this will redirect to home page after login
{"golden_diff": "diff --git a/django_project/core/settings/contrib.py b/django_project/core/settings/contrib.py\n--- a/django_project/core/settings/contrib.py\n+++ b/django_project/core/settings/contrib.py\n@@ -20,8 +20,12 @@\n 'django_hashedfilenamestorage',\n 'django_countries', # for sponsor addresses\n # 'user_map',\n+ 'disqus',\n )\n \n+# Set disqus and shortname\n+# noinspection PyUnresolvedReferences\n+from .secret import DISQUS_WEBSITE_SHORTNAME # noqa\n \n MIGRATION_MODULES = {'accounts': 'core.migration'}\n \ndiff --git a/django_project/core/settings/project.py b/django_project/core/settings/project.py\n--- a/django_project/core/settings/project.py\n+++ b/django_project/core/settings/project.py\n@@ -17,7 +17,6 @@\n 'changes',\n 'github_issue',\n 'vota',\n- 'disqus',\n )\n \n # Due to profile page does not available, this will redirect to home page after login\n", "issue": "Disqus functionality is currently broken\nThere should be disqus inline chat widgets on each version page and each entry page. Currently these are not working - can we work to fix it.\n\n", "before_files": [{"content": "# coding=utf-8\n\n\"\"\"Project level settings.\n\nAdjust these values as needed but don't commit passwords etc. to any public\nrepository!\n\"\"\"\n\nimport os # noqa\nfrom django.utils.translation import ugettext_lazy as _\nfrom .utils import absolute_path\nfrom .contrib import * # noqa\n\n# Project apps\nINSTALLED_APPS += (\n 'base',\n 'changes',\n 'github_issue',\n 'vota',\n 'disqus',\n)\n\n# Due to profile page does not available, this will redirect to home page after login\nLOGIN_REDIRECT_URL = '/'\n\n# How many versions to list in each project box\nPROJECT_VERSION_LIST_SIZE = 10\n\n# Set debug to false for production\nDEBUG = TEMPLATE_DEBUG = False\n\nSOUTH_TESTS_MIGRATE = False\n\n\n# Set languages which want to be translated\nLANGUAGES = (\n ('en', _('English')),\n ('af', _('Afrikaans')),\n ('id', _('Indonesian')),\n ('ko', _('Korean')),\n)\n\n# Set storage path for the translation files\nLOCALE_PATHS = (absolute_path('locale'),)\n\n\nMIDDLEWARE_CLASSES = (\n # For nav bar generation\n 'core.custom_middleware.NavContextMiddleware',\n) + MIDDLEWARE_CLASSES\n\n# Project specific javascript files to be pipelined\n# For third party libs like jquery should go in contrib.py\nPIPELINE_JS['project'] = {\n 'source_filenames': (\n 'js/csrf-ajax.js',\n 'js/changelog.js',\n 'js/github-issue.js'\n ),\n 'output_filename': 'js/project.js',\n}\n\n# Project specific css files to be pipelined\n# For third party libs like bootstrap should go in contrib.py\nPIPELINE_CSS['project'] = {\n 'source_filenames': (\n 'css/changelog.css',\n ),\n 'output_filename': 'css/project.css',\n 'extra_context': {\n 'media': 'screen, projection',\n },\n}\n", "path": "django_project/core/settings/project.py"}, {"content": "# coding=utf-8\n\"\"\"\ncore.settings.contrib\n\"\"\"\nfrom .base import * # noqa\n\n# Extra installed apps - grapelli needs to be added before others\nINSTALLED_APPS = (\n 'grappelli',\n) + INSTALLED_APPS\n\nINSTALLED_APPS += (\n 'raven.contrib.django.raven_compat', # enable Raven plugin\n 'crispy_forms',\n 'widget_tweaks', # lets us add some bootstrap css to form elements\n 'easy_thumbnails',\n 'reversion',\n 'rosetta',\n 'embed_video',\n 'django_hashedfilenamestorage',\n 'django_countries', # for sponsor addresses\n # 'user_map',\n)\n\n\nMIGRATION_MODULES = {'accounts': 'core.migration'}\n\nGRAPPELLI_ADMIN_TITLE = 'Site administration panel'\n\nSTOP_WORDS = (\n 'a', 'an', 'and', 'if', 'is', 'the', 'in', 'i', 'you', 'other',\n 'this', 'that'\n)\n\nCRISPY_TEMPLATE_PACK = 'bootstrap3'\n\n# Easy-thumbnails options\nTHUMBNAIL_SUBDIR = 'thumbnails'\nTHUMBNAIL_ALIASES = {\n '': {\n 'entry': {'size': (50, 50), 'crop': True},\n 'medium-entry': {'size': (100, 100), 'crop': True},\n 'large-entry': {'size': (400, 300), 'crop': True},\n 'thumb300x200': {'size': (300, 200), 'crop': True},\n },\n}\n\n# Pipeline related settings\n\nINSTALLED_APPS += (\n 'pipeline',)\n\nMIDDLEWARE_CLASSES += (\n # For rosetta localisation\n 'django.middleware.locale.LocaleMiddleware',\n)\n\nDEFAULT_FILE_STORAGE = (\n 'django_hashedfilenamestorage.storage.HashedFilenameFileSystemStorage')\n\n# use underscore template function\nPIPELINE_TEMPLATE_FUNC = '_.template'\n\n# enable cached storage - requires uglify.js (node.js)\nSTATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'\n\n# Contributed / third party js libs for pipeline compression\n# For hand rolled js for this app, use project.py\nPIPELINE_JS = {}\n\n# Contributed / third party css for pipeline compression\n# For hand rolled css for this app, use project.py\nPIPELINE_CSS = {}\n\n# These get enabled in prod.py\nPIPELINE_ENABLED = False\nPIPELINE_CSS_COMPRESSOR = None\nPIPELINE_JS_COMPRESSOR = None\n", "path": "django_project/core/settings/contrib.py"}]}
1,865
229
gh_patches_debug_20418
rasdani/github-patches
git_diff
nonebot__nonebot2-238
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Bug: 内置的single_session插件有一些bug **描述问题:** 内置的`single_session`插件只能处理有`get_session_id`方法的`event`,如果一个`matcher`监听了`metaevent`,那么其中的`run_preprocessor`会报错 **如何复现?** [这一行](https://github.com/nonebot/nonebot2/blob/93ffc93a80cf9e3103eb4a164e7b32ab3cdd0882/nonebot/plugins/single_session.py#L13)限制了只能监听有`get_session_id`的事件,但是对没有这个方法的事件没有做额外的处理,导致报错。 除此之外,下面的[判断语句](https://github.com/nonebot/nonebot2/blob/93ffc93a80cf9e3103eb4a164e7b32ab3cdd0882/nonebot/plugins/single_session.py#L16)也有问题,如果这个事件第一次遇到的话不应该被忽略 **期望的结果** 插件正常使用 ```` </issue> <code> [start of nonebot/plugins/single_session.py] 1 from typing import Dict, Optional 2 3 from nonebot.typing import T_State 4 from nonebot.matcher import Matcher 5 from nonebot.adapters import Bot, Event 6 from nonebot.message import run_preprocessor, run_postprocessor, IgnoredException 7 8 _running_matcher: Dict[str, int] = {} 9 10 11 @run_preprocessor 12 async def _(matcher: Matcher, bot: Bot, event: Event, state: T_State): 13 session_id = event.get_session_id() 14 event_id = id(event) 15 16 if _running_matcher.get(session_id, None) != event_id: 17 raise IgnoredException("Annother matcher running") 18 19 _running_matcher[session_id] = event_id 20 21 22 @run_postprocessor 23 async def _(matcher: Matcher, exception: Optional[Exception], bot: Bot, event: Event, state: T_State): 24 session_id = event.get_session_id() 25 if session_id in _running_matcher: 26 del _running_matcher[session_id] 27 [end of nonebot/plugins/single_session.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/nonebot/plugins/single_session.py b/nonebot/plugins/single_session.py --- a/nonebot/plugins/single_session.py +++ b/nonebot/plugins/single_session.py @@ -10,17 +10,23 @@ @run_preprocessor async def _(matcher: Matcher, bot: Bot, event: Event, state: T_State): - session_id = event.get_session_id() - event_id = id(event) - - if _running_matcher.get(session_id, None) != event_id: + try: + session_id = event.get_session_id() + except Exception: + return + current_event_id = id(event) + event_id = _running_matcher.get(session_id, None) + if event_id and event_id != current_event_id: raise IgnoredException("Annother matcher running") - _running_matcher[session_id] = event_id + _running_matcher[session_id] = current_event_id @run_postprocessor async def _(matcher: Matcher, exception: Optional[Exception], bot: Bot, event: Event, state: T_State): - session_id = event.get_session_id() + try: + session_id = event.get_session_id() + except Exception: + return if session_id in _running_matcher: del _running_matcher[session_id]
{"golden_diff": "diff --git a/nonebot/plugins/single_session.py b/nonebot/plugins/single_session.py\n--- a/nonebot/plugins/single_session.py\n+++ b/nonebot/plugins/single_session.py\n@@ -10,17 +10,23 @@\n \n @run_preprocessor\n async def _(matcher: Matcher, bot: Bot, event: Event, state: T_State):\n- session_id = event.get_session_id()\n- event_id = id(event)\n-\n- if _running_matcher.get(session_id, None) != event_id:\n+ try:\n+ session_id = event.get_session_id()\n+ except Exception:\n+ return\n+ current_event_id = id(event)\n+ event_id = _running_matcher.get(session_id, None)\n+ if event_id and event_id != current_event_id:\n raise IgnoredException(\"Annother matcher running\")\n \n- _running_matcher[session_id] = event_id\n+ _running_matcher[session_id] = current_event_id\n \n \n @run_postprocessor\n async def _(matcher: Matcher, exception: Optional[Exception], bot: Bot, event: Event, state: T_State):\n- session_id = event.get_session_id()\n+ try:\n+ session_id = event.get_session_id()\n+ except Exception:\n+ return\n if session_id in _running_matcher:\n del _running_matcher[session_id]\n", "issue": "Bug: \u5185\u7f6e\u7684single_session\u63d2\u4ef6\u6709\u4e00\u4e9bbug\n**\u63cf\u8ff0\u95ee\u9898\uff1a**\r\n\r\n\u5185\u7f6e\u7684`single_session`\u63d2\u4ef6\u53ea\u80fd\u5904\u7406\u6709`get_session_id`\u65b9\u6cd5\u7684`event`\uff0c\u5982\u679c\u4e00\u4e2a`matcher`\u76d1\u542c\u4e86`metaevent`\uff0c\u90a3\u4e48\u5176\u4e2d\u7684`run_preprocessor`\u4f1a\u62a5\u9519\r\n\r\n**\u5982\u4f55\u590d\u73b0\uff1f**\r\n\r\n[\u8fd9\u4e00\u884c](https://github.com/nonebot/nonebot2/blob/93ffc93a80cf9e3103eb4a164e7b32ab3cdd0882/nonebot/plugins/single_session.py#L13)\u9650\u5236\u4e86\u53ea\u80fd\u76d1\u542c\u6709`get_session_id`\u7684\u4e8b\u4ef6\uff0c\u4f46\u662f\u5bf9\u6ca1\u6709\u8fd9\u4e2a\u65b9\u6cd5\u7684\u4e8b\u4ef6\u6ca1\u6709\u505a\u989d\u5916\u7684\u5904\u7406\uff0c\u5bfc\u81f4\u62a5\u9519\u3002\r\n\u9664\u6b64\u4e4b\u5916\uff0c\u4e0b\u9762\u7684[\u5224\u65ad\u8bed\u53e5](https://github.com/nonebot/nonebot2/blob/93ffc93a80cf9e3103eb4a164e7b32ab3cdd0882/nonebot/plugins/single_session.py#L16)\u4e5f\u6709\u95ee\u9898\uff0c\u5982\u679c\u8fd9\u4e2a\u4e8b\u4ef6\u7b2c\u4e00\u6b21\u9047\u5230\u7684\u8bdd\u4e0d\u5e94\u8be5\u88ab\u5ffd\u7565\r\n\r\n**\u671f\u671b\u7684\u7ed3\u679c**\r\n\u63d2\u4ef6\u6b63\u5e38\u4f7f\u7528\r\n\r\n````\r\n\n", "before_files": [{"content": "from typing import Dict, Optional\n\nfrom nonebot.typing import T_State\nfrom nonebot.matcher import Matcher\nfrom nonebot.adapters import Bot, Event\nfrom nonebot.message import run_preprocessor, run_postprocessor, IgnoredException\n\n_running_matcher: Dict[str, int] = {}\n\n\n@run_preprocessor\nasync def _(matcher: Matcher, bot: Bot, event: Event, state: T_State):\n session_id = event.get_session_id()\n event_id = id(event)\n\n if _running_matcher.get(session_id, None) != event_id:\n raise IgnoredException(\"Annother matcher running\")\n\n _running_matcher[session_id] = event_id\n\n\n@run_postprocessor\nasync def _(matcher: Matcher, exception: Optional[Exception], bot: Bot, event: Event, state: T_State):\n session_id = event.get_session_id()\n if session_id in _running_matcher:\n del _running_matcher[session_id]\n", "path": "nonebot/plugins/single_session.py"}]}
1,051
303
gh_patches_debug_607
rasdani/github-patches
git_diff
pex-tool__pex-1446
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 2.1.49 On the docket: + [ ] Avoid re-using old ~/.pex/code/ caches. #1444 </issue> <code> [start of pex/version.py] 1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 __version__ = "2.1.48" 5 [end of pex/version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pex/version.py b/pex/version.py --- a/pex/version.py +++ b/pex/version.py @@ -1,4 +1,4 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -__version__ = "2.1.48" +__version__ = "2.1.49"
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.48\"\n+__version__ = \"2.1.49\"\n", "issue": "Release 2.1.49\nOn the docket:\r\n+ [ ] Avoid re-using old ~/.pex/code/ caches. #1444 \n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.48\"\n", "path": "pex/version.py"}]}
618
97
gh_patches_debug_33430
rasdani/github-patches
git_diff
Mailu__Mailu-1268
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Make roundcube log to the console Roundcube is currently hard to debug because it logs into a special folder (/var/www/html/logs). It should log to stdout/stderr instead. </issue> <code> [start of webmails/roundcube/start.py] 1 #!/usr/bin/python3 2 3 import os 4 import logging as log 5 import sys 6 from socrate import conf 7 8 log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING")) 9 10 os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT"))*0.66/1048576)) 11 12 conf.jinja("/php.ini", os.environ, "/usr/local/etc/php/conf.d/roundcube.ini") 13 14 # Fix some permissions 15 os.system("mkdir -p /data/gpg") 16 os.system("chown -R www-data:www-data /data") 17 18 # Run apache 19 os.execv("/usr/local/bin/apache2-foreground", ["apache2-foreground"]) 20 [end of webmails/roundcube/start.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/webmails/roundcube/start.py b/webmails/roundcube/start.py --- a/webmails/roundcube/start.py +++ b/webmails/roundcube/start.py @@ -4,16 +4,61 @@ import logging as log import sys from socrate import conf +import subprocess log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING")) os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT"))*0.66/1048576)) +db_flavor=os.environ.get("ROUNDCUBE_DB_FLAVOR",os.environ.get("DB_FLAVOR","sqlite")) +if db_flavor=="sqlite": + os.environ["DB_DSNW"]="sqlite:////data/roundcube.db" +elif db_flavor=="mysql": + os.environ["DB_DSNW"]="mysql://%s:%s@%s/%s" % ( + os.environ.get("ROUNDCUBE_DB_USER","roundcube"), + os.environ.get("ROUNDCUBE_DB_PW"), + os.environ.get("ROUNDCUBE_DB_HOST",os.environ.get("DB_HOST","database")), + os.environ.get("ROUNDCUBE_DB_NAME","roundcube") + ) +elif db_flavor=="postgresql": + os.environ["DB_DSNW"]="pgsql://%s:%s@%s/%s" % ( + os.environ.get("ROUNDCUBE_DB_USER","roundcube"), + os.environ.get("ROUNDCUBE_DB_PW"), + os.environ.get("ROUNDCUBE_DB_HOST",os.environ.get("DB_HOST","database")), + os.environ.get("ROUNDCUBE_DB_NAME","roundcube") + ) +else: + print("Unknown ROUNDCUBE_DB_FLAVOR: %s",db_flavor) + exit(1) + + + conf.jinja("/php.ini", os.environ, "/usr/local/etc/php/conf.d/roundcube.ini") # Fix some permissions -os.system("mkdir -p /data/gpg") -os.system("chown -R www-data:www-data /data") +os.system("mkdir -p /data/gpg /var/www/html/logs") +os.system("touch /var/www/html/logs/errors") +os.system("chown -R www-data:www-data /data /var/www/html/logs") + +try: + print("Initializing database") + result=subprocess.check_output(["/var/www/html/bin/initdb.sh","--dir","/var/www/html/SQL"],stderr=subprocess.STDOUT) + print(result.decode()) +except subprocess.CalledProcessError as e: + if "already exists" in e.stdout.decode(): + print("Already initialzed") + else: + print(e.stdout.decode()) + quit(1) + +try: + print("Upgrading database") + subprocess.check_call(["/var/www/html/bin/update.sh","--version=?","-y"],stderr=subprocess.STDOUT) +except subprocess.CalledProcessError as e: + quit(1) + +# Tail roundcube logs +subprocess.Popen(["tail","-f","-n","0","/var/www/html/logs/errors"]) # Run apache os.execv("/usr/local/bin/apache2-foreground", ["apache2-foreground"])
{"golden_diff": "diff --git a/webmails/roundcube/start.py b/webmails/roundcube/start.py\n--- a/webmails/roundcube/start.py\n+++ b/webmails/roundcube/start.py\n@@ -4,16 +4,61 @@\n import logging as log\n import sys\n from socrate import conf\n+import subprocess\n \n log.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n \n os.environ[\"MAX_FILESIZE\"] = str(int(int(os.environ.get(\"MESSAGE_SIZE_LIMIT\"))*0.66/1048576))\n \n+db_flavor=os.environ.get(\"ROUNDCUBE_DB_FLAVOR\",os.environ.get(\"DB_FLAVOR\",\"sqlite\"))\n+if db_flavor==\"sqlite\":\n+ os.environ[\"DB_DSNW\"]=\"sqlite:////data/roundcube.db\"\n+elif db_flavor==\"mysql\":\n+ os.environ[\"DB_DSNW\"]=\"mysql://%s:%s@%s/%s\" % (\n+ os.environ.get(\"ROUNDCUBE_DB_USER\",\"roundcube\"),\n+ os.environ.get(\"ROUNDCUBE_DB_PW\"),\n+ os.environ.get(\"ROUNDCUBE_DB_HOST\",os.environ.get(\"DB_HOST\",\"database\")),\n+ os.environ.get(\"ROUNDCUBE_DB_NAME\",\"roundcube\")\n+ )\n+elif db_flavor==\"postgresql\":\n+ os.environ[\"DB_DSNW\"]=\"pgsql://%s:%s@%s/%s\" % (\n+ os.environ.get(\"ROUNDCUBE_DB_USER\",\"roundcube\"),\n+ os.environ.get(\"ROUNDCUBE_DB_PW\"),\n+ os.environ.get(\"ROUNDCUBE_DB_HOST\",os.environ.get(\"DB_HOST\",\"database\")),\n+ os.environ.get(\"ROUNDCUBE_DB_NAME\",\"roundcube\")\n+ )\n+else:\n+ print(\"Unknown ROUNDCUBE_DB_FLAVOR: %s\",db_flavor)\n+ exit(1)\n+\n+\n+\n conf.jinja(\"/php.ini\", os.environ, \"/usr/local/etc/php/conf.d/roundcube.ini\")\n \n # Fix some permissions\n-os.system(\"mkdir -p /data/gpg\")\n-os.system(\"chown -R www-data:www-data /data\")\n+os.system(\"mkdir -p /data/gpg /var/www/html/logs\")\n+os.system(\"touch /var/www/html/logs/errors\")\n+os.system(\"chown -R www-data:www-data /data /var/www/html/logs\")\n+\n+try:\n+ print(\"Initializing database\")\n+ result=subprocess.check_output([\"/var/www/html/bin/initdb.sh\",\"--dir\",\"/var/www/html/SQL\"],stderr=subprocess.STDOUT)\n+ print(result.decode())\n+except subprocess.CalledProcessError as e:\n+ if \"already exists\" in e.stdout.decode():\n+ print(\"Already initialzed\")\n+ else:\n+ print(e.stdout.decode())\n+ quit(1)\n+\n+try:\n+ print(\"Upgrading database\")\n+ subprocess.check_call([\"/var/www/html/bin/update.sh\",\"--version=?\",\"-y\"],stderr=subprocess.STDOUT)\n+except subprocess.CalledProcessError as e:\n+ quit(1)\n+\n+# Tail roundcube logs\n+subprocess.Popen([\"tail\",\"-f\",\"-n\",\"0\",\"/var/www/html/logs/errors\"])\n \n # Run apache\n os.execv(\"/usr/local/bin/apache2-foreground\", [\"apache2-foreground\"])\n", "issue": "Make roundcube log to the console\nRoundcube is currently hard to debug because it logs into a special folder (/var/www/html/logs). It should log to stdout/stderr instead.\n", "before_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport logging as log\nimport sys\nfrom socrate import conf\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\nos.environ[\"MAX_FILESIZE\"] = str(int(int(os.environ.get(\"MESSAGE_SIZE_LIMIT\"))*0.66/1048576))\n\nconf.jinja(\"/php.ini\", os.environ, \"/usr/local/etc/php/conf.d/roundcube.ini\")\n\n# Fix some permissions\nos.system(\"mkdir -p /data/gpg\")\nos.system(\"chown -R www-data:www-data /data\")\n\n# Run apache\nos.execv(\"/usr/local/bin/apache2-foreground\", [\"apache2-foreground\"])\n", "path": "webmails/roundcube/start.py"}]}
766
722
gh_patches_debug_13238
rasdani/github-patches
git_diff
mindsdb__mindsdb-2007
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Response contains 'nan' instead of `null` if do ``` select null, null, null from information_schema.tables limit 1; ``` then response will be: ``` +------+--------+--------+ | None | None_2 | None_3 | +------+--------+--------+ | nan | nan | nan | +------+--------+--------+ ``` row values must be `null` </issue> <code> [start of mindsdb/api/mysql/mysql_proxy/utilities/sql.py] 1 import duckdb 2 import pandas as pd 3 from mindsdb_sql import parse_sql 4 from mindsdb_sql.parser.ast import Select, Identifier, BinaryOperation, OrderBy 5 from mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender 6 7 from mindsdb.utilities.log import log 8 9 10 def _remove_table_name(root): 11 if isinstance(root, BinaryOperation): 12 _remove_table_name(root.args[0]) 13 _remove_table_name(root.args[1]) 14 elif isinstance(root, Identifier): 15 root.parts = [root.parts[-1]] 16 17 18 def query_df(df, query): 19 """ Perform simple query ('select' from one table, without subqueries and joins) on DataFrame. 20 21 Args: 22 df (pandas.DataFrame): data 23 query (mindsdb_sql.parser.ast.Select | str): select query 24 25 Returns: 26 pandas.DataFrame 27 """ 28 29 if isinstance(query, str): 30 query_ast = parse_sql(query, dialect='mysql') 31 else: 32 query_ast = query 33 34 if isinstance(query_ast, Select) is False or isinstance(query_ast.from_table, Identifier) is False: 35 raise Exception("Only 'SELECT from TABLE' statements supported for internal query") 36 37 query_ast.from_table.parts = ['df_table'] 38 for identifier in query_ast.targets: 39 if isinstance(identifier, Identifier): 40 identifier.parts = [identifier.parts[-1]] 41 if isinstance(query_ast.order_by, list): 42 for orderby in query_ast.order_by: 43 if isinstance(orderby, OrderBy) and isinstance(orderby.field, Identifier): 44 orderby.field.parts = [orderby.field.parts[-1]] 45 _remove_table_name(query_ast.where) 46 47 render = SqlalchemyRender('postgres') 48 try: 49 query_str = render.get_string(query_ast, with_failback=False) 50 except Exception as e: 51 log.error(f"Exception during query casting to 'postgres' dialect. Query: {str(query)}. Error: {e}") 52 query_str = render.get_string(query_ast, with_failback=True) 53 54 res = duckdb.query_df(df, 'df_table', query_str) 55 result_df = res.df() 56 result_df = result_df.where(pd.notnull(result_df), None) 57 return result_df 58 [end of mindsdb/api/mysql/mysql_proxy/utilities/sql.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/mindsdb/api/mysql/mysql_proxy/utilities/sql.py b/mindsdb/api/mysql/mysql_proxy/utilities/sql.py --- a/mindsdb/api/mysql/mysql_proxy/utilities/sql.py +++ b/mindsdb/api/mysql/mysql_proxy/utilities/sql.py @@ -1,5 +1,5 @@ import duckdb -import pandas as pd +import numpy as np from mindsdb_sql import parse_sql from mindsdb_sql.parser.ast import Select, Identifier, BinaryOperation, OrderBy from mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender @@ -53,5 +53,5 @@ res = duckdb.query_df(df, 'df_table', query_str) result_df = res.df() - result_df = result_df.where(pd.notnull(result_df), None) + result_df = result_df.replace({np.nan: None}) return result_df
{"golden_diff": "diff --git a/mindsdb/api/mysql/mysql_proxy/utilities/sql.py b/mindsdb/api/mysql/mysql_proxy/utilities/sql.py\n--- a/mindsdb/api/mysql/mysql_proxy/utilities/sql.py\n+++ b/mindsdb/api/mysql/mysql_proxy/utilities/sql.py\n@@ -1,5 +1,5 @@\n import duckdb\n-import pandas as pd\n+import numpy as np\n from mindsdb_sql import parse_sql\n from mindsdb_sql.parser.ast import Select, Identifier, BinaryOperation, OrderBy\n from mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender\n@@ -53,5 +53,5 @@\n \n res = duckdb.query_df(df, 'df_table', query_str)\n result_df = res.df()\n- result_df = result_df.where(pd.notnull(result_df), None)\n+ result_df = result_df.replace({np.nan: None})\n return result_df\n", "issue": "Response contains 'nan' instead of `null`\nif do \r\n```\r\nselect null, null, null from information_schema.tables limit 1;\r\n```\r\nthen response will be:\r\n```\r\n+------+--------+--------+\r\n| None | None_2 | None_3 |\r\n+------+--------+--------+\r\n| nan | nan | nan |\r\n+------+--------+--------+\r\n```\r\nrow values must be `null`\r\n\n", "before_files": [{"content": "import duckdb\nimport pandas as pd\nfrom mindsdb_sql import parse_sql\nfrom mindsdb_sql.parser.ast import Select, Identifier, BinaryOperation, OrderBy\nfrom mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender\n\nfrom mindsdb.utilities.log import log\n\n\ndef _remove_table_name(root):\n if isinstance(root, BinaryOperation):\n _remove_table_name(root.args[0])\n _remove_table_name(root.args[1])\n elif isinstance(root, Identifier):\n root.parts = [root.parts[-1]]\n\n\ndef query_df(df, query):\n \"\"\" Perform simple query ('select' from one table, without subqueries and joins) on DataFrame.\n\n Args:\n df (pandas.DataFrame): data\n query (mindsdb_sql.parser.ast.Select | str): select query\n\n Returns:\n pandas.DataFrame\n \"\"\"\n\n if isinstance(query, str):\n query_ast = parse_sql(query, dialect='mysql')\n else:\n query_ast = query\n\n if isinstance(query_ast, Select) is False or isinstance(query_ast.from_table, Identifier) is False:\n raise Exception(\"Only 'SELECT from TABLE' statements supported for internal query\")\n\n query_ast.from_table.parts = ['df_table']\n for identifier in query_ast.targets:\n if isinstance(identifier, Identifier):\n identifier.parts = [identifier.parts[-1]]\n if isinstance(query_ast.order_by, list):\n for orderby in query_ast.order_by:\n if isinstance(orderby, OrderBy) and isinstance(orderby.field, Identifier):\n orderby.field.parts = [orderby.field.parts[-1]]\n _remove_table_name(query_ast.where)\n\n render = SqlalchemyRender('postgres')\n try:\n query_str = render.get_string(query_ast, with_failback=False)\n except Exception as e:\n log.error(f\"Exception during query casting to 'postgres' dialect. Query: {str(query)}. Error: {e}\")\n query_str = render.get_string(query_ast, with_failback=True)\n\n res = duckdb.query_df(df, 'df_table', query_str)\n result_df = res.df()\n result_df = result_df.where(pd.notnull(result_df), None)\n return result_df\n", "path": "mindsdb/api/mysql/mysql_proxy/utilities/sql.py"}]}
1,204
191
gh_patches_debug_28834
rasdani/github-patches
git_diff
mampfes__hacs_waste_collection_schedule-1837
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [Bug]: ART Trier Germany collecting no more Data ### I Have A Problem With: A specific source ### What's Your Problem ART Trier Germany collecting no more Data. It worked till yesterday. I think they have a new homepage. The Calender is now empty, only one Entry on February 26th: A.R.T. Wichtiger Hinweis! The link (https://www.art-trier.de/cms/abfuhrtermine-1002.html) in the Description for ART Trier doesn't work anymore. Get a 404 Error Page. Ver. 1.45.1 ### Source (if relevant) art_trier_de ### Logs ```Shell no relevant logs ``` ### Relevant Configuration ```YAML - name: art_trier_de args: district: "Fellerich" zip_code: "54456" ``` ### Checklist Source Error - [ ] Use the example parameters for your source (often available in the documentation) (don't forget to restart Home Assistant after changing the configuration) - [X] Checked that the website of your service provider is still working - [ ] Tested my attributes on the service provider website (if possible) - [X] I have tested with the latest version of the integration (master) (for HACS in the 3 dot menu of the integration click on "Redownload" and choose master as version) ### Checklist Sensor Error - [X] Checked in the Home Assistant Calendar tab if the event names match the types names (if types argument is used) ### Required - [X] I have searched past (closed AND opened) issues to see if this bug has already been reported, and it hasn't been. - [X] I understand that people give their precious time for free, and thus I've done my very best to make this problem as easy as possible to investigate. </issue> <code> [start of custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py] 1 import contextlib 2 from datetime import datetime 3 from typing import Optional 4 from urllib.parse import quote 5 6 import requests 7 from waste_collection_schedule import Collection # type: ignore[attr-defined] 8 from waste_collection_schedule.service.ICS import ICS 9 10 TITLE = "ART Trier" 11 DESCRIPTION = "Source for waste collection of ART Trier." 12 URL = "https://www.art-trier.de" 13 TEST_CASES = { 14 "Trier": { 15 "zip_code": "54296", 16 "district": "Stadt Trier, Universitätsring", 17 }, # # https://www.art-trier.de/ics-feed/54296_trier_universitaetsring_1-1800.ics 18 "Schweich": { 19 "zip_code": "54338", 20 "district": "Schweich (inkl. Issel)", 21 }, # https://www.art-trier.de/ics-feed/54338_schweich_inkl_issel_1-1800.ics 22 "Dreis": { 23 "zip_code": "54518", 24 "district": "Dreis", 25 }, # https://www.art-trier.de/ics-feed/54518_dreis_1-1800.ics 26 "Wittlich Marktplatz": { 27 "zip_code": "54516", 28 "district": "Wittlich, Marktplatz", 29 }, # https://www.art-trier.de/ics-feed/54516_wittlich_marktplatz_1-1800.ics 30 "Wittlich Wengerohr": { 31 "zip_code": "54516", 32 "district": "Wittlich-Wengerohr", 33 }, # https://www.art-trier.de/ics-feed/54516_wittlich%2Dwengerohr_1-1800.ics 34 } 35 36 API_URL = "https://www.art-trier.de/ics-feed" 37 REMINDER_DAY = ( 38 "0" # The calendar event should be on the same day as the waste collection 39 ) 40 REMINDER_TIME = "0600" # The calendar event should start on any hour of the correct day, so this does not matter much 41 ICON_MAP = { 42 "Altpapier": "mdi:package-variant", 43 "Restmüll": "mdi:trash-can", 44 "Gelber Sack": "mdi:recycle", 45 } 46 SPECIAL_CHARS = str.maketrans( 47 { 48 " ": "_", 49 "ä": "ae", 50 "ü": "ue", 51 "ö": "oe", 52 "ß": "ss", 53 "(": None, 54 ")": None, 55 ",": None, 56 ".": None, 57 } 58 ) 59 60 61 class Source: 62 def __init__(self, district: str, zip_code: str): 63 self._district = quote( 64 district.lower().removeprefix("stadt ").translate(SPECIAL_CHARS).strip() 65 ) 66 self._zip_code = zip_code 67 self._ics = ICS(regex=r"^A.R.T. Abfuhrtermin: (.*)", split_at=r" & ") 68 69 def fetch(self): 70 url = f"{API_URL}/{self._zip_code}_{self._district}_{REMINDER_DAY}-{REMINDER_TIME}.ics" 71 72 res = requests.get(url) 73 res.raise_for_status() 74 75 schedule = self._ics.convert(res.text) 76 77 return [ 78 Collection(date=entry[0], t=entry[1], icon=ICON_MAP.get(entry[1])) 79 for entry in schedule 80 ] 81 [end of custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py --- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py +++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py @@ -1,13 +1,11 @@ -import contextlib -from datetime import datetime -from typing import Optional +import logging from urllib.parse import quote import requests from waste_collection_schedule import Collection # type: ignore[attr-defined] from waste_collection_schedule.service.ICS import ICS -TITLE = "ART Trier" +TITLE = "ART Trier (Depreciated)" DESCRIPTION = "Source for waste collection of ART Trier." URL = "https://www.art-trier.de" TEST_CASES = { @@ -56,6 +54,7 @@ ".": None, } ) +LOGGER = logging.getLogger(__name__) class Source: @@ -67,7 +66,11 @@ self._ics = ICS(regex=r"^A.R.T. Abfuhrtermin: (.*)", split_at=r" & ") def fetch(self): - url = f"{API_URL}/{self._zip_code}_{self._district}_{REMINDER_DAY}-{REMINDER_TIME}.ics" + LOGGER.warning( + "The ART Trier source is deprecated and might not work with all addresses anymore." + " Please use the ICS instead: https://github.com/mampfes/hacs_waste_collection_schedule/blob/master/doc/ics/art_trier_de.md" + ) + url = f"{API_URL}/{self._zip_code}:{self._district}::@{REMINDER_DAY}-{REMINDER_TIME}.ics" res = requests.get(url) res.raise_for_status()
{"golden_diff": "diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py\n--- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py\n+++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py\n@@ -1,13 +1,11 @@\n-import contextlib\n-from datetime import datetime\n-from typing import Optional\n+import logging\n from urllib.parse import quote\n \n import requests\n from waste_collection_schedule import Collection # type: ignore[attr-defined]\n from waste_collection_schedule.service.ICS import ICS\n \n-TITLE = \"ART Trier\"\n+TITLE = \"ART Trier (Depreciated)\"\n DESCRIPTION = \"Source for waste collection of ART Trier.\"\n URL = \"https://www.art-trier.de\"\n TEST_CASES = {\n@@ -56,6 +54,7 @@\n \".\": None,\n }\n )\n+LOGGER = logging.getLogger(__name__)\n \n \n class Source:\n@@ -67,7 +66,11 @@\n self._ics = ICS(regex=r\"^A.R.T. Abfuhrtermin: (.*)\", split_at=r\" & \")\n \n def fetch(self):\n- url = f\"{API_URL}/{self._zip_code}_{self._district}_{REMINDER_DAY}-{REMINDER_TIME}.ics\"\n+ LOGGER.warning(\n+ \"The ART Trier source is deprecated and might not work with all addresses anymore.\"\n+ \" Please use the ICS instead: https://github.com/mampfes/hacs_waste_collection_schedule/blob/master/doc/ics/art_trier_de.md\"\n+ )\n+ url = f\"{API_URL}/{self._zip_code}:{self._district}::@{REMINDER_DAY}-{REMINDER_TIME}.ics\"\n \n res = requests.get(url)\n res.raise_for_status()\n", "issue": "[Bug]: ART Trier Germany collecting no more Data\n### I Have A Problem With:\n\nA specific source\n\n### What's Your Problem\n\nART Trier Germany collecting no more Data. It worked till yesterday. I think they have a new homepage.\r\nThe Calender is now empty, only one Entry on February 26th: A.R.T. Wichtiger Hinweis!\r\nThe link (https://www.art-trier.de/cms/abfuhrtermine-1002.html) in the Description for ART Trier doesn't work anymore. Get a 404 Error Page.\r\n\r\nVer. 1.45.1\n\n### Source (if relevant)\n\nart_trier_de\n\n### Logs\n\n```Shell\nno relevant logs\n```\n\n\n### Relevant Configuration\n\n```YAML\n- name: art_trier_de\r\n args:\r\n district: \"Fellerich\"\r\n zip_code: \"54456\"\n```\n\n\n### Checklist Source Error\n\n- [ ] Use the example parameters for your source (often available in the documentation) (don't forget to restart Home Assistant after changing the configuration)\n- [X] Checked that the website of your service provider is still working\n- [ ] Tested my attributes on the service provider website (if possible)\n- [X] I have tested with the latest version of the integration (master) (for HACS in the 3 dot menu of the integration click on \"Redownload\" and choose master as version)\n\n### Checklist Sensor Error\n\n- [X] Checked in the Home Assistant Calendar tab if the event names match the types names (if types argument is used)\n\n### Required\n\n- [X] I have searched past (closed AND opened) issues to see if this bug has already been reported, and it hasn't been.\n- [X] I understand that people give their precious time for free, and thus I've done my very best to make this problem as easy as possible to investigate.\n", "before_files": [{"content": "import contextlib\nfrom datetime import datetime\nfrom typing import Optional\nfrom urllib.parse import quote\n\nimport requests\nfrom waste_collection_schedule import Collection # type: ignore[attr-defined]\nfrom waste_collection_schedule.service.ICS import ICS\n\nTITLE = \"ART Trier\"\nDESCRIPTION = \"Source for waste collection of ART Trier.\"\nURL = \"https://www.art-trier.de\"\nTEST_CASES = {\n \"Trier\": {\n \"zip_code\": \"54296\",\n \"district\": \"Stadt Trier, Universit\u00e4tsring\",\n }, # # https://www.art-trier.de/ics-feed/54296_trier_universitaetsring_1-1800.ics\n \"Schweich\": {\n \"zip_code\": \"54338\",\n \"district\": \"Schweich (inkl. Issel)\",\n }, # https://www.art-trier.de/ics-feed/54338_schweich_inkl_issel_1-1800.ics\n \"Dreis\": {\n \"zip_code\": \"54518\",\n \"district\": \"Dreis\",\n }, # https://www.art-trier.de/ics-feed/54518_dreis_1-1800.ics\n \"Wittlich Marktplatz\": {\n \"zip_code\": \"54516\",\n \"district\": \"Wittlich, Marktplatz\",\n }, # https://www.art-trier.de/ics-feed/54516_wittlich_marktplatz_1-1800.ics\n \"Wittlich Wengerohr\": {\n \"zip_code\": \"54516\",\n \"district\": \"Wittlich-Wengerohr\",\n }, # https://www.art-trier.de/ics-feed/54516_wittlich%2Dwengerohr_1-1800.ics\n}\n\nAPI_URL = \"https://www.art-trier.de/ics-feed\"\nREMINDER_DAY = (\n \"0\" # The calendar event should be on the same day as the waste collection\n)\nREMINDER_TIME = \"0600\" # The calendar event should start on any hour of the correct day, so this does not matter much\nICON_MAP = {\n \"Altpapier\": \"mdi:package-variant\",\n \"Restm\u00fcll\": \"mdi:trash-can\",\n \"Gelber Sack\": \"mdi:recycle\",\n}\nSPECIAL_CHARS = str.maketrans(\n {\n \" \": \"_\",\n \"\u00e4\": \"ae\",\n \"\u00fc\": \"ue\",\n \"\u00f6\": \"oe\",\n \"\u00df\": \"ss\",\n \"(\": None,\n \")\": None,\n \",\": None,\n \".\": None,\n }\n)\n\n\nclass Source:\n def __init__(self, district: str, zip_code: str):\n self._district = quote(\n district.lower().removeprefix(\"stadt \").translate(SPECIAL_CHARS).strip()\n )\n self._zip_code = zip_code\n self._ics = ICS(regex=r\"^A.R.T. Abfuhrtermin: (.*)\", split_at=r\" & \")\n\n def fetch(self):\n url = f\"{API_URL}/{self._zip_code}_{self._district}_{REMINDER_DAY}-{REMINDER_TIME}.ics\"\n\n res = requests.get(url)\n res.raise_for_status()\n\n schedule = self._ics.convert(res.text)\n\n return [\n Collection(date=entry[0], t=entry[1], icon=ICON_MAP.get(entry[1]))\n for entry in schedule\n ]\n", "path": "custom_components/waste_collection_schedule/waste_collection_schedule/source/art_trier_de.py"}]}
1,913
403
gh_patches_debug_9267
rasdani/github-patches
git_diff
pre-commit__pre-commit-1480
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> random.shuffle's random= argument got deprecated Related issue: [bpo-40465](https://bugs.python.org/issue40465). ``` black..................................................................../home/isidentical/.venv/lib/python3.10/site-packages/pre_commit/languages/helpers.py:95: DeprecationWarning: The *random* parameter to shuffle() has been deprecated since Python 3.9 and will be removed in a subsequent version. random.shuffle(seq, random=fixed_random.random) Passed ``` </issue> <code> [start of pre_commit/languages/helpers.py] 1 import multiprocessing 2 import os 3 import random 4 from typing import Any 5 from typing import List 6 from typing import Optional 7 from typing import overload 8 from typing import Sequence 9 from typing import Tuple 10 from typing import TYPE_CHECKING 11 12 import pre_commit.constants as C 13 from pre_commit.hook import Hook 14 from pre_commit.prefix import Prefix 15 from pre_commit.util import cmd_output_b 16 from pre_commit.xargs import xargs 17 18 if TYPE_CHECKING: 19 from typing import NoReturn 20 21 FIXED_RANDOM_SEED = 1542676186 22 23 24 def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None: 25 cmd_output_b(*cmd, cwd=prefix.prefix_dir) 26 27 28 @overload 29 def environment_dir(d: None, language_version: str) -> None: ... 30 @overload 31 def environment_dir(d: str, language_version: str) -> str: ... 32 33 34 def environment_dir(d: Optional[str], language_version: str) -> Optional[str]: 35 if d is None: 36 return None 37 else: 38 return f'{d}-{language_version}' 39 40 41 def assert_version_default(binary: str, version: str) -> None: 42 if version != C.DEFAULT: 43 raise AssertionError( 44 f'For now, pre-commit requires system-installed {binary}', 45 ) 46 47 48 def assert_no_additional_deps( 49 lang: str, 50 additional_deps: Sequence[str], 51 ) -> None: 52 if additional_deps: 53 raise AssertionError( 54 f'For now, pre-commit does not support ' 55 f'additional_dependencies for {lang}', 56 ) 57 58 59 def basic_get_default_version() -> str: 60 return C.DEFAULT 61 62 63 def basic_healthy(prefix: Prefix, language_version: str) -> bool: 64 return True 65 66 67 def no_install( 68 prefix: Prefix, 69 version: str, 70 additional_dependencies: Sequence[str], 71 ) -> 'NoReturn': 72 raise AssertionError('This type is not installable') 73 74 75 def target_concurrency(hook: Hook) -> int: 76 if hook.require_serial or 'PRE_COMMIT_NO_CONCURRENCY' in os.environ: 77 return 1 78 else: 79 # Travis appears to have a bunch of CPUs, but we can't use them all. 80 if 'TRAVIS' in os.environ: 81 return 2 82 else: 83 try: 84 return multiprocessing.cpu_count() 85 except NotImplementedError: 86 return 1 87 88 89 def _shuffled(seq: Sequence[str]) -> List[str]: 90 """Deterministically shuffle""" 91 fixed_random = random.Random() 92 fixed_random.seed(FIXED_RANDOM_SEED, version=1) 93 94 seq = list(seq) 95 random.shuffle(seq, random=fixed_random.random) 96 return seq 97 98 99 def run_xargs( 100 hook: Hook, 101 cmd: Tuple[str, ...], 102 file_args: Sequence[str], 103 **kwargs: Any, 104 ) -> Tuple[int, bytes]: 105 # Shuffle the files so that they more evenly fill out the xargs partitions, 106 # but do it deterministically in case a hook cares about ordering. 107 file_args = _shuffled(file_args) 108 kwargs['target_concurrency'] = target_concurrency(hook) 109 return xargs(cmd, file_args, **kwargs) 110 [end of pre_commit/languages/helpers.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pre_commit/languages/helpers.py b/pre_commit/languages/helpers.py --- a/pre_commit/languages/helpers.py +++ b/pre_commit/languages/helpers.py @@ -18,7 +18,7 @@ if TYPE_CHECKING: from typing import NoReturn -FIXED_RANDOM_SEED = 1542676186 +FIXED_RANDOM_SEED = 1542676187 def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None: @@ -92,7 +92,7 @@ fixed_random.seed(FIXED_RANDOM_SEED, version=1) seq = list(seq) - random.shuffle(seq, random=fixed_random.random) + fixed_random.shuffle(seq) return seq
{"golden_diff": "diff --git a/pre_commit/languages/helpers.py b/pre_commit/languages/helpers.py\n--- a/pre_commit/languages/helpers.py\n+++ b/pre_commit/languages/helpers.py\n@@ -18,7 +18,7 @@\n if TYPE_CHECKING:\n from typing import NoReturn\n \n-FIXED_RANDOM_SEED = 1542676186\n+FIXED_RANDOM_SEED = 1542676187\n \n \n def run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None:\n@@ -92,7 +92,7 @@\n fixed_random.seed(FIXED_RANDOM_SEED, version=1)\n \n seq = list(seq)\n- random.shuffle(seq, random=fixed_random.random)\n+ fixed_random.shuffle(seq)\n return seq\n", "issue": "random.shuffle's random= argument got deprecated\nRelated issue: [bpo-40465](https://bugs.python.org/issue40465).\r\n```\r\nblack..................................................................../home/isidentical/.venv/lib/python3.10/site-packages/pre_commit/languages/helpers.py:95: DeprecationWarning: The *random* parameter to shuffle() has been deprecated\r\nsince Python 3.9 and will be removed in a subsequent version.\r\n random.shuffle(seq, random=fixed_random.random)\r\nPassed\r\n```\r\n\r\n\n", "before_files": [{"content": "import multiprocessing\nimport os\nimport random\nfrom typing import Any\nfrom typing import List\nfrom typing import Optional\nfrom typing import overload\nfrom typing import Sequence\nfrom typing import Tuple\nfrom typing import TYPE_CHECKING\n\nimport pre_commit.constants as C\nfrom pre_commit.hook import Hook\nfrom pre_commit.prefix import Prefix\nfrom pre_commit.util import cmd_output_b\nfrom pre_commit.xargs import xargs\n\nif TYPE_CHECKING:\n from typing import NoReturn\n\nFIXED_RANDOM_SEED = 1542676186\n\n\ndef run_setup_cmd(prefix: Prefix, cmd: Tuple[str, ...]) -> None:\n cmd_output_b(*cmd, cwd=prefix.prefix_dir)\n\n\n@overload\ndef environment_dir(d: None, language_version: str) -> None: ...\n@overload\ndef environment_dir(d: str, language_version: str) -> str: ...\n\n\ndef environment_dir(d: Optional[str], language_version: str) -> Optional[str]:\n if d is None:\n return None\n else:\n return f'{d}-{language_version}'\n\n\ndef assert_version_default(binary: str, version: str) -> None:\n if version != C.DEFAULT:\n raise AssertionError(\n f'For now, pre-commit requires system-installed {binary}',\n )\n\n\ndef assert_no_additional_deps(\n lang: str,\n additional_deps: Sequence[str],\n) -> None:\n if additional_deps:\n raise AssertionError(\n f'For now, pre-commit does not support '\n f'additional_dependencies for {lang}',\n )\n\n\ndef basic_get_default_version() -> str:\n return C.DEFAULT\n\n\ndef basic_healthy(prefix: Prefix, language_version: str) -> bool:\n return True\n\n\ndef no_install(\n prefix: Prefix,\n version: str,\n additional_dependencies: Sequence[str],\n) -> 'NoReturn':\n raise AssertionError('This type is not installable')\n\n\ndef target_concurrency(hook: Hook) -> int:\n if hook.require_serial or 'PRE_COMMIT_NO_CONCURRENCY' in os.environ:\n return 1\n else:\n # Travis appears to have a bunch of CPUs, but we can't use them all.\n if 'TRAVIS' in os.environ:\n return 2\n else:\n try:\n return multiprocessing.cpu_count()\n except NotImplementedError:\n return 1\n\n\ndef _shuffled(seq: Sequence[str]) -> List[str]:\n \"\"\"Deterministically shuffle\"\"\"\n fixed_random = random.Random()\n fixed_random.seed(FIXED_RANDOM_SEED, version=1)\n\n seq = list(seq)\n random.shuffle(seq, random=fixed_random.random)\n return seq\n\n\ndef run_xargs(\n hook: Hook,\n cmd: Tuple[str, ...],\n file_args: Sequence[str],\n **kwargs: Any,\n) -> Tuple[int, bytes]:\n # Shuffle the files so that they more evenly fill out the xargs partitions,\n # but do it deterministically in case a hook cares about ordering.\n file_args = _shuffled(file_args)\n kwargs['target_concurrency'] = target_concurrency(hook)\n return xargs(cmd, file_args, **kwargs)\n", "path": "pre_commit/languages/helpers.py"}]}
1,556
175
gh_patches_debug_20264
rasdani/github-patches
git_diff
svthalia__concrexit-3089
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Admin sales shift API should also return total_paid_revenue ### Is your feature request related to a problem? Please describe. The current admin sales shift api route only gives the total_revenue for a shift, but this might contain unpaid orders. We don't want those in certain scoreboards, like for the rag week. ### Describe the solution you'd like Add `total_paid_revenue` ### Motivation ### Describe alternatives you've considered ### Additional context </issue> <code> [start of website/sales/api/v2/admin/serializers/shift.py] 1 from rest_framework import serializers 2 3 from sales.models.product import ProductListItem 4 from sales.models.shift import Shift 5 6 7 class ProductListItemSerializer(serializers.ModelSerializer): 8 """Serializer for product list items.""" 9 10 class Meta: 11 model = ProductListItem 12 fields = ("name", "price", "age_restricted") 13 read_only_fields = ("name", "price", "age_restricted") 14 15 name = serializers.SerializerMethodField("_name") 16 age_restricted = serializers.SerializerMethodField("_age_restricted") 17 18 def _name(self, instance): 19 return instance.product.name 20 21 def _age_restricted(self, instance): 22 return instance.product.age_restricted 23 24 25 class ShiftSerializer(serializers.ModelSerializer): 26 """Serializer for shifts.""" 27 28 class Meta: 29 model = Shift 30 fields = ( 31 "pk", 32 "title", 33 "locked", 34 "active", 35 "start", 36 "end", 37 "products", 38 "total_revenue", 39 "num_orders", 40 "product_sales", 41 ) 42 43 total_revenue = serializers.DecimalField( 44 max_digits=10, decimal_places=2, min_value=0, read_only=True 45 ) 46 47 products = ProductListItemSerializer( 48 source="product_list.product_items", many=True, read_only=True 49 ) 50 51 title = serializers.SerializerMethodField("_get_title") 52 53 def _get_title(self, instance): 54 return instance.title 55 56 product_sales = serializers.JSONField() 57 [end of website/sales/api/v2/admin/serializers/shift.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/website/sales/api/v2/admin/serializers/shift.py b/website/sales/api/v2/admin/serializers/shift.py --- a/website/sales/api/v2/admin/serializers/shift.py +++ b/website/sales/api/v2/admin/serializers/shift.py @@ -1,5 +1,6 @@ from rest_framework import serializers +from payments.api.v2.serializers.payment_amount import PaymentAmountSerializer from sales.models.product import ProductListItem from sales.models.shift import Shift @@ -36,13 +37,13 @@ "end", "products", "total_revenue", + "total_revenue_paid", "num_orders", "product_sales", ) - total_revenue = serializers.DecimalField( - max_digits=10, decimal_places=2, min_value=0, read_only=True - ) + total_revenue = PaymentAmountSerializer(min_value=0, read_only=True) + total_revenue_paid = PaymentAmountSerializer(min_value=0, read_only=True) products = ProductListItemSerializer( source="product_list.product_items", many=True, read_only=True
{"golden_diff": "diff --git a/website/sales/api/v2/admin/serializers/shift.py b/website/sales/api/v2/admin/serializers/shift.py\n--- a/website/sales/api/v2/admin/serializers/shift.py\n+++ b/website/sales/api/v2/admin/serializers/shift.py\n@@ -1,5 +1,6 @@\n from rest_framework import serializers\n \n+from payments.api.v2.serializers.payment_amount import PaymentAmountSerializer\n from sales.models.product import ProductListItem\n from sales.models.shift import Shift\n \n@@ -36,13 +37,13 @@\n \"end\",\n \"products\",\n \"total_revenue\",\n+ \"total_revenue_paid\",\n \"num_orders\",\n \"product_sales\",\n )\n \n- total_revenue = serializers.DecimalField(\n- max_digits=10, decimal_places=2, min_value=0, read_only=True\n- )\n+ total_revenue = PaymentAmountSerializer(min_value=0, read_only=True)\n+ total_revenue_paid = PaymentAmountSerializer(min_value=0, read_only=True)\n \n products = ProductListItemSerializer(\n source=\"product_list.product_items\", many=True, read_only=True\n", "issue": "Admin sales shift API should also return total_paid_revenue\n### Is your feature request related to a problem? Please describe.\r\nThe current admin sales shift api route only gives the total_revenue for a shift, but this might contain unpaid orders. We don't want those in certain scoreboards, like for the rag week.\r\n\r\n### Describe the solution you'd like\r\nAdd `total_paid_revenue`\r\n\r\n### Motivation\r\n\r\n### Describe alternatives you've considered\r\n\r\n### Additional context\r\n\n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom sales.models.product import ProductListItem\nfrom sales.models.shift import Shift\n\n\nclass ProductListItemSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for product list items.\"\"\"\n\n class Meta:\n model = ProductListItem\n fields = (\"name\", \"price\", \"age_restricted\")\n read_only_fields = (\"name\", \"price\", \"age_restricted\")\n\n name = serializers.SerializerMethodField(\"_name\")\n age_restricted = serializers.SerializerMethodField(\"_age_restricted\")\n\n def _name(self, instance):\n return instance.product.name\n\n def _age_restricted(self, instance):\n return instance.product.age_restricted\n\n\nclass ShiftSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for shifts.\"\"\"\n\n class Meta:\n model = Shift\n fields = (\n \"pk\",\n \"title\",\n \"locked\",\n \"active\",\n \"start\",\n \"end\",\n \"products\",\n \"total_revenue\",\n \"num_orders\",\n \"product_sales\",\n )\n\n total_revenue = serializers.DecimalField(\n max_digits=10, decimal_places=2, min_value=0, read_only=True\n )\n\n products = ProductListItemSerializer(\n source=\"product_list.product_items\", many=True, read_only=True\n )\n\n title = serializers.SerializerMethodField(\"_get_title\")\n\n def _get_title(self, instance):\n return instance.title\n\n product_sales = serializers.JSONField()\n", "path": "website/sales/api/v2/admin/serializers/shift.py"}]}
1,060
258
gh_patches_debug_8616
rasdani/github-patches
git_diff
googleapis__google-api-python-client-1271
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Remove duplicate docs generation In `synth.py` we have a `nox` session to generate the docs [here](https://github.com/googleapis/google-api-python-client/blob/master/synth.py#L36). The same python script is running as part of the Github action in #1187, so we should remove the `docs` session from `synth.py` and `noxfile.py`. </issue> <code> [start of noxfile.py] 1 2 # Copyright 2020 Google LLC 3 # 4 # Licensed under the Apache License, Version 2.0 (the "License"); 5 # you may not use this file except in compliance with the License. 6 # You may obtain a copy of the License at 7 # 8 # http://www.apache.org/licenses/LICENSE-2.0 9 # 10 # Unless required by applicable law or agreed to in writing, software 11 # distributed under the License is distributed on an "AS IS" BASIS, 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 16 import sys 17 18 import nox 19 20 test_dependencies = [ 21 "django>=2.0.0", 22 "google-auth", 23 "google-auth-httplib2", 24 "mox", 25 "parameterized", 26 "pyopenssl", 27 "pytest", 28 "pytest-cov", 29 "webtest", 30 "coverage", 31 "unittest2", 32 "mock", 33 ] 34 35 36 @nox.session(python=["3.7"]) 37 def lint(session): 38 session.install("flake8") 39 session.run( 40 "flake8", 41 "googleapiclient", 42 "tests", 43 "--count", 44 "--select=E9,F63,F7,F82", 45 "--show-source", 46 "--statistics", 47 ) 48 49 50 @nox.session(python=["3.6", "3.7", "3.8", "3.9"]) 51 @nox.parametrize( 52 "oauth2client", 53 [ 54 "oauth2client<2dev", 55 "oauth2client>=2,<=3dev", 56 "oauth2client>=3,<=4dev", 57 "oauth2client>=4,<=5dev", 58 ], 59 ) 60 def unit(session, oauth2client): 61 session.install(*test_dependencies) 62 session.install(oauth2client) 63 session.install('.') 64 65 # Run py.test against the unit tests. 66 session.run( 67 "py.test", 68 "--quiet", 69 "--cov=googleapiclient", 70 "--cov=tests", 71 "--cov-append", 72 "--cov-config=.coveragerc", 73 "--cov-report=", 74 "--cov-fail-under=85", 75 "tests", 76 *session.posargs, 77 ) 78 79 80 @nox.session(python="3.6") 81 def docs(session): 82 session.install('.') 83 session.run("python", "describe.py") [end of noxfile.py] [start of synth.py] 1 # Copyright 2020 Google LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import synthtool as s 16 from synthtool import gcp 17 18 19 common = gcp.CommonTemplates() 20 21 # ---------------------------------------------------------------------------- 22 # Add templated files 23 # ---------------------------------------------------------------------------- 24 templated_files = common.py_library() 25 26 # Copy kokoro configs. 27 # Docs are excluded as repo docs cannot currently be generated using sphinx. 28 s.move(templated_files / '.kokoro', excludes=['**/docs/*', 'publish-docs.sh']) 29 30 # Also move issue templates 31 s.move(templated_files / '.github') 32 33 # ---------------------------------------------------------------------------- 34 # Generate docs 35 # ---------------------------------------------------------------------------- 36 s.shell.run(["nox", "-s", "docs"], hide_output=False) 37 [end of synth.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/noxfile.py b/noxfile.py --- a/noxfile.py +++ b/noxfile.py @@ -75,9 +75,3 @@ "tests", *session.posargs, ) - - [email protected](python="3.6") -def docs(session): - session.install('.') - session.run("python", "describe.py") \ No newline at end of file diff --git a/synth.py b/synth.py --- a/synth.py +++ b/synth.py @@ -29,8 +29,3 @@ # Also move issue templates s.move(templated_files / '.github') - -# ---------------------------------------------------------------------------- -# Generate docs -# ---------------------------------------------------------------------------- -s.shell.run(["nox", "-s", "docs"], hide_output=False)
{"golden_diff": "diff --git a/noxfile.py b/noxfile.py\n--- a/noxfile.py\n+++ b/noxfile.py\n@@ -75,9 +75,3 @@\n \"tests\",\n *session.posargs,\n )\n-\n-\[email protected](python=\"3.6\")\n-def docs(session):\n- session.install('.')\n- session.run(\"python\", \"describe.py\")\n\\ No newline at end of file\ndiff --git a/synth.py b/synth.py\n--- a/synth.py\n+++ b/synth.py\n@@ -29,8 +29,3 @@\n \n # Also move issue templates\n s.move(templated_files / '.github')\n-\n-# ----------------------------------------------------------------------------\n-# Generate docs\n-# ----------------------------------------------------------------------------\n-s.shell.run([\"nox\", \"-s\", \"docs\"], hide_output=False)\n", "issue": "Remove duplicate docs generation\nIn `synth.py` we have a `nox` session to generate the docs [here](https://github.com/googleapis/google-api-python-client/blob/master/synth.py#L36). The same python script is running as part of the Github action in #1187, so we should remove the `docs` session from `synth.py` and `noxfile.py`.\n", "before_files": [{"content": "\n# Copyright 2020 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys\n\nimport nox\n\ntest_dependencies = [\n \"django>=2.0.0\",\n \"google-auth\",\n \"google-auth-httplib2\",\n \"mox\",\n \"parameterized\",\n \"pyopenssl\",\n \"pytest\",\n \"pytest-cov\",\n \"webtest\",\n \"coverage\",\n \"unittest2\",\n \"mock\",\n]\n\n\[email protected](python=[\"3.7\"])\ndef lint(session):\n session.install(\"flake8\")\n session.run(\n \"flake8\",\n \"googleapiclient\",\n \"tests\",\n \"--count\",\n \"--select=E9,F63,F7,F82\",\n \"--show-source\",\n \"--statistics\",\n )\n\n\[email protected](python=[\"3.6\", \"3.7\", \"3.8\", \"3.9\"])\[email protected](\n \"oauth2client\",\n [\n \"oauth2client<2dev\",\n \"oauth2client>=2,<=3dev\",\n \"oauth2client>=3,<=4dev\",\n \"oauth2client>=4,<=5dev\",\n ],\n)\ndef unit(session, oauth2client):\n session.install(*test_dependencies)\n session.install(oauth2client)\n session.install('.')\n\n # Run py.test against the unit tests.\n session.run(\n \"py.test\",\n \"--quiet\",\n \"--cov=googleapiclient\",\n \"--cov=tests\",\n \"--cov-append\",\n \"--cov-config=.coveragerc\",\n \"--cov-report=\",\n \"--cov-fail-under=85\",\n \"tests\",\n *session.posargs,\n )\n\n\[email protected](python=\"3.6\")\ndef docs(session):\n session.install('.')\n session.run(\"python\", \"describe.py\")", "path": "noxfile.py"}, {"content": "# Copyright 2020 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport synthtool as s\nfrom synthtool import gcp\n\n\ncommon = gcp.CommonTemplates()\n\n# ----------------------------------------------------------------------------\n# Add templated files\n# ----------------------------------------------------------------------------\ntemplated_files = common.py_library()\n\n# Copy kokoro configs.\n# Docs are excluded as repo docs cannot currently be generated using sphinx.\ns.move(templated_files / '.kokoro', excludes=['**/docs/*', 'publish-docs.sh'])\n\n# Also move issue templates\ns.move(templated_files / '.github')\n\n# ----------------------------------------------------------------------------\n# Generate docs\n# ----------------------------------------------------------------------------\ns.shell.run([\"nox\", \"-s\", \"docs\"], hide_output=False)\n", "path": "synth.py"}]}
1,621
175
gh_patches_debug_17236
rasdani/github-patches
git_diff
pyca__cryptography-3638
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update release automation for new wheel builder Once #3636 is merged we need to update the release automation to trigger the new wheel builder and download the artifacts. </issue> <code> [start of release.py] 1 # This file is dual licensed under the terms of the Apache License, Version 2 # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 # for complete details. 4 5 from __future__ import absolute_import, division, print_function 6 7 import getpass 8 import io 9 import os 10 import subprocess 11 import time 12 13 import click 14 15 from clint.textui.progress import Bar as ProgressBar 16 17 import requests 18 19 20 JENKINS_URL = "https://jenkins.cryptography.io/job/cryptography-wheel-builder" 21 22 23 def run(*args, **kwargs): 24 kwargs.setdefault("stderr", subprocess.STDOUT) 25 subprocess.check_output(list(args), **kwargs) 26 27 28 def wait_for_build_completed(session): 29 # Wait 20 seconds before actually checking if the build is complete, to 30 # ensure that it had time to really start. 31 time.sleep(20) 32 while True: 33 response = session.get( 34 "{0}/lastBuild/api/json/".format(JENKINS_URL), 35 headers={ 36 "Accept": "application/json", 37 } 38 ) 39 response.raise_for_status() 40 if not response.json()["building"]: 41 assert response.json()["result"] == "SUCCESS" 42 break 43 time.sleep(0.1) 44 45 46 def download_artifacts(session): 47 response = session.get( 48 "{0}/lastBuild/api/json/".format(JENKINS_URL), 49 headers={ 50 "Accept": "application/json" 51 } 52 ) 53 response.raise_for_status() 54 assert not response.json()["building"] 55 assert response.json()["result"] == "SUCCESS" 56 57 paths = [] 58 59 last_build_number = response.json()["number"] 60 for run in response.json()["runs"]: 61 if run["number"] != last_build_number: 62 print( 63 "Skipping {0} as it is not from the latest build ({1})".format( 64 run["url"], last_build_number 65 ) 66 ) 67 continue 68 69 response = session.get( 70 run["url"] + "api/json/", 71 headers={ 72 "Accept": "application/json", 73 } 74 ) 75 response.raise_for_status() 76 for artifact in response.json()["artifacts"]: 77 response = session.get( 78 "{0}artifact/{1}".format(run["url"], artifact["relativePath"]), 79 stream=True 80 ) 81 assert response.headers["content-length"] 82 print("Downloading {0}".format(artifact["fileName"])) 83 bar = ProgressBar( 84 expected_size=int(response.headers["content-length"]), 85 filled_char="=" 86 ) 87 content = io.BytesIO() 88 for data in response.iter_content(chunk_size=8192): 89 content.write(data) 90 bar.show(content.tell()) 91 assert bar.expected_size == content.tell() 92 bar.done() 93 out_path = os.path.join( 94 os.path.dirname(__file__), 95 "dist", 96 artifact["fileName"], 97 ) 98 with open(out_path, "wb") as f: 99 f.write(content.getvalue()) 100 paths.append(out_path) 101 return paths 102 103 104 @click.command() 105 @click.argument("version") 106 def release(version): 107 """ 108 ``version`` should be a string like '0.4' or '1.0'. 109 """ 110 run("git", "tag", "-s", version, "-m", "{0} release".format(version)) 111 run("git", "push", "--tags") 112 113 run("python", "setup.py", "sdist") 114 run("python", "setup.py", "sdist", "bdist_wheel", cwd="vectors/") 115 116 run( 117 "twine", "upload", "-s", "dist/cryptography-{0}*".format(version), 118 "vectors/dist/cryptography_vectors-{0}*".format(version), shell=True 119 ) 120 121 session = requests.Session() 122 123 # This tells the CDN to delete the cached response for the URL. We do this 124 # so that the Jenkins builders will see the new sdist immediately when they 125 # go to build the wheels. 126 response = session.request( 127 "PURGE", "https://pypi.python.org/simple/cryptography/" 128 ) 129 response.raise_for_status() 130 131 username = getpass.getpass("Input the GitHub/Jenkins username: ") 132 token = getpass.getpass("Input the Jenkins token: ") 133 response = session.post( 134 "{0}/build".format(JENKINS_URL), 135 auth=requests.auth.HTTPBasicAuth( 136 username, token 137 ), 138 params={ 139 "cause": "Building wheels for {0}".format(version) 140 } 141 ) 142 response.raise_for_status() 143 wait_for_build_completed(session) 144 paths = download_artifacts(session) 145 run("twine", "upload", " ".join(paths)) 146 147 148 if __name__ == "__main__": 149 release() 150 [end of release.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/release.py b/release.py --- a/release.py +++ b/release.py @@ -17,7 +17,10 @@ import requests -JENKINS_URL = "https://jenkins.cryptography.io/job/cryptography-wheel-builder" +JENKINS_URL = ( + "https://ci.cryptography.io/job/cryptography-support-jobs/" + "job/wheel-builder" +) def run(*args, **kwargs): @@ -128,14 +131,11 @@ ) response.raise_for_status() - username = getpass.getpass("Input the GitHub/Jenkins username: ") token = getpass.getpass("Input the Jenkins token: ") - response = session.post( + response = session.get( "{0}/build".format(JENKINS_URL), - auth=requests.auth.HTTPBasicAuth( - username, token - ), params={ + "token": token, "cause": "Building wheels for {0}".format(version) } )
{"golden_diff": "diff --git a/release.py b/release.py\n--- a/release.py\n+++ b/release.py\n@@ -17,7 +17,10 @@\n import requests\n \n \n-JENKINS_URL = \"https://jenkins.cryptography.io/job/cryptography-wheel-builder\"\n+JENKINS_URL = (\n+ \"https://ci.cryptography.io/job/cryptography-support-jobs/\"\n+ \"job/wheel-builder\"\n+)\n \n \n def run(*args, **kwargs):\n@@ -128,14 +131,11 @@\n )\n response.raise_for_status()\n \n- username = getpass.getpass(\"Input the GitHub/Jenkins username: \")\n token = getpass.getpass(\"Input the Jenkins token: \")\n- response = session.post(\n+ response = session.get(\n \"{0}/build\".format(JENKINS_URL),\n- auth=requests.auth.HTTPBasicAuth(\n- username, token\n- ),\n params={\n+ \"token\": token,\n \"cause\": \"Building wheels for {0}\".format(version)\n }\n )\n", "issue": "Update release automation for new wheel builder\nOnce #3636 is merged we need to update the release automation to trigger the new wheel builder and download the artifacts.\n", "before_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport getpass\nimport io\nimport os\nimport subprocess\nimport time\n\nimport click\n\nfrom clint.textui.progress import Bar as ProgressBar\n\nimport requests\n\n\nJENKINS_URL = \"https://jenkins.cryptography.io/job/cryptography-wheel-builder\"\n\n\ndef run(*args, **kwargs):\n kwargs.setdefault(\"stderr\", subprocess.STDOUT)\n subprocess.check_output(list(args), **kwargs)\n\n\ndef wait_for_build_completed(session):\n # Wait 20 seconds before actually checking if the build is complete, to\n # ensure that it had time to really start.\n time.sleep(20)\n while True:\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n if not response.json()[\"building\"]:\n assert response.json()[\"result\"] == \"SUCCESS\"\n break\n time.sleep(0.1)\n\n\ndef download_artifacts(session):\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\"\n }\n )\n response.raise_for_status()\n assert not response.json()[\"building\"]\n assert response.json()[\"result\"] == \"SUCCESS\"\n\n paths = []\n\n last_build_number = response.json()[\"number\"]\n for run in response.json()[\"runs\"]:\n if run[\"number\"] != last_build_number:\n print(\n \"Skipping {0} as it is not from the latest build ({1})\".format(\n run[\"url\"], last_build_number\n )\n )\n continue\n\n response = session.get(\n run[\"url\"] + \"api/json/\",\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n for artifact in response.json()[\"artifacts\"]:\n response = session.get(\n \"{0}artifact/{1}\".format(run[\"url\"], artifact[\"relativePath\"]),\n stream=True\n )\n assert response.headers[\"content-length\"]\n print(\"Downloading {0}\".format(artifact[\"fileName\"]))\n bar = ProgressBar(\n expected_size=int(response.headers[\"content-length\"]),\n filled_char=\"=\"\n )\n content = io.BytesIO()\n for data in response.iter_content(chunk_size=8192):\n content.write(data)\n bar.show(content.tell())\n assert bar.expected_size == content.tell()\n bar.done()\n out_path = os.path.join(\n os.path.dirname(__file__),\n \"dist\",\n artifact[\"fileName\"],\n )\n with open(out_path, \"wb\") as f:\n f.write(content.getvalue())\n paths.append(out_path)\n return paths\n\n\[email protected]()\[email protected](\"version\")\ndef release(version):\n \"\"\"\n ``version`` should be a string like '0.4' or '1.0'.\n \"\"\"\n run(\"git\", \"tag\", \"-s\", version, \"-m\", \"{0} release\".format(version))\n run(\"git\", \"push\", \"--tags\")\n\n run(\"python\", \"setup.py\", \"sdist\")\n run(\"python\", \"setup.py\", \"sdist\", \"bdist_wheel\", cwd=\"vectors/\")\n\n run(\n \"twine\", \"upload\", \"-s\", \"dist/cryptography-{0}*\".format(version),\n \"vectors/dist/cryptography_vectors-{0}*\".format(version), shell=True\n )\n\n session = requests.Session()\n\n # This tells the CDN to delete the cached response for the URL. We do this\n # so that the Jenkins builders will see the new sdist immediately when they\n # go to build the wheels.\n response = session.request(\n \"PURGE\", \"https://pypi.python.org/simple/cryptography/\"\n )\n response.raise_for_status()\n\n username = getpass.getpass(\"Input the GitHub/Jenkins username: \")\n token = getpass.getpass(\"Input the Jenkins token: \")\n response = session.post(\n \"{0}/build\".format(JENKINS_URL),\n auth=requests.auth.HTTPBasicAuth(\n username, token\n ),\n params={\n \"cause\": \"Building wheels for {0}\".format(version)\n }\n )\n response.raise_for_status()\n wait_for_build_completed(session)\n paths = download_artifacts(session)\n run(\"twine\", \"upload\", \" \".join(paths))\n\n\nif __name__ == \"__main__\":\n release()\n", "path": "release.py"}]}
1,919
231
gh_patches_debug_24985
rasdani/github-patches
git_diff
comic__grand-challenge.org-2348
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Archive Serializers use `id` rather than `pk` Some of our serializers use `id` rather than `pk`, for consistency we should only use one and that should be `pk`. Check the other serializers and see if this occurs elsewhere. </issue> <code> [start of app/grandchallenge/archives/serializers.py] 1 from django.db.transaction import on_commit 2 from guardian.shortcuts import get_objects_for_user 3 from rest_framework import serializers 4 from rest_framework.fields import ReadOnlyField, URLField 5 from rest_framework.relations import HyperlinkedRelatedField 6 7 from grandchallenge.archives.models import Archive, ArchiveItem 8 from grandchallenge.archives.tasks import ( 9 start_archive_item_update_tasks, 10 update_archive_item_update_kwargs, 11 ) 12 from grandchallenge.components.serializers import ( 13 ComponentInterfaceValuePostSerializer, 14 ComponentInterfaceValueSerializer, 15 ) 16 from grandchallenge.hanging_protocols.serializers import ( 17 HangingProtocolSerializer, 18 ) 19 20 21 class ArchiveItemSerializer(serializers.ModelSerializer): 22 archive = HyperlinkedRelatedField( 23 read_only=True, view_name="api:archive-detail" 24 ) 25 values = ComponentInterfaceValueSerializer(many=True) 26 27 class Meta: 28 model = ArchiveItem 29 fields = ("id", "archive", "values") 30 31 32 class ArchiveSerializer(serializers.ModelSerializer): 33 algorithms = HyperlinkedRelatedField( 34 read_only=True, many=True, view_name="api:algorithm-detail" 35 ) 36 logo = URLField(source="logo.x20.url", read_only=True) 37 url = URLField(source="get_absolute_url", read_only=True) 38 # Include the read only name for legacy clients 39 name = ReadOnlyField() 40 hanging_protocol = HangingProtocolSerializer() 41 42 class Meta: 43 model = Archive 44 fields = ( 45 "id", 46 "name", 47 "title", 48 "algorithms", 49 "logo", 50 "description", 51 "api_url", 52 "url", 53 "hanging_protocol", 54 "view_content", 55 ) 56 57 58 class ArchiveItemPostSerializer(ArchiveItemSerializer): 59 archive = HyperlinkedRelatedField( 60 queryset=Archive.objects.none(), 61 view_name="api:archive-detail", 62 write_only=True, 63 ) 64 65 def __init__(self, *args, **kwargs): 66 super().__init__(*args, **kwargs) 67 self.fields["values"] = ComponentInterfaceValuePostSerializer( 68 many=True, context=self.context 69 ) 70 71 if "request" in self.context: 72 user = self.context["request"].user 73 74 self.fields["archive"].queryset = get_objects_for_user( 75 user, "archives.use_archive", accept_global_perms=False 76 ) 77 78 def update(self, instance, validated_data): 79 civs = validated_data.pop("values") 80 81 civ_pks_to_remove = set() 82 civ_pks_to_add = set() 83 upload_pks = {} 84 85 for civ in civs: 86 interface = civ.pop("interface", None) 87 upload_session = civ.pop("upload_session", None) 88 value = civ.pop("value", None) 89 image = civ.pop("image", None) 90 user_upload = civ.pop("user_upload", None) 91 92 update_archive_item_update_kwargs( 93 instance=instance, 94 interface=interface, 95 value=value, 96 image=image, 97 user_upload=user_upload, 98 upload_session=upload_session, 99 civ_pks_to_add=civ_pks_to_add, 100 civ_pks_to_remove=civ_pks_to_remove, 101 upload_pks=upload_pks, 102 ) 103 104 on_commit( 105 start_archive_item_update_tasks.signature( 106 kwargs={ 107 "archive_item_pk": instance.pk, 108 "civ_pks_to_add": list(civ_pks_to_add), 109 "civ_pks_to_remove": list(civ_pks_to_remove), 110 "upload_pks": upload_pks, 111 } 112 ).apply_async 113 ) 114 115 return instance 116 [end of app/grandchallenge/archives/serializers.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/app/grandchallenge/archives/serializers.py b/app/grandchallenge/archives/serializers.py --- a/app/grandchallenge/archives/serializers.py +++ b/app/grandchallenge/archives/serializers.py @@ -11,7 +11,7 @@ ) from grandchallenge.components.serializers import ( ComponentInterfaceValuePostSerializer, - ComponentInterfaceValueSerializer, + HyperlinkedComponentInterfaceValueSerializer, ) from grandchallenge.hanging_protocols.serializers import ( HangingProtocolSerializer, @@ -22,11 +22,11 @@ archive = HyperlinkedRelatedField( read_only=True, view_name="api:archive-detail" ) - values = ComponentInterfaceValueSerializer(many=True) + values = HyperlinkedComponentInterfaceValueSerializer(many=True) class Meta: model = ArchiveItem - fields = ("id", "archive", "values") + fields = ("pk", "archive", "values") class ArchiveSerializer(serializers.ModelSerializer): @@ -42,7 +42,7 @@ class Meta: model = Archive fields = ( - "id", + "pk", "name", "title", "algorithms",
{"golden_diff": "diff --git a/app/grandchallenge/archives/serializers.py b/app/grandchallenge/archives/serializers.py\n--- a/app/grandchallenge/archives/serializers.py\n+++ b/app/grandchallenge/archives/serializers.py\n@@ -11,7 +11,7 @@\n )\n from grandchallenge.components.serializers import (\n ComponentInterfaceValuePostSerializer,\n- ComponentInterfaceValueSerializer,\n+ HyperlinkedComponentInterfaceValueSerializer,\n )\n from grandchallenge.hanging_protocols.serializers import (\n HangingProtocolSerializer,\n@@ -22,11 +22,11 @@\n archive = HyperlinkedRelatedField(\n read_only=True, view_name=\"api:archive-detail\"\n )\n- values = ComponentInterfaceValueSerializer(many=True)\n+ values = HyperlinkedComponentInterfaceValueSerializer(many=True)\n \n class Meta:\n model = ArchiveItem\n- fields = (\"id\", \"archive\", \"values\")\n+ fields = (\"pk\", \"archive\", \"values\")\n \n \n class ArchiveSerializer(serializers.ModelSerializer):\n@@ -42,7 +42,7 @@\n class Meta:\n model = Archive\n fields = (\n- \"id\",\n+ \"pk\",\n \"name\",\n \"title\",\n \"algorithms\",\n", "issue": "Archive Serializers use `id` rather than `pk`\nSome of our serializers use `id` rather than `pk`, for consistency we should only use one and that should be `pk`. Check the other serializers and see if this occurs elsewhere.\n", "before_files": [{"content": "from django.db.transaction import on_commit\nfrom guardian.shortcuts import get_objects_for_user\nfrom rest_framework import serializers\nfrom rest_framework.fields import ReadOnlyField, URLField\nfrom rest_framework.relations import HyperlinkedRelatedField\n\nfrom grandchallenge.archives.models import Archive, ArchiveItem\nfrom grandchallenge.archives.tasks import (\n start_archive_item_update_tasks,\n update_archive_item_update_kwargs,\n)\nfrom grandchallenge.components.serializers import (\n ComponentInterfaceValuePostSerializer,\n ComponentInterfaceValueSerializer,\n)\nfrom grandchallenge.hanging_protocols.serializers import (\n HangingProtocolSerializer,\n)\n\n\nclass ArchiveItemSerializer(serializers.ModelSerializer):\n archive = HyperlinkedRelatedField(\n read_only=True, view_name=\"api:archive-detail\"\n )\n values = ComponentInterfaceValueSerializer(many=True)\n\n class Meta:\n model = ArchiveItem\n fields = (\"id\", \"archive\", \"values\")\n\n\nclass ArchiveSerializer(serializers.ModelSerializer):\n algorithms = HyperlinkedRelatedField(\n read_only=True, many=True, view_name=\"api:algorithm-detail\"\n )\n logo = URLField(source=\"logo.x20.url\", read_only=True)\n url = URLField(source=\"get_absolute_url\", read_only=True)\n # Include the read only name for legacy clients\n name = ReadOnlyField()\n hanging_protocol = HangingProtocolSerializer()\n\n class Meta:\n model = Archive\n fields = (\n \"id\",\n \"name\",\n \"title\",\n \"algorithms\",\n \"logo\",\n \"description\",\n \"api_url\",\n \"url\",\n \"hanging_protocol\",\n \"view_content\",\n )\n\n\nclass ArchiveItemPostSerializer(ArchiveItemSerializer):\n archive = HyperlinkedRelatedField(\n queryset=Archive.objects.none(),\n view_name=\"api:archive-detail\",\n write_only=True,\n )\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields[\"values\"] = ComponentInterfaceValuePostSerializer(\n many=True, context=self.context\n )\n\n if \"request\" in self.context:\n user = self.context[\"request\"].user\n\n self.fields[\"archive\"].queryset = get_objects_for_user(\n user, \"archives.use_archive\", accept_global_perms=False\n )\n\n def update(self, instance, validated_data):\n civs = validated_data.pop(\"values\")\n\n civ_pks_to_remove = set()\n civ_pks_to_add = set()\n upload_pks = {}\n\n for civ in civs:\n interface = civ.pop(\"interface\", None)\n upload_session = civ.pop(\"upload_session\", None)\n value = civ.pop(\"value\", None)\n image = civ.pop(\"image\", None)\n user_upload = civ.pop(\"user_upload\", None)\n\n update_archive_item_update_kwargs(\n instance=instance,\n interface=interface,\n value=value,\n image=image,\n user_upload=user_upload,\n upload_session=upload_session,\n civ_pks_to_add=civ_pks_to_add,\n civ_pks_to_remove=civ_pks_to_remove,\n upload_pks=upload_pks,\n )\n\n on_commit(\n start_archive_item_update_tasks.signature(\n kwargs={\n \"archive_item_pk\": instance.pk,\n \"civ_pks_to_add\": list(civ_pks_to_add),\n \"civ_pks_to_remove\": list(civ_pks_to_remove),\n \"upload_pks\": upload_pks,\n }\n ).apply_async\n )\n\n return instance\n", "path": "app/grandchallenge/archives/serializers.py"}]}
1,581
269
gh_patches_debug_32080
rasdani/github-patches
git_diff
ManageIQ__integration_tests-4789
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Freeze.py screws up test running The virtualenv that is left in requirments/ dir seems to interfere with normal operations so I always need to delete it, perhaps we need some ignore somewhere or need to place it elsewhere ``` ../default/lib/python2.7/site-packages/py/_path/common.py:367: in visit for x in Visitor(fil, rec, ignore, bf, sort).gen(self): ../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen for p in self.gen(subdir): ../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen for p in self.gen(subdir): ../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen for p in self.gen(subdir): ../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen for p in self.gen(subdir): ../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen for p in self.gen(subdir): ../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen for p in self.gen(subdir): ../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen for p in self.gen(subdir): ../default/lib/python2.7/site-packages/py/_path/common.py:406: in gen if p.check(dir=1) and (rec is None or rec(p))]) ../default/lib/python2.7/site-packages/_pytest/main.py:682: in _recurse ihook = self.gethookproxy(path) ../default/lib/python2.7/site-packages/_pytest/main.py:587: in gethookproxy my_conftestmodules = pm._getconftestmodules(fspath) ../default/lib/python2.7/site-packages/_pytest/config.py:339: in _getconftestmodules mod = self._importconftest(conftestpath) ../default/lib/python2.7/site-packages/_pytest/config.py:375: in _importconftest self.consider_conftest(mod) ../default/lib/python2.7/site-packages/_pytest/config.py:398: in consider_conftest if self.register(conftestmodule, name=conftestmodule.__file__): ../default/lib/python2.7/site-packages/_pytest/config.py:250: in register ret = super(PytestPluginManager, self).register(plugin, name) ../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:371: in register hook._maybe_apply_history(hookimpl) ../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:768: in _maybe_apply_history res = self._hookexec(self, [method], kwargs) ../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:339: in _hookexec return self._inner_hookexec(hook, methods, kwargs) ../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:334: in <lambda> _MultiCall(methods, kwargs, hook.spec_opts).execute() ../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:614: in execute res = hook_impl.function(*args) requirements/temporary_venv/lib/python2.7/site-packages/tests/contrib/appengine/conftest.py:45: in pytest_configure if config.getoption('gae_sdk') is not None: ../default/lib/python2.7/site-packages/_pytest/config.py:1195: in getoption raise ValueError("no option named %r" % (name,)) E ValueError: no option named 'gae_sdk' ``` </issue> <code> [start of requirements/freeze.py] 1 #!/usr/bin/env python 2 """ 3 outputs the frozen packages 4 """ 5 import sys 6 import os 7 import argparse 8 import subprocess 9 parser = argparse.ArgumentParser(description=__doc__.strip()) 10 parser.add_argument('--venv', default='requirements/temporary_venv') 11 parser.add_argument( 12 "--template", default="requirements/template.txt",) 13 parser.add_argument( 14 "--out", default=sys.stdout, type=argparse.FileType('w'), 15 help='the file where packages should be written to') 16 17 18 def main(args): 19 if not os.path.isdir(args.venv): 20 subprocess.check_call([ 21 sys.executable, '-m', 'virtualenv', args.venv 22 ]) 23 subprocess.check_call([ 24 os.path.join(args.venv, 'bin/pip'), 25 'install', '-U', '-r', args.template]) 26 27 subprocess.check_call([ 28 os.path.join(args.venv, 'bin/pip'), 'freeze' 29 ], stdout=args.out) 30 31 32 if __name__ == '__main__': 33 main(parser.parse_args()) 34 [end of requirements/freeze.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/requirements/freeze.py b/requirements/freeze.py --- a/requirements/freeze.py +++ b/requirements/freeze.py @@ -2,31 +2,52 @@ """ outputs the frozen packages """ +from __future__ import print_function import sys import os import argparse import subprocess +import tempfile +import shutil parser = argparse.ArgumentParser(description=__doc__.strip()) -parser.add_argument('--venv', default='requirements/temporary_venv') +parser.add_argument('--venv', default=None) +parser.add_argument('--keep-venv', action='store_true') parser.add_argument( "--template", default="requirements/template.txt",) parser.add_argument( - "--out", default=sys.stdout, type=argparse.FileType('w'), + "--out", default=None, help='the file where packages should be written to') def main(args): - if not os.path.isdir(args.venv): + if args.venv is None: + args.venv = tempfile.mkdtemp(suffix='-miq-QE-rebuild-venv') + + try: + if not os.path.isdir(os.path.join(args.venv, 'bin')): + subprocess.check_call([ + sys.executable, '-m', 'virtualenv', args.venv + ]) subprocess.check_call([ - sys.executable, '-m', 'virtualenv', args.venv - ]) - subprocess.check_call([ - os.path.join(args.venv, 'bin/pip'), - 'install', '-U', '-r', args.template]) + os.path.join(args.venv, 'bin/pip'), + 'install', '-U', '-r', args.template]) + + if args.out is None: + subprocess.check_call([ + os.path.join(args.venv, 'bin/pip'), 'freeze' + ], stdout=sys.stdout) + else: + with open(args.out) as out: + subprocess.check_call([ + os.path.join(args.venv, 'bin/pip'), 'freeze' + ], stdout=out) - subprocess.check_call([ - os.path.join(args.venv, 'bin/pip'), 'freeze' - ], stdout=args.out) + subprocess.check_call([ + os.path.join(args.venv, 'bin/pip'), 'freeze' + ], stdout=args.out) + finally: + if not args.keep_venv: + shutil.rmtree(args.venv) if __name__ == '__main__':
{"golden_diff": "diff --git a/requirements/freeze.py b/requirements/freeze.py\n--- a/requirements/freeze.py\n+++ b/requirements/freeze.py\n@@ -2,31 +2,52 @@\n \"\"\"\n outputs the frozen packages\n \"\"\"\n+from __future__ import print_function\n import sys\n import os\n import argparse\n import subprocess\n+import tempfile\n+import shutil\n parser = argparse.ArgumentParser(description=__doc__.strip())\n-parser.add_argument('--venv', default='requirements/temporary_venv')\n+parser.add_argument('--venv', default=None)\n+parser.add_argument('--keep-venv', action='store_true')\n parser.add_argument(\n \"--template\", default=\"requirements/template.txt\",)\n parser.add_argument(\n- \"--out\", default=sys.stdout, type=argparse.FileType('w'),\n+ \"--out\", default=None,\n help='the file where packages should be written to')\n \n \n def main(args):\n- if not os.path.isdir(args.venv):\n+ if args.venv is None:\n+ args.venv = tempfile.mkdtemp(suffix='-miq-QE-rebuild-venv')\n+\n+ try:\n+ if not os.path.isdir(os.path.join(args.venv, 'bin')):\n+ subprocess.check_call([\n+ sys.executable, '-m', 'virtualenv', args.venv\n+ ])\n subprocess.check_call([\n- sys.executable, '-m', 'virtualenv', args.venv\n- ])\n- subprocess.check_call([\n- os.path.join(args.venv, 'bin/pip'),\n- 'install', '-U', '-r', args.template])\n+ os.path.join(args.venv, 'bin/pip'),\n+ 'install', '-U', '-r', args.template])\n+\n+ if args.out is None:\n+ subprocess.check_call([\n+ os.path.join(args.venv, 'bin/pip'), 'freeze'\n+ ], stdout=sys.stdout)\n+ else:\n+ with open(args.out) as out:\n+ subprocess.check_call([\n+ os.path.join(args.venv, 'bin/pip'), 'freeze'\n+ ], stdout=out)\n \n- subprocess.check_call([\n- os.path.join(args.venv, 'bin/pip'), 'freeze'\n- ], stdout=args.out)\n+ subprocess.check_call([\n+ os.path.join(args.venv, 'bin/pip'), 'freeze'\n+ ], stdout=args.out)\n+ finally:\n+ if not args.keep_venv:\n+ shutil.rmtree(args.venv)\n \n \n if __name__ == '__main__':\n", "issue": "Freeze.py screws up test running\nThe virtualenv that is left in requirments/ dir seems to interfere with normal operations so I always need to delete it, perhaps we need some ignore somewhere or need to place it elsewhere\r\n\r\n```\r\n../default/lib/python2.7/site-packages/py/_path/common.py:367: in visit\r\n for x in Visitor(fil, rec, ignore, bf, sort).gen(self):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen\r\n for p in self.gen(subdir):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen\r\n for p in self.gen(subdir):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen\r\n for p in self.gen(subdir):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen\r\n for p in self.gen(subdir):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen\r\n for p in self.gen(subdir):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen\r\n for p in self.gen(subdir):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:416: in gen\r\n for p in self.gen(subdir):\r\n../default/lib/python2.7/site-packages/py/_path/common.py:406: in gen\r\n if p.check(dir=1) and (rec is None or rec(p))])\r\n../default/lib/python2.7/site-packages/_pytest/main.py:682: in _recurse\r\n ihook = self.gethookproxy(path)\r\n../default/lib/python2.7/site-packages/_pytest/main.py:587: in gethookproxy\r\n my_conftestmodules = pm._getconftestmodules(fspath)\r\n../default/lib/python2.7/site-packages/_pytest/config.py:339: in _getconftestmodules\r\n mod = self._importconftest(conftestpath)\r\n../default/lib/python2.7/site-packages/_pytest/config.py:375: in _importconftest\r\n self.consider_conftest(mod)\r\n../default/lib/python2.7/site-packages/_pytest/config.py:398: in consider_conftest\r\n if self.register(conftestmodule, name=conftestmodule.__file__):\r\n../default/lib/python2.7/site-packages/_pytest/config.py:250: in register\r\n ret = super(PytestPluginManager, self).register(plugin, name)\r\n../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:371: in register\r\n hook._maybe_apply_history(hookimpl)\r\n../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:768: in _maybe_apply_history\r\n res = self._hookexec(self, [method], kwargs)\r\n../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:339: in _hookexec\r\n return self._inner_hookexec(hook, methods, kwargs)\r\n../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:334: in <lambda>\r\n _MultiCall(methods, kwargs, hook.spec_opts).execute()\r\n../default/lib/python2.7/site-packages/_pytest/vendored_packages/pluggy.py:614: in execute\r\n res = hook_impl.function(*args)\r\nrequirements/temporary_venv/lib/python2.7/site-packages/tests/contrib/appengine/conftest.py:45: in pytest_configure\r\n if config.getoption('gae_sdk') is not None:\r\n../default/lib/python2.7/site-packages/_pytest/config.py:1195: in getoption\r\n raise ValueError(\"no option named %r\" % (name,))\r\nE ValueError: no option named 'gae_sdk'\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\n\"\"\"\noutputs the frozen packages\n\"\"\"\nimport sys\nimport os\nimport argparse\nimport subprocess\nparser = argparse.ArgumentParser(description=__doc__.strip())\nparser.add_argument('--venv', default='requirements/temporary_venv')\nparser.add_argument(\n \"--template\", default=\"requirements/template.txt\",)\nparser.add_argument(\n \"--out\", default=sys.stdout, type=argparse.FileType('w'),\n help='the file where packages should be written to')\n\n\ndef main(args):\n if not os.path.isdir(args.venv):\n subprocess.check_call([\n sys.executable, '-m', 'virtualenv', args.venv\n ])\n subprocess.check_call([\n os.path.join(args.venv, 'bin/pip'),\n 'install', '-U', '-r', args.template])\n\n subprocess.check_call([\n os.path.join(args.venv, 'bin/pip'), 'freeze'\n ], stdout=args.out)\n\n\nif __name__ == '__main__':\n main(parser.parse_args())\n", "path": "requirements/freeze.py"}]}
1,676
559
gh_patches_debug_61039
rasdani/github-patches
git_diff
google-research__text-to-text-transfer-transformer-327
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Issue Running T5 in colab TPU Hi Team, I was trying to do a pre training of T5 from scratch on colab. I could see if i install t5 using (pip install t5[gcp]), and tried to connect to execute ` tf.tpu.experimental.initialize_tpu_system(tpu)`, getting below error. `InvalidArgumentError: NodeDef expected inputs 'string' do not match 0 inputs specified; Op<name=_Send; signature=tensor:T -> ; attr=T:type; attr=tensor_name:string; attr=send_device:string; attr=send_device_incarnation:int; attr=recv_device:string; attr=client_terminated:bool,default=false; is_stateful=true>; NodeDef: {{node _Send}}` If install/ upgrade tensorflow, it gets resolved, however import of t5 does not work as below. ` import t5` `NotFoundError: /usr/local/lib/python3.6/dist-packages/tensorflow_text/python/metrics/_text_similarity_metric_ops.so: undefined symbol: _ZN10tensorflow14kernel_factory17OpKernelRegistrar12InitInternalEPKNS_9KernelDefEN4absl11string_viewESt10unique_ptrINS0_15OpKernelFactoryESt14default_deleteIS8_EE` Please let me know how if there is a way to resolve this. Thanks. </issue> <code> [start of setup.py] 1 # Copyright 2020 The T5 Authors. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 """Install T5.""" 16 17 import os 18 import sys 19 import setuptools 20 21 # To enable importing version.py directly, we add its path to sys.path. 22 version_path = os.path.join(os.path.dirname(__file__), 't5') 23 sys.path.append(version_path) 24 from version import __version__ # pylint: disable=g-import-not-at-top 25 26 # Get the long description from the README file. 27 with open('README.md') as fp: 28 _LONG_DESCRIPTION = fp.read() 29 30 setuptools.setup( 31 name='t5', 32 version=__version__, 33 description='Text-to-text transfer transformer', 34 long_description=_LONG_DESCRIPTION, 35 long_description_content_type='text/markdown', 36 author='Google Inc.', 37 author_email='[email protected]', 38 url='http://github.com/google-research/text-to-text-transfer-transformer', 39 license='Apache 2.0', 40 packages=setuptools.find_packages(), 41 package_data={ 42 '': ['*.gin'], 43 }, 44 scripts=[], 45 install_requires=[ 46 'absl-py', 47 'babel', 48 'gin-config', 49 'mesh-tensorflow[transformer]>=0.1.13', 50 'nltk', 51 'numpy', 52 'pandas', 53 'rouge-score', 54 'sacrebleu', 55 'scikit-learn', 56 'scipy', 57 'sentencepiece', 58 'six>=1.14', # TODO(adarob): Remove once rouge-score is updated. 59 'tensorflow-text<2.3', # TODO(adarob): Unpin once #320 is resolved. 60 'tfds-nightly', 61 'torch', 62 'transformers>=2.7.0', 63 ], 64 extras_require={ 65 'gcp': ['gevent', 'google-api-python-client', 'google-compute-engine', 66 'google-cloud-storage', 'oauth2client'], 67 'cache-tasks': ['apache-beam'], 68 'test': ['pytest'], 69 }, 70 entry_points={ 71 'console_scripts': [ 72 't5_mesh_transformer = t5.models.mesh_transformer_main:console_entry_point', 73 't5_cache_tasks = t5.data.cache_tasks_main:console_entry_point' 74 ], 75 }, 76 classifiers=[ 77 'Development Status :: 4 - Beta', 78 'Intended Audience :: Developers', 79 'Intended Audience :: Science/Research', 80 'License :: OSI Approved :: Apache Software License', 81 'Topic :: Scientific/Engineering :: Artificial Intelligence', 82 ], 83 keywords='text nlp machinelearning', 84 ) 85 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ 'scipy', 'sentencepiece', 'six>=1.14', # TODO(adarob): Remove once rouge-score is updated. - 'tensorflow-text<2.3', # TODO(adarob): Unpin once #320 is resolved. + 'tensorflow-text', 'tfds-nightly', 'torch', 'transformers>=2.7.0',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -56,7 +56,7 @@\n 'scipy',\n 'sentencepiece',\n 'six>=1.14', # TODO(adarob): Remove once rouge-score is updated.\n- 'tensorflow-text<2.3', # TODO(adarob): Unpin once #320 is resolved.\n+ 'tensorflow-text',\n 'tfds-nightly',\n 'torch',\n 'transformers>=2.7.0',\n", "issue": "Issue Running T5 in colab TPU\nHi Team,\r\n\r\nI was trying to do a pre training of T5 from scratch on colab. I could see if i install t5 using (pip install t5[gcp]), and tried to connect to execute ` tf.tpu.experimental.initialize_tpu_system(tpu)`, getting below error.\r\n\r\n`InvalidArgumentError: NodeDef expected inputs 'string' do not match 0 inputs specified; Op<name=_Send; signature=tensor:T -> ; attr=T:type; attr=tensor_name:string; attr=send_device:string; attr=send_device_incarnation:int; attr=recv_device:string; attr=client_terminated:bool,default=false; is_stateful=true>; NodeDef: {{node _Send}}`\r\n\r\nIf install/ upgrade tensorflow, it gets resolved, however import of t5 does not work as below.\r\n`\r\nimport t5`\r\n\r\n`NotFoundError: /usr/local/lib/python3.6/dist-packages/tensorflow_text/python/metrics/_text_similarity_metric_ops.so: undefined symbol: _ZN10tensorflow14kernel_factory17OpKernelRegistrar12InitInternalEPKNS_9KernelDefEN4absl11string_viewESt10unique_ptrINS0_15OpKernelFactoryESt14default_deleteIS8_EE`\r\n\r\nPlease let me know how if there is a way to resolve this.\r\nThanks.\r\n\n", "before_files": [{"content": "# Copyright 2020 The T5 Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Install T5.\"\"\"\n\nimport os\nimport sys\nimport setuptools\n\n# To enable importing version.py directly, we add its path to sys.path.\nversion_path = os.path.join(os.path.dirname(__file__), 't5')\nsys.path.append(version_path)\nfrom version import __version__ # pylint: disable=g-import-not-at-top\n\n# Get the long description from the README file.\nwith open('README.md') as fp:\n _LONG_DESCRIPTION = fp.read()\n\nsetuptools.setup(\n name='t5',\n version=__version__,\n description='Text-to-text transfer transformer',\n long_description=_LONG_DESCRIPTION,\n long_description_content_type='text/markdown',\n author='Google Inc.',\n author_email='[email protected]',\n url='http://github.com/google-research/text-to-text-transfer-transformer',\n license='Apache 2.0',\n packages=setuptools.find_packages(),\n package_data={\n '': ['*.gin'],\n },\n scripts=[],\n install_requires=[\n 'absl-py',\n 'babel',\n 'gin-config',\n 'mesh-tensorflow[transformer]>=0.1.13',\n 'nltk',\n 'numpy',\n 'pandas',\n 'rouge-score',\n 'sacrebleu',\n 'scikit-learn',\n 'scipy',\n 'sentencepiece',\n 'six>=1.14', # TODO(adarob): Remove once rouge-score is updated.\n 'tensorflow-text<2.3', # TODO(adarob): Unpin once #320 is resolved.\n 'tfds-nightly',\n 'torch',\n 'transformers>=2.7.0',\n ],\n extras_require={\n 'gcp': ['gevent', 'google-api-python-client', 'google-compute-engine',\n 'google-cloud-storage', 'oauth2client'],\n 'cache-tasks': ['apache-beam'],\n 'test': ['pytest'],\n },\n entry_points={\n 'console_scripts': [\n 't5_mesh_transformer = t5.models.mesh_transformer_main:console_entry_point',\n 't5_cache_tasks = t5.data.cache_tasks_main:console_entry_point'\n ],\n },\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n ],\n keywords='text nlp machinelearning',\n)\n", "path": "setup.py"}]}
1,650
121
gh_patches_debug_6019
rasdani/github-patches
git_diff
cupy__cupy-3335
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> `around` result is different compared to numpy ``` >>> np.__version__ '1.16.4' >>> np.around([2.5]) array([2.]) >>> cupy.__version__ '7.0.0a1' >>> cupy.around([2.5]) array([3.]) ``` NumPy seems to round to even. </issue> <code> [start of cupy/math/rounding.py] 1 from cupy import core 2 from cupy.core import fusion 3 from cupy.math import ufunc 4 5 6 def around(a, decimals=0, out=None): 7 """Rounds to the given number of decimals. 8 9 Args: 10 a (cupy.ndarray): The source array. 11 decimals (int): umber of decimal places to round to (default: 0). 12 If decimals is negative, it specifies the number of positions to 13 the left of the decimal point. 14 out (cupy.ndarray): Output array. 15 16 Returns: 17 cupy.ndarray: Rounded array. 18 19 .. seealso:: :func:`numpy.around` 20 21 """ 22 if fusion._is_fusing(): 23 return fusion._call_ufunc(core.core._round_ufunc, a, decimals, out=out) 24 a = core.array(a, copy=False) 25 return a.round(decimals, out=out) 26 27 28 def round_(a, decimals=0, out=None): 29 return around(a, decimals, out=out) 30 31 32 rint = ufunc.create_math_ufunc( 33 'rint', 1, 'cupy_rint', 34 '''Rounds each element of an array to the nearest integer. 35 36 .. seealso:: :data:`numpy.rint` 37 38 ''') 39 40 41 floor = ufunc.create_math_ufunc( 42 'floor', 1, 'cupy_floor', 43 '''Rounds each element of an array to its floor integer. 44 45 .. seealso:: :data:`numpy.floor` 46 47 ''', support_complex=False) 48 49 50 ceil = ufunc.create_math_ufunc( 51 'ceil', 1, 'cupy_ceil', 52 '''Rounds each element of an array to its ceiling integer. 53 54 .. seealso:: :data:`numpy.ceil` 55 56 ''', support_complex=False) 57 58 59 trunc = ufunc.create_math_ufunc( 60 'trunc', 1, 'cupy_trunc', 61 '''Rounds each element of an array towards zero. 62 63 .. seealso:: :data:`numpy.trunc` 64 65 ''', support_complex=False) 66 67 68 fix = core.create_ufunc( 69 'cupy_fix', ('e->e', 'f->f', 'd->d'), 70 'out0 = (in0 >= 0.0) ? floor(in0): ceil(in0)', 71 doc='''If given value x is positive, it return floor(x). 72 Else, it return ceil(x). 73 74 .. seealso:: :func:`numpy.fix` 75 76 ''') 77 [end of cupy/math/rounding.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/cupy/math/rounding.py b/cupy/math/rounding.py --- a/cupy/math/rounding.py +++ b/cupy/math/rounding.py @@ -8,7 +8,7 @@ Args: a (cupy.ndarray): The source array. - decimals (int): umber of decimal places to round to (default: 0). + decimals (int): Number of decimal places to round to (default: 0). If decimals is negative, it specifies the number of positions to the left of the decimal point. out (cupy.ndarray): Output array.
{"golden_diff": "diff --git a/cupy/math/rounding.py b/cupy/math/rounding.py\n--- a/cupy/math/rounding.py\n+++ b/cupy/math/rounding.py\n@@ -8,7 +8,7 @@\n \n Args:\n a (cupy.ndarray): The source array.\n- decimals (int): umber of decimal places to round to (default: 0).\n+ decimals (int): Number of decimal places to round to (default: 0).\n If decimals is negative, it specifies the number of positions to\n the left of the decimal point.\n out (cupy.ndarray): Output array.\n", "issue": "`around` result is different compared to numpy\n```\r\n>>> np.__version__\r\n'1.16.4'\r\n>>> np.around([2.5])\r\narray([2.])\r\n>>> cupy.__version__\r\n'7.0.0a1'\r\n>>> cupy.around([2.5])\r\narray([3.])\r\n```\r\nNumPy seems to round to even.\n", "before_files": [{"content": "from cupy import core\nfrom cupy.core import fusion\nfrom cupy.math import ufunc\n\n\ndef around(a, decimals=0, out=None):\n \"\"\"Rounds to the given number of decimals.\n\n Args:\n a (cupy.ndarray): The source array.\n decimals (int): umber of decimal places to round to (default: 0).\n If decimals is negative, it specifies the number of positions to\n the left of the decimal point.\n out (cupy.ndarray): Output array.\n\n Returns:\n cupy.ndarray: Rounded array.\n\n .. seealso:: :func:`numpy.around`\n\n \"\"\"\n if fusion._is_fusing():\n return fusion._call_ufunc(core.core._round_ufunc, a, decimals, out=out)\n a = core.array(a, copy=False)\n return a.round(decimals, out=out)\n\n\ndef round_(a, decimals=0, out=None):\n return around(a, decimals, out=out)\n\n\nrint = ufunc.create_math_ufunc(\n 'rint', 1, 'cupy_rint',\n '''Rounds each element of an array to the nearest integer.\n\n .. seealso:: :data:`numpy.rint`\n\n ''')\n\n\nfloor = ufunc.create_math_ufunc(\n 'floor', 1, 'cupy_floor',\n '''Rounds each element of an array to its floor integer.\n\n .. seealso:: :data:`numpy.floor`\n\n ''', support_complex=False)\n\n\nceil = ufunc.create_math_ufunc(\n 'ceil', 1, 'cupy_ceil',\n '''Rounds each element of an array to its ceiling integer.\n\n .. seealso:: :data:`numpy.ceil`\n\n ''', support_complex=False)\n\n\ntrunc = ufunc.create_math_ufunc(\n 'trunc', 1, 'cupy_trunc',\n '''Rounds each element of an array towards zero.\n\n .. seealso:: :data:`numpy.trunc`\n\n ''', support_complex=False)\n\n\nfix = core.create_ufunc(\n 'cupy_fix', ('e->e', 'f->f', 'd->d'),\n 'out0 = (in0 >= 0.0) ? floor(in0): ceil(in0)',\n doc='''If given value x is positive, it return floor(x).\n Else, it return ceil(x).\n\n .. seealso:: :func:`numpy.fix`\n\n ''')\n", "path": "cupy/math/rounding.py"}]}
1,305
137
gh_patches_debug_27011
rasdani/github-patches
git_diff
dask__distributed-8347
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Bokeh 3.3.0 uses absolute URLs and breaks proxied dashboards I noticed when using `distributed==2023.10.1` with `bokeh==3.2.2` the URLs to the Bokeh JavaScript are relative. ```html <script type="text/javascript" src="static/js/bokeh.min.js?v=3ca6425586de5036dc01992dd69aa61e9196dd02619557cfaeb1b3d8b77adf724be49401b1168483d165494ce57a6daa16e6f6d3660fef117d45028221f86357"></script> ``` But when upgrading to `bokeh==3.3.0` they become absolute. ```html <script type="text/javascript" src="/static/js/bokeh.min.js?v=39ef57c3a83533e24f961e5c27f651a61045dbccefac4b5df86a7680b1edaff31886a7c0322250ffb0d758fa14ae156c9b640f60cca99f020096b050a4dbb571"></script> ``` This breaks dashboards that are being proxied at some sub-url. Setting `dask scheduler --dashboard-prefix ""` doesn't fix it. ### Reproducer ``` $ pip install dask distributed bokeh==3.3.0 $ dask scheduler & $ curl localhost:8787/status | grep bokeh.min.js ``` </issue> <code> [start of distributed/dashboard/core.py] 1 from __future__ import annotations 2 3 import functools 4 import warnings 5 6 from bokeh.application import Application 7 from bokeh.application.handlers.function import FunctionHandler 8 from bokeh.server.server import BokehTornado 9 from bokeh.server.util import create_hosts_allowlist 10 11 import dask 12 13 from distributed.dashboard.utils import BOKEH_VERSION 14 from distributed.versions import BOKEH_REQUIREMENT 15 16 # Set `prereleases=True` to allow for use with dev versions of `bokeh` 17 if not BOKEH_REQUIREMENT.specifier.contains(BOKEH_VERSION, prereleases=True): 18 warnings.warn( 19 f"\nDask needs {BOKEH_REQUIREMENT} for the dashboard." 20 f"\nYou have bokeh={BOKEH_VERSION}." 21 "\nContinuing without the dashboard." 22 ) 23 raise ImportError( 24 f"Dask needs {BOKEH_REQUIREMENT} for the dashboard, not bokeh={BOKEH_VERSION}" 25 ) 26 27 28 if BOKEH_VERSION.major < 3: 29 from bokeh.models import Panel as TabPanel # noqa: F401 30 else: 31 from bokeh.models import TabPanel # noqa: F401 32 33 34 def BokehApplication(applications, server, prefix="/", template_variables=None): 35 template_variables = template_variables or {} 36 prefix = "/" + prefix.strip("/") + "/" if prefix else "/" 37 38 extra = {"prefix": prefix, **template_variables} 39 40 funcs = {k: functools.partial(v, server, extra) for k, v in applications.items()} 41 apps = {k: Application(FunctionHandler(v)) for k, v in funcs.items()} 42 43 kwargs = dask.config.get("distributed.scheduler.dashboard.bokeh-application").copy() 44 extra_websocket_origins = create_hosts_allowlist( 45 kwargs.pop("allow_websocket_origin"), server.http_server.port 46 ) 47 48 return BokehTornado( 49 apps, 50 prefix=prefix, 51 use_index=False, 52 extra_websocket_origins=extra_websocket_origins, 53 **kwargs, 54 ) 55 [end of distributed/dashboard/core.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/distributed/dashboard/core.py b/distributed/dashboard/core.py --- a/distributed/dashboard/core.py +++ b/distributed/dashboard/core.py @@ -5,6 +5,7 @@ from bokeh.application import Application from bokeh.application.handlers.function import FunctionHandler +from bokeh.resources import Resources from bokeh.server.server import BokehTornado from bokeh.server.util import create_hosts_allowlist @@ -31,6 +32,11 @@ from bokeh.models import TabPanel # noqa: F401 +class DaskBokehTornado(BokehTornado): + def resources(self, absolute_url: str | bool | None = True) -> Resources: + return super().resources(absolute_url) + + def BokehApplication(applications, server, prefix="/", template_variables=None): template_variables = template_variables or {} prefix = "/" + prefix.strip("/") + "/" if prefix else "/" @@ -45,10 +51,11 @@ kwargs.pop("allow_websocket_origin"), server.http_server.port ) - return BokehTornado( + return DaskBokehTornado( apps, prefix=prefix, use_index=False, extra_websocket_origins=extra_websocket_origins, + absolute_url="", **kwargs, )
{"golden_diff": "diff --git a/distributed/dashboard/core.py b/distributed/dashboard/core.py\n--- a/distributed/dashboard/core.py\n+++ b/distributed/dashboard/core.py\n@@ -5,6 +5,7 @@\n \n from bokeh.application import Application\n from bokeh.application.handlers.function import FunctionHandler\n+from bokeh.resources import Resources\n from bokeh.server.server import BokehTornado\n from bokeh.server.util import create_hosts_allowlist\n \n@@ -31,6 +32,11 @@\n from bokeh.models import TabPanel # noqa: F401\n \n \n+class DaskBokehTornado(BokehTornado):\n+ def resources(self, absolute_url: str | bool | None = True) -> Resources:\n+ return super().resources(absolute_url)\n+\n+\n def BokehApplication(applications, server, prefix=\"/\", template_variables=None):\n template_variables = template_variables or {}\n prefix = \"/\" + prefix.strip(\"/\") + \"/\" if prefix else \"/\"\n@@ -45,10 +51,11 @@\n kwargs.pop(\"allow_websocket_origin\"), server.http_server.port\n )\n \n- return BokehTornado(\n+ return DaskBokehTornado(\n apps,\n prefix=prefix,\n use_index=False,\n extra_websocket_origins=extra_websocket_origins,\n+ absolute_url=\"\",\n **kwargs,\n )\n", "issue": "Bokeh 3.3.0 uses absolute URLs and breaks proxied dashboards\nI noticed when using `distributed==2023.10.1` with `bokeh==3.2.2` the URLs to the Bokeh JavaScript are relative.\r\n\r\n```html\r\n<script type=\"text/javascript\" src=\"static/js/bokeh.min.js?v=3ca6425586de5036dc01992dd69aa61e9196dd02619557cfaeb1b3d8b77adf724be49401b1168483d165494ce57a6daa16e6f6d3660fef117d45028221f86357\"></script>\r\n```\r\n\r\nBut when upgrading to `bokeh==3.3.0` they become absolute.\r\n\r\n```html\r\n<script type=\"text/javascript\" src=\"/static/js/bokeh.min.js?v=39ef57c3a83533e24f961e5c27f651a61045dbccefac4b5df86a7680b1edaff31886a7c0322250ffb0d758fa14ae156c9b640f60cca99f020096b050a4dbb571\"></script>\r\n```\r\n\r\nThis breaks dashboards that are being proxied at some sub-url.\r\n\r\nSetting `dask scheduler --dashboard-prefix \"\"` doesn't fix it.\r\n\r\n### Reproducer\r\n\r\n```\r\n$ pip install dask distributed bokeh==3.3.0\r\n$ dask scheduler &\r\n$ curl localhost:8787/status | grep bokeh.min.js\r\n```\n", "before_files": [{"content": "from __future__ import annotations\n\nimport functools\nimport warnings\n\nfrom bokeh.application import Application\nfrom bokeh.application.handlers.function import FunctionHandler\nfrom bokeh.server.server import BokehTornado\nfrom bokeh.server.util import create_hosts_allowlist\n\nimport dask\n\nfrom distributed.dashboard.utils import BOKEH_VERSION\nfrom distributed.versions import BOKEH_REQUIREMENT\n\n# Set `prereleases=True` to allow for use with dev versions of `bokeh`\nif not BOKEH_REQUIREMENT.specifier.contains(BOKEH_VERSION, prereleases=True):\n warnings.warn(\n f\"\\nDask needs {BOKEH_REQUIREMENT} for the dashboard.\"\n f\"\\nYou have bokeh={BOKEH_VERSION}.\"\n \"\\nContinuing without the dashboard.\"\n )\n raise ImportError(\n f\"Dask needs {BOKEH_REQUIREMENT} for the dashboard, not bokeh={BOKEH_VERSION}\"\n )\n\n\nif BOKEH_VERSION.major < 3:\n from bokeh.models import Panel as TabPanel # noqa: F401\nelse:\n from bokeh.models import TabPanel # noqa: F401\n\n\ndef BokehApplication(applications, server, prefix=\"/\", template_variables=None):\n template_variables = template_variables or {}\n prefix = \"/\" + prefix.strip(\"/\") + \"/\" if prefix else \"/\"\n\n extra = {\"prefix\": prefix, **template_variables}\n\n funcs = {k: functools.partial(v, server, extra) for k, v in applications.items()}\n apps = {k: Application(FunctionHandler(v)) for k, v in funcs.items()}\n\n kwargs = dask.config.get(\"distributed.scheduler.dashboard.bokeh-application\").copy()\n extra_websocket_origins = create_hosts_allowlist(\n kwargs.pop(\"allow_websocket_origin\"), server.http_server.port\n )\n\n return BokehTornado(\n apps,\n prefix=prefix,\n use_index=False,\n extra_websocket_origins=extra_websocket_origins,\n **kwargs,\n )\n", "path": "distributed/dashboard/core.py"}]}
1,490
293
gh_patches_debug_64681
rasdani/github-patches
git_diff
chainer__chainer-751
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> `cupy.array_split` doesn't accept empty indecies ``` >>> x=cupy.array([1]) >>> cupy.array_split(x, []) Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/home/unno/git/chainer/cupy/manipulation/split.py", line 32, in array_split ret.append(ary[skip + (slice(index, size),)]) UnboundLocalError: local variable 'index' referenced before assignment ``` </issue> <code> [start of cupy/manipulation/split.py] 1 import numpy 2 import six 3 4 5 def array_split(ary, indices_or_sections, axis=0): 6 """Splits an array into multiple sub arrays along a given axis. 7 8 This function is almost equivalent to :func:`cupy.split`. The only 9 difference is that this function allows an integer sections that does not 10 evenly divide the axis. 11 12 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.array_split` 13 14 """ 15 if ary.ndim <= axis: 16 raise IndexError('Axis exceeds ndim') 17 size = ary.shape[axis] 18 19 if numpy.isscalar(indices_or_sections): 20 each_size = (size - 1) // indices_or_sections + 1 21 indices = [i * each_size 22 for i in six.moves.range(1, indices_or_sections)] 23 else: 24 indices = indices_or_sections 25 26 skip = (slice(None),) * axis 27 ret = [] 28 i = 0 29 for index in indices: 30 ret.append(ary[skip + (slice(i, index),)]) 31 i = index 32 ret.append(ary[skip + (slice(index, size),)]) 33 34 return ret 35 36 37 def dsplit(ary, indices_or_sections): 38 """Splits an array into multiple sub arrays along the third axis. 39 40 This is equivalent to ``split`` with ``axis=2``. 41 42 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit` 43 44 """ 45 if ary.ndim <= 2: 46 raise ValueError('Cannot dsplit an array with less than 3 dimensions') 47 return split(ary, indices_or_sections, 2) 48 49 50 def hsplit(ary, indices_or_sections): 51 """Splits an array into multiple sub arrays horizontally. 52 53 This is equivalent to ``split`` with ``axis=0`` if ``ary`` has one 54 dimension, and otherwise that with ``axis=1``. 55 56 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.hsplit` 57 58 """ 59 if ary.ndim == 0: 60 raise ValueError('Cannot hsplit a zero-dimensional array') 61 if ary.ndim == 1: 62 return split(ary, indices_or_sections, 0) 63 else: 64 return split(ary, indices_or_sections, 1) 65 66 67 def split(ary, indices_or_sections, axis=0): 68 """Splits an array into multiple sub arrays along a given axis. 69 70 Args: 71 ary (cupy.ndarray): Array to split. 72 indices_or_sections (int or sequence of ints): A value indicating how 73 to divide the axis. If it is an integer, then is treated as the 74 number of sections, and the axis is evenly divided. Otherwise, 75 the integers indicate indices to split at. Note that the sequence 76 on the device memory is not allowed. 77 axis (int): Axis along which the array is split. 78 79 Returns: 80 A list of sub arrays. Eacy array is a view of the corresponding input 81 array. 82 83 .. seealso:: :func:`numpy.split` 84 85 """ 86 if ary.ndim <= axis: 87 raise IndexError('Axis exceeds ndim') 88 size = ary.shape[axis] 89 90 if numpy.isscalar(indices_or_sections): 91 if size % indices_or_sections != 0: 92 raise ValueError( 93 'indices_or_sections must divide the size along the axes.\n' 94 'If you want to split the array into non-equally-sized ' 95 'arrays, use array_split instead.') 96 return array_split(ary, indices_or_sections, axis) 97 98 99 def vsplit(ary, indices_or_sections): 100 """Splits an array into multiple sub arrays along the first axis. 101 102 This is equivalent to ``split`` with ``axis=0``. 103 104 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit` 105 106 """ 107 if ary.ndim <= 1: 108 raise ValueError('Cannot vsplit an array with less than 2 dimensions') 109 return split(ary, indices_or_sections, 0) 110 [end of cupy/manipulation/split.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/cupy/manipulation/split.py b/cupy/manipulation/split.py --- a/cupy/manipulation/split.py +++ b/cupy/manipulation/split.py @@ -23,6 +23,9 @@ else: indices = indices_or_sections + if len(indices) == 0: + return [ary] + skip = (slice(None),) * axis ret = [] i = 0
{"golden_diff": "diff --git a/cupy/manipulation/split.py b/cupy/manipulation/split.py\n--- a/cupy/manipulation/split.py\n+++ b/cupy/manipulation/split.py\n@@ -23,6 +23,9 @@\n else:\n indices = indices_or_sections\n \n+ if len(indices) == 0:\n+ return [ary]\n+\n skip = (slice(None),) * axis\n ret = []\n i = 0\n", "issue": "`cupy.array_split` doesn't accept empty indecies\n```\n>>> x=cupy.array([1])\n>>> cupy.array_split(x, [])\nTraceback (most recent call last):\n File \"<stdin>\", line 1, in <module>\n File \"/home/unno/git/chainer/cupy/manipulation/split.py\", line 32, in array_split\n ret.append(ary[skip + (slice(index, size),)])\nUnboundLocalError: local variable 'index' referenced before assignment\n```\n\n", "before_files": [{"content": "import numpy\nimport six\n\n\ndef array_split(ary, indices_or_sections, axis=0):\n \"\"\"Splits an array into multiple sub arrays along a given axis.\n\n This function is almost equivalent to :func:`cupy.split`. The only\n difference is that this function allows an integer sections that does not\n evenly divide the axis.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.array_split`\n\n \"\"\"\n if ary.ndim <= axis:\n raise IndexError('Axis exceeds ndim')\n size = ary.shape[axis]\n\n if numpy.isscalar(indices_or_sections):\n each_size = (size - 1) // indices_or_sections + 1\n indices = [i * each_size\n for i in six.moves.range(1, indices_or_sections)]\n else:\n indices = indices_or_sections\n\n skip = (slice(None),) * axis\n ret = []\n i = 0\n for index in indices:\n ret.append(ary[skip + (slice(i, index),)])\n i = index\n ret.append(ary[skip + (slice(index, size),)])\n\n return ret\n\n\ndef dsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays along the third axis.\n\n This is equivalent to ``split`` with ``axis=2``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`\n\n \"\"\"\n if ary.ndim <= 2:\n raise ValueError('Cannot dsplit an array with less than 3 dimensions')\n return split(ary, indices_or_sections, 2)\n\n\ndef hsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays horizontally.\n\n This is equivalent to ``split`` with ``axis=0`` if ``ary`` has one\n dimension, and otherwise that with ``axis=1``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.hsplit`\n\n \"\"\"\n if ary.ndim == 0:\n raise ValueError('Cannot hsplit a zero-dimensional array')\n if ary.ndim == 1:\n return split(ary, indices_or_sections, 0)\n else:\n return split(ary, indices_or_sections, 1)\n\n\ndef split(ary, indices_or_sections, axis=0):\n \"\"\"Splits an array into multiple sub arrays along a given axis.\n\n Args:\n ary (cupy.ndarray): Array to split.\n indices_or_sections (int or sequence of ints): A value indicating how\n to divide the axis. If it is an integer, then is treated as the\n number of sections, and the axis is evenly divided. Otherwise,\n the integers indicate indices to split at. Note that the sequence\n on the device memory is not allowed.\n axis (int): Axis along which the array is split.\n\n Returns:\n A list of sub arrays. Eacy array is a view of the corresponding input\n array.\n\n .. seealso:: :func:`numpy.split`\n\n \"\"\"\n if ary.ndim <= axis:\n raise IndexError('Axis exceeds ndim')\n size = ary.shape[axis]\n\n if numpy.isscalar(indices_or_sections):\n if size % indices_or_sections != 0:\n raise ValueError(\n 'indices_or_sections must divide the size along the axes.\\n'\n 'If you want to split the array into non-equally-sized '\n 'arrays, use array_split instead.')\n return array_split(ary, indices_or_sections, axis)\n\n\ndef vsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays along the first axis.\n\n This is equivalent to ``split`` with ``axis=0``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`\n\n \"\"\"\n if ary.ndim <= 1:\n raise ValueError('Cannot vsplit an array with less than 2 dimensions')\n return split(ary, indices_or_sections, 0)\n", "path": "cupy/manipulation/split.py"}]}
1,756
105
gh_patches_debug_303
rasdani/github-patches
git_diff
pyinstaller__pyinstaller-2347
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> gi._gobject.option is not part of pygobject The [GObject hook](https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/hook-gi.repository.GObject.py) adds a `hiddenimport` for `gi._gobject.option` however `gi/_gobject/option.py` is not part of pygobject. This leads to the following warning when packaging a Gtk application: ``` 4813 INFO: Loading module hook "hook-gi.py"... 4818 INFO: Loading module hook "hook-gi.repository.GObject.py"... 4926 INFO: Processing pre-safe import module hook gi.repository.GLib 4963 WARNING: Hidden import "gi._gobject.option" not found! ``` Browsing through the [pygobject git history](https://git.gnome.org/browse/pygobject/), I find commit [8afd7e8](https://git.gnome.org/browse/pygobject/commit/gi/_option.py?id=8afd7e880a72a44e6ea46c763bab82146fd75c96) which moved `gi/_glib/option.py` into `gi/_option.py` Replacing the `hiddenimport` to `hiddenimports += ['gi._option', 'gi._gobject']` silences the issue. However, I do not yet understand enough about pygobject and pyinstaller to know if this is the right thing to do. </issue> <code> [start of PyInstaller/hooks/hook-gi.repository.GObject.py] 1 #----------------------------------------------------------------------------- 2 # Copyright (c) 2005-2016, PyInstaller Development Team. 3 # 4 # Distributed under the terms of the GNU General Public License with exception 5 # for distributing bootloader. 6 # 7 # The full license is in the file COPYING.txt, distributed with this software. 8 #----------------------------------------------------------------------------- 9 """ 10 Import hook for GObject https://developer.gnome.org/gobject/stable/ from the GLib 11 library https://wiki.gnome.org/Projects/GLib introspected through PyGobject https://wiki.gnome.org/PyGObject 12 via the GObject Introspection middleware layer https://wiki.gnome.org/Projects/GObjectIntrospection 13 14 Tested with GLib 2.44.1, PyGObject 3.16.2, and GObject Introspection 1.44.0 on Mac OS X 10.10 and 15 GLib 2.42.2, PyGObject 3.14.0, and GObject Introspection 1.42 on Windows 7 16 """ 17 18 from PyInstaller.utils.hooks import get_gi_typelibs 19 20 binaries, datas, hiddenimports = get_gi_typelibs('GObject', '2.0') 21 22 hiddenimports += ['gi._gobject.option', 'gi._gobject'] 23 [end of PyInstaller/hooks/hook-gi.repository.GObject.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/PyInstaller/hooks/hook-gi.repository.GObject.py b/PyInstaller/hooks/hook-gi.repository.GObject.py --- a/PyInstaller/hooks/hook-gi.repository.GObject.py +++ b/PyInstaller/hooks/hook-gi.repository.GObject.py @@ -19,4 +19,4 @@ binaries, datas, hiddenimports = get_gi_typelibs('GObject', '2.0') -hiddenimports += ['gi._gobject.option', 'gi._gobject'] +hiddenimports += ['gi._gobject']
{"golden_diff": "diff --git a/PyInstaller/hooks/hook-gi.repository.GObject.py b/PyInstaller/hooks/hook-gi.repository.GObject.py\n--- a/PyInstaller/hooks/hook-gi.repository.GObject.py\n+++ b/PyInstaller/hooks/hook-gi.repository.GObject.py\n@@ -19,4 +19,4 @@\n \n binaries, datas, hiddenimports = get_gi_typelibs('GObject', '2.0')\n \n-hiddenimports += ['gi._gobject.option', 'gi._gobject']\n+hiddenimports += ['gi._gobject']\n", "issue": "gi._gobject.option is not part of pygobject\nThe [GObject hook](https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/hook-gi.repository.GObject.py) adds a `hiddenimport` for `gi._gobject.option` however `gi/_gobject/option.py` is not part of pygobject.\r\n\r\nThis leads to the following warning when packaging a Gtk application:\r\n```\r\n4813 INFO: Loading module hook \"hook-gi.py\"...\r\n4818 INFO: Loading module hook \"hook-gi.repository.GObject.py\"...\r\n4926 INFO: Processing pre-safe import module hook gi.repository.GLib\r\n4963 WARNING: Hidden import \"gi._gobject.option\" not found!\r\n```\r\n\r\nBrowsing through the [pygobject git history](https://git.gnome.org/browse/pygobject/), I find commit [8afd7e8](https://git.gnome.org/browse/pygobject/commit/gi/_option.py?id=8afd7e880a72a44e6ea46c763bab82146fd75c96) which moved `gi/_glib/option.py` into `gi/_option.py`\r\n\r\nReplacing the `hiddenimport` to `hiddenimports += ['gi._option', 'gi._gobject']` silences the issue. However, I do not yet understand enough about pygobject and pyinstaller to know if this is the right thing to do.\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2016, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\"\"\"\nImport hook for GObject https://developer.gnome.org/gobject/stable/ from the GLib\nlibrary https://wiki.gnome.org/Projects/GLib introspected through PyGobject https://wiki.gnome.org/PyGObject\nvia the GObject Introspection middleware layer https://wiki.gnome.org/Projects/GObjectIntrospection\n\nTested with GLib 2.44.1, PyGObject 3.16.2, and GObject Introspection 1.44.0 on Mac OS X 10.10 and\nGLib 2.42.2, PyGObject 3.14.0, and GObject Introspection 1.42 on Windows 7\n\"\"\"\n\nfrom PyInstaller.utils.hooks import get_gi_typelibs\n\nbinaries, datas, hiddenimports = get_gi_typelibs('GObject', '2.0')\n\nhiddenimports += ['gi._gobject.option', 'gi._gobject']\n", "path": "PyInstaller/hooks/hook-gi.repository.GObject.py"}]}
1,182
123
gh_patches_debug_3782
rasdani/github-patches
git_diff
bookwyrm-social__bookwyrm-2314
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Normalize stored ISNI The `ISNI` field should always be in the same format (without spaces), but right now sometimes the field is stored with spaces between the digits. There should be validation when the author is saved that cleans this value, similar to how ISBNs are validated </issue> <code> [start of bookwyrm/models/author.py] 1 """ database schema for info about authors """ 2 import re 3 from django.contrib.postgres.indexes import GinIndex 4 from django.core.cache import cache 5 from django.core.cache.utils import make_template_fragment_key 6 from django.db import models 7 8 from bookwyrm import activitypub 9 from bookwyrm.settings import DOMAIN 10 11 from .book import BookDataModel 12 from . import fields 13 14 15 class Author(BookDataModel): 16 """basic biographic info""" 17 18 wikipedia_link = fields.CharField( 19 max_length=255, blank=True, null=True, deduplication_field=True 20 ) 21 isni = fields.CharField( 22 max_length=255, blank=True, null=True, deduplication_field=True 23 ) 24 gutenberg_id = fields.CharField( 25 max_length=255, blank=True, null=True, deduplication_field=True 26 ) 27 # idk probably other keys would be useful here? 28 born = fields.DateTimeField(blank=True, null=True) 29 died = fields.DateTimeField(blank=True, null=True) 30 name = fields.CharField(max_length=255) 31 aliases = fields.ArrayField( 32 models.CharField(max_length=255), blank=True, default=list 33 ) 34 bio = fields.HtmlField(null=True, blank=True) 35 36 def save(self, *args, **kwargs): 37 """clear related template caches""" 38 # clear template caches 39 if self.id: 40 cache_keys = [ 41 make_template_fragment_key("titleby", [book]) 42 for book in self.book_set.values_list("id", flat=True) 43 ] 44 cache.delete_many(cache_keys) 45 return super().save(*args, **kwargs) 46 47 @property 48 def isni_link(self): 49 """generate the url from the isni id""" 50 clean_isni = re.sub(r"\s", "", self.isni) 51 return f"https://isni.org/isni/{clean_isni}" 52 53 @property 54 def openlibrary_link(self): 55 """generate the url from the openlibrary id""" 56 return f"https://openlibrary.org/authors/{self.openlibrary_key}" 57 58 def get_remote_id(self): 59 """editions and works both use "book" instead of model_name""" 60 return f"https://{DOMAIN}/author/{self.id}" 61 62 activity_serializer = activitypub.Author 63 64 class Meta: 65 """sets up postgres GIN index field""" 66 67 indexes = (GinIndex(fields=["search_vector"]),) 68 [end of bookwyrm/models/author.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/bookwyrm/models/author.py b/bookwyrm/models/author.py --- a/bookwyrm/models/author.py +++ b/bookwyrm/models/author.py @@ -42,6 +42,11 @@ for book in self.book_set.values_list("id", flat=True) ] cache.delete_many(cache_keys) + + # normalize isni format + if self.isni: + self.isni = re.sub(r"\s", "", self.isni) + return super().save(*args, **kwargs) @property
{"golden_diff": "diff --git a/bookwyrm/models/author.py b/bookwyrm/models/author.py\n--- a/bookwyrm/models/author.py\n+++ b/bookwyrm/models/author.py\n@@ -42,6 +42,11 @@\n for book in self.book_set.values_list(\"id\", flat=True)\n ]\n cache.delete_many(cache_keys)\n+\n+ # normalize isni format\n+ if self.isni:\n+ self.isni = re.sub(r\"\\s\", \"\", self.isni)\n+\n return super().save(*args, **kwargs)\n \n @property\n", "issue": "Normalize stored ISNI\nThe `ISNI` field should always be in the same format (without spaces), but right now sometimes the field is stored with spaces between the digits. There should be validation when the author is saved that cleans this value, similar to how ISBNs are validated\n", "before_files": [{"content": "\"\"\" database schema for info about authors \"\"\"\nimport re\nfrom django.contrib.postgres.indexes import GinIndex\nfrom django.core.cache import cache\nfrom django.core.cache.utils import make_template_fragment_key\nfrom django.db import models\n\nfrom bookwyrm import activitypub\nfrom bookwyrm.settings import DOMAIN\n\nfrom .book import BookDataModel\nfrom . import fields\n\n\nclass Author(BookDataModel):\n \"\"\"basic biographic info\"\"\"\n\n wikipedia_link = fields.CharField(\n max_length=255, blank=True, null=True, deduplication_field=True\n )\n isni = fields.CharField(\n max_length=255, blank=True, null=True, deduplication_field=True\n )\n gutenberg_id = fields.CharField(\n max_length=255, blank=True, null=True, deduplication_field=True\n )\n # idk probably other keys would be useful here?\n born = fields.DateTimeField(blank=True, null=True)\n died = fields.DateTimeField(blank=True, null=True)\n name = fields.CharField(max_length=255)\n aliases = fields.ArrayField(\n models.CharField(max_length=255), blank=True, default=list\n )\n bio = fields.HtmlField(null=True, blank=True)\n\n def save(self, *args, **kwargs):\n \"\"\"clear related template caches\"\"\"\n # clear template caches\n if self.id:\n cache_keys = [\n make_template_fragment_key(\"titleby\", [book])\n for book in self.book_set.values_list(\"id\", flat=True)\n ]\n cache.delete_many(cache_keys)\n return super().save(*args, **kwargs)\n\n @property\n def isni_link(self):\n \"\"\"generate the url from the isni id\"\"\"\n clean_isni = re.sub(r\"\\s\", \"\", self.isni)\n return f\"https://isni.org/isni/{clean_isni}\"\n\n @property\n def openlibrary_link(self):\n \"\"\"generate the url from the openlibrary id\"\"\"\n return f\"https://openlibrary.org/authors/{self.openlibrary_key}\"\n\n def get_remote_id(self):\n \"\"\"editions and works both use \"book\" instead of model_name\"\"\"\n return f\"https://{DOMAIN}/author/{self.id}\"\n\n activity_serializer = activitypub.Author\n\n class Meta:\n \"\"\"sets up postgres GIN index field\"\"\"\n\n indexes = (GinIndex(fields=[\"search_vector\"]),)\n", "path": "bookwyrm/models/author.py"}]}
1,238
128
gh_patches_debug_2665
rasdani/github-patches
git_diff
opsdroid__opsdroid-946
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> PyPI deployments are failing Looks like PyPI deployments are failing. `v0.15.1` and `v0.15.2` haven't gone out. ``` HTTPError: 400 Client Error: The description failed to render in the default format of reStructuredText. See https://pypi.org/help/#description-content-type for more information. for url: https://upload.pypi.org/legacy/ ``` PyPI deployments are failing Looks like PyPI deployments are failing. `v0.15.1` and `v0.15.2` haven't gone out. ``` HTTPError: 400 Client Error: The description failed to render in the default format of reStructuredText. See https://pypi.org/help/#description-content-type for more information. for url: https://upload.pypi.org/legacy/ ``` </issue> <code> [start of setup.py] 1 #!/usr/bin/env python3 2 import os 3 from setuptools import setup, find_packages 4 from setuptools.command.build_py import build_py 5 from setuptools.command.sdist import sdist 6 from setuptools.command.develop import develop 7 import versioneer 8 9 PACKAGE_NAME = 'opsdroid' 10 HERE = os.path.abspath(os.path.dirname(__file__)) 11 README = open(os.path.join(HERE, 'README.md'), encoding="utf8").read() 12 13 PACKAGES = find_packages(exclude=['tests', 'tests.*', 'modules', 14 'modules.*', 'docs', 'docs.*']) 15 16 17 # For now we simply define the install_requires based on the contents 18 # of requirements.txt. In the future, install_requires may become much 19 # looser than the (automatically) resolved requirements.txt. 20 with open(os.path.join(HERE, 'requirements.txt'), 'r') as fh: 21 REQUIRES = [line.strip() for line in fh] 22 23 24 class Develop(develop): 25 """Custom `develop` command to always build mo files on install -e.""" 26 27 def run(self): 28 self.run_command('compile_catalog') 29 develop.run(self) # old style class 30 31 32 class BuildPy(build_py): 33 """Custom `build_py` command to always build mo files for wheels.""" 34 35 def run(self): 36 self.run_command('compile_catalog') 37 build_py.run(self) # old style class 38 39 40 class Sdist(sdist): 41 """Custom `sdist` command to ensure that mo files are always created.""" 42 43 def run(self): 44 self.run_command('compile_catalog') 45 sdist.run(self) # old style class 46 47 48 setup( 49 name=PACKAGE_NAME, 50 version=versioneer.get_version(), 51 license='Apache License 2.0', 52 url='https://opsdroid.github.io/', 53 download_url='https://github.com/opsdroid/opsdroid/releases', 54 author='Jacob Tomlinson', 55 author_email='[email protected]', 56 description='An open source ChatOps bot framework.', 57 long_description=README, 58 packages=PACKAGES, 59 include_package_data=True, 60 zip_safe=False, 61 platforms='any', 62 classifiers=[ 63 'Development Status :: 4 - Beta', 64 'Environment :: Console', 65 'Framework :: AsyncIO', 66 'Intended Audience :: Developers', 67 'Intended Audience :: System Administrators', 68 'Intended Audience :: Information Technology', 69 'License :: OSI Approved :: Apache Software License', 70 'Programming Language :: Python', 71 'Programming Language :: Python :: 3', 72 'Programming Language :: Python :: 3 :: Only', 73 'Programming Language :: Python :: 3.5', 74 'Programming Language :: Python :: 3.6', 75 'Programming Language :: Python :: 3.7', 76 'Topic :: Communications :: Chat', 77 'Topic :: Scientific/Engineering :: Artificial Intelligence', 78 'Topic :: Software Development :: Libraries :: Python Modules' 79 ], 80 install_requires=REQUIRES, 81 test_suite='tests', 82 keywords=[ 83 'bot', 84 'bot-framework', 85 'opsdroid', 86 'botkit', 87 'python3', 88 'asyncio', 89 'chatops', 90 'devops', 91 'nlu' 92 ], 93 setup_requires=['Babel'], 94 cmdclass=versioneer.get_cmdclass({'sdist': Sdist, 95 'build_py': BuildPy, 96 'develop': Develop}), 97 entry_points={ 98 'console_scripts': [ 99 'opsdroid = opsdroid.__main__:main' 100 ] 101 }, 102 ) 103 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -55,6 +55,7 @@ author_email='[email protected]', description='An open source ChatOps bot framework.', long_description=README, + long_description_content_type='text/markdown', packages=PACKAGES, include_package_data=True, zip_safe=False,
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -55,6 +55,7 @@\n author_email='[email protected]',\n description='An open source ChatOps bot framework.',\n long_description=README,\n+ long_description_content_type='text/markdown',\n packages=PACKAGES,\n include_package_data=True,\n zip_safe=False,\n", "issue": "PyPI deployments are failing\nLooks like PyPI deployments are failing. `v0.15.1` and `v0.15.2` haven't gone out.\r\n\r\n```\r\nHTTPError: 400 Client Error: The description failed to render in the default format of reStructuredText. See https://pypi.org/help/#description-content-type for more information. for url: https://upload.pypi.org/legacy/\r\n```\nPyPI deployments are failing\nLooks like PyPI deployments are failing. `v0.15.1` and `v0.15.2` haven't gone out.\r\n\r\n```\r\nHTTPError: 400 Client Error: The description failed to render in the default format of reStructuredText. See https://pypi.org/help/#description-content-type for more information. for url: https://upload.pypi.org/legacy/\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python3\nimport os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.build_py import build_py\nfrom setuptools.command.sdist import sdist\nfrom setuptools.command.develop import develop\nimport versioneer\n\nPACKAGE_NAME = 'opsdroid'\nHERE = os.path.abspath(os.path.dirname(__file__))\nREADME = open(os.path.join(HERE, 'README.md'), encoding=\"utf8\").read()\n\nPACKAGES = find_packages(exclude=['tests', 'tests.*', 'modules',\n 'modules.*', 'docs', 'docs.*'])\n\n\n# For now we simply define the install_requires based on the contents\n# of requirements.txt. In the future, install_requires may become much\n# looser than the (automatically) resolved requirements.txt.\nwith open(os.path.join(HERE, 'requirements.txt'), 'r') as fh:\n REQUIRES = [line.strip() for line in fh]\n\n\nclass Develop(develop):\n \"\"\"Custom `develop` command to always build mo files on install -e.\"\"\"\n\n def run(self):\n self.run_command('compile_catalog')\n develop.run(self) # old style class\n\n\nclass BuildPy(build_py):\n \"\"\"Custom `build_py` command to always build mo files for wheels.\"\"\"\n\n def run(self):\n self.run_command('compile_catalog')\n build_py.run(self) # old style class\n\n\nclass Sdist(sdist):\n \"\"\"Custom `sdist` command to ensure that mo files are always created.\"\"\"\n\n def run(self):\n self.run_command('compile_catalog')\n sdist.run(self) # old style class\n\n\nsetup(\n name=PACKAGE_NAME,\n version=versioneer.get_version(),\n license='Apache License 2.0',\n url='https://opsdroid.github.io/',\n download_url='https://github.com/opsdroid/opsdroid/releases',\n author='Jacob Tomlinson',\n author_email='[email protected]',\n description='An open source ChatOps bot framework.',\n long_description=README,\n packages=PACKAGES,\n include_package_data=True,\n zip_safe=False,\n platforms='any',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Framework :: AsyncIO',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Intended Audience :: Information Technology',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3 :: Only',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Communications :: Chat',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Software Development :: Libraries :: Python Modules'\n ],\n install_requires=REQUIRES,\n test_suite='tests',\n keywords=[\n 'bot',\n 'bot-framework',\n 'opsdroid',\n 'botkit',\n 'python3',\n 'asyncio',\n 'chatops',\n 'devops',\n 'nlu'\n ],\n setup_requires=['Babel'],\n cmdclass=versioneer.get_cmdclass({'sdist': Sdist,\n 'build_py': BuildPy,\n 'develop': Develop}),\n entry_points={\n 'console_scripts': [\n 'opsdroid = opsdroid.__main__:main'\n ]\n },\n)\n", "path": "setup.py"}]}
1,668
90
gh_patches_debug_26907
rasdani/github-patches
git_diff
google__turbinia-696
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Generate version data from tags and commits Today we have a hard-coded value in `turbinia/__init__.py`, but it would be nice to generate the version number from the current TAG for releases and from the git commit id when there is no TAG (ie. when running from master or a different branch). </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 # 4 # Copyright 2017 Google Inc. 5 # 6 # Licensed under the Apache License, Version 2.0 (the "License"); 7 # you may not use this file except in compliance with the License. 8 # You may obtain a copy of the License at 9 # 10 # http://www.apache.org/licenses/LICENSE-2.0 11 # 12 # Unless required by applicable law or agreed to in writing, software 13 # distributed under the License is distributed on an "AS IS" BASIS, 14 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 # See the License for the specific language governing permissions and 16 # limitations under the License. 17 """This is the setup file for the project.""" 18 19 # yapf: disable 20 21 from __future__ import unicode_literals 22 23 import sys 24 25 from setuptools import find_packages 26 from setuptools import setup 27 28 29 # make sure turbinia is in path 30 sys.path.insert(0, '.') 31 32 import turbinia # pylint: disable=wrong-import-position 33 34 turbinia_description = ( 35 'Turbinia is an open-source framework for deploying, managing, and running' 36 'forensic workloads on cloud platforms. It is intended to automate running ' 37 'of common forensic processing tools (i.e. Plaso, TSK, strings, etc) to ' 38 'help with processing evidence in the Cloud, scaling the processing of ' 39 'large amounts of evidence, and decreasing response time by parallelizing' 40 'processing where possible.') 41 42 requirements = [] 43 with open('requirements.txt','r') as f: 44 requirements = f.read().splitlines() 45 setup( 46 name='turbinia', 47 version=turbinia.__version__, 48 description='Automation and Scaling of Digital Forensics Tools', 49 long_description=turbinia_description, 50 license='Apache License, Version 2.0', 51 url='http://turbinia.plumbing/', 52 maintainer='Turbinia development team', 53 maintainer_email='[email protected]', 54 classifiers=[ 55 'Development Status :: 4 - Beta', 56 'Environment :: Console', 57 'Operating System :: OS Independent', 58 'Programming Language :: Python', 59 ], 60 packages=find_packages(), 61 include_package_data=True, 62 zip_safe=False, 63 entry_points={'console_scripts': ['turbiniactl=turbinia.turbiniactl:main']}, 64 install_requires=requirements, 65 extras_require={ 66 'dev': ['mock', 'nose', 'yapf', 'celery~=4.1', 'coverage'], 67 'local': ['celery~=4.1', 'kombu~=4.1', 'redis~=3.0'], 68 'worker': ['docker-explorer>=20191104', 'plaso>=20200430', 'pyhindsight>=20200607'] 69 } 70 ) 71 [end of setup.py] [start of turbinia/__init__.py] 1 # -*- coding: utf-8 -*- 2 # Copyright 2015 Google Inc. 3 # 4 # Licensed under the Apache License, Version 2.0 (the "License"); 5 # you may not use this file except in compliance with the License. 6 # You may obtain a copy of the License at 7 # 8 # http://www.apache.org/licenses/LICENSE-2.0 9 # 10 # Unless required by applicable law or agreed to in writing, software 11 # distributed under the License is distributed on an "AS IS" BASIS, 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 """Main Turbinia application.""" 16 17 __version__ = '20190819' 18 19 import logging 20 log = logging.getLogger('turbinia') 21 22 23 def log_and_report(message, trace): 24 """Log an error and if enabled, send to GCP Error Reporting API. 25 26 Args: 27 message(str): The user defined message to log. 28 trace(str): The error traceback message to log. 29 """ 30 from turbinia import config 31 32 log.error(message) 33 log.error(trace) 34 # If GCP Error Reporting is enabled. 35 config.LoadConfig() 36 if config.STACKDRIVER_TRACEBACK: 37 # Only load google_cloud if needed 38 from turbinia.lib import google_cloud 39 client = google_cloud.setup_stackdriver_traceback(config.TURBINIA_PROJECT) 40 client.report_exception() 41 42 43 class TurbiniaException(Exception): 44 """Turbinia Exception class.""" 45 pass 46 [end of turbinia/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -44,7 +44,6 @@ requirements = f.read().splitlines() setup( name='turbinia', - version=turbinia.__version__, description='Automation and Scaling of Digital Forensics Tools', long_description=turbinia_description, license='Apache License, Version 2.0', @@ -66,5 +65,7 @@ 'dev': ['mock', 'nose', 'yapf', 'celery~=4.1', 'coverage'], 'local': ['celery~=4.1', 'kombu~=4.1', 'redis~=3.0'], 'worker': ['docker-explorer>=20191104', 'plaso>=20200430', 'pyhindsight>=20200607'] - } + }, + use_scm_version=True, + setup_requires=['setuptools_scm'] ) diff --git a/turbinia/__init__.py b/turbinia/__init__.py --- a/turbinia/__init__.py +++ b/turbinia/__init__.py @@ -14,11 +14,15 @@ # limitations under the License. """Main Turbinia application.""" -__version__ = '20190819' - import logging log = logging.getLogger('turbinia') +from pkg_resources import get_distribution, DistributionNotFound +try: + __version__ = get_distribution(__name__).version +except DistributionNotFound: + __version__ = "unknown" + def log_and_report(message, trace): """Log an error and if enabled, send to GCP Error Reporting API.
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -44,7 +44,6 @@\n requirements = f.read().splitlines()\n setup(\n name='turbinia',\n- version=turbinia.__version__,\n description='Automation and Scaling of Digital Forensics Tools',\n long_description=turbinia_description,\n license='Apache License, Version 2.0',\n@@ -66,5 +65,7 @@\n 'dev': ['mock', 'nose', 'yapf', 'celery~=4.1', 'coverage'],\n 'local': ['celery~=4.1', 'kombu~=4.1', 'redis~=3.0'],\n 'worker': ['docker-explorer>=20191104', 'plaso>=20200430', 'pyhindsight>=20200607']\n- }\n+ },\n+ use_scm_version=True,\n+ setup_requires=['setuptools_scm']\n )\ndiff --git a/turbinia/__init__.py b/turbinia/__init__.py\n--- a/turbinia/__init__.py\n+++ b/turbinia/__init__.py\n@@ -14,11 +14,15 @@\n # limitations under the License.\n \"\"\"Main Turbinia application.\"\"\"\n \n-__version__ = '20190819'\n-\n import logging\n log = logging.getLogger('turbinia')\n \n+from pkg_resources import get_distribution, DistributionNotFound\n+try:\n+ __version__ = get_distribution(__name__).version\n+except DistributionNotFound:\n+ __version__ = \"unknown\"\n+\n \n def log_and_report(message, trace):\n \"\"\"Log an error and if enabled, send to GCP Error Reporting API.\n", "issue": "Generate version data from tags and commits\nToday we have a hard-coded value in `turbinia/__init__.py`, but it would be nice to generate the version number from the current TAG for releases and from the git commit id when there is no TAG (ie. when running from master or a different branch).\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright 2017 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"This is the setup file for the project.\"\"\"\n\n# yapf: disable\n\nfrom __future__ import unicode_literals\n\nimport sys\n\nfrom setuptools import find_packages\nfrom setuptools import setup\n\n\n# make sure turbinia is in path\nsys.path.insert(0, '.')\n\nimport turbinia # pylint: disable=wrong-import-position\n\nturbinia_description = (\n 'Turbinia is an open-source framework for deploying, managing, and running'\n 'forensic workloads on cloud platforms. It is intended to automate running '\n 'of common forensic processing tools (i.e. Plaso, TSK, strings, etc) to '\n 'help with processing evidence in the Cloud, scaling the processing of '\n 'large amounts of evidence, and decreasing response time by parallelizing'\n 'processing where possible.')\n\nrequirements = []\nwith open('requirements.txt','r') as f:\n requirements = f.read().splitlines()\nsetup(\n name='turbinia',\n version=turbinia.__version__,\n description='Automation and Scaling of Digital Forensics Tools',\n long_description=turbinia_description,\n license='Apache License, Version 2.0',\n url='http://turbinia.plumbing/',\n maintainer='Turbinia development team',\n maintainer_email='[email protected]',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n ],\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n entry_points={'console_scripts': ['turbiniactl=turbinia.turbiniactl:main']},\n install_requires=requirements,\n extras_require={\n 'dev': ['mock', 'nose', 'yapf', 'celery~=4.1', 'coverage'],\n 'local': ['celery~=4.1', 'kombu~=4.1', 'redis~=3.0'],\n 'worker': ['docker-explorer>=20191104', 'plaso>=20200430', 'pyhindsight>=20200607']\n }\n)\n", "path": "setup.py"}, {"content": "# -*- coding: utf-8 -*-\n# Copyright 2015 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Main Turbinia application.\"\"\"\n\n__version__ = '20190819'\n\nimport logging\nlog = logging.getLogger('turbinia')\n\n\ndef log_and_report(message, trace):\n \"\"\"Log an error and if enabled, send to GCP Error Reporting API.\n\n Args:\n message(str): The user defined message to log.\n trace(str): The error traceback message to log.\n \"\"\"\n from turbinia import config\n\n log.error(message)\n log.error(trace)\n # If GCP Error Reporting is enabled.\n config.LoadConfig()\n if config.STACKDRIVER_TRACEBACK:\n # Only load google_cloud if needed\n from turbinia.lib import google_cloud\n client = google_cloud.setup_stackdriver_traceback(config.TURBINIA_PROJECT)\n client.report_exception()\n\n\nclass TurbiniaException(Exception):\n \"\"\"Turbinia Exception class.\"\"\"\n pass\n", "path": "turbinia/__init__.py"}]}
1,790
401
gh_patches_debug_2116
rasdani/github-patches
git_diff
comic__grand-challenge.org-3383
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Viewer configuration does not show linking options **Describe the bug** The view and edit pages for viewer configurations no longer show options to set the linking configuration. **To Reproduce** Steps to reproduce the behavior: 1. Go to https://grand-challenge.org/viewer-configurations/demo-rse/ 2. Scroll down to 'Linking Configuration' The options displayed are duplicates of the 'Plugin and Tools' section. **Expected behavior** A clear and concise description of what you expected to happen. **Screenshots** If applicable, add screenshots to help explain your problem. **Desktop (please complete the following information):** - OS: [e.g. iOS] - Browser [e.g. chrome, safari] - Version [e.g. 22] **Smartphone (please complete the following information):** - Device: [e.g. iPhone6] - OS: [e.g. iOS8.1] - Browser [e.g. stock browser, safari] - Version [e.g. 22] **Additional context** Add any other context about the problem here. </issue> <code> [start of app/grandchallenge/workstation_configs/forms.py] 1 from crispy_forms.helper import FormHelper 2 from crispy_forms.layout import Fieldset, Layout, Submit 3 from django.forms import ModelForm 4 from django_select2.forms import Select2MultipleWidget 5 6 from grandchallenge.core.forms import SaveFormInitMixin 7 from grandchallenge.core.widgets import ColorEditorWidget, JSONEditorWidget 8 from grandchallenge.workstation_configs.models import ( 9 KEY_BINDINGS_SCHEMA, 10 OVERLAY_SEGMENTS_SCHEMA, 11 WorkstationConfig, 12 ) 13 14 GENERAL_FIELDS = ( 15 "title", 16 "description", 17 "image_context", 18 "window_presets", 19 "default_window_preset", 20 "default_slab_thickness_mm", 21 "default_slab_render_method", 22 "default_orientation", 23 "default_image_interpolation", 24 "default_limit_view_area_to_image_volume", 25 "default_overlay_alpha", 26 "ghosting_slice_depth", 27 "overlay_luts", 28 "default_overlay_lut", 29 "default_overlay_interpolation", 30 "overlay_segments", 31 "key_bindings", 32 "default_zoom_scale", 33 "default_brush_size", 34 "default_annotation_color", 35 "default_annotation_line_width", 36 "auto_jump_center_of_gravity", 37 "point_bounding_box_size_mm", 38 ) 39 PLUGIN_FIELDS = ( 40 "show_image_info_plugin", 41 "show_display_plugin", 42 "show_image_switcher_plugin", 43 "show_algorithm_output_plugin", 44 "show_overlay_plugin", 45 "show_annotation_statistics_plugin", 46 "show_swivel_tool", 47 "show_invert_tool", 48 "show_flip_tool", 49 "show_window_level_tool", 50 "show_reset_tool", 51 "show_overlay_selection_tool", 52 "show_lut_selection_tool", 53 "show_annotation_counter_tool", 54 "enable_contrast_enhancement", 55 ) 56 LINKED_FIELDS = ( 57 "link_images", 58 "link_panning", 59 "link_zooming", 60 "link_slicing", 61 "link_orienting", 62 "link_windowing", 63 "link_inverting", 64 "link_flipping", 65 ) 66 67 68 class WorkstationConfigForm(SaveFormInitMixin, ModelForm): 69 def __init__(self, *args, read_only=False, **kwargs): 70 super().__init__(*args, **kwargs) 71 72 self.helper = FormHelper(self) 73 self.helper.layout = Layout( 74 Fieldset("", *GENERAL_FIELDS), 75 Fieldset( 76 "Plugins and Tools", 77 *PLUGIN_FIELDS, 78 css_class="border rounded px-2 my-4", 79 ), 80 Fieldset( 81 "Linking Configuration", 82 *PLUGIN_FIELDS, 83 css_class="border rounded px-2 my-4", 84 ), 85 ) 86 87 if read_only: 88 for field in self.fields: 89 self.fields[field].disabled = True 90 else: 91 self.helper.layout.append(Submit("save", "Save")) 92 93 class Meta: 94 model = WorkstationConfig 95 fields = ( 96 *GENERAL_FIELDS, 97 *PLUGIN_FIELDS, 98 *LINKED_FIELDS, 99 ) 100 101 widgets = { 102 "overlay_segments": JSONEditorWidget( 103 schema=OVERLAY_SEGMENTS_SCHEMA 104 ), 105 "key_bindings": JSONEditorWidget(schema=KEY_BINDINGS_SCHEMA), 106 "default_annotation_color": ColorEditorWidget(format="hex"), 107 "window_presets": Select2MultipleWidget, 108 "overlay_luts": Select2MultipleWidget, 109 } 110 help_texts = { 111 "overlay_segments": ( 112 model._meta.get_field("overlay_segments").help_text 113 + ". If an categorical overlay is shown, it is possible to show toggles " 114 "to change the visibility of the different overlay categories. To do " 115 "so, configure the categories that should be displayed. Data from the" 116 " algorithm's output.json can be added as an extra label to each " 117 "toggle using jinja templating. " 118 'For example: [{ "voxel_value": 0, "name": "Level 0", "visible": ' 119 'false, "metric_template": "{{metrics.volumes[0]}} mm³"},]' 120 ), 121 "key_bindings": model._meta.get_field("key_bindings").help_text 122 + ". A copy and paste JSON can be obtained from the viewer.", 123 } 124 [end of app/grandchallenge/workstation_configs/forms.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/app/grandchallenge/workstation_configs/forms.py b/app/grandchallenge/workstation_configs/forms.py --- a/app/grandchallenge/workstation_configs/forms.py +++ b/app/grandchallenge/workstation_configs/forms.py @@ -79,7 +79,7 @@ ), Fieldset( "Linking Configuration", - *PLUGIN_FIELDS, + *LINKED_FIELDS, css_class="border rounded px-2 my-4", ), )
{"golden_diff": "diff --git a/app/grandchallenge/workstation_configs/forms.py b/app/grandchallenge/workstation_configs/forms.py\n--- a/app/grandchallenge/workstation_configs/forms.py\n+++ b/app/grandchallenge/workstation_configs/forms.py\n@@ -79,7 +79,7 @@\n ),\n Fieldset(\n \"Linking Configuration\",\n- *PLUGIN_FIELDS,\n+ *LINKED_FIELDS,\n css_class=\"border rounded px-2 my-4\",\n ),\n )\n", "issue": "Viewer configuration does not show linking options\n**Describe the bug**\r\nThe view and edit pages for viewer configurations no longer show options to set the linking configuration.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Go to https://grand-challenge.org/viewer-configurations/demo-rse/\r\n2. Scroll down to 'Linking Configuration'\r\nThe options displayed are duplicates of the 'Plugin and Tools' section.\r\n\r\n**Expected behavior**\r\nA clear and concise description of what you expected to happen.\r\n\r\n**Screenshots**\r\nIf applicable, add screenshots to help explain your problem.\r\n\r\n**Desktop (please complete the following information):**\r\n - OS: [e.g. iOS]\r\n - Browser [e.g. chrome, safari]\r\n - Version [e.g. 22]\r\n\r\n**Smartphone (please complete the following information):**\r\n - Device: [e.g. iPhone6]\r\n - OS: [e.g. iOS8.1]\r\n - Browser [e.g. stock browser, safari]\r\n - Version [e.g. 22]\r\n\r\n**Additional context**\r\nAdd any other context about the problem here.\r\n\n", "before_files": [{"content": "from crispy_forms.helper import FormHelper\nfrom crispy_forms.layout import Fieldset, Layout, Submit\nfrom django.forms import ModelForm\nfrom django_select2.forms import Select2MultipleWidget\n\nfrom grandchallenge.core.forms import SaveFormInitMixin\nfrom grandchallenge.core.widgets import ColorEditorWidget, JSONEditorWidget\nfrom grandchallenge.workstation_configs.models import (\n KEY_BINDINGS_SCHEMA,\n OVERLAY_SEGMENTS_SCHEMA,\n WorkstationConfig,\n)\n\nGENERAL_FIELDS = (\n \"title\",\n \"description\",\n \"image_context\",\n \"window_presets\",\n \"default_window_preset\",\n \"default_slab_thickness_mm\",\n \"default_slab_render_method\",\n \"default_orientation\",\n \"default_image_interpolation\",\n \"default_limit_view_area_to_image_volume\",\n \"default_overlay_alpha\",\n \"ghosting_slice_depth\",\n \"overlay_luts\",\n \"default_overlay_lut\",\n \"default_overlay_interpolation\",\n \"overlay_segments\",\n \"key_bindings\",\n \"default_zoom_scale\",\n \"default_brush_size\",\n \"default_annotation_color\",\n \"default_annotation_line_width\",\n \"auto_jump_center_of_gravity\",\n \"point_bounding_box_size_mm\",\n)\nPLUGIN_FIELDS = (\n \"show_image_info_plugin\",\n \"show_display_plugin\",\n \"show_image_switcher_plugin\",\n \"show_algorithm_output_plugin\",\n \"show_overlay_plugin\",\n \"show_annotation_statistics_plugin\",\n \"show_swivel_tool\",\n \"show_invert_tool\",\n \"show_flip_tool\",\n \"show_window_level_tool\",\n \"show_reset_tool\",\n \"show_overlay_selection_tool\",\n \"show_lut_selection_tool\",\n \"show_annotation_counter_tool\",\n \"enable_contrast_enhancement\",\n)\nLINKED_FIELDS = (\n \"link_images\",\n \"link_panning\",\n \"link_zooming\",\n \"link_slicing\",\n \"link_orienting\",\n \"link_windowing\",\n \"link_inverting\",\n \"link_flipping\",\n)\n\n\nclass WorkstationConfigForm(SaveFormInitMixin, ModelForm):\n def __init__(self, *args, read_only=False, **kwargs):\n super().__init__(*args, **kwargs)\n\n self.helper = FormHelper(self)\n self.helper.layout = Layout(\n Fieldset(\"\", *GENERAL_FIELDS),\n Fieldset(\n \"Plugins and Tools\",\n *PLUGIN_FIELDS,\n css_class=\"border rounded px-2 my-4\",\n ),\n Fieldset(\n \"Linking Configuration\",\n *PLUGIN_FIELDS,\n css_class=\"border rounded px-2 my-4\",\n ),\n )\n\n if read_only:\n for field in self.fields:\n self.fields[field].disabled = True\n else:\n self.helper.layout.append(Submit(\"save\", \"Save\"))\n\n class Meta:\n model = WorkstationConfig\n fields = (\n *GENERAL_FIELDS,\n *PLUGIN_FIELDS,\n *LINKED_FIELDS,\n )\n\n widgets = {\n \"overlay_segments\": JSONEditorWidget(\n schema=OVERLAY_SEGMENTS_SCHEMA\n ),\n \"key_bindings\": JSONEditorWidget(schema=KEY_BINDINGS_SCHEMA),\n \"default_annotation_color\": ColorEditorWidget(format=\"hex\"),\n \"window_presets\": Select2MultipleWidget,\n \"overlay_luts\": Select2MultipleWidget,\n }\n help_texts = {\n \"overlay_segments\": (\n model._meta.get_field(\"overlay_segments\").help_text\n + \". If an categorical overlay is shown, it is possible to show toggles \"\n \"to change the visibility of the different overlay categories. To do \"\n \"so, configure the categories that should be displayed. Data from the\"\n \" algorithm's output.json can be added as an extra label to each \"\n \"toggle using jinja templating. \"\n 'For example: [{ \"voxel_value\": 0, \"name\": \"Level 0\", \"visible\": '\n 'false, \"metric_template\": \"{{metrics.volumes[0]}} mm\u00b3\"},]'\n ),\n \"key_bindings\": model._meta.get_field(\"key_bindings\").help_text\n + \". A copy and paste JSON can be obtained from the viewer.\",\n }\n", "path": "app/grandchallenge/workstation_configs/forms.py"}]}
1,902
103
gh_patches_debug_36194
rasdani/github-patches
git_diff
rlworkgroup__garage-625
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> FireReset is firing warnings in the CI from recent tests runs ```sh UserWarning: WARN: <class 'garage.envs.wrappers.fire_reset.FireReset'> doesn't implement 'step' method, which is required for wrappers derived directly from Wrapper. Deprecated default implementation is used. ``` </issue> <code> [start of garage/envs/wrappers/noop.py] 1 """Noop wrapper for gym.Env.""" 2 import gym 3 import numpy as np 4 5 6 class Noop(gym.Wrapper): 7 """ 8 Noop wrapper for gym.Env. 9 10 It samples initial states by taking random number of no-ops on reset. 11 No-op is assumed to be action 0. 12 13 Args: 14 env: The environment to be wrapped. 15 noop_max: Maximum number no-op to be performed on reset. 16 """ 17 18 def __init__(self, env, noop_max=30): 19 super().__init__(env) 20 self._noop_max = noop_max 21 self._noop_action = 0 22 assert noop_max > 0, "noop_max should be larger than 0!" 23 assert env.unwrapped.get_action_meanings()[0] == 'NOOP', ( 24 "No-op should be the 0-th action but it's not in {}!".format(env)) 25 26 def reset(self, **kwargs): 27 """gym.Env reset function.""" 28 obs = self.env.reset(**kwargs) 29 noops = np.random.randint(1, self._noop_max + 1) 30 for _ in range(noops): 31 obs, _, done, _ = self.env.step(self._noop_action) 32 if done: 33 obs = self.env.reset(**kwargs) 34 return obs 35 [end of garage/envs/wrappers/noop.py] [start of garage/envs/wrappers/fire_reset.py] 1 """Fire reset wrapper for gym.Env.""" 2 import gym 3 4 5 class FireReset(gym.Wrapper): 6 """ 7 Fire reset wrapper for gym.Env. 8 9 Take action "fire" on reset. 10 11 Args: 12 env: The environment to be wrapped. 13 """ 14 15 def __init__(self, env): 16 super().__init__(env) 17 assert env.unwrapped.get_action_meanings()[1] == 'FIRE', ( 18 "Only use fire reset wrapper for suitable environment!") 19 assert len(env.unwrapped.get_action_meanings()) >= 3, ( 20 "Only use fire reset wrapper for suitable environment!") 21 22 def reset(self, **kwargs): 23 """gym.Env reset function.""" 24 self.env.reset(**kwargs) 25 obs, _, done, _ = self.env.step(1) 26 if done: 27 obs = self.env.reset(**kwargs) 28 return obs 29 [end of garage/envs/wrappers/fire_reset.py] [start of garage/envs/wrappers/clip_reward.py] 1 """Clip reward for gym.Env.""" 2 import gym 3 import numpy as np 4 5 6 class ClipReward(gym.Wrapper): 7 """Clip the reward by its sign.""" 8 9 def step(self, ac): 10 """gym.Env step function.""" 11 obs, reward, done, info = self.env.step(ac) 12 return obs, np.sign(reward), done, info 13 [end of garage/envs/wrappers/clip_reward.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/garage/envs/wrappers/clip_reward.py b/garage/envs/wrappers/clip_reward.py --- a/garage/envs/wrappers/clip_reward.py +++ b/garage/envs/wrappers/clip_reward.py @@ -10,3 +10,7 @@ """gym.Env step function.""" obs, reward, done, info = self.env.step(ac) return obs, np.sign(reward), done, info + + def reset(self): + """gym.Env reset.""" + return self.env.reset() diff --git a/garage/envs/wrappers/fire_reset.py b/garage/envs/wrappers/fire_reset.py --- a/garage/envs/wrappers/fire_reset.py +++ b/garage/envs/wrappers/fire_reset.py @@ -15,9 +15,9 @@ def __init__(self, env): super().__init__(env) assert env.unwrapped.get_action_meanings()[1] == 'FIRE', ( - "Only use fire reset wrapper for suitable environment!") + 'Only use fire reset wrapper for suitable environment!') assert len(env.unwrapped.get_action_meanings()) >= 3, ( - "Only use fire reset wrapper for suitable environment!") + 'Only use fire reset wrapper for suitable environment!') def reset(self, **kwargs): """gym.Env reset function.""" @@ -26,3 +26,7 @@ if done: obs = self.env.reset(**kwargs) return obs + + def step(self, action): + """gym.Env step function.""" + return self.env.step(action) diff --git a/garage/envs/wrappers/noop.py b/garage/envs/wrappers/noop.py --- a/garage/envs/wrappers/noop.py +++ b/garage/envs/wrappers/noop.py @@ -19,7 +19,7 @@ super().__init__(env) self._noop_max = noop_max self._noop_action = 0 - assert noop_max > 0, "noop_max should be larger than 0!" + assert noop_max > 0, 'noop_max should be larger than 0!' assert env.unwrapped.get_action_meanings()[0] == 'NOOP', ( "No-op should be the 0-th action but it's not in {}!".format(env)) @@ -28,7 +28,11 @@ obs = self.env.reset(**kwargs) noops = np.random.randint(1, self._noop_max + 1) for _ in range(noops): - obs, _, done, _ = self.env.step(self._noop_action) + obs, _, done, _ = self.step(self._noop_action) if done: obs = self.env.reset(**kwargs) return obs + + def step(self, action): + """gym.Env step function.""" + return self.env.step(action)
{"golden_diff": "diff --git a/garage/envs/wrappers/clip_reward.py b/garage/envs/wrappers/clip_reward.py\n--- a/garage/envs/wrappers/clip_reward.py\n+++ b/garage/envs/wrappers/clip_reward.py\n@@ -10,3 +10,7 @@\n \"\"\"gym.Env step function.\"\"\"\n obs, reward, done, info = self.env.step(ac)\n return obs, np.sign(reward), done, info\n+\n+ def reset(self):\n+ \"\"\"gym.Env reset.\"\"\"\n+ return self.env.reset()\ndiff --git a/garage/envs/wrappers/fire_reset.py b/garage/envs/wrappers/fire_reset.py\n--- a/garage/envs/wrappers/fire_reset.py\n+++ b/garage/envs/wrappers/fire_reset.py\n@@ -15,9 +15,9 @@\n def __init__(self, env):\n super().__init__(env)\n assert env.unwrapped.get_action_meanings()[1] == 'FIRE', (\n- \"Only use fire reset wrapper for suitable environment!\")\n+ 'Only use fire reset wrapper for suitable environment!')\n assert len(env.unwrapped.get_action_meanings()) >= 3, (\n- \"Only use fire reset wrapper for suitable environment!\")\n+ 'Only use fire reset wrapper for suitable environment!')\n \n def reset(self, **kwargs):\n \"\"\"gym.Env reset function.\"\"\"\n@@ -26,3 +26,7 @@\n if done:\n obs = self.env.reset(**kwargs)\n return obs\n+\n+ def step(self, action):\n+ \"\"\"gym.Env step function.\"\"\"\n+ return self.env.step(action)\ndiff --git a/garage/envs/wrappers/noop.py b/garage/envs/wrappers/noop.py\n--- a/garage/envs/wrappers/noop.py\n+++ b/garage/envs/wrappers/noop.py\n@@ -19,7 +19,7 @@\n super().__init__(env)\n self._noop_max = noop_max\n self._noop_action = 0\n- assert noop_max > 0, \"noop_max should be larger than 0!\"\n+ assert noop_max > 0, 'noop_max should be larger than 0!'\n assert env.unwrapped.get_action_meanings()[0] == 'NOOP', (\n \"No-op should be the 0-th action but it's not in {}!\".format(env))\n \n@@ -28,7 +28,11 @@\n obs = self.env.reset(**kwargs)\n noops = np.random.randint(1, self._noop_max + 1)\n for _ in range(noops):\n- obs, _, done, _ = self.env.step(self._noop_action)\n+ obs, _, done, _ = self.step(self._noop_action)\n if done:\n obs = self.env.reset(**kwargs)\n return obs\n+\n+ def step(self, action):\n+ \"\"\"gym.Env step function.\"\"\"\n+ return self.env.step(action)\n", "issue": "FireReset is firing warnings in the CI\nfrom recent tests runs\r\n```sh\r\nUserWarning: WARN: <class 'garage.envs.wrappers.fire_reset.FireReset'> doesn't implement 'step' method, which is required for wrappers derived directly from Wrapper. Deprecated default implementation is used.\r\n```\n", "before_files": [{"content": "\"\"\"Noop wrapper for gym.Env.\"\"\"\nimport gym\nimport numpy as np\n\n\nclass Noop(gym.Wrapper):\n \"\"\"\n Noop wrapper for gym.Env.\n\n It samples initial states by taking random number of no-ops on reset.\n No-op is assumed to be action 0.\n\n Args:\n env: The environment to be wrapped.\n noop_max: Maximum number no-op to be performed on reset.\n \"\"\"\n\n def __init__(self, env, noop_max=30):\n super().__init__(env)\n self._noop_max = noop_max\n self._noop_action = 0\n assert noop_max > 0, \"noop_max should be larger than 0!\"\n assert env.unwrapped.get_action_meanings()[0] == 'NOOP', (\n \"No-op should be the 0-th action but it's not in {}!\".format(env))\n\n def reset(self, **kwargs):\n \"\"\"gym.Env reset function.\"\"\"\n obs = self.env.reset(**kwargs)\n noops = np.random.randint(1, self._noop_max + 1)\n for _ in range(noops):\n obs, _, done, _ = self.env.step(self._noop_action)\n if done:\n obs = self.env.reset(**kwargs)\n return obs\n", "path": "garage/envs/wrappers/noop.py"}, {"content": "\"\"\"Fire reset wrapper for gym.Env.\"\"\"\nimport gym\n\n\nclass FireReset(gym.Wrapper):\n \"\"\"\n Fire reset wrapper for gym.Env.\n\n Take action \"fire\" on reset.\n\n Args:\n env: The environment to be wrapped.\n \"\"\"\n\n def __init__(self, env):\n super().__init__(env)\n assert env.unwrapped.get_action_meanings()[1] == 'FIRE', (\n \"Only use fire reset wrapper for suitable environment!\")\n assert len(env.unwrapped.get_action_meanings()) >= 3, (\n \"Only use fire reset wrapper for suitable environment!\")\n\n def reset(self, **kwargs):\n \"\"\"gym.Env reset function.\"\"\"\n self.env.reset(**kwargs)\n obs, _, done, _ = self.env.step(1)\n if done:\n obs = self.env.reset(**kwargs)\n return obs\n", "path": "garage/envs/wrappers/fire_reset.py"}, {"content": "\"\"\"Clip reward for gym.Env.\"\"\"\nimport gym\nimport numpy as np\n\n\nclass ClipReward(gym.Wrapper):\n \"\"\"Clip the reward by its sign.\"\"\"\n\n def step(self, ac):\n \"\"\"gym.Env step function.\"\"\"\n obs, reward, done, info = self.env.step(ac)\n return obs, np.sign(reward), done, info\n", "path": "garage/envs/wrappers/clip_reward.py"}]}
1,322
663
gh_patches_debug_660
rasdani/github-patches
git_diff
pex-tool__pex-2153
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 2.1.137 On the docket: + [x] A locked requirement with mixed artifact types fails to lock. #2150 </issue> <code> [start of pex/version.py] 1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 __version__ = "2.1.136" 5 [end of pex/version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pex/version.py b/pex/version.py --- a/pex/version.py +++ b/pex/version.py @@ -1,4 +1,4 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -__version__ = "2.1.136" +__version__ = "2.1.137"
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.136\"\n+__version__ = \"2.1.137\"\n", "issue": "Release 2.1.137\nOn the docket:\r\n+ [x] A locked requirement with mixed artifact types fails to lock. #2150\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.136\"\n", "path": "pex/version.py"}]}
620
99
gh_patches_debug_19672
rasdani/github-patches
git_diff
NVIDIA__apex-620
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> _amp_state determines whether running in distributed at import It looks like the `_amp_state.py` module determines whether pytorch is running in distributed mode at the import level. The `distributed` only seems to be used in `maybe_print`. See code snippet: https://github.com/NVIDIA/apex/blob/37cdaf4ad57ab4e7dd9ef13dbed7b29aa939d061/apex/amp/_amp_state.py#L38-L52 This causes a couple issues: 1. It will only support the `env://` initialization of torch distributed 2. It will fail if amp is imported before launching the distributed training Neither of these is an issue for most, since most people launch via `torch.distributed.launch`. However, it can be an issue if you define your own distributed launch function or use `torch.multiprocessing.spawn`. I can't see a good reason to do it this way anyway, as it appears this variable is only used in the `maybe_print` function. I'll submit a pull request to fix this. Let me know if I'm missing something though. </issue> <code> [start of apex/amp/_amp_state.py] 1 # This is a "header object" that allows different amp modules to communicate. 2 # I'm a C++ guy, not a python guy. I decided this approach because it seemed most C++-like. 3 # But apparently it's ok: 4 # http://effbot.org/pyfaq/how-do-i-share-global-variables-across-modules.htm 5 import os 6 import torch 7 8 TORCH_MAJOR = int(torch.__version__.split('.')[0]) 9 TORCH_MINOR = int(torch.__version__.split('.')[1]) 10 11 if TORCH_MAJOR == 0: 12 import collections.abc as container_abcs 13 else: 14 from torch._six import container_abcs 15 16 17 class AmpState(object): 18 def __init__(self): 19 self.hard_override=False 20 self.allow_incoming_model_not_fp32 = False 21 self.verbosity=1 22 23 24 # Attribute stash. Could also just stash things as global module attributes. 25 _amp_state = AmpState() 26 27 28 def warn_or_err(msg): 29 if _amp_state.hard_override: 30 print("Warning: " + msg) 31 else: 32 raise RuntimeError(msg) 33 # I'm not sure if allowing hard_override is a good idea. 34 # + " If you're sure you know what you're doing, supply " + 35 # "hard_override=True to amp.initialize.") 36 37 38 distributed = False 39 if 'WORLD_SIZE' in os.environ: 40 distributed = int(os.environ['WORLD_SIZE']) > 1 41 42 43 def maybe_print(msg, rank0=False): 44 if _amp_state.verbosity > 0: 45 if rank0: 46 if distributed: 47 if torch.distributed.get_rank() == 0: 48 print(msg) 49 else: 50 print(msg) 51 else: 52 print(msg) 53 54 55 # def iter_params(param_groups): 56 # for group in param_groups: 57 # for p in group['params']: 58 # yield p 59 60 61 def master_params(optimizer): 62 """ 63 Generator expression that iterates over the params owned by ``optimizer``. 64 65 Args: 66 optimizer: An optimizer previously returned from ``amp.initialize``. 67 """ 68 for group in optimizer.param_groups: 69 for p in group['params']: 70 yield p 71 [end of apex/amp/_amp_state.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/apex/amp/_amp_state.py b/apex/amp/_amp_state.py --- a/apex/amp/_amp_state.py +++ b/apex/amp/_amp_state.py @@ -1,5 +1,5 @@ # This is a "header object" that allows different amp modules to communicate. -# I'm a C++ guy, not a python guy. I decided this approach because it seemed most C++-like. +# I'm a C++ guy, not a python guy. I decided this approach because it seemed most C++-like. # But apparently it's ok: # http://effbot.org/pyfaq/how-do-i-share-global-variables-across-modules.htm import os @@ -35,12 +35,9 @@ # "hard_override=True to amp.initialize.") -distributed = False -if 'WORLD_SIZE' in os.environ: - distributed = int(os.environ['WORLD_SIZE']) > 1 - - def maybe_print(msg, rank0=False): + distributed = torch.distributed.is_initialized() and \ + torch.distributed.get_world_size() > 1 if _amp_state.verbosity > 0: if rank0: if distributed:
{"golden_diff": "diff --git a/apex/amp/_amp_state.py b/apex/amp/_amp_state.py\n--- a/apex/amp/_amp_state.py\n+++ b/apex/amp/_amp_state.py\n@@ -1,5 +1,5 @@\n # This is a \"header object\" that allows different amp modules to communicate.\n-# I'm a C++ guy, not a python guy. I decided this approach because it seemed most C++-like. \n+# I'm a C++ guy, not a python guy. I decided this approach because it seemed most C++-like.\n # But apparently it's ok:\n # http://effbot.org/pyfaq/how-do-i-share-global-variables-across-modules.htm\n import os\n@@ -35,12 +35,9 @@\n # \"hard_override=True to amp.initialize.\")\n \n \n-distributed = False\n-if 'WORLD_SIZE' in os.environ:\n- distributed = int(os.environ['WORLD_SIZE']) > 1\n-\n-\n def maybe_print(msg, rank0=False):\n+ distributed = torch.distributed.is_initialized() and \\\n+ torch.distributed.get_world_size() > 1\n if _amp_state.verbosity > 0:\n if rank0:\n if distributed:\n", "issue": "_amp_state determines whether running in distributed at import\nIt looks like the `_amp_state.py` module determines whether pytorch is running in distributed mode at the import level. The `distributed` only seems to be used in `maybe_print`. See code snippet:\r\n\r\nhttps://github.com/NVIDIA/apex/blob/37cdaf4ad57ab4e7dd9ef13dbed7b29aa939d061/apex/amp/_amp_state.py#L38-L52\r\n\r\nThis causes a couple issues:\r\n\r\n1. It will only support the `env://` initialization of torch distributed\r\n2. It will fail if amp is imported before launching the distributed training\r\n\r\nNeither of these is an issue for most, since most people launch via `torch.distributed.launch`. However, it can be an issue if you define your own distributed launch function or use `torch.multiprocessing.spawn`. I can't see a good reason to do it this way anyway, as it appears this variable is only used in the `maybe_print` function. I'll submit a pull request to fix this. Let me know if I'm missing something though.\n", "before_files": [{"content": "# This is a \"header object\" that allows different amp modules to communicate.\n# I'm a C++ guy, not a python guy. I decided this approach because it seemed most C++-like. \n# But apparently it's ok:\n# http://effbot.org/pyfaq/how-do-i-share-global-variables-across-modules.htm\nimport os\nimport torch\n\nTORCH_MAJOR = int(torch.__version__.split('.')[0])\nTORCH_MINOR = int(torch.__version__.split('.')[1])\n\nif TORCH_MAJOR == 0:\n import collections.abc as container_abcs\nelse:\n from torch._six import container_abcs\n\n\nclass AmpState(object):\n def __init__(self):\n self.hard_override=False\n self.allow_incoming_model_not_fp32 = False\n self.verbosity=1\n\n\n# Attribute stash. Could also just stash things as global module attributes.\n_amp_state = AmpState()\n\n\ndef warn_or_err(msg):\n if _amp_state.hard_override:\n print(\"Warning: \" + msg)\n else:\n raise RuntimeError(msg)\n # I'm not sure if allowing hard_override is a good idea.\n # + \" If you're sure you know what you're doing, supply \" +\n # \"hard_override=True to amp.initialize.\")\n\n\ndistributed = False\nif 'WORLD_SIZE' in os.environ:\n distributed = int(os.environ['WORLD_SIZE']) > 1\n\n\ndef maybe_print(msg, rank0=False):\n if _amp_state.verbosity > 0:\n if rank0:\n if distributed:\n if torch.distributed.get_rank() == 0:\n print(msg)\n else:\n print(msg)\n else:\n print(msg)\n\n\n# def iter_params(param_groups):\n# for group in param_groups:\n# for p in group['params']:\n# yield p\n\n\ndef master_params(optimizer):\n \"\"\"\n Generator expression that iterates over the params owned by ``optimizer``.\n\n Args:\n optimizer: An optimizer previously returned from ``amp.initialize``.\n \"\"\"\n for group in optimizer.param_groups:\n for p in group['params']:\n yield p\n", "path": "apex/amp/_amp_state.py"}]}
1,379
269
gh_patches_debug_24424
rasdani/github-patches
git_diff
cisagov__manage.get.gov-2112
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Domain request table: Capture the "CISA region" a `domain request` is in ### Issue description _As an analyst I want to know which [CISA region](https://www.cisa.gov/about/regions) a request is in so that if my research doesn't determine a request is authentic/eligible, I can know which region I need to reach out to_ Each US state/territory is in a "CISA region". When we need additional assistance in verification, we can reach out to regional contacts. Having regional designations in-registrar means we don't have to look it up, saving analyst time. ### Acceptance criteria - [ ] For **non-federal requests**, the domain request detail page shows the CISA Region number based on the state abbreviation code of the organization address in the request. (See markup in "Additional Context") - [ ] For **federal requests**, the domain request detail page shows "N/A" for the CISA region number, and no table lookup is necessary. - [ ] The CISA region is not displayed on any user-facing views. ### Additional context Please make this reusable by domain information as well. consider domain helper or other utility that can then take in the org address state/territory abbreviation and returns the number for the region. Regions and state abbreviations haven't changed in a long time and aren't expected to change in the future, they do not need to be in a table and a simple dictionary lookup (while ugly) is probably the simplest solution. Based on the official two-letter state/territory abbreviation from the organization address, use the data in the following table to determine the region number: | Region | Locales | | ------- | ----- | | 1 |Connecticut, Maine, Massachusetts, New Hampshire, Rhode Island, Vermont| | 2 |New Jersey, New York, Puerto Rico, U.S. Virgin Islands| |3| Delaware, District of Columbia, Maryland, Pennsylvania, Virginia, West Virginia| |4| Alabama, Florida, Georgia, Kentucky, Mississippi, North Carolina, South Carolina, Tennessee| |5| Illinois, Indiana, Michigan, Minnesota, Ohio, Wisconsin| |6| Arkansas, Louisiana, New Mexico, Oklahoma, Texas| |7|Iowa, Kansas, Missouri, Nebraska| |8|Colorado, Montana, North Dakota, South Dakota, Utah, Wyoming| |9|Arizona, California, Hawaii, Nevada, Guam, American Samoa, Commonwealth of the Northern Mariana Islands| |10|Alaska, Idaho, Oregon, Washington| Click Image to open Miro: [![Image](https://github.com/cisagov/manage.get.gov/assets/62573986/bca81201-ce20-409b-bb31-10bca552ed23)](https://miro.com/app/board/uXjVKNvtde0=/?moveToWidget=3458764587423408611&cot=14) ### Links to other issues Blocks #2095 </issue> <code> [start of src/registrar/templatetags/custom_filters.py] 1 import logging 2 from django import template 3 import re 4 from registrar.models.domain_request import DomainRequest 5 6 register = template.Library() 7 logger = logging.getLogger(__name__) 8 9 10 @register.filter(name="extract_value") 11 def extract_value(html_input): 12 match = re.search(r'value="([^"]*)"', html_input) 13 if match: 14 return match.group(1) 15 return "" 16 17 18 @register.filter 19 def extract_a_text(value): 20 # Use regex to extract the text within the <a> tag 21 pattern = r"<a\b[^>]*>(.*?)</a>" 22 match = re.search(pattern, value) 23 if match: 24 extracted_text = match.group(1) 25 else: 26 extracted_text = "" 27 28 return extracted_text 29 30 31 @register.filter 32 def find_index(haystack, needle): 33 try: 34 return haystack.index(needle) 35 except ValueError: 36 return -1 37 38 39 @register.filter 40 def slice_after(value, substring): 41 index = value.find(substring) 42 if index != -1: 43 result = value[index + len(substring) :] 44 return result 45 return value 46 47 48 @register.filter 49 def contains_checkbox(html_list): 50 for html_string in html_list: 51 if re.search(r'<input[^>]*type="checkbox"', html_string): 52 return True 53 return False 54 55 56 @register.filter 57 def get_organization_long_name(generic_org_type): 58 organization_choices_dict = dict(DomainRequest.OrganizationChoicesVerbose.choices) 59 long_form_type = organization_choices_dict[generic_org_type] 60 if long_form_type is None: 61 logger.error("Organization type error, triggered by a template's custom filter") 62 return "Error" 63 64 return long_form_type 65 66 67 @register.filter(name="has_permission") 68 def has_permission(user, permission): 69 return user.has_perm(permission) 70 [end of src/registrar/templatetags/custom_filters.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/registrar/templatetags/custom_filters.py b/src/registrar/templatetags/custom_filters.py --- a/src/registrar/templatetags/custom_filters.py +++ b/src/registrar/templatetags/custom_filters.py @@ -67,3 +67,69 @@ @register.filter(name="has_permission") def has_permission(user, permission): return user.has_perm(permission) + + [email protected] +def get_region(state): + if state and isinstance(state, str): + regions = { + "CT": 1, + "ME": 1, + "MA": 1, + "NH": 1, + "RI": 1, + "VT": 1, + "NJ": 2, + "NY": 2, + "PR": 2, + "VI": 2, + "DE": 3, + "DC": 3, + "MD": 3, + "PA": 3, + "VA": 3, + "WV": 3, + "AL": 4, + "FL": 4, + "GA": 4, + "KY": 4, + "MS": 4, + "NC": 4, + "SC": 4, + "TN": 4, + "IL": 5, + "IN": 5, + "MI": 5, + "MN": 5, + "OH": 5, + "WI": 5, + "AR": 6, + "LA": 6, + "NM": 6, + "OK": 6, + "TX": 6, + "IA": 7, + "KS": 7, + "MO": 7, + "NE": 7, + "CO": 8, + "MT": 8, + "ND": 8, + "SD": 8, + "UT": 8, + "WY": 8, + "AZ": 9, + "CA": 9, + "HI": 9, + "NV": 9, + "GU": 9, + "AS": 9, + "MP": 9, + "AK": 10, + "ID": 10, + "OR": 10, + "WA": 10, + } + return regions.get(state.upper(), "N/A") + else: + return None
{"golden_diff": "diff --git a/src/registrar/templatetags/custom_filters.py b/src/registrar/templatetags/custom_filters.py\n--- a/src/registrar/templatetags/custom_filters.py\n+++ b/src/registrar/templatetags/custom_filters.py\n@@ -67,3 +67,69 @@\n @register.filter(name=\"has_permission\")\n def has_permission(user, permission):\n return user.has_perm(permission)\n+\n+\[email protected]\n+def get_region(state):\n+ if state and isinstance(state, str):\n+ regions = {\n+ \"CT\": 1,\n+ \"ME\": 1,\n+ \"MA\": 1,\n+ \"NH\": 1,\n+ \"RI\": 1,\n+ \"VT\": 1,\n+ \"NJ\": 2,\n+ \"NY\": 2,\n+ \"PR\": 2,\n+ \"VI\": 2,\n+ \"DE\": 3,\n+ \"DC\": 3,\n+ \"MD\": 3,\n+ \"PA\": 3,\n+ \"VA\": 3,\n+ \"WV\": 3,\n+ \"AL\": 4,\n+ \"FL\": 4,\n+ \"GA\": 4,\n+ \"KY\": 4,\n+ \"MS\": 4,\n+ \"NC\": 4,\n+ \"SC\": 4,\n+ \"TN\": 4,\n+ \"IL\": 5,\n+ \"IN\": 5,\n+ \"MI\": 5,\n+ \"MN\": 5,\n+ \"OH\": 5,\n+ \"WI\": 5,\n+ \"AR\": 6,\n+ \"LA\": 6,\n+ \"NM\": 6,\n+ \"OK\": 6,\n+ \"TX\": 6,\n+ \"IA\": 7,\n+ \"KS\": 7,\n+ \"MO\": 7,\n+ \"NE\": 7,\n+ \"CO\": 8,\n+ \"MT\": 8,\n+ \"ND\": 8,\n+ \"SD\": 8,\n+ \"UT\": 8,\n+ \"WY\": 8,\n+ \"AZ\": 9,\n+ \"CA\": 9,\n+ \"HI\": 9,\n+ \"NV\": 9,\n+ \"GU\": 9,\n+ \"AS\": 9,\n+ \"MP\": 9,\n+ \"AK\": 10,\n+ \"ID\": 10,\n+ \"OR\": 10,\n+ \"WA\": 10,\n+ }\n+ return regions.get(state.upper(), \"N/A\")\n+ else:\n+ return None\n", "issue": "Domain request table: Capture the \"CISA region\" a `domain request` is in\n### Issue description\n\n_As an analyst\nI want to know which [CISA region](https://www.cisa.gov/about/regions) a request is in \nso that if my research doesn't determine a request is authentic/eligible, I can know which region I need to reach out to_\n\nEach US state/territory is in a \"CISA region\". When we need additional assistance in verification, we can reach out to regional contacts. Having regional designations in-registrar means we don't have to look it up, saving analyst time.\n\n### Acceptance criteria\n\n- [ ] For **non-federal requests**, the domain request detail page shows the CISA Region number based on the state abbreviation code of the organization address in the request. (See markup in \"Additional Context\")\n- [ ] For **federal requests**, the domain request detail page shows \"N/A\" for the CISA region number, and no table lookup is necessary.\n- [ ] The CISA region is not displayed on any user-facing views.\n\n### Additional context\nPlease make this reusable by domain information as well. consider domain helper or other utility that can then take in the org address state/territory abbreviation and returns the number for the region. Regions and state abbreviations haven't changed in a long time and aren't expected to change in the future, they do not need to be in a table and a simple dictionary lookup (while ugly) is probably the simplest solution.\n\nBased on the official two-letter state/territory abbreviation from the organization address, use the data in the following table to determine the region number:\n\n| Region | Locales |\n| ------- | ----- |\n| 1 |Connecticut, Maine, Massachusetts, New Hampshire, Rhode Island, Vermont|\n| 2 |New Jersey, New York, Puerto Rico, U.S. Virgin Islands|\n|3| Delaware, District of Columbia, Maryland, Pennsylvania, Virginia, West Virginia|\n|4| Alabama, Florida, Georgia, Kentucky, Mississippi, North Carolina, South Carolina, Tennessee|\n|5| Illinois, Indiana, Michigan, Minnesota, Ohio, Wisconsin|\n|6| Arkansas, Louisiana, New Mexico, Oklahoma, Texas|\n|7|Iowa, Kansas, Missouri, Nebraska|\n|8|Colorado, Montana, North Dakota, South Dakota, Utah, Wyoming|\n|9|Arizona, California, Hawaii, Nevada, Guam, American Samoa, Commonwealth of the Northern Mariana Islands|\n|10|Alaska, Idaho, Oregon, Washington|\n\nClick Image to open Miro:\n[![Image](https://github.com/cisagov/manage.get.gov/assets/62573986/bca81201-ce20-409b-bb31-10bca552ed23)](https://miro.com/app/board/uXjVKNvtde0=/?moveToWidget=3458764587423408611&cot=14)\n\n### Links to other issues\n\nBlocks #2095 \n", "before_files": [{"content": "import logging\nfrom django import template\nimport re\nfrom registrar.models.domain_request import DomainRequest\n\nregister = template.Library()\nlogger = logging.getLogger(__name__)\n\n\[email protected](name=\"extract_value\")\ndef extract_value(html_input):\n match = re.search(r'value=\"([^\"]*)\"', html_input)\n if match:\n return match.group(1)\n return \"\"\n\n\[email protected]\ndef extract_a_text(value):\n # Use regex to extract the text within the <a> tag\n pattern = r\"<a\\b[^>]*>(.*?)</a>\"\n match = re.search(pattern, value)\n if match:\n extracted_text = match.group(1)\n else:\n extracted_text = \"\"\n\n return extracted_text\n\n\[email protected]\ndef find_index(haystack, needle):\n try:\n return haystack.index(needle)\n except ValueError:\n return -1\n\n\[email protected]\ndef slice_after(value, substring):\n index = value.find(substring)\n if index != -1:\n result = value[index + len(substring) :]\n return result\n return value\n\n\[email protected]\ndef contains_checkbox(html_list):\n for html_string in html_list:\n if re.search(r'<input[^>]*type=\"checkbox\"', html_string):\n return True\n return False\n\n\[email protected]\ndef get_organization_long_name(generic_org_type):\n organization_choices_dict = dict(DomainRequest.OrganizationChoicesVerbose.choices)\n long_form_type = organization_choices_dict[generic_org_type]\n if long_form_type is None:\n logger.error(\"Organization type error, triggered by a template's custom filter\")\n return \"Error\"\n\n return long_form_type\n\n\[email protected](name=\"has_permission\")\ndef has_permission(user, permission):\n return user.has_perm(permission)\n", "path": "src/registrar/templatetags/custom_filters.py"}]}
1,713
598
gh_patches_debug_2096
rasdani/github-patches
git_diff
liqd__a4-product-1097
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> archived projects accessible via activity feed At https://www.beteiligung.in/liqd/ all projects are private but I can see the content of the projects if I click on the activity feed. Even if not signed in. </issue> <code> [start of apps/organisations/views.py] 1 from django.contrib.messages.views import SuccessMessageMixin 2 from django.utils.translation import ugettext_lazy as _ 3 from django.views import generic 4 from django.views.generic import DetailView 5 6 from adhocracy4.actions.models import Action 7 from adhocracy4.projects.models import Project 8 from adhocracy4.rules import mixins as rules_mixins 9 from apps.projects import query 10 11 from . import forms 12 from .models import Organisation 13 14 15 class OrganisationView(DetailView): 16 template_name = 'organisation_landing_page.html' 17 model = Organisation 18 slug_url_kwarg = 'organisation_slug' 19 20 def get_context_data(self, **kwargs): 21 context = super().get_context_data(**kwargs) 22 23 project_list = Project.objects\ 24 .filter(organisation=self.object, 25 is_archived=False, 26 is_draft=False) 27 project_list = query.filter_viewable( 28 project_list, self.request.user 29 ) 30 context['project_list'] = project_list 31 32 context['action_list'] = Action.objects\ 33 .filter(project__organisation=self.object)\ 34 .filter_public()\ 35 .exclude_updates()[:4] 36 37 context['stats'] = { 38 'users': 1204, 39 'items': 3425, 40 'comments': 23234, 41 'ratings': 134234, 42 } 43 44 return context 45 46 47 class InformationView(DetailView): 48 template_name = 'organisation_information.html' 49 model = Organisation 50 slug_url_kwarg = 'organisation_slug' 51 52 53 class ImprintView(DetailView): 54 template_name = 'organisation_imprint.html' 55 model = Organisation 56 slug_url_kwarg = 'organisation_slug' 57 58 59 class OrganisationUpdateView(rules_mixins.PermissionRequiredMixin, 60 SuccessMessageMixin, 61 generic.UpdateView): 62 model = Organisation 63 form_class = forms.OrganisationForm 64 slug_url_kwarg = 'organisation_slug' 65 template_name = 'organisation_form.html' 66 success_message = _('Organisation successfully updated.') 67 permission_required = 'a4_candy_organisations.change_organisation' 68 menu_item = 'organisation' 69 70 def get_success_url(self): 71 return self.request.path 72 [end of apps/organisations/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/apps/organisations/views.py b/apps/organisations/views.py --- a/apps/organisations/views.py +++ b/apps/organisations/views.py @@ -31,6 +31,7 @@ context['action_list'] = Action.objects\ .filter(project__organisation=self.object)\ + .filter(project__is_archived=False) \ .filter_public()\ .exclude_updates()[:4]
{"golden_diff": "diff --git a/apps/organisations/views.py b/apps/organisations/views.py\n--- a/apps/organisations/views.py\n+++ b/apps/organisations/views.py\n@@ -31,6 +31,7 @@\n \n context['action_list'] = Action.objects\\\n .filter(project__organisation=self.object)\\\n+ .filter(project__is_archived=False) \\\n .filter_public()\\\n .exclude_updates()[:4]\n", "issue": "archived projects accessible via activity feed\n At https://www.beteiligung.in/liqd/ all projects are private but I can see the content of the projects if I click on the activity feed. Even if not signed in.\n", "before_files": [{"content": "from django.contrib.messages.views import SuccessMessageMixin\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.views import generic\nfrom django.views.generic import DetailView\n\nfrom adhocracy4.actions.models import Action\nfrom adhocracy4.projects.models import Project\nfrom adhocracy4.rules import mixins as rules_mixins\nfrom apps.projects import query\n\nfrom . import forms\nfrom .models import Organisation\n\n\nclass OrganisationView(DetailView):\n template_name = 'organisation_landing_page.html'\n model = Organisation\n slug_url_kwarg = 'organisation_slug'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n project_list = Project.objects\\\n .filter(organisation=self.object,\n is_archived=False,\n is_draft=False)\n project_list = query.filter_viewable(\n project_list, self.request.user\n )\n context['project_list'] = project_list\n\n context['action_list'] = Action.objects\\\n .filter(project__organisation=self.object)\\\n .filter_public()\\\n .exclude_updates()[:4]\n\n context['stats'] = {\n 'users': 1204,\n 'items': 3425,\n 'comments': 23234,\n 'ratings': 134234,\n }\n\n return context\n\n\nclass InformationView(DetailView):\n template_name = 'organisation_information.html'\n model = Organisation\n slug_url_kwarg = 'organisation_slug'\n\n\nclass ImprintView(DetailView):\n template_name = 'organisation_imprint.html'\n model = Organisation\n slug_url_kwarg = 'organisation_slug'\n\n\nclass OrganisationUpdateView(rules_mixins.PermissionRequiredMixin,\n SuccessMessageMixin,\n generic.UpdateView):\n model = Organisation\n form_class = forms.OrganisationForm\n slug_url_kwarg = 'organisation_slug'\n template_name = 'organisation_form.html'\n success_message = _('Organisation successfully updated.')\n permission_required = 'a4_candy_organisations.change_organisation'\n menu_item = 'organisation'\n\n def get_success_url(self):\n return self.request.path\n", "path": "apps/organisations/views.py"}]}
1,182
95
gh_patches_debug_33794
rasdani/github-patches
git_diff
alltheplaces__alltheplaces-2634
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Spider motel6 is broken During the global build at 2021-08-18-14-42-26, spider **motel6** failed with **0 features** and **2 errors**. Here's [the log](https://data.alltheplaces.xyz/runs/2021-08-18-14-42-26/logs/motel6.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-08-18-14-42-26/output/motel6.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-08-18-14-42-26/output/motel6.geojson)) </issue> <code> [start of locations/spiders/motel6.py] 1 # -*- coding: utf-8 -*- 2 import scrapy 3 import json 4 from locations.items import GeojsonPointItem 5 6 brand_lookup = { 7 "MS": "Motel 6", 8 "SS": "Studio 6", 9 "HS": "Hotel 6" 10 } 11 12 13 class Motel6Spider(scrapy.Spider): 14 name = "motel6" 15 allowed_domains = ["motel6.com"] 16 start_urls = ( 17 'https://www.motel6.com/var/g6/hotel-summary/ms.infinity.1.json', 18 ) 19 20 def parse(self, response): 21 idata = json.loads(response.body_as_unicode()) 22 storeids = idata.keys() 23 URL = 'https://www.motel6.com/var/g6/hotel-information/en/{}.json' 24 for storeid in storeids: 25 try: 26 int(storeid) 27 except ValueError: 28 continue 29 try: 30 yield scrapy.Request(URL.format(storeid), callback=self.parse_hotel) 31 except ValueError: 32 continue 33 34 def parse_hotel(self, response): 35 mdata = json.loads(response.body_as_unicode()) 36 37 properties = { 38 'ref': mdata["property_id"], 39 'name': mdata["name"], 40 'addr_full': mdata["address"], 41 'city': mdata["city"], 42 'postcode': mdata["zip"], 43 'lat': mdata["latitude"], 44 'lon': mdata["longitude"], 45 'phone': mdata["phone"], 46 'state': mdata["state"], 47 'website': mdata["microsite_url"], 48 'brand': brand_lookup[mdata["brand_id"]], 49 } 50 51 yield GeojsonPointItem(**properties) 52 [end of locations/spiders/motel6.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/locations/spiders/motel6.py b/locations/spiders/motel6.py --- a/locations/spiders/motel6.py +++ b/locations/spiders/motel6.py @@ -14,20 +14,21 @@ name = "motel6" allowed_domains = ["motel6.com"] start_urls = ( - 'https://www.motel6.com/var/g6/hotel-summary/ms.infinity.1.json', + 'https://www.motel6.com/content/g6-cache/property-summary.1.json', ) def parse(self, response): idata = json.loads(response.body_as_unicode()) - storeids = idata.keys() - URL = 'https://www.motel6.com/var/g6/hotel-information/en/{}.json' - for storeid in storeids: + url = 'https://www.motel6.com/bin/g6/propertydata.{}.json' + + for storeid in idata.keys(): try: int(storeid) except ValueError: continue + try: - yield scrapy.Request(URL.format(storeid), callback=self.parse_hotel) + yield scrapy.Request(url.format(storeid), callback=self.parse_hotel) except ValueError: continue @@ -35,17 +36,17 @@ mdata = json.loads(response.body_as_unicode()) properties = { - 'ref': mdata["property_id"], - 'name': mdata["name"], - 'addr_full': mdata["address"], - 'city': mdata["city"], - 'postcode': mdata["zip"], - 'lat': mdata["latitude"], - 'lon': mdata["longitude"], - 'phone': mdata["phone"], - 'state': mdata["state"], - 'website': mdata["microsite_url"], - 'brand': brand_lookup[mdata["brand_id"]], + 'ref': mdata["property_id"], + 'name': mdata["name"], + 'addr_full': mdata["address"], + 'city': mdata["city"], + 'postcode': mdata["zip"], + 'lat': mdata["latitude"], + 'lon': mdata["longitude"], + 'phone': mdata["phone"], + 'state': mdata["state"], + 'website': mdata["microsite_url"], + 'brand': brand_lookup[mdata["brand_id"]], } yield GeojsonPointItem(**properties)
{"golden_diff": "diff --git a/locations/spiders/motel6.py b/locations/spiders/motel6.py\n--- a/locations/spiders/motel6.py\n+++ b/locations/spiders/motel6.py\n@@ -14,20 +14,21 @@\n name = \"motel6\"\n allowed_domains = [\"motel6.com\"]\n start_urls = (\n- 'https://www.motel6.com/var/g6/hotel-summary/ms.infinity.1.json',\n+ 'https://www.motel6.com/content/g6-cache/property-summary.1.json',\n )\n \n def parse(self, response):\n idata = json.loads(response.body_as_unicode())\n- storeids = idata.keys()\n- URL = 'https://www.motel6.com/var/g6/hotel-information/en/{}.json'\n- for storeid in storeids:\n+ url = 'https://www.motel6.com/bin/g6/propertydata.{}.json'\n+\n+ for storeid in idata.keys():\n try:\n int(storeid)\n except ValueError:\n continue\n+\n try:\n- yield scrapy.Request(URL.format(storeid), callback=self.parse_hotel)\n+ yield scrapy.Request(url.format(storeid), callback=self.parse_hotel)\n except ValueError:\n continue\n \n@@ -35,17 +36,17 @@\n mdata = json.loads(response.body_as_unicode())\n \n properties = {\n- 'ref': mdata[\"property_id\"],\n- 'name': mdata[\"name\"],\n- 'addr_full': mdata[\"address\"],\n- 'city': mdata[\"city\"],\n- 'postcode': mdata[\"zip\"],\n- 'lat': mdata[\"latitude\"],\n- 'lon': mdata[\"longitude\"],\n- 'phone': mdata[\"phone\"],\n- 'state': mdata[\"state\"],\n- 'website': mdata[\"microsite_url\"],\n- 'brand': brand_lookup[mdata[\"brand_id\"]],\n+ 'ref': mdata[\"property_id\"],\n+ 'name': mdata[\"name\"],\n+ 'addr_full': mdata[\"address\"],\n+ 'city': mdata[\"city\"],\n+ 'postcode': mdata[\"zip\"],\n+ 'lat': mdata[\"latitude\"],\n+ 'lon': mdata[\"longitude\"],\n+ 'phone': mdata[\"phone\"],\n+ 'state': mdata[\"state\"],\n+ 'website': mdata[\"microsite_url\"],\n+ 'brand': brand_lookup[mdata[\"brand_id\"]],\n }\n \n yield GeojsonPointItem(**properties)\n", "issue": "Spider motel6 is broken\nDuring the global build at 2021-08-18-14-42-26, spider **motel6** failed with **0 features** and **2 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-08-18-14-42-26/logs/motel6.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-08-18-14-42-26/output/motel6.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-08-18-14-42-26/output/motel6.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport scrapy\nimport json\nfrom locations.items import GeojsonPointItem\n\nbrand_lookup = {\n \"MS\": \"Motel 6\",\n \"SS\": \"Studio 6\",\n \"HS\": \"Hotel 6\"\n}\n\n\nclass Motel6Spider(scrapy.Spider):\n name = \"motel6\"\n allowed_domains = [\"motel6.com\"]\n start_urls = (\n 'https://www.motel6.com/var/g6/hotel-summary/ms.infinity.1.json',\n )\n\n def parse(self, response):\n idata = json.loads(response.body_as_unicode())\n storeids = idata.keys()\n URL = 'https://www.motel6.com/var/g6/hotel-information/en/{}.json'\n for storeid in storeids:\n try:\n int(storeid)\n except ValueError:\n continue\n try:\n yield scrapy.Request(URL.format(storeid), callback=self.parse_hotel)\n except ValueError:\n continue\n\n def parse_hotel(self, response):\n mdata = json.loads(response.body_as_unicode())\n\n properties = {\n 'ref': mdata[\"property_id\"],\n 'name': mdata[\"name\"],\n 'addr_full': mdata[\"address\"],\n 'city': mdata[\"city\"],\n 'postcode': mdata[\"zip\"],\n 'lat': mdata[\"latitude\"],\n 'lon': mdata[\"longitude\"],\n 'phone': mdata[\"phone\"],\n 'state': mdata[\"state\"],\n 'website': mdata[\"microsite_url\"],\n 'brand': brand_lookup[mdata[\"brand_id\"]],\n }\n\n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/motel6.py"}]}
1,175
554
gh_patches_debug_37989
rasdani/github-patches
git_diff
mathesar-foundation__mathesar-1270
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> API should validate input for number columns ## Description <!-- A clear and concise description of what the bug is. --> Currently, the API accepts strings for values input to number-typed columns. In some cases, these strings carry locale-sensitive information, i.e., using specific decimal points and negation styles. This is a problem since confusion will arise whenever the client, service, and database have different locale settings (it's likely the client and DB will have different locale settings by default). Even worse, the locale settings in the database (assuming PostgreSQL) may be applied differently in different contexts. ## Expected behavior <!-- A clear and concise description of what you expected to happen. --> Columns which use a number type for storage at the DB layer should only accept numbers in one of two formats: - an actual JSON number, or - A string conforming to the [JSON number spec](https://www.json.org/json-en.html), except wrapped in double-quotes. The validation of this should be locale-independent, and should happen in the Mathesar web service rather than the database. ## To Reproduce <!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. --> - Create a table with a number-typed column containing a decimal point (e.g., `FLOAT`). - Send an API request with input for that column as a string, with a comma for a decimal point. - You can do this easily from the browseable API, see `/api/db/v0/tables/<table_ID>/records/<record_ID>/` - Observe the database-layer error. </issue> <code> [start of mathesar/api/utils.py] 1 from rest_framework.exceptions import NotFound 2 3 from db.records.operations import group 4 from mathesar.models import Table 5 6 DATA_KEY = 'data' 7 METADATA_KEY = 'metadata' 8 9 10 def get_table_or_404(pk): 11 """ 12 Get table if it exists, otherwise throws a DRF NotFound error. 13 Args: 14 pk: id of table 15 Returns: 16 table: return the table based on a specific id 17 """ 18 try: 19 table = Table.objects.get(id=pk) 20 except Table.DoesNotExist: 21 raise NotFound 22 return table 23 24 25 def process_annotated_records(record_list, column_name_id_map): 26 27 RESULT_IDX = 'result_indices' 28 29 def _get_record_dict(record): 30 return record._asdict() if not isinstance(record, dict) else record 31 32 split_records = ( 33 {DATA_KEY: record_dict} 34 for record_dict in (_get_record_dict(record) for record in record_list) 35 ) 36 37 combined_records, groups = group.extract_group_metadata( 38 split_records, data_key=DATA_KEY, metadata_key=METADATA_KEY 39 ) 40 41 processed_records, record_metadata = zip( 42 *tuple(tuple(d.values()) for d in combined_records) 43 ) 44 45 def _replace_column_names_with_ids(group_metadata_item): 46 try: 47 processed_group_metadata_item = { 48 column_name_id_map[k]: v for k, v in group_metadata_item.items() 49 } 50 except AttributeError: 51 processed_group_metadata_item = group_metadata_item 52 return processed_group_metadata_item 53 54 if groups is not None: 55 groups_by_id = { 56 grp[group.GroupMetadataField.GROUP_ID.value]: { 57 k: _replace_column_names_with_ids(v) for k, v in grp.items() 58 if k != group.GroupMetadataField.GROUP_ID.value 59 } | {RESULT_IDX: []} 60 for grp in groups 61 } 62 63 for i, meta in enumerate(record_metadata): 64 groups_by_id[meta[group.GroupMetadataField.GROUP_ID.value]][RESULT_IDX].append(i) 65 66 output_groups = sorted(list(groups_by_id.values()), key=lambda x: x[RESULT_IDX][0]) 67 else: 68 output_groups = None 69 70 return processed_records, output_groups 71 [end of mathesar/api/utils.py] [start of mathesar/api/serializers/records.py] 1 from psycopg2.errors import NotNullViolation 2 from rest_framework import serializers 3 from rest_framework import status 4 from sqlalchemy.exc import IntegrityError 5 6 import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions 7 from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin 8 9 10 class RecordListParameterSerializer(MathesarErrorMessageMixin, serializers.Serializer): 11 filter = serializers.JSONField(required=False, default=None) 12 order_by = serializers.JSONField(required=False, default=[]) 13 grouping = serializers.JSONField(required=False, default={}) 14 duplicate_only = serializers.JSONField(required=False, default=None) 15 16 17 class RecordSerializer(MathesarErrorMessageMixin, serializers.BaseSerializer): 18 def update(self, instance, validated_data): 19 table = self.context['table'] 20 record = table.update_record(instance['id'], validated_data) 21 return record 22 23 def create(self, validated_data): 24 table = self.context['table'] 25 try: 26 record = table.create_record_or_records(validated_data) 27 except IntegrityError as e: 28 if type(e.orig) == NotNullViolation: 29 raise database_api_exceptions.NotNullViolationAPIException( 30 e, 31 status_code=status.HTTP_400_BAD_REQUEST, 32 table=table 33 ) 34 else: 35 raise database_api_exceptions.MathesarAPIException(e, status_code=status.HTTP_400_BAD_REQUEST) 36 return record 37 38 def to_representation(self, instance): 39 records = instance._asdict() if not isinstance(instance, dict) else instance 40 columns_map = self.context['columns_map'] 41 records = {columns_map[column_name]: column_value for column_name, column_value in records.items()} 42 return records 43 44 def to_internal_value(self, data): 45 columns_map = self.context['columns_map'].inverse 46 data = {columns_map[int(column_id)]: value for column_id, value in data.items()} 47 return data 48 [end of mathesar/api/serializers/records.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/mathesar/api/serializers/records.py b/mathesar/api/serializers/records.py --- a/mathesar/api/serializers/records.py +++ b/mathesar/api/serializers/records.py @@ -5,6 +5,8 @@ import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin +from mathesar.models import Column +from mathesar.api.utils import follows_json_number_spec, is_number class RecordListParameterSerializer(MathesarErrorMessageMixin, serializers.Serializer): @@ -44,4 +46,17 @@ def to_internal_value(self, data): columns_map = self.context['columns_map'].inverse data = {columns_map[int(column_id)]: value for column_id, value in data.items()} + # If the data type of the column is number then the value must be an integer + # or a string which follows JSON number spec. + for column_name in data.keys(): + column = Column.objects.get(id=columns_map.inverse[column_name]) + column_type = column.type + value = data[column_name] + if is_number(column_type) and type(data[column_name]) is str and not follows_json_number_spec(value): + raise database_api_exceptions.MathesarAPIException( + IntegrityError, + status_code=status.HTTP_400_BAD_REQUEST, + message="Number strings should follow JSON number spec", + field=column_name + ) return data diff --git a/mathesar/api/utils.py b/mathesar/api/utils.py --- a/mathesar/api/utils.py +++ b/mathesar/api/utils.py @@ -1,7 +1,9 @@ from rest_framework.exceptions import NotFound +import re from db.records.operations import group from mathesar.models import Table +from mathesar.database.types import _get_type_map DATA_KEY = 'data' METADATA_KEY = 'metadata' @@ -68,3 +70,39 @@ output_groups = None return processed_records, output_groups + + +def is_number(column_type): + """ + Check if a column data type is a number + Args: + column_type: data type of column + """ + for type in _get_type_map(): + if type['name'] == 'Number': + if str(column_type).lower() in type['sa_type_names']: + return True + else: + return False + + +def follows_json_number_spec(number): + """ + Check if a string follows JSON number spec + Args: + number: number as string + """ + patterns = [ + r"^-?0$", + r"^-?0[\.][0-9]+$", + r"^-?0[eE][+-]?[0-9]*$", + r"^-?0[\.][0-9]+[eE][+-]?[0-9]+$", + r"^-?[1-9][0-9]*$", + r"^-?[1-9][0-9]*[\.][0-9]+$", + r"^-?[1-9][0-9]*[eE][+-]?[0-9]+$", + r"^-?[1-9][0-9]*[\.][0-9]+[eE][+-]?[0-9]+$", + ] + for pattern in patterns: + if re.search(pattern, number) is not None: + return True + return False
{"golden_diff": "diff --git a/mathesar/api/serializers/records.py b/mathesar/api/serializers/records.py\n--- a/mathesar/api/serializers/records.py\n+++ b/mathesar/api/serializers/records.py\n@@ -5,6 +5,8 @@\n \n import mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions\n from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin\n+from mathesar.models import Column\n+from mathesar.api.utils import follows_json_number_spec, is_number\n \n \n class RecordListParameterSerializer(MathesarErrorMessageMixin, serializers.Serializer):\n@@ -44,4 +46,17 @@\n def to_internal_value(self, data):\n columns_map = self.context['columns_map'].inverse\n data = {columns_map[int(column_id)]: value for column_id, value in data.items()}\n+ # If the data type of the column is number then the value must be an integer\n+ # or a string which follows JSON number spec.\n+ for column_name in data.keys():\n+ column = Column.objects.get(id=columns_map.inverse[column_name])\n+ column_type = column.type\n+ value = data[column_name]\n+ if is_number(column_type) and type(data[column_name]) is str and not follows_json_number_spec(value):\n+ raise database_api_exceptions.MathesarAPIException(\n+ IntegrityError,\n+ status_code=status.HTTP_400_BAD_REQUEST,\n+ message=\"Number strings should follow JSON number spec\",\n+ field=column_name\n+ )\n return data\ndiff --git a/mathesar/api/utils.py b/mathesar/api/utils.py\n--- a/mathesar/api/utils.py\n+++ b/mathesar/api/utils.py\n@@ -1,7 +1,9 @@\n from rest_framework.exceptions import NotFound\n+import re\n \n from db.records.operations import group\n from mathesar.models import Table\n+from mathesar.database.types import _get_type_map\n \n DATA_KEY = 'data'\n METADATA_KEY = 'metadata'\n@@ -68,3 +70,39 @@\n output_groups = None\n \n return processed_records, output_groups\n+\n+\n+def is_number(column_type):\n+ \"\"\"\n+ Check if a column data type is a number\n+ Args:\n+ column_type: data type of column\n+ \"\"\"\n+ for type in _get_type_map():\n+ if type['name'] == 'Number':\n+ if str(column_type).lower() in type['sa_type_names']:\n+ return True\n+ else:\n+ return False\n+\n+\n+def follows_json_number_spec(number):\n+ \"\"\"\n+ Check if a string follows JSON number spec\n+ Args:\n+ number: number as string\n+ \"\"\"\n+ patterns = [\n+ r\"^-?0$\",\n+ r\"^-?0[\\.][0-9]+$\",\n+ r\"^-?0[eE][+-]?[0-9]*$\",\n+ r\"^-?0[\\.][0-9]+[eE][+-]?[0-9]+$\",\n+ r\"^-?[1-9][0-9]*$\",\n+ r\"^-?[1-9][0-9]*[\\.][0-9]+$\",\n+ r\"^-?[1-9][0-9]*[eE][+-]?[0-9]+$\",\n+ r\"^-?[1-9][0-9]*[\\.][0-9]+[eE][+-]?[0-9]+$\",\n+ ]\n+ for pattern in patterns:\n+ if re.search(pattern, number) is not None:\n+ return True\n+ return False\n", "issue": "API should validate input for number columns\n## Description\r\n<!-- A clear and concise description of what the bug is. -->\r\n\r\nCurrently, the API accepts strings for values input to number-typed columns. In some cases, these strings carry locale-sensitive information, i.e., using specific decimal points and negation styles. This is a problem since confusion will arise whenever the client, service, and database have different locale settings (it's likely the client and DB will have different locale settings by default). Even worse, the locale settings in the database (assuming PostgreSQL) may be applied differently in different contexts.\r\n\r\n## Expected behavior\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n\r\nColumns which use a number type for storage at the DB layer should only accept numbers in one of two formats:\r\n- an actual JSON number, or\r\n- A string conforming to the [JSON number spec](https://www.json.org/json-en.html), except wrapped in double-quotes.\r\n\r\nThe validation of this should be locale-independent, and should happen in the Mathesar web service rather than the database.\r\n\r\n## To Reproduce\r\n<!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. -->\r\n\r\n- Create a table with a number-typed column containing a decimal point (e.g., `FLOAT`).\r\n- Send an API request with input for that column as a string, with a comma for a decimal point. \r\n - You can do this easily from the browseable API, see `/api/db/v0/tables/<table_ID>/records/<record_ID>/`\r\n- Observe the database-layer error.\r\n\n", "before_files": [{"content": "from rest_framework.exceptions import NotFound\n\nfrom db.records.operations import group\nfrom mathesar.models import Table\n\nDATA_KEY = 'data'\nMETADATA_KEY = 'metadata'\n\n\ndef get_table_or_404(pk):\n \"\"\"\n Get table if it exists, otherwise throws a DRF NotFound error.\n Args:\n pk: id of table\n Returns:\n table: return the table based on a specific id\n \"\"\"\n try:\n table = Table.objects.get(id=pk)\n except Table.DoesNotExist:\n raise NotFound\n return table\n\n\ndef process_annotated_records(record_list, column_name_id_map):\n\n RESULT_IDX = 'result_indices'\n\n def _get_record_dict(record):\n return record._asdict() if not isinstance(record, dict) else record\n\n split_records = (\n {DATA_KEY: record_dict}\n for record_dict in (_get_record_dict(record) for record in record_list)\n )\n\n combined_records, groups = group.extract_group_metadata(\n split_records, data_key=DATA_KEY, metadata_key=METADATA_KEY\n )\n\n processed_records, record_metadata = zip(\n *tuple(tuple(d.values()) for d in combined_records)\n )\n\n def _replace_column_names_with_ids(group_metadata_item):\n try:\n processed_group_metadata_item = {\n column_name_id_map[k]: v for k, v in group_metadata_item.items()\n }\n except AttributeError:\n processed_group_metadata_item = group_metadata_item\n return processed_group_metadata_item\n\n if groups is not None:\n groups_by_id = {\n grp[group.GroupMetadataField.GROUP_ID.value]: {\n k: _replace_column_names_with_ids(v) for k, v in grp.items()\n if k != group.GroupMetadataField.GROUP_ID.value\n } | {RESULT_IDX: []}\n for grp in groups\n }\n\n for i, meta in enumerate(record_metadata):\n groups_by_id[meta[group.GroupMetadataField.GROUP_ID.value]][RESULT_IDX].append(i)\n\n output_groups = sorted(list(groups_by_id.values()), key=lambda x: x[RESULT_IDX][0])\n else:\n output_groups = None\n\n return processed_records, output_groups\n", "path": "mathesar/api/utils.py"}, {"content": "from psycopg2.errors import NotNullViolation\nfrom rest_framework import serializers\nfrom rest_framework import status\nfrom sqlalchemy.exc import IntegrityError\n\nimport mathesar.api.exceptions.database_exceptions.exceptions as database_api_exceptions\nfrom mathesar.api.exceptions.mixins import MathesarErrorMessageMixin\n\n\nclass RecordListParameterSerializer(MathesarErrorMessageMixin, serializers.Serializer):\n filter = serializers.JSONField(required=False, default=None)\n order_by = serializers.JSONField(required=False, default=[])\n grouping = serializers.JSONField(required=False, default={})\n duplicate_only = serializers.JSONField(required=False, default=None)\n\n\nclass RecordSerializer(MathesarErrorMessageMixin, serializers.BaseSerializer):\n def update(self, instance, validated_data):\n table = self.context['table']\n record = table.update_record(instance['id'], validated_data)\n return record\n\n def create(self, validated_data):\n table = self.context['table']\n try:\n record = table.create_record_or_records(validated_data)\n except IntegrityError as e:\n if type(e.orig) == NotNullViolation:\n raise database_api_exceptions.NotNullViolationAPIException(\n e,\n status_code=status.HTTP_400_BAD_REQUEST,\n table=table\n )\n else:\n raise database_api_exceptions.MathesarAPIException(e, status_code=status.HTTP_400_BAD_REQUEST)\n return record\n\n def to_representation(self, instance):\n records = instance._asdict() if not isinstance(instance, dict) else instance\n columns_map = self.context['columns_map']\n records = {columns_map[column_name]: column_value for column_name, column_value in records.items()}\n return records\n\n def to_internal_value(self, data):\n columns_map = self.context['columns_map'].inverse\n data = {columns_map[int(column_id)]: value for column_id, value in data.items()}\n return data\n", "path": "mathesar/api/serializers/records.py"}]}
1,984
783
gh_patches_debug_31432
rasdani/github-patches
git_diff
facebookresearch__CompilerGym-309
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> CompilerGym cache directory defaults don't doesn't play nicely with shared access machines ## 🐛 Bug The default locations of the CompilerGym caches are in shared folders. On multi-user machines this can cause permission errors as the directory may be created by one user without write permissions to other users. I propose switching to user-specific defaults like so: - [x] `COMPILER_GYM_CACHE` -> `/tmp/compiler_gym-$user` - [x] `COMPILER_YM_TRANSIENT_CACHE` -> `/dev/shm/compiler_gym-$user` </issue> <code> [start of compiler_gym/util/runfiles_path.py] 1 # Copyright (c) Facebook, Inc. and its affiliates. 2 # 3 # This source code is licensed under the MIT license found in the 4 # LICENSE file in the root directory of this source tree. 5 """Module for resolving a runfiles path.""" 6 import getpass 7 import os 8 from pathlib import Path 9 10 # NOTE(cummins): Moving this file may require updating this relative path. 11 _PACKAGE_ROOT = Path(os.path.join(os.path.dirname(__file__), "../../")).resolve( 12 strict=True 13 ) 14 15 16 def runfiles_path(relpath: str) -> Path: 17 """Resolve the path to a runfiles data path. 18 19 No checks are to made to ensure that the path, or the containing directory, 20 exist. 21 22 Use environment variable COMPILER_GYM_RUNFILES=/path/to/runfiles if running 23 outside of bazel. 24 25 :param relpath: The relative path within the runfiles tree. 26 27 :return: An absolute path. 28 """ 29 # There are three ways of determining a runfiles path: 30 # 1. Set the COMPILER_GYM_RUNFILES environment variable. 31 # 2. Using the rules_python library that is provided by bazel. This will 32 # fail if not being executed within a bazel sandbox. 33 # 3. Computing the path relative to the location of this file. This is the 34 # fallback approach that is used for when the code has been installed 35 # by setuptools. 36 runfiles_path = os.environ.get("COMPILER_GYM_RUNFILES") 37 if runfiles_path: 38 return Path(runfiles_path) / relpath 39 else: 40 try: 41 from rules_python.python.runfiles import runfiles 42 43 return Path( 44 runfiles.Create().Rlocation( 45 "CompilerGym" if relpath == "." else f"CompilerGym/{relpath}" 46 ) 47 ) 48 except (ModuleNotFoundError, TypeError): 49 return _PACKAGE_ROOT / relpath 50 51 52 def site_data_path(relpath: str) -> Path: 53 """Return a path within the site data directory. 54 55 CompilerGym uses a directory to store persistent site data files in, such as benchmark datasets. 56 The default location is :code:`~/.local/share/compiler_gym`. Set the environment variable 57 :code:`$COMPILER_GYM_SITE_DATA` to override this default location. 58 59 No checks are to made to ensure that the path, or the containing directory, 60 exist. 61 62 :param relpath: The relative path within the site data tree. 63 64 :return: An absolute path. 65 """ 66 # NOTE(cummins): This function has a matching implementation in the C++ 67 # sources, compiler_gym::service::getSiteDataPath(). Any change to behavior 68 # here must be reflected in the C++ version. 69 forced = os.environ.get("COMPILER_GYM_SITE_DATA") 70 if forced: 71 return Path(forced) / relpath 72 elif os.environ.get("HOME"): 73 return Path("~/.local/share/compiler_gym").expanduser() / relpath 74 else: 75 return Path("/tmp/compiler_gym/site_data") / relpath 76 77 78 def cache_path(relpath: str) -> Path: 79 """Return a path within the cache directory. 80 81 CompilerGym uses a directory to cache files in, such as downloaded content. 82 The default location for this cache is :code:`~/.cache/compiler_gym`. Set 83 the environment variable :code:`$COMPILER_GYM_CACHE` to override this 84 default location. 85 86 No checks are to made to ensure that the path, or the containing directory, 87 exist. 88 89 :param relpath: The relative path within the cache tree. 90 91 :return: An absolute path. 92 """ 93 forced = os.environ.get("COMPILER_GYM_CACHE") 94 if forced: 95 return Path(forced) / relpath 96 elif os.environ.get("HOME"): 97 return Path("~/.cache/compiler_gym").expanduser() / relpath 98 else: 99 return Path("/tmp/compiler_gym/cache") / relpath 100 101 102 def transient_cache_path(relpath: str) -> Path: 103 """Return a path within the transient cache directory. 104 105 The transient cache is a directory used to store files that do not need to 106 persist beyond the lifetime of the current process. When available, the 107 temporary filesystem :code:`/dev/shm` will be used. Else, 108 :meth:`cache_path() <compiler_gym.cache_path>` is used as a fallback. Set 109 the environment variable :code:`$COMPILER_GYM_TRANSIENT_CACHE` to override 110 the default location. 111 112 No checks are to made to ensure that the path, or the containing directory, 113 exist. 114 115 :param relpath: The relative path within the cache tree. 116 117 :return: An absolute path. 118 """ 119 forced = os.environ.get("COMPILER_GYM_TRANSIENT_CACHE") 120 if forced: 121 return Path(forced) / relpath 122 elif Path("/dev/shm").is_dir(): 123 return Path(f"/dev/shm/compiler_gym_{getpass.getuser()}") / relpath 124 else: 125 # Fallback to using the regular cache. 126 return cache_path(relpath) 127 [end of compiler_gym/util/runfiles_path.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/compiler_gym/util/runfiles_path.py b/compiler_gym/util/runfiles_path.py --- a/compiler_gym/util/runfiles_path.py +++ b/compiler_gym/util/runfiles_path.py @@ -3,8 +3,8 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """Module for resolving a runfiles path.""" -import getpass import os +from getpass import getuser from pathlib import Path # NOTE(cummins): Moving this file may require updating this relative path. @@ -72,7 +72,7 @@ elif os.environ.get("HOME"): return Path("~/.local/share/compiler_gym").expanduser() / relpath else: - return Path("/tmp/compiler_gym/site_data") / relpath + return Path(f"/tmp/compiler_gym_{getuser()}/site_data") / relpath def cache_path(relpath: str) -> Path: @@ -96,7 +96,7 @@ elif os.environ.get("HOME"): return Path("~/.cache/compiler_gym").expanduser() / relpath else: - return Path("/tmp/compiler_gym/cache") / relpath + return Path(f"/tmp/compiler_gym_{getuser()}/cache") / relpath def transient_cache_path(relpath: str) -> Path: @@ -120,7 +120,7 @@ if forced: return Path(forced) / relpath elif Path("/dev/shm").is_dir(): - return Path(f"/dev/shm/compiler_gym_{getpass.getuser()}") / relpath + return Path(f"/dev/shm/compiler_gym_{getuser()}") / relpath else: # Fallback to using the regular cache. return cache_path(relpath)
{"golden_diff": "diff --git a/compiler_gym/util/runfiles_path.py b/compiler_gym/util/runfiles_path.py\n--- a/compiler_gym/util/runfiles_path.py\n+++ b/compiler_gym/util/runfiles_path.py\n@@ -3,8 +3,8 @@\n # This source code is licensed under the MIT license found in the\n # LICENSE file in the root directory of this source tree.\n \"\"\"Module for resolving a runfiles path.\"\"\"\n-import getpass\n import os\n+from getpass import getuser\n from pathlib import Path\n \n # NOTE(cummins): Moving this file may require updating this relative path.\n@@ -72,7 +72,7 @@\n elif os.environ.get(\"HOME\"):\n return Path(\"~/.local/share/compiler_gym\").expanduser() / relpath\n else:\n- return Path(\"/tmp/compiler_gym/site_data\") / relpath\n+ return Path(f\"/tmp/compiler_gym_{getuser()}/site_data\") / relpath\n \n \n def cache_path(relpath: str) -> Path:\n@@ -96,7 +96,7 @@\n elif os.environ.get(\"HOME\"):\n return Path(\"~/.cache/compiler_gym\").expanduser() / relpath\n else:\n- return Path(\"/tmp/compiler_gym/cache\") / relpath\n+ return Path(f\"/tmp/compiler_gym_{getuser()}/cache\") / relpath\n \n \n def transient_cache_path(relpath: str) -> Path:\n@@ -120,7 +120,7 @@\n if forced:\n return Path(forced) / relpath\n elif Path(\"/dev/shm\").is_dir():\n- return Path(f\"/dev/shm/compiler_gym_{getpass.getuser()}\") / relpath\n+ return Path(f\"/dev/shm/compiler_gym_{getuser()}\") / relpath\n else:\n # Fallback to using the regular cache.\n return cache_path(relpath)\n", "issue": "CompilerGym cache directory defaults don't doesn't play nicely with shared access machines\n## \ud83d\udc1b Bug\r\n\r\nThe default locations of the CompilerGym caches are in shared folders. On multi-user machines this can cause permission errors as the directory may be created by one user without write permissions to other users. I propose switching to user-specific defaults like so:\r\n\r\n- [x] `COMPILER_GYM_CACHE` -> `/tmp/compiler_gym-$user`\r\n- [x] `COMPILER_YM_TRANSIENT_CACHE` -> `/dev/shm/compiler_gym-$user`\r\n\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\"\"\"Module for resolving a runfiles path.\"\"\"\nimport getpass\nimport os\nfrom pathlib import Path\n\n# NOTE(cummins): Moving this file may require updating this relative path.\n_PACKAGE_ROOT = Path(os.path.join(os.path.dirname(__file__), \"../../\")).resolve(\n strict=True\n)\n\n\ndef runfiles_path(relpath: str) -> Path:\n \"\"\"Resolve the path to a runfiles data path.\n\n No checks are to made to ensure that the path, or the containing directory,\n exist.\n\n Use environment variable COMPILER_GYM_RUNFILES=/path/to/runfiles if running\n outside of bazel.\n\n :param relpath: The relative path within the runfiles tree.\n\n :return: An absolute path.\n \"\"\"\n # There are three ways of determining a runfiles path:\n # 1. Set the COMPILER_GYM_RUNFILES environment variable.\n # 2. Using the rules_python library that is provided by bazel. This will\n # fail if not being executed within a bazel sandbox.\n # 3. Computing the path relative to the location of this file. This is the\n # fallback approach that is used for when the code has been installed\n # by setuptools.\n runfiles_path = os.environ.get(\"COMPILER_GYM_RUNFILES\")\n if runfiles_path:\n return Path(runfiles_path) / relpath\n else:\n try:\n from rules_python.python.runfiles import runfiles\n\n return Path(\n runfiles.Create().Rlocation(\n \"CompilerGym\" if relpath == \".\" else f\"CompilerGym/{relpath}\"\n )\n )\n except (ModuleNotFoundError, TypeError):\n return _PACKAGE_ROOT / relpath\n\n\ndef site_data_path(relpath: str) -> Path:\n \"\"\"Return a path within the site data directory.\n\n CompilerGym uses a directory to store persistent site data files in, such as benchmark datasets.\n The default location is :code:`~/.local/share/compiler_gym`. Set the environment variable\n :code:`$COMPILER_GYM_SITE_DATA` to override this default location.\n\n No checks are to made to ensure that the path, or the containing directory,\n exist.\n\n :param relpath: The relative path within the site data tree.\n\n :return: An absolute path.\n \"\"\"\n # NOTE(cummins): This function has a matching implementation in the C++\n # sources, compiler_gym::service::getSiteDataPath(). Any change to behavior\n # here must be reflected in the C++ version.\n forced = os.environ.get(\"COMPILER_GYM_SITE_DATA\")\n if forced:\n return Path(forced) / relpath\n elif os.environ.get(\"HOME\"):\n return Path(\"~/.local/share/compiler_gym\").expanduser() / relpath\n else:\n return Path(\"/tmp/compiler_gym/site_data\") / relpath\n\n\ndef cache_path(relpath: str) -> Path:\n \"\"\"Return a path within the cache directory.\n\n CompilerGym uses a directory to cache files in, such as downloaded content.\n The default location for this cache is :code:`~/.cache/compiler_gym`. Set\n the environment variable :code:`$COMPILER_GYM_CACHE` to override this\n default location.\n\n No checks are to made to ensure that the path, or the containing directory,\n exist.\n\n :param relpath: The relative path within the cache tree.\n\n :return: An absolute path.\n \"\"\"\n forced = os.environ.get(\"COMPILER_GYM_CACHE\")\n if forced:\n return Path(forced) / relpath\n elif os.environ.get(\"HOME\"):\n return Path(\"~/.cache/compiler_gym\").expanduser() / relpath\n else:\n return Path(\"/tmp/compiler_gym/cache\") / relpath\n\n\ndef transient_cache_path(relpath: str) -> Path:\n \"\"\"Return a path within the transient cache directory.\n\n The transient cache is a directory used to store files that do not need to\n persist beyond the lifetime of the current process. When available, the\n temporary filesystem :code:`/dev/shm` will be used. Else,\n :meth:`cache_path() <compiler_gym.cache_path>` is used as a fallback. Set\n the environment variable :code:`$COMPILER_GYM_TRANSIENT_CACHE` to override\n the default location.\n\n No checks are to made to ensure that the path, or the containing directory,\n exist.\n\n :param relpath: The relative path within the cache tree.\n\n :return: An absolute path.\n \"\"\"\n forced = os.environ.get(\"COMPILER_GYM_TRANSIENT_CACHE\")\n if forced:\n return Path(forced) / relpath\n elif Path(\"/dev/shm\").is_dir():\n return Path(f\"/dev/shm/compiler_gym_{getpass.getuser()}\") / relpath\n else:\n # Fallback to using the regular cache.\n return cache_path(relpath)\n", "path": "compiler_gym/util/runfiles_path.py"}]}
2,047
409
gh_patches_debug_1369
rasdani/github-patches
git_diff
Parsl__parsl-972
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Fix `ModuleNotFoundError: No module named 'monitoring'` Looks like this bug was introduced with the recent merge of monitoring back into the parsl repo. ``` Traceback (most recent call last): File "/Users/awoodard/software/miniconda3/bin/parsl-visualize", line 11, in <module> load_entry_point('parsl==0.7.2', 'console_scripts', 'parsl-visualize')() File "/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py", line 484, in load_entry_point return get_distribution(dist).load_entry_point(group, name) File "/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py", line 2707, in load_entry_point return ep.load() File "/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py", line 2325, in load return self.resolve() File "/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py", line 2331, in resolve module = __import__(self.module_name, fromlist=['__name__'], level=0) ModuleNotFoundError: No module named 'monitoring' ``` </issue> <code> [start of setup.py] 1 from setuptools import setup, find_packages 2 3 with open('parsl/version.py') as f: 4 exec(f.read()) 5 6 with open('requirements.txt') as f: 7 install_requires = f.readlines() 8 9 extras_require = { 10 'monitoring' : [ 11 'psutil', 12 'sqlalchemy', 13 'sqlalchemy_utils', 14 'pydot', 15 'networkx', 16 'Flask', 17 'flask_sqlalchemy', 18 'pandas', 19 'plotly', 20 'python-daemon' 21 ], 22 'aws' : ['boto3'], 23 'kubernetes' : ['kubernetes'], 24 'extreme_scale' : ['mpi4py'], 25 'docs' : ['nbsphinx', 'sphinx_rtd_theme'], 26 'google_cloud' : ['google-auth', 'google-api-python-client'], 27 'gssapi' : ['python-gssapi'], 28 } 29 extras_require['all'] = sum(extras_require.values(), []) 30 31 setup( 32 name='parsl', 33 version=VERSION, 34 description='Simple data dependent workflows in Python', 35 long_description='Simple parallel workflows system for Python', 36 url='https://github.com/Parsl/parsl', 37 author='The Parsl Team', 38 author_email='[email protected]', 39 license='Apache 2.0', 40 download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION), 41 include_package_data=True, 42 packages=find_packages(), 43 install_requires=install_requires, 44 scripts = ['parsl/executors/high_throughput/process_worker_pool.py', 45 'parsl/executors/extreme_scale/mpi_worker_pool.py', 46 'parsl/executors/low_latency/lowlatency_worker.py', 47 ], 48 extras_require=extras_require, 49 classifiers=[ 50 # Maturity 51 'Development Status :: 3 - Alpha', 52 # Intended audience 53 'Intended Audience :: Developers', 54 # Licence, must match with licence above 55 'License :: OSI Approved :: Apache Software License', 56 # Python versions supported 57 'Programming Language :: Python :: 3.5', 58 'Programming Language :: Python :: 3.6', 59 ], 60 keywords=['Workflows', 'Scientific computing'], 61 entry_points={'console_scripts': 62 [ 63 'parsl-globus-auth=parsl.data_provider.globus:cli_run', 64 'parsl-visualize=monitoring.visualization.app:cli_run', 65 ]} 66 ) 67 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -61,6 +61,6 @@ entry_points={'console_scripts': [ 'parsl-globus-auth=parsl.data_provider.globus:cli_run', - 'parsl-visualize=monitoring.visualization.app:cli_run', + 'parsl-visualize=parsl.monitoring.visualization.app:cli_run', ]} )
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -61,6 +61,6 @@\n entry_points={'console_scripts':\n [\n 'parsl-globus-auth=parsl.data_provider.globus:cli_run',\n- 'parsl-visualize=monitoring.visualization.app:cli_run',\n+ 'parsl-visualize=parsl.monitoring.visualization.app:cli_run',\n ]}\n )\n", "issue": "Fix `ModuleNotFoundError: No module named 'monitoring'`\nLooks like this bug was introduced with the recent merge of monitoring back into the parsl repo.\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/Users/awoodard/software/miniconda3/bin/parsl-visualize\", line 11, in <module>\r\n load_entry_point('parsl==0.7.2', 'console_scripts', 'parsl-visualize')()\r\n File \"/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py\", line 484, in load_entry_point\r\n return get_distribution(dist).load_entry_point(group, name)\r\n File \"/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py\", line 2707, in load_entry_point\r\n return ep.load()\r\n File \"/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py\", line 2325, in load\r\n return self.resolve()\r\n File \"/Users/awoodard/software/miniconda3/lib/python3.7/site-packages/pkg_resources/__init__.py\", line 2331, in resolve\r\n module = __import__(self.module_name, fromlist=['__name__'], level=0)\r\nModuleNotFoundError: No module named 'monitoring'\r\n```\n", "before_files": [{"content": "from setuptools import setup, find_packages\n\nwith open('parsl/version.py') as f:\n exec(f.read())\n\nwith open('requirements.txt') as f:\n install_requires = f.readlines()\n\nextras_require = {\n 'monitoring' : [\n 'psutil',\n 'sqlalchemy',\n 'sqlalchemy_utils',\n 'pydot',\n 'networkx',\n 'Flask',\n 'flask_sqlalchemy',\n 'pandas',\n 'plotly',\n 'python-daemon'\n ],\n 'aws' : ['boto3'],\n 'kubernetes' : ['kubernetes'],\n 'extreme_scale' : ['mpi4py'],\n 'docs' : ['nbsphinx', 'sphinx_rtd_theme'],\n 'google_cloud' : ['google-auth', 'google-api-python-client'],\n 'gssapi' : ['python-gssapi'],\n}\nextras_require['all'] = sum(extras_require.values(), [])\n\nsetup(\n name='parsl',\n version=VERSION,\n description='Simple data dependent workflows in Python',\n long_description='Simple parallel workflows system for Python',\n url='https://github.com/Parsl/parsl',\n author='The Parsl Team',\n author_email='[email protected]',\n license='Apache 2.0',\n download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION),\n include_package_data=True,\n packages=find_packages(),\n install_requires=install_requires,\n scripts = ['parsl/executors/high_throughput/process_worker_pool.py',\n 'parsl/executors/extreme_scale/mpi_worker_pool.py',\n 'parsl/executors/low_latency/lowlatency_worker.py',\n ],\n extras_require=extras_require,\n classifiers=[\n # Maturity\n 'Development Status :: 3 - Alpha',\n # Intended audience\n 'Intended Audience :: Developers',\n # Licence, must match with licence above\n 'License :: OSI Approved :: Apache Software License',\n # Python versions supported\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n ],\n keywords=['Workflows', 'Scientific computing'],\n entry_points={'console_scripts':\n [\n 'parsl-globus-auth=parsl.data_provider.globus:cli_run',\n 'parsl-visualize=monitoring.visualization.app:cli_run',\n ]}\n)\n", "path": "setup.py"}]}
1,481
100
gh_patches_debug_13031
rasdani/github-patches
git_diff
inventree__InvenTree-6284
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Platform UI refuses to log out ### Please verify that this bug has NOT been raised before. - [X] I checked and didn't find a similar issue ### Describe the bug* Discovered when I was setting up Platorm UI for dev, trying to log out simply sends you to the Home page and tells you that you were already logged in ![image](https://github.com/inventree/InvenTree/assets/7506688/d4a21313-2076-470b-a449-64e530f54ce8) ### Steps to Reproduce Not sure about the exact trigger here. It's still occuring to me as it did yesterday. ### Expected behaviour Being able to log out ### Deployment Method - [ ] Docker - [ ] Bare metal ### Version Information InvenTree - inventree.org The Open-Source Inventory Management System Installation paths: Base /workspaces/InvenTree Config /workspaces/InvenTree/dev/config.yaml Media /workspaces/InvenTree/dev/media Static /workspaces/InvenTree/dev/static Versions: Python 3.10.10 Django 3.2.23 InvenTree 0.13.0 dev API 152 Node v20.9.0 Yarn 1.22.19 Commit hash:dabd95d Commit date:2023-11-21 ### Please verify if you can reproduce this bug on the demo site. - [ ] I can reproduce this bug on the demo site. ### Relevant log output ```shell Created new API token for user 'admin' (name='inventree-web-app') [22/Nov/2023 17:23:56] "GET /api/user/token/?name=inventree-web-app HTTP/1.1" 200 114 [22/Nov/2023 17:23:56] "GET /api/user/me/ HTTP/1.1" 200 134 [22/Nov/2023 17:23:56] "GET /api/notifications/?read=false&limit=1 HTTP/1.1" 200 52 [22/Nov/2023 17:23:57] "GET /api/user/roles/ HTTP/1.1" 200 527 [22/Nov/2023 17:23:57] "GET /api/settings/global/ HTTP/1.1" 200 27344 Created new API token for user 'admin' (name='inventree-web-app') [22/Nov/2023 17:23:57] "GET /api/user/token/?name=inventree-web-app HTTP/1.1" 200 114 Background worker check failed Email backend not configured InvenTree system health checks failed [22/Nov/2023 17:23:57] "GET /api/ HTTP/1.1" 200 1145 [22/Nov/2023 17:23:57] "GET /api/user/me/ HTTP/1.1" 200 134 [22/Nov/2023 17:23:57] "GET /api/generic/status/ HTTP/1.1" 200 5851 [22/Nov/2023 17:23:57] "GET /api/user/roles/ HTTP/1.1" 200 527 Background worker check failed Email backend not configured InvenTree system health checks failed [22/Nov/2023 17:23:58] "GET /api/settings/global/ HTTP/1.1" 200 27344 [22/Nov/2023 17:23:58] "GET /api/ HTTP/1.1" 200 1145 [22/Nov/2023 17:23:58] "GET /api/settings/user/ HTTP/1.1" 200 13878 [22/Nov/2023 17:23:58] "GET /api/generic/status/ HTTP/1.1" 200 5851 [22/Nov/2023 17:23:58] "GET /api/settings/user/ HTTP/1.1" 200 13878 ``` </issue> <code> [start of InvenTree/web/urls.py] 1 """URLs for web app.""" 2 3 from django.conf import settings 4 from django.shortcuts import redirect 5 from django.urls import include, path 6 from django.views.decorators.csrf import ensure_csrf_cookie 7 from django.views.generic import TemplateView 8 9 10 class RedirectAssetView(TemplateView): 11 """View to redirect to static asset.""" 12 13 def get(self, request, *args, **kwargs): 14 """Redirect to static asset.""" 15 return redirect( 16 f"{settings.STATIC_URL}web/assets/{kwargs['path']}", permanent=True 17 ) 18 19 20 spa_view = ensure_csrf_cookie(TemplateView.as_view(template_name='web/index.html')) 21 assets_path = path('assets/<path:path>', RedirectAssetView.as_view()) 22 23 24 urlpatterns = [ 25 path( 26 f'{settings.FRONTEND_URL_BASE}/', 27 include([ 28 assets_path, 29 path( 30 'set-password?uid=<uid>&token=<token>', 31 spa_view, 32 name='password_reset_confirm', 33 ), 34 path('', spa_view), 35 ]), 36 ), 37 assets_path, 38 path(settings.FRONTEND_URL_BASE, spa_view, name='platform'), 39 ] 40 [end of InvenTree/web/urls.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/InvenTree/web/urls.py b/InvenTree/web/urls.py --- a/InvenTree/web/urls.py +++ b/InvenTree/web/urls.py @@ -2,7 +2,7 @@ from django.conf import settings from django.shortcuts import redirect -from django.urls import include, path +from django.urls import include, path, re_path from django.views.decorators.csrf import ensure_csrf_cookie from django.views.generic import TemplateView @@ -31,7 +31,7 @@ spa_view, name='password_reset_confirm', ), - path('', spa_view), + re_path('.*', spa_view), ]), ), assets_path,
{"golden_diff": "diff --git a/InvenTree/web/urls.py b/InvenTree/web/urls.py\n--- a/InvenTree/web/urls.py\n+++ b/InvenTree/web/urls.py\n@@ -2,7 +2,7 @@\n \n from django.conf import settings\n from django.shortcuts import redirect\n-from django.urls import include, path\n+from django.urls import include, path, re_path\n from django.views.decorators.csrf import ensure_csrf_cookie\n from django.views.generic import TemplateView\n \n@@ -31,7 +31,7 @@\n spa_view,\n name='password_reset_confirm',\n ),\n- path('', spa_view),\n+ re_path('.*', spa_view),\n ]),\n ),\n assets_path,\n", "issue": "Platform UI refuses to log out\n### Please verify that this bug has NOT been raised before.\n\n- [X] I checked and didn't find a similar issue\n\n### Describe the bug*\n\nDiscovered when I was setting up Platorm UI for dev, trying to log out simply sends you to the Home page and tells you that you were already logged in\r\n![image](https://github.com/inventree/InvenTree/assets/7506688/d4a21313-2076-470b-a449-64e530f54ce8)\r\n\n\n### Steps to Reproduce\n\nNot sure about the exact trigger here. It's still occuring to me as it did yesterday.\r\n\n\n### Expected behaviour\n\nBeing able to log out\n\n### Deployment Method\n\n- [ ] Docker\n- [ ] Bare metal\n\n### Version Information\n\nInvenTree - inventree.org\r\nThe Open-Source Inventory Management System\r\n\r\n\r\nInstallation paths:\r\nBase /workspaces/InvenTree\r\nConfig /workspaces/InvenTree/dev/config.yaml\r\nMedia /workspaces/InvenTree/dev/media\r\nStatic /workspaces/InvenTree/dev/static\r\n\r\nVersions:\r\nPython 3.10.10\r\nDjango 3.2.23\r\nInvenTree 0.13.0 dev\r\nAPI 152\r\nNode v20.9.0\r\nYarn 1.22.19\r\n\r\nCommit hash:dabd95d\r\nCommit date:2023-11-21\n\n### Please verify if you can reproduce this bug on the demo site.\n\n- [ ] I can reproduce this bug on the demo site.\n\n### Relevant log output\n\n```shell\nCreated new API token for user 'admin' (name='inventree-web-app')\r\n[22/Nov/2023 17:23:56] \"GET /api/user/token/?name=inventree-web-app HTTP/1.1\" 200 114\r\n[22/Nov/2023 17:23:56] \"GET /api/user/me/ HTTP/1.1\" 200 134\r\n[22/Nov/2023 17:23:56] \"GET /api/notifications/?read=false&limit=1 HTTP/1.1\" 200 52\r\n[22/Nov/2023 17:23:57] \"GET /api/user/roles/ HTTP/1.1\" 200 527\r\n[22/Nov/2023 17:23:57] \"GET /api/settings/global/ HTTP/1.1\" 200 27344\r\nCreated new API token for user 'admin' (name='inventree-web-app')\r\n[22/Nov/2023 17:23:57] \"GET /api/user/token/?name=inventree-web-app HTTP/1.1\" 200 114\r\nBackground worker check failed\r\nEmail backend not configured\r\nInvenTree system health checks failed\r\n[22/Nov/2023 17:23:57] \"GET /api/ HTTP/1.1\" 200 1145\r\n[22/Nov/2023 17:23:57] \"GET /api/user/me/ HTTP/1.1\" 200 134\r\n[22/Nov/2023 17:23:57] \"GET /api/generic/status/ HTTP/1.1\" 200 5851\r\n[22/Nov/2023 17:23:57] \"GET /api/user/roles/ HTTP/1.1\" 200 527\r\nBackground worker check failed\r\nEmail backend not configured\r\nInvenTree system health checks failed\r\n[22/Nov/2023 17:23:58] \"GET /api/settings/global/ HTTP/1.1\" 200 27344\r\n[22/Nov/2023 17:23:58] \"GET /api/ HTTP/1.1\" 200 1145\r\n[22/Nov/2023 17:23:58] \"GET /api/settings/user/ HTTP/1.1\" 200 13878\r\n[22/Nov/2023 17:23:58] \"GET /api/generic/status/ HTTP/1.1\" 200 5851\r\n[22/Nov/2023 17:23:58] \"GET /api/settings/user/ HTTP/1.1\" 200 13878\n```\n\n", "before_files": [{"content": "\"\"\"URLs for web app.\"\"\"\n\nfrom django.conf import settings\nfrom django.shortcuts import redirect\nfrom django.urls import include, path\nfrom django.views.decorators.csrf import ensure_csrf_cookie\nfrom django.views.generic import TemplateView\n\n\nclass RedirectAssetView(TemplateView):\n \"\"\"View to redirect to static asset.\"\"\"\n\n def get(self, request, *args, **kwargs):\n \"\"\"Redirect to static asset.\"\"\"\n return redirect(\n f\"{settings.STATIC_URL}web/assets/{kwargs['path']}\", permanent=True\n )\n\n\nspa_view = ensure_csrf_cookie(TemplateView.as_view(template_name='web/index.html'))\nassets_path = path('assets/<path:path>', RedirectAssetView.as_view())\n\n\nurlpatterns = [\n path(\n f'{settings.FRONTEND_URL_BASE}/',\n include([\n assets_path,\n path(\n 'set-password?uid=<uid>&token=<token>',\n spa_view,\n name='password_reset_confirm',\n ),\n path('', spa_view),\n ]),\n ),\n assets_path,\n path(settings.FRONTEND_URL_BASE, spa_view, name='platform'),\n]\n", "path": "InvenTree/web/urls.py"}]}
1,957
156
gh_patches_debug_24482
rasdani/github-patches
git_diff
sunpy__sunpy-3515
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Error in documentation for "Finding bright regions with ndimage" example. <!-- This comments are hidden when you submit the issue so you do not need to remove them! Please be sure to check out our contributing guidelines: https://github.com/sunpy/sunpy/blob/master/CONTRIBUTING.rst Please be sure to check out our code of conduct: https://github.com/sunpy/sunpy/blob/master/CODE_OF_CONDUCT.rst --> <!-- Please have a search on our GitHub repository to see if a similar issue has already been posted. If a similar issue is closed, have a quick look to see if you are satisfied by the resolution. If not please go ahead and open an issue! --> ### Description <!-- Provide a general description of the bug. --> There seems to be an error in the documentation for the "Finding bright regions with ndimage" example. In the part where a mask is made, the surrounding text states: " We choose the criterion that the data should be at least 5% of the maximum value. " However, if you look at the code immediately below, the threshold is based off 10% the max value: `mask = aiamap.data < aiamap.max() * 0.10` ### Expected behavior <!-- What did you expect to happen. --> Documentation needs to be modified to reflect that the threshold is based off a 10% threshold. </issue> <code> [start of examples/map/image_bright_regions_gallery_example.py] 1 # coding: utf-8 2 """ 3 =================================== 4 Finding bright regions with ndimage 5 =================================== 6 7 How you can to find the brightest regions in an AIA image and 8 count the approximate number of regions of interest using ndimage. 9 """ 10 # sphinx_gallery_thumbnail_number = 2 11 12 from scipy import ndimage 13 import matplotlib.pyplot as plt 14 15 import sunpy.map 16 from sunpy.data.sample import AIA_193_IMAGE 17 18 ############################################################################### 19 # We start with the sample data 20 aiamap_mask = sunpy.map.Map(AIA_193_IMAGE) 21 aiamap = sunpy.map.Map(AIA_193_IMAGE) 22 23 ############################################################################## 24 # First we make a mask, which tells us which regions are bright. We 25 # choose the criterion that the data should be at least 5% of the maximum 26 # value. Pixels with intensity values greater than this are included in the 27 # mask, while all other pixels are excluded. 28 mask = aiamap.data < aiamap.max() * 0.10 29 30 ############################################################################## 31 # Mask is a `boolean` array. It can be used to modify the original map object 32 # without modifying the data. Once this mask attribute is set, we can plot the 33 # image again. 34 aiamap_mask.mask = mask 35 plt.figure() 36 aiamap.plot() 37 plt.colorbar() 38 plt.show() 39 40 ############################################################################## 41 # Only the brightest pixels remain in the image. 42 # However, these areas are artificially broken up into small regions. 43 # We can solve this by applying some smoothing to the image data. 44 # Here we apply a 2D Gaussian smoothing function to the data. 45 data2 = ndimage.gaussian_filter(aiamap.data * ~mask, 14) 46 47 ############################################################################## 48 # The issue with the filtering is that it create pixels where the values are 49 # small (<100), so when we go on later to label this array, 50 # we get one large region which encompasses the entire array. 51 # If you want to see, just remove this line. 52 data2[data2 < 100] = 0 53 54 ############################################################################## 55 # Now we will make a second SunPy map with this smoothed data. 56 aiamap2 = sunpy.map.Map(data2, aiamap.meta) 57 58 ############################################################################## 59 # The function `label` from the `scipy.ndimage` module, counts the number of 60 # contiguous regions in an image. 61 labels, n = ndimage.label(aiamap2.data) 62 63 ############################################################################## 64 # Finally, we plot the smoothed bright image data, along with the estimate of 65 # the number of distinct regions. We can see that approximately 6 distinct hot 66 # regions are present above the 5% of the maximum level. 67 plt.figure() 68 ax = plt.subplot(projection=aiamap) 69 aiamap.plot() 70 plt.contour(labels) 71 plt.figtext(0.3, 0.2, f'Number of regions = {n}', color='white') 72 plt.show() 73 [end of examples/map/image_bright_regions_gallery_example.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/examples/map/image_bright_regions_gallery_example.py b/examples/map/image_bright_regions_gallery_example.py --- a/examples/map/image_bright_regions_gallery_example.py +++ b/examples/map/image_bright_regions_gallery_example.py @@ -22,7 +22,7 @@ ############################################################################## # First we make a mask, which tells us which regions are bright. We -# choose the criterion that the data should be at least 5% of the maximum +# choose the criterion that the data should be at least 10% of the maximum # value. Pixels with intensity values greater than this are included in the # mask, while all other pixels are excluded. mask = aiamap.data < aiamap.max() * 0.10 @@ -63,7 +63,7 @@ ############################################################################## # Finally, we plot the smoothed bright image data, along with the estimate of # the number of distinct regions. We can see that approximately 6 distinct hot -# regions are present above the 5% of the maximum level. +# regions are present above the 10% of the maximum level. plt.figure() ax = plt.subplot(projection=aiamap) aiamap.plot()
{"golden_diff": "diff --git a/examples/map/image_bright_regions_gallery_example.py b/examples/map/image_bright_regions_gallery_example.py\n--- a/examples/map/image_bright_regions_gallery_example.py\n+++ b/examples/map/image_bright_regions_gallery_example.py\n@@ -22,7 +22,7 @@\n \n ##############################################################################\n # First we make a mask, which tells us which regions are bright. We\n-# choose the criterion that the data should be at least 5% of the maximum\n+# choose the criterion that the data should be at least 10% of the maximum\n # value. Pixels with intensity values greater than this are included in the\n # mask, while all other pixels are excluded.\n mask = aiamap.data < aiamap.max() * 0.10\n@@ -63,7 +63,7 @@\n ##############################################################################\n # Finally, we plot the smoothed bright image data, along with the estimate of\n # the number of distinct regions. We can see that approximately 6 distinct hot\n-# regions are present above the 5% of the maximum level.\n+# regions are present above the 10% of the maximum level.\n plt.figure()\n ax = plt.subplot(projection=aiamap)\n aiamap.plot()\n", "issue": "Error in documentation for \"Finding bright regions with ndimage\" example.\n<!-- This comments are hidden when you submit the issue so you do not need to remove them!\r\nPlease be sure to check out our contributing guidelines: https://github.com/sunpy/sunpy/blob/master/CONTRIBUTING.rst\r\nPlease be sure to check out our code of conduct:\r\nhttps://github.com/sunpy/sunpy/blob/master/CODE_OF_CONDUCT.rst -->\r\n\r\n<!-- Please have a search on our GitHub repository to see if a similar issue has already been posted.\r\nIf a similar issue is closed, have a quick look to see if you are satisfied by the resolution.\r\nIf not please go ahead and open an issue! -->\r\n\r\n### Description\r\n<!-- Provide a general description of the bug. -->\r\nThere seems to be an error in the documentation for the \"Finding bright regions with ndimage\" example.\r\n\r\nIn the part where a mask is made, the surrounding text states: \" We choose the criterion that the data should be at least 5% of the maximum value. \" However, if you look at the code immediately below, the threshold is based off 10% the max value:\r\n`mask = aiamap.data < aiamap.max() * 0.10`\r\n\r\n### Expected behavior\r\n<!-- What did you expect to happen. -->\r\n\r\nDocumentation needs to be modified to reflect that the threshold is based off a 10% threshold. \n", "before_files": [{"content": "# coding: utf-8\n\"\"\"\n===================================\nFinding bright regions with ndimage\n===================================\n\nHow you can to find the brightest regions in an AIA image and\ncount the approximate number of regions of interest using ndimage.\n\"\"\"\n# sphinx_gallery_thumbnail_number = 2\n\nfrom scipy import ndimage\nimport matplotlib.pyplot as plt\n\nimport sunpy.map\nfrom sunpy.data.sample import AIA_193_IMAGE\n\n###############################################################################\n# We start with the sample data\naiamap_mask = sunpy.map.Map(AIA_193_IMAGE)\naiamap = sunpy.map.Map(AIA_193_IMAGE)\n\n##############################################################################\n# First we make a mask, which tells us which regions are bright. We\n# choose the criterion that the data should be at least 5% of the maximum\n# value. Pixels with intensity values greater than this are included in the\n# mask, while all other pixels are excluded.\nmask = aiamap.data < aiamap.max() * 0.10\n\n##############################################################################\n# Mask is a `boolean` array. It can be used to modify the original map object\n# without modifying the data. Once this mask attribute is set, we can plot the\n# image again.\naiamap_mask.mask = mask\nplt.figure()\naiamap.plot()\nplt.colorbar()\nplt.show()\n\n##############################################################################\n# Only the brightest pixels remain in the image.\n# However, these areas are artificially broken up into small regions.\n# We can solve this by applying some smoothing to the image data.\n# Here we apply a 2D Gaussian smoothing function to the data.\ndata2 = ndimage.gaussian_filter(aiamap.data * ~mask, 14)\n\n##############################################################################\n# The issue with the filtering is that it create pixels where the values are\n# small (<100), so when we go on later to label this array,\n# we get one large region which encompasses the entire array.\n# If you want to see, just remove this line.\ndata2[data2 < 100] = 0\n\n##############################################################################\n# Now we will make a second SunPy map with this smoothed data.\naiamap2 = sunpy.map.Map(data2, aiamap.meta)\n\n##############################################################################\n# The function `label` from the `scipy.ndimage` module, counts the number of\n# contiguous regions in an image.\nlabels, n = ndimage.label(aiamap2.data)\n\n##############################################################################\n# Finally, we plot the smoothed bright image data, along with the estimate of\n# the number of distinct regions. We can see that approximately 6 distinct hot\n# regions are present above the 5% of the maximum level.\nplt.figure()\nax = plt.subplot(projection=aiamap)\naiamap.plot()\nplt.contour(labels)\nplt.figtext(0.3, 0.2, f'Number of regions = {n}', color='white')\nplt.show()\n", "path": "examples/map/image_bright_regions_gallery_example.py"}]}
1,592
260
gh_patches_debug_7546
rasdani/github-patches
git_diff
akvo__akvo-rsr-1594
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add content_owner to organisation REST API filters </issue> <code> [start of akvo/rest/views/organisation.py] 1 # -*- coding: utf-8 -*- 2 3 # Akvo RSR is covered by the GNU Affero General Public License. 4 # See more details in the license.txt file located at the root folder of the Akvo RSR module. 5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >. 6 7 from django.conf import settings 8 9 from rest_framework.compat import etree, six 10 from rest_framework.exceptions import ParseError 11 from rest_framework.parsers import XMLParser, JSONParser 12 13 from akvo.rsr.models import Organisation, Country 14 15 from ..serializers import OrganisationSerializer 16 from ..viewsets import BaseRSRViewSet 17 18 19 class AkvoOrganisationParser(XMLParser): 20 def parse(self, stream, media_type=None, parser_context=None): 21 assert etree, 'XMLParser requires defusedxml to be installed' 22 23 parser_context = parser_context or {} 24 encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) 25 parser = etree.DefusedXMLParser(encoding=encoding) 26 try: 27 tree = etree.parse(stream, parser=parser, forbid_dtd=True) 28 except (etree.ParseError, ValueError) as exc: 29 raise ParseError('XML parse error - %s' % six.text_type(exc)) 30 return self.organisation_data_from_etree(tree.getroot()) 31 32 def organisation_data_from_etree(self, tree): 33 def find_text(tree, str): 34 element = tree.find(str) 35 if element is None: 36 return '' 37 return element.text.strip() if element.text else "" 38 39 def location_data(location_tree): 40 if location_tree is None: 41 return [] 42 iso_code = find_text(location_tree, 'iso_code').lower() 43 country, created = Country.objects.get_or_create(**Country.fields_from_iso_code(iso_code)) 44 country = country.id 45 latitude = find_text(location_tree, 'latitude') or 0 46 longitude = find_text(location_tree, 'longitude') or 0 47 primary = True 48 return [dict(latitude=latitude, longitude=longitude, country=country, primary=primary)] 49 50 #id = find_text(tree, 'org_id') 51 long_name = find_text(tree, 'name') 52 name = long_name[:25] 53 description = find_text(tree, 'description') 54 url = find_text(tree, 'url') 55 iati_type = find_text(tree, 'iati_organisation_type') 56 new_organisation_type = int(iati_type) if iati_type else 22 57 organisation_type = Organisation.org_type_from_iati_type(new_organisation_type) 58 locations = location_data(tree.find('location/object')) 59 return dict( 60 name=name, long_name=long_name, description=description, url=url, 61 organisation_type=organisation_type, new_organisation_type=new_organisation_type, 62 locations=locations 63 ) 64 65 66 class OrganisationViewSet(BaseRSRViewSet): 67 """ 68 API endpoint that allows organisations to be viewed or edited. 69 """ 70 queryset = Organisation.objects.all() 71 serializer_class = OrganisationSerializer 72 parser_classes = (AkvoOrganisationParser, JSONParser,) 73 filter_fields = ('name', 'long_name', 'iati_org_id', ) 74 75 def get_queryset(self): 76 """ Enable filtering of Organisations on iati_org_id or name 77 """ 78 queryset = super(OrganisationViewSet, self).get_queryset() 79 pk = self.request.QUERY_PARAMS.get('id', None) 80 if pk is not None: 81 try: 82 queryset = queryset.filter(pk=pk) 83 except ValueError: 84 pass 85 iati_org_id = self.request.QUERY_PARAMS.get('iati_org_id', None) 86 if iati_org_id is not None: 87 queryset = queryset.filter(iati_org_id=iati_org_id) 88 name = self.request.QUERY_PARAMS.get('name', None) 89 if name is not None: 90 queryset = queryset.filter(name=name) 91 long_name = self.request.QUERY_PARAMS.get('long_name', None) 92 if long_name is not None: 93 queryset = queryset.filter(long_name=long_name) 94 return queryset 95 [end of akvo/rest/views/organisation.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/akvo/rest/views/organisation.py b/akvo/rest/views/organisation.py --- a/akvo/rest/views/organisation.py +++ b/akvo/rest/views/organisation.py @@ -70,7 +70,7 @@ queryset = Organisation.objects.all() serializer_class = OrganisationSerializer parser_classes = (AkvoOrganisationParser, JSONParser,) - filter_fields = ('name', 'long_name', 'iati_org_id', ) + filter_fields = ('name', 'long_name', 'iati_org_id', 'content_owner') def get_queryset(self): """ Enable filtering of Organisations on iati_org_id or name
{"golden_diff": "diff --git a/akvo/rest/views/organisation.py b/akvo/rest/views/organisation.py\n--- a/akvo/rest/views/organisation.py\n+++ b/akvo/rest/views/organisation.py\n@@ -70,7 +70,7 @@\n queryset = Organisation.objects.all()\n serializer_class = OrganisationSerializer\n parser_classes = (AkvoOrganisationParser, JSONParser,)\n- filter_fields = ('name', 'long_name', 'iati_org_id', )\n+ filter_fields = ('name', 'long_name', 'iati_org_id', 'content_owner')\n \n def get_queryset(self):\n \"\"\" Enable filtering of Organisations on iati_org_id or name\n", "issue": "Add content_owner to organisation REST API filters\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.conf import settings\n\nfrom rest_framework.compat import etree, six\nfrom rest_framework.exceptions import ParseError\nfrom rest_framework.parsers import XMLParser, JSONParser\n\nfrom akvo.rsr.models import Organisation, Country\n\nfrom ..serializers import OrganisationSerializer\nfrom ..viewsets import BaseRSRViewSet\n\n\nclass AkvoOrganisationParser(XMLParser):\n def parse(self, stream, media_type=None, parser_context=None):\n assert etree, 'XMLParser requires defusedxml to be installed'\n\n parser_context = parser_context or {}\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n parser = etree.DefusedXMLParser(encoding=encoding)\n try:\n tree = etree.parse(stream, parser=parser, forbid_dtd=True)\n except (etree.ParseError, ValueError) as exc:\n raise ParseError('XML parse error - %s' % six.text_type(exc))\n return self.organisation_data_from_etree(tree.getroot())\n\n def organisation_data_from_etree(self, tree):\n def find_text(tree, str):\n element = tree.find(str)\n if element is None:\n return ''\n return element.text.strip() if element.text else \"\"\n\n def location_data(location_tree):\n if location_tree is None:\n return []\n iso_code = find_text(location_tree, 'iso_code').lower()\n country, created = Country.objects.get_or_create(**Country.fields_from_iso_code(iso_code))\n country = country.id\n latitude = find_text(location_tree, 'latitude') or 0\n longitude = find_text(location_tree, 'longitude') or 0\n primary = True\n return [dict(latitude=latitude, longitude=longitude, country=country, primary=primary)]\n\n #id = find_text(tree, 'org_id')\n long_name = find_text(tree, 'name')\n name = long_name[:25]\n description = find_text(tree, 'description')\n url = find_text(tree, 'url')\n iati_type = find_text(tree, 'iati_organisation_type')\n new_organisation_type = int(iati_type) if iati_type else 22\n organisation_type = Organisation.org_type_from_iati_type(new_organisation_type)\n locations = location_data(tree.find('location/object'))\n return dict(\n name=name, long_name=long_name, description=description, url=url,\n organisation_type=organisation_type, new_organisation_type=new_organisation_type,\n locations=locations\n )\n\n\nclass OrganisationViewSet(BaseRSRViewSet):\n \"\"\"\n API endpoint that allows organisations to be viewed or edited.\n \"\"\"\n queryset = Organisation.objects.all()\n serializer_class = OrganisationSerializer\n parser_classes = (AkvoOrganisationParser, JSONParser,)\n filter_fields = ('name', 'long_name', 'iati_org_id', )\n\n def get_queryset(self):\n \"\"\" Enable filtering of Organisations on iati_org_id or name\n \"\"\"\n queryset = super(OrganisationViewSet, self).get_queryset()\n pk = self.request.QUERY_PARAMS.get('id', None)\n if pk is not None:\n try:\n queryset = queryset.filter(pk=pk)\n except ValueError:\n pass\n iati_org_id = self.request.QUERY_PARAMS.get('iati_org_id', None)\n if iati_org_id is not None:\n queryset = queryset.filter(iati_org_id=iati_org_id)\n name = self.request.QUERY_PARAMS.get('name', None)\n if name is not None:\n queryset = queryset.filter(name=name)\n long_name = self.request.QUERY_PARAMS.get('long_name', None)\n if long_name is not None:\n queryset = queryset.filter(long_name=long_name)\n return queryset\n", "path": "akvo/rest/views/organisation.py"}]}
1,585
148
gh_patches_debug_13060
rasdani/github-patches
git_diff
ytdl-org__youtube-dl-3089
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> AttributeError in ard module With the command given below, I get the error message below. I'm using version 2014.06.09. `youtube-dl http://www.ardmediathek.de/tv/Klassiker-der-Weltliteratur/Max-Frisch/BR-alpha/Video\?documentId\=19067308\&bcastId\=14913194` ``` [ARD] 19067308: Downloading webpage [ARD] 19067308: Downloading JSON metadata Traceback (most recent call last): File "/usr/bin/youtube-dl", line 9, in <module> load_entry_point('youtube-dl==2014.06.09', 'console_scripts', 'youtube-dl')() File "/usr/lib/python3.4/site-packages/youtube_dl/__init__.py", line 853, in main _real_main(argv) File "/usr/lib/python3.4/site-packages/youtube_dl/__init__.py", line 843, in _real_main retcode = ydl.download(all_urls) File "/usr/lib/python3.4/site-packages/youtube_dl/YoutubeDL.py", line 1050, in download self.extract_info(url) File "/usr/lib/python3.4/site-packages/youtube_dl/YoutubeDL.py", line 516, in extract_info ie_result = ie.extract(url) File "/usr/lib/python3.4/site-packages/youtube_dl/extractor/common.py", line 168, in extract return self._real_extract(url) File "/usr/lib/python3.4/site-packages/youtube_dl/extractor/ard.py", line 66, in _real_extract determine_ext(format['url']), format['quality']) File "/usr/lib/python3.4/site-packages/youtube_dl/utils.py", line 845, in determine_ext guess = url.partition(u'?')[0].rpartition(u'.')[2] AttributeError: 'list' object has no attribute 'partition' ``` </issue> <code> [start of youtube_dl/extractor/ard.py] 1 # coding: utf-8 2 from __future__ import unicode_literals 3 4 import re 5 6 from .common import InfoExtractor 7 from ..utils import ( 8 determine_ext, 9 ExtractorError, 10 ) 11 12 13 class ARDIE(InfoExtractor): 14 _VALID_URL = r'^https?://(?:(?:www\.)?ardmediathek\.de|mediathek\.daserste\.de)/(?:.*/)(?P<video_id>[^/\?]+)(?:\?.*)?' 15 16 _TEST = { 17 'url': 'http://www.ardmediathek.de/das-erste/guenther-jauch/edward-snowden-im-interview-held-oder-verraeter?documentId=19288786', 18 'file': '19288786.mp4', 19 'md5': '515bf47ce209fb3f5a61b7aad364634c', 20 'info_dict': { 21 'title': 'Edward Snowden im Interview - Held oder Verräter?', 22 'description': 'Edward Snowden hat alles aufs Spiel gesetzt, um die weltweite \xdcberwachung durch die Geheimdienste zu enttarnen. Nun stellt sich der ehemalige NSA-Mitarbeiter erstmals weltweit in einem TV-Interview den Fragen eines NDR-Journalisten. Die Sendung vom Sonntagabend.', 23 'thumbnail': 'http://www.ardmediathek.de/ard/servlet/contentblob/19/28/87/90/19288790/bild/2250037', 24 }, 25 'skip': 'Blocked outside of Germany', 26 } 27 28 def _real_extract(self, url): 29 # determine video id from url 30 m = re.match(self._VALID_URL, url) 31 32 numid = re.search(r'documentId=([0-9]+)', url) 33 if numid: 34 video_id = numid.group(1) 35 else: 36 video_id = m.group('video_id') 37 38 webpage = self._download_webpage(url, video_id) 39 40 title = self._html_search_regex( 41 [r'<h1(?:\s+class="boxTopHeadline")?>(.*?)</h1>', 42 r'<meta name="dcterms.title" content="(.*?)"/>', 43 r'<h4 class="headline">(.*?)</h4>'], 44 webpage, 'title') 45 description = self._html_search_meta( 46 'dcterms.abstract', webpage, 'description') 47 thumbnail = self._og_search_thumbnail(webpage) 48 49 50 media_info = self._download_json( 51 'http://www.ardmediathek.de/play/media/%s' % video_id, video_id) 52 # The second element of the _mediaArray contains the standard http urls 53 streams = media_info['_mediaArray'][1]['_mediaStreamArray'] 54 if not streams: 55 if '"fsk"' in webpage: 56 raise ExtractorError('This video is only available after 20:00') 57 58 formats = [] 59 for s in streams: 60 format = { 61 'quality': s['_quality'], 62 'url': s['_stream'], 63 } 64 65 format['format_id'] = '%s-%s' % ( 66 determine_ext(format['url']), format['quality']) 67 68 formats.append(format) 69 70 self._sort_formats(formats) 71 72 return { 73 'id': video_id, 74 'title': title, 75 'description': description, 76 'formats': formats, 77 'thumbnail': thumbnail, 78 } 79 [end of youtube_dl/extractor/ard.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/youtube_dl/extractor/ard.py b/youtube_dl/extractor/ard.py --- a/youtube_dl/extractor/ard.py +++ b/youtube_dl/extractor/ard.py @@ -56,7 +56,18 @@ raise ExtractorError('This video is only available after 20:00') formats = [] + for s in streams: + if type(s['_stream']) == list: + for index, url in enumerate(s['_stream'][::-1]): + quality = s['_quality'] + index + formats.append({ + 'quality': quality, + 'url': url, + 'format_id': '%s-%s' % (determine_ext(url), quality) + }) + continue + format = { 'quality': s['_quality'], 'url': s['_stream'],
{"golden_diff": "diff --git a/youtube_dl/extractor/ard.py b/youtube_dl/extractor/ard.py\n--- a/youtube_dl/extractor/ard.py\n+++ b/youtube_dl/extractor/ard.py\n@@ -56,7 +56,18 @@\n raise ExtractorError('This video is only available after 20:00')\n \n formats = []\n+\n for s in streams:\n+ if type(s['_stream']) == list:\n+ for index, url in enumerate(s['_stream'][::-1]):\n+ quality = s['_quality'] + index\n+ formats.append({\n+ 'quality': quality,\n+ 'url': url,\n+ 'format_id': '%s-%s' % (determine_ext(url), quality)\n+ })\n+ continue\n+\n format = {\n 'quality': s['_quality'],\n 'url': s['_stream'],\n", "issue": "AttributeError in ard module\nWith the command given below, I get the error message below. I'm using version 2014.06.09.\n\n`youtube-dl http://www.ardmediathek.de/tv/Klassiker-der-Weltliteratur/Max-Frisch/BR-alpha/Video\\?documentId\\=19067308\\&bcastId\\=14913194`\n\n```\n[ARD] 19067308: Downloading webpage\n[ARD] 19067308: Downloading JSON metadata\nTraceback (most recent call last):\n File \"/usr/bin/youtube-dl\", line 9, in <module>\n load_entry_point('youtube-dl==2014.06.09', 'console_scripts', 'youtube-dl')()\n File \"/usr/lib/python3.4/site-packages/youtube_dl/__init__.py\", line 853, in main\n _real_main(argv)\n File \"/usr/lib/python3.4/site-packages/youtube_dl/__init__.py\", line 843, in _real_main\n retcode = ydl.download(all_urls)\n File \"/usr/lib/python3.4/site-packages/youtube_dl/YoutubeDL.py\", line 1050, in download\n self.extract_info(url)\n File \"/usr/lib/python3.4/site-packages/youtube_dl/YoutubeDL.py\", line 516, in extract_info\n ie_result = ie.extract(url)\n File \"/usr/lib/python3.4/site-packages/youtube_dl/extractor/common.py\", line 168, in extract\n return self._real_extract(url)\n File \"/usr/lib/python3.4/site-packages/youtube_dl/extractor/ard.py\", line 66, in _real_extract\n determine_ext(format['url']), format['quality'])\n File \"/usr/lib/python3.4/site-packages/youtube_dl/utils.py\", line 845, in determine_ext\n guess = url.partition(u'?')[0].rpartition(u'.')[2]\nAttributeError: 'list' object has no attribute 'partition'\n```\n\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom .common import InfoExtractor\nfrom ..utils import (\n determine_ext,\n ExtractorError,\n)\n\n\nclass ARDIE(InfoExtractor):\n _VALID_URL = r'^https?://(?:(?:www\\.)?ardmediathek\\.de|mediathek\\.daserste\\.de)/(?:.*/)(?P<video_id>[^/\\?]+)(?:\\?.*)?'\n\n _TEST = {\n 'url': 'http://www.ardmediathek.de/das-erste/guenther-jauch/edward-snowden-im-interview-held-oder-verraeter?documentId=19288786',\n 'file': '19288786.mp4',\n 'md5': '515bf47ce209fb3f5a61b7aad364634c',\n 'info_dict': {\n 'title': 'Edward Snowden im Interview - Held oder Verr\u00e4ter?',\n 'description': 'Edward Snowden hat alles aufs Spiel gesetzt, um die weltweite \\xdcberwachung durch die Geheimdienste zu enttarnen. Nun stellt sich der ehemalige NSA-Mitarbeiter erstmals weltweit in einem TV-Interview den Fragen eines NDR-Journalisten. Die Sendung vom Sonntagabend.',\n 'thumbnail': 'http://www.ardmediathek.de/ard/servlet/contentblob/19/28/87/90/19288790/bild/2250037',\n },\n 'skip': 'Blocked outside of Germany',\n }\n\n def _real_extract(self, url):\n # determine video id from url\n m = re.match(self._VALID_URL, url)\n\n numid = re.search(r'documentId=([0-9]+)', url)\n if numid:\n video_id = numid.group(1)\n else:\n video_id = m.group('video_id')\n\n webpage = self._download_webpage(url, video_id)\n\n title = self._html_search_regex(\n [r'<h1(?:\\s+class=\"boxTopHeadline\")?>(.*?)</h1>',\n r'<meta name=\"dcterms.title\" content=\"(.*?)\"/>',\n r'<h4 class=\"headline\">(.*?)</h4>'],\n webpage, 'title')\n description = self._html_search_meta(\n 'dcterms.abstract', webpage, 'description')\n thumbnail = self._og_search_thumbnail(webpage)\n\n\n media_info = self._download_json(\n 'http://www.ardmediathek.de/play/media/%s' % video_id, video_id)\n # The second element of the _mediaArray contains the standard http urls\n streams = media_info['_mediaArray'][1]['_mediaStreamArray']\n if not streams:\n if '\"fsk\"' in webpage:\n raise ExtractorError('This video is only available after 20:00')\n\n formats = []\n for s in streams:\n format = {\n 'quality': s['_quality'],\n 'url': s['_stream'],\n }\n\n format['format_id'] = '%s-%s' % (\n determine_ext(format['url']), format['quality'])\n\n formats.append(format)\n\n self._sort_formats(formats)\n\n return {\n 'id': video_id,\n 'title': title,\n 'description': description,\n 'formats': formats,\n 'thumbnail': thumbnail,\n }\n", "path": "youtube_dl/extractor/ard.py"}]}
1,955
197
gh_patches_debug_59532
rasdani/github-patches
git_diff
mit-ll-responsible-ai__hydra-zen-97
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> PEP 561 compatibility Hi, Would it be possible to make hydra-zen compliant with [PEP 561](https://www.python.org/dev/peps/pep-0561) by distributing a `py.typed` file with the package? Currently I'm getting `Skipping analyzing "hydra_zen": found module but no type hints or library stubs` when I run mypy on a test file. Here are steps to reproduce this error: ```text $ pip install hydra-zen mypy ... Successfully installed PyYAML-5.4.1 antlr4-python3-runtime-4.8 hydra-core-1.1.1 hydra-zen-0.2.0 mypy-0.910 mypy-extensions-0.4.3 omegaconf-2.1.1 toml-0.10.2 typing-extensions-3.10.0.2 ... $ echo "from hydra_zen import builds" > tmp.py $ mypy tmp.py tmp.py:1: error: Skipping analyzing "hydra_zen": found module but no type hints or library stubs tmp.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports Found 1 error in 1 file (checked 1 source file) ``` I believe that adding an empty `py.typed` file to the `src/hydra_zen` directory (and modifying `setup.py` so that the `py.typed` file is distributed with the `hydra-zen` package) would make it possible for type checkers following PEP 561 to discover the type hints in `src`. (I'd be happy to submit a PR to this effect.) </issue> <code> [start of setup.py] 1 # Copyright (c) 2021 Massachusetts Institute of Technology 2 # SPDX-License-Identifier: MIT 3 4 from setuptools import find_packages, setup 5 6 import versioneer 7 8 DISTNAME = "hydra_zen" 9 LICENSE = "MIT" 10 AUTHOR = "Justin Goodwin, Ryan Soklaski" 11 AUTHOR_EMAIL = "[email protected]" 12 URL = "https://github.com/mit-ll-responsible-ai/hydra_zen" 13 CLASSIFIERS = [ 14 "Development Status :: 4 - Beta", 15 "License :: OSI Approved :: MIT License", 16 "Operating System :: OS Independent", 17 "Intended Audience :: Science/Research", 18 "Programming Language :: Python :: 3.6", 19 "Programming Language :: Python :: 3.7", 20 "Programming Language :: Python :: 3.8", 21 "Programming Language :: Python :: 3.9", 22 "Topic :: Scientific/Engineering", 23 ] 24 KEYWORDS = "machine learning research configuration scalable reproducible" 25 INSTALL_REQUIRES = [ 26 "hydra-core >= 1.1.0", 27 "typing-extensions >= 3.7.4.1", 28 ] 29 TESTS_REQUIRE = [ 30 "pytest >= 3.8", 31 "hypothesis >= 5.32.0", 32 ] 33 34 DESCRIPTION = "Utilities for making hydra scale to ML workflows" 35 LONG_DESCRIPTION = """ 36 hydra-zen helps you configure your project using the power of Hydra, while enjoying the Zen of Python! 37 38 hydra-zen eliminates the boilerplate code that you write to configure, orchestrate, and organize the results of large-scale projects, such as machine learning experiments. It does so by providing Hydra-compatible tools that dynamically generate "structured configurations" of your code, and enables Python-centric workflows for running configured instances of your code. 39 40 hydra-zen offers: 41 42 - Functions for automatically and dynamically generating structured configs that can be used to fully or partially instantiate objects in your application. 43 - The ability to launch Hydra jobs, complete with parameter sweeps and multi-run configurations, from within a notebook or any other Python environment. 44 - Incisive type annotations that provide enriched context about your project's configurations to IDEs, type checkers, and other tooling. 45 - Runtime validation of configurations to catch mistakes before your application launches. 46 - Equal support for both object-oriented libraries (e.g., torch.nn) and functional ones (e.g., jax and numpy). 47 48 These functions and capabilities can be used to great effect alongside PyTorch Lightning to design boilerplate-free machine learning projects! 49 """ 50 51 52 setup( 53 name=DISTNAME, 54 version=versioneer.get_version(), 55 cmdclass=versioneer.get_cmdclass(), 56 license=LICENSE, 57 author=AUTHOR, 58 author_email=AUTHOR_EMAIL, 59 classifiers=CLASSIFIERS, 60 keywords=KEYWORDS, 61 description=DESCRIPTION, 62 long_description=LONG_DESCRIPTION, 63 install_requires=INSTALL_REQUIRES, 64 tests_require=TESTS_REQUIRE, 65 url=URL, 66 download_url="https://github.com/mit-ll-responsible-ai/hydra-zen/tarball/" 67 + versioneer.get_version(), 68 python_requires=">=3.6", 69 packages=find_packages(where="src", exclude=["tests", "tests.*"]), 70 package_dir={"": "src"}, 71 ) 72 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -68,4 +68,5 @@ python_requires=">=3.6", packages=find_packages(where="src", exclude=["tests", "tests.*"]), package_dir={"": "src"}, + package_data={"hydra_zen": ["py.typed"]} )
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -68,4 +68,5 @@\n python_requires=\">=3.6\",\n packages=find_packages(where=\"src\", exclude=[\"tests\", \"tests.*\"]),\n package_dir={\"\": \"src\"},\n+ package_data={\"hydra_zen\": [\"py.typed\"]}\n )\n", "issue": "PEP 561 compatibility\nHi,\r\n\r\nWould it be possible to make hydra-zen compliant with [PEP 561](https://www.python.org/dev/peps/pep-0561) by distributing a `py.typed` file with the package?\r\n\r\nCurrently I'm getting `Skipping analyzing \"hydra_zen\": found module but no type hints or library stubs` when I run mypy on a test file. Here are steps to reproduce this error:\r\n```text\r\n$ pip install hydra-zen mypy\r\n...\r\nSuccessfully installed PyYAML-5.4.1 antlr4-python3-runtime-4.8 hydra-core-1.1.1 hydra-zen-0.2.0 mypy-0.910 mypy-extensions-0.4.3 omegaconf-2.1.1 toml-0.10.2 typing-extensions-3.10.0.2\r\n...\r\n$ echo \"from hydra_zen import builds\" > tmp.py\r\n$ mypy tmp.py\r\ntmp.py:1: error: Skipping analyzing \"hydra_zen\": found module but no type hints or library stubs\r\ntmp.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports\r\nFound 1 error in 1 file (checked 1 source file)\r\n```\r\n\r\nI believe that adding an empty `py.typed` file to the `src/hydra_zen` directory (and modifying `setup.py` so that the `py.typed` file is distributed with the `hydra-zen` package) would make it possible for type checkers following PEP 561 to discover the type hints in `src`.\r\n(I'd be happy to submit a PR to this effect.)\n", "before_files": [{"content": "# Copyright (c) 2021 Massachusetts Institute of Technology\n# SPDX-License-Identifier: MIT\n\nfrom setuptools import find_packages, setup\n\nimport versioneer\n\nDISTNAME = \"hydra_zen\"\nLICENSE = \"MIT\"\nAUTHOR = \"Justin Goodwin, Ryan Soklaski\"\nAUTHOR_EMAIL = \"[email protected]\"\nURL = \"https://github.com/mit-ll-responsible-ai/hydra_zen\"\nCLASSIFIERS = [\n \"Development Status :: 4 - Beta\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Science/Research\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Topic :: Scientific/Engineering\",\n]\nKEYWORDS = \"machine learning research configuration scalable reproducible\"\nINSTALL_REQUIRES = [\n \"hydra-core >= 1.1.0\",\n \"typing-extensions >= 3.7.4.1\",\n]\nTESTS_REQUIRE = [\n \"pytest >= 3.8\",\n \"hypothesis >= 5.32.0\",\n]\n\nDESCRIPTION = \"Utilities for making hydra scale to ML workflows\"\nLONG_DESCRIPTION = \"\"\"\nhydra-zen helps you configure your project using the power of Hydra, while enjoying the Zen of Python!\n\nhydra-zen eliminates the boilerplate code that you write to configure, orchestrate, and organize the results of large-scale projects, such as machine learning experiments. It does so by providing Hydra-compatible tools that dynamically generate \"structured configurations\" of your code, and enables Python-centric workflows for running configured instances of your code.\n\nhydra-zen offers:\n\n - Functions for automatically and dynamically generating structured configs that can be used to fully or partially instantiate objects in your application.\n - The ability to launch Hydra jobs, complete with parameter sweeps and multi-run configurations, from within a notebook or any other Python environment.\n - Incisive type annotations that provide enriched context about your project's configurations to IDEs, type checkers, and other tooling.\n - Runtime validation of configurations to catch mistakes before your application launches.\n - Equal support for both object-oriented libraries (e.g., torch.nn) and functional ones (e.g., jax and numpy).\n\nThese functions and capabilities can be used to great effect alongside PyTorch Lightning to design boilerplate-free machine learning projects!\n\"\"\"\n\n\nsetup(\n name=DISTNAME,\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n license=LICENSE,\n author=AUTHOR,\n author_email=AUTHOR_EMAIL,\n classifiers=CLASSIFIERS,\n keywords=KEYWORDS,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n install_requires=INSTALL_REQUIRES,\n tests_require=TESTS_REQUIRE,\n url=URL,\n download_url=\"https://github.com/mit-ll-responsible-ai/hydra-zen/tarball/\"\n + versioneer.get_version(),\n python_requires=\">=3.6\",\n packages=find_packages(where=\"src\", exclude=[\"tests\", \"tests.*\"]),\n package_dir={\"\": \"src\"},\n)\n", "path": "setup.py"}]}
1,747
82
gh_patches_debug_29551
rasdani/github-patches
git_diff
doccano__doccano-1770
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Wrong progress in collaborative annotation ('Share annotations across all users') How to reproduce the behaviour --------- Progress is shown as individual progress instead of total progress when 'Share annotations across all users' is ticked in project setting. Your Environment --------- <!-- Include details of your environment.--> * Operating System: wsl2+ubuntu20.04 * Python Version Used: 3.8 * When you install doccano: 20220403 * How did you install doccano (Heroku button etc): source </issue> <code> [start of backend/metrics/views.py] 1 import abc 2 3 from rest_framework import status 4 from rest_framework.permissions import IsAuthenticated 5 from rest_framework.response import Response 6 from rest_framework.views import APIView 7 8 from examples.models import Example, ExampleState 9 from label_types.models import CategoryType, LabelType, RelationType, SpanType 10 from labels.models import Category, Label, Relation, Span 11 from projects.models import Member 12 from projects.permissions import IsProjectAdmin, IsProjectStaffAndReadOnly 13 14 15 class ProgressAPI(APIView): 16 permission_classes = [IsAuthenticated & (IsProjectAdmin | IsProjectStaffAndReadOnly)] 17 18 def get(self, request, *args, **kwargs): 19 examples = Example.objects.filter(project=self.kwargs["project_id"]).values("id") 20 total = examples.count() 21 complete = ExampleState.objects.count_done(examples, user=self.request.user) 22 data = {"total": total, "remaining": total - complete, "complete": complete} 23 return Response(data=data, status=status.HTTP_200_OK) 24 25 26 class MemberProgressAPI(APIView): 27 permission_classes = [IsAuthenticated & (IsProjectAdmin | IsProjectStaffAndReadOnly)] 28 29 def get(self, request, *args, **kwargs): 30 examples = Example.objects.filter(project=self.kwargs["project_id"]).values("id") 31 members = Member.objects.filter(project=self.kwargs["project_id"]) 32 data = ExampleState.objects.measure_member_progress(examples, members) 33 return Response(data=data, status=status.HTTP_200_OK) 34 35 36 class LabelDistribution(abc.ABC, APIView): 37 permission_classes = [IsAuthenticated & (IsProjectAdmin | IsProjectStaffAndReadOnly)] 38 model = Label 39 label_type = LabelType 40 41 def get(self, request, *args, **kwargs): 42 labels = self.label_type.objects.filter(project=self.kwargs["project_id"]) 43 examples = Example.objects.filter(project=self.kwargs["project_id"]).values("id") 44 members = Member.objects.filter(project=self.kwargs["project_id"]) 45 data = self.model.objects.calc_label_distribution(examples, members, labels) 46 return Response(data=data, status=status.HTTP_200_OK) 47 48 49 class CategoryTypeDistribution(LabelDistribution): 50 model = Category 51 label_type = CategoryType 52 53 54 class SpanTypeDistribution(LabelDistribution): 55 model = Span 56 label_type = SpanType 57 58 59 class RelationTypeDistribution(LabelDistribution): 60 model = Relation 61 label_type = RelationType 62 [end of backend/metrics/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/backend/metrics/views.py b/backend/metrics/views.py --- a/backend/metrics/views.py +++ b/backend/metrics/views.py @@ -1,5 +1,6 @@ import abc +from django.shortcuts import get_object_or_404 from rest_framework import status from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response @@ -8,7 +9,7 @@ from examples.models import Example, ExampleState from label_types.models import CategoryType, LabelType, RelationType, SpanType from labels.models import Category, Label, Relation, Span -from projects.models import Member +from projects.models import Member, Project from projects.permissions import IsProjectAdmin, IsProjectStaffAndReadOnly @@ -18,7 +19,11 @@ def get(self, request, *args, **kwargs): examples = Example.objects.filter(project=self.kwargs["project_id"]).values("id") total = examples.count() - complete = ExampleState.objects.count_done(examples, user=self.request.user) + project = get_object_or_404(Project, pk=self.kwargs["project_id"]) + if project.collaborative_annotation: + complete = ExampleState.objects.count_done(examples) + else: + complete = ExampleState.objects.count_done(examples, user=self.request.user) data = {"total": total, "remaining": total - complete, "complete": complete} return Response(data=data, status=status.HTTP_200_OK)
{"golden_diff": "diff --git a/backend/metrics/views.py b/backend/metrics/views.py\n--- a/backend/metrics/views.py\n+++ b/backend/metrics/views.py\n@@ -1,5 +1,6 @@\n import abc\n \n+from django.shortcuts import get_object_or_404\n from rest_framework import status\n from rest_framework.permissions import IsAuthenticated\n from rest_framework.response import Response\n@@ -8,7 +9,7 @@\n from examples.models import Example, ExampleState\n from label_types.models import CategoryType, LabelType, RelationType, SpanType\n from labels.models import Category, Label, Relation, Span\n-from projects.models import Member\n+from projects.models import Member, Project\n from projects.permissions import IsProjectAdmin, IsProjectStaffAndReadOnly\n \n \n@@ -18,7 +19,11 @@\n def get(self, request, *args, **kwargs):\n examples = Example.objects.filter(project=self.kwargs[\"project_id\"]).values(\"id\")\n total = examples.count()\n- complete = ExampleState.objects.count_done(examples, user=self.request.user)\n+ project = get_object_or_404(Project, pk=self.kwargs[\"project_id\"])\n+ if project.collaborative_annotation:\n+ complete = ExampleState.objects.count_done(examples)\n+ else:\n+ complete = ExampleState.objects.count_done(examples, user=self.request.user)\n data = {\"total\": total, \"remaining\": total - complete, \"complete\": complete}\n return Response(data=data, status=status.HTTP_200_OK)\n", "issue": "Wrong progress in collaborative annotation ('Share annotations across all users')\nHow to reproduce the behaviour\r\n---------\r\nProgress is shown as individual progress instead of total progress when 'Share annotations across all users' is ticked in project setting.\r\n\r\nYour Environment\r\n---------\r\n<!-- Include details of your environment.-->\r\n* Operating System: wsl2+ubuntu20.04\r\n* Python Version Used: 3.8\r\n* When you install doccano: 20220403\r\n* How did you install doccano (Heroku button etc): source\r\n\n", "before_files": [{"content": "import abc\n\nfrom rest_framework import status\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom examples.models import Example, ExampleState\nfrom label_types.models import CategoryType, LabelType, RelationType, SpanType\nfrom labels.models import Category, Label, Relation, Span\nfrom projects.models import Member\nfrom projects.permissions import IsProjectAdmin, IsProjectStaffAndReadOnly\n\n\nclass ProgressAPI(APIView):\n permission_classes = [IsAuthenticated & (IsProjectAdmin | IsProjectStaffAndReadOnly)]\n\n def get(self, request, *args, **kwargs):\n examples = Example.objects.filter(project=self.kwargs[\"project_id\"]).values(\"id\")\n total = examples.count()\n complete = ExampleState.objects.count_done(examples, user=self.request.user)\n data = {\"total\": total, \"remaining\": total - complete, \"complete\": complete}\n return Response(data=data, status=status.HTTP_200_OK)\n\n\nclass MemberProgressAPI(APIView):\n permission_classes = [IsAuthenticated & (IsProjectAdmin | IsProjectStaffAndReadOnly)]\n\n def get(self, request, *args, **kwargs):\n examples = Example.objects.filter(project=self.kwargs[\"project_id\"]).values(\"id\")\n members = Member.objects.filter(project=self.kwargs[\"project_id\"])\n data = ExampleState.objects.measure_member_progress(examples, members)\n return Response(data=data, status=status.HTTP_200_OK)\n\n\nclass LabelDistribution(abc.ABC, APIView):\n permission_classes = [IsAuthenticated & (IsProjectAdmin | IsProjectStaffAndReadOnly)]\n model = Label\n label_type = LabelType\n\n def get(self, request, *args, **kwargs):\n labels = self.label_type.objects.filter(project=self.kwargs[\"project_id\"])\n examples = Example.objects.filter(project=self.kwargs[\"project_id\"]).values(\"id\")\n members = Member.objects.filter(project=self.kwargs[\"project_id\"])\n data = self.model.objects.calc_label_distribution(examples, members, labels)\n return Response(data=data, status=status.HTTP_200_OK)\n\n\nclass CategoryTypeDistribution(LabelDistribution):\n model = Category\n label_type = CategoryType\n\n\nclass SpanTypeDistribution(LabelDistribution):\n model = Span\n label_type = SpanType\n\n\nclass RelationTypeDistribution(LabelDistribution):\n model = Relation\n label_type = RelationType\n", "path": "backend/metrics/views.py"}]}
1,276
321
gh_patches_debug_807
rasdani/github-patches
git_diff
bokeh__bokeh-10106
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [BUG] `cd sphinx; make serve` doesn't work #### ALL software version info (bokeh, python, notebook, OS, browser, any other relevant packages) Bokeh 2.0.2-76-ga417746c9 #### Description of expected behavior and the observed behavior The page at https://docs.bokeh.org/en/latest/docs/dev_guide/documentation.html mentions that it's possible to run `make serve` to serve the documentation locally. But running it results in: ``` Exception in thread Thread-2: Traceback (most recent call last): File "/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/threading.py", line 917, in _bootstrap_inner self.run() File "/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/threading.py", line 865, in run self._target(*self._args, **self._kwargs) File "docserver.py", line 43, in open_browser webbrowser.open("http://localhost:%d/en/latest/index.html" % PORT, new="tab") File "/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/webbrowser.py", line 78, in open if browser.open(url, new, autoraise): File "/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/webbrowser.py", line 251, in open "expected 0, 1, or 2, got %s" % new) webbrowser.Error: Bad 'new' parameter to open(); expected 0, 1, or 2, got tab ``` Not sure where `"tab"` has come from, but it has been there forever. </issue> <code> [start of sphinx/docserver.py] 1 import os 2 import sys 3 import threading 4 import time 5 import webbrowser 6 7 import flask 8 import tornado 9 from tornado.httpserver import HTTPServer 10 from tornado.ioloop import IOLoop 11 from tornado.wsgi import WSGIContainer 12 13 _basedir = os.path.join("..", os.path.dirname(__file__)) 14 15 app = flask.Flask(__name__, static_folder="/unused") 16 PORT=5009 17 http_server = HTTPServer(WSGIContainer(app)) 18 19 @app.route('/') 20 def welcome(): 21 return """ 22 <h1>Welcome to the Bokeh documentation server</h1> 23 You probably want to go to <a href="/en/latest/index.html"> Index</a> 24 """ 25 26 @app.route('/versions.json') 27 def send_versions(): 28 return flask.send_from_directory( 29 os.path.join(_basedir, "sphinx"), "test_versions.json") 30 31 @app.route('/alert.html') 32 def send_alert(): 33 return os.environ.get("BOKEH_DOCS_ALERT", "") 34 35 @app.route('/en/latest/<path:filename>') 36 def send_docs(filename): 37 return flask.send_from_directory( 38 os.path.join(_basedir, "sphinx/build/html/"), filename) 39 40 def open_browser(): 41 # Child process 42 time.sleep(0.5) 43 webbrowser.open("http://localhost:%d/en/latest/index.html" % PORT, new="tab") 44 45 data = {} 46 47 def serve_http(): 48 data['ioloop'] = IOLoop() 49 http_server.listen(PORT) 50 IOLoop.current().start() 51 52 def shutdown_server(): 53 ioloop = data['ioloop'] 54 ioloop.add_callback(ioloop.stop) 55 print("Asked Server to shut down.") 56 57 def ui(): 58 try: 59 time.sleep(0.5) 60 input("Press <ENTER> to exit...\n") # lgtm [py/use-of-input] 61 except KeyboardInterrupt: 62 pass 63 64 if __name__ == "__main__": 65 66 if tornado.version_info[0] == 4: 67 print('docserver.py script requires tornado 5 or higher') 68 sys.exit(1) 69 70 print("\nStarting Bokeh plot server on port %d..." % PORT) 71 print("Visit http://localhost:%d/en/latest/index.html to see plots\n" % PORT) 72 73 t_server = threading.Thread(target=serve_http) 74 t_server.start() 75 t_browser = threading.Thread(target=open_browser) 76 t_browser.start() 77 78 ui() 79 80 shutdown_server() 81 t_server.join() 82 t_browser.join() 83 print("Server shut down.") 84 [end of sphinx/docserver.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sphinx/docserver.py b/sphinx/docserver.py --- a/sphinx/docserver.py +++ b/sphinx/docserver.py @@ -40,7 +40,7 @@ def open_browser(): # Child process time.sleep(0.5) - webbrowser.open("http://localhost:%d/en/latest/index.html" % PORT, new="tab") + webbrowser.open("http://localhost:%d/en/latest/index.html" % PORT, new=2) data = {}
{"golden_diff": "diff --git a/sphinx/docserver.py b/sphinx/docserver.py\n--- a/sphinx/docserver.py\n+++ b/sphinx/docserver.py\n@@ -40,7 +40,7 @@\n def open_browser():\n # Child process\n time.sleep(0.5)\n- webbrowser.open(\"http://localhost:%d/en/latest/index.html\" % PORT, new=\"tab\")\n+ webbrowser.open(\"http://localhost:%d/en/latest/index.html\" % PORT, new=2)\n \n data = {}\n", "issue": "[BUG] `cd sphinx; make serve` doesn't work\n#### ALL software version info (bokeh, python, notebook, OS, browser, any other relevant packages)\r\nBokeh 2.0.2-76-ga417746c9\r\n\r\n#### Description of expected behavior and the observed behavior\r\nThe page at https://docs.bokeh.org/en/latest/docs/dev_guide/documentation.html mentions that it's possible to run `make serve` to serve the documentation locally. But running it results in:\r\n```\r\nException in thread Thread-2:\r\nTraceback (most recent call last):\r\n File \"/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/threading.py\", line 917, in _bootstrap_inner\r\n self.run()\r\n File \"/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/threading.py\", line 865, in run\r\n self._target(*self._args, **self._kwargs)\r\n File \"docserver.py\", line 43, in open_browser\r\n webbrowser.open(\"http://localhost:%d/en/latest/index.html\" % PORT, new=\"tab\")\r\n File \"/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/webbrowser.py\", line 78, in open\r\n if browser.open(url, new, autoraise):\r\n File \"/home/p-himik/soft/miniconda3/envs/bokeh-dev/lib/python3.7/webbrowser.py\", line 251, in open\r\n \"expected 0, 1, or 2, got %s\" % new)\r\nwebbrowser.Error: Bad 'new' parameter to open(); expected 0, 1, or 2, got tab\r\n```\r\nNot sure where `\"tab\"` has come from, but it has been there forever.\n", "before_files": [{"content": "import os\nimport sys\nimport threading\nimport time\nimport webbrowser\n\nimport flask\nimport tornado\nfrom tornado.httpserver import HTTPServer\nfrom tornado.ioloop import IOLoop\nfrom tornado.wsgi import WSGIContainer\n\n_basedir = os.path.join(\"..\", os.path.dirname(__file__))\n\napp = flask.Flask(__name__, static_folder=\"/unused\")\nPORT=5009\nhttp_server = HTTPServer(WSGIContainer(app))\n\[email protected]('/')\ndef welcome():\n return \"\"\"\n <h1>Welcome to the Bokeh documentation server</h1>\n You probably want to go to <a href=\"/en/latest/index.html\"> Index</a>\n \"\"\"\n\[email protected]('/versions.json')\ndef send_versions():\n return flask.send_from_directory(\n os.path.join(_basedir, \"sphinx\"), \"test_versions.json\")\n\[email protected]('/alert.html')\ndef send_alert():\n return os.environ.get(\"BOKEH_DOCS_ALERT\", \"\")\n\[email protected]('/en/latest/<path:filename>')\ndef send_docs(filename):\n return flask.send_from_directory(\n os.path.join(_basedir, \"sphinx/build/html/\"), filename)\n\ndef open_browser():\n # Child process\n time.sleep(0.5)\n webbrowser.open(\"http://localhost:%d/en/latest/index.html\" % PORT, new=\"tab\")\n\ndata = {}\n\ndef serve_http():\n data['ioloop'] = IOLoop()\n http_server.listen(PORT)\n IOLoop.current().start()\n\ndef shutdown_server():\n ioloop = data['ioloop']\n ioloop.add_callback(ioloop.stop)\n print(\"Asked Server to shut down.\")\n\ndef ui():\n try:\n time.sleep(0.5)\n input(\"Press <ENTER> to exit...\\n\") # lgtm [py/use-of-input]\n except KeyboardInterrupt:\n pass\n\nif __name__ == \"__main__\":\n\n if tornado.version_info[0] == 4:\n print('docserver.py script requires tornado 5 or higher')\n sys.exit(1)\n\n print(\"\\nStarting Bokeh plot server on port %d...\" % PORT)\n print(\"Visit http://localhost:%d/en/latest/index.html to see plots\\n\" % PORT)\n\n t_server = threading.Thread(target=serve_http)\n t_server.start()\n t_browser = threading.Thread(target=open_browser)\n t_browser.start()\n\n ui()\n\n shutdown_server()\n t_server.join()\n t_browser.join()\n print(\"Server shut down.\")\n", "path": "sphinx/docserver.py"}]}
1,650
112
gh_patches_debug_3215
rasdani/github-patches
git_diff
python-discord__bot-733
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Write unit tests for `bot/rules/newlines.py` Write unit tests for [`bot/rules/newlines.py`](../blob/master/bot/rules/newlines.py). ## Implementation details Please make sure to read the general information in the [meta issue](553) and the [testing README](../blob/master/tests/README.md). We are aiming for a 100% [branch coverage](https://coverage.readthedocs.io/en/stable/branch.html) for this file, but if you think that is not possible, please discuss that in this issue. ## Additional information If you want to work on this issue, **please make sure that you get assigned to it** by one of the core devs before starting to work on it. We would like to prevent the situation that multiple people are working on the same issue. To get assigned, leave a comment showing your interesting in tackling this issue. </issue> <code> [start of bot/rules/attachments.py] 1 from typing import Dict, Iterable, List, Optional, Tuple 2 3 from discord import Member, Message 4 5 6 async def apply( 7 last_message: Message, recent_messages: List[Message], config: Dict[str, int] 8 ) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: 9 """Detects total attachments exceeding the limit sent by a single user.""" 10 relevant_messages = tuple( 11 msg 12 for msg in recent_messages 13 if ( 14 msg.author == last_message.author 15 and len(msg.attachments) > 0 16 ) 17 ) 18 total_recent_attachments = sum(len(msg.attachments) for msg in relevant_messages) 19 20 if total_recent_attachments > config['max']: 21 return ( 22 f"sent {total_recent_attachments} attachments in {config['max']}s", 23 (last_message.author,), 24 relevant_messages 25 ) 26 return None 27 [end of bot/rules/attachments.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/bot/rules/attachments.py b/bot/rules/attachments.py --- a/bot/rules/attachments.py +++ b/bot/rules/attachments.py @@ -19,7 +19,7 @@ if total_recent_attachments > config['max']: return ( - f"sent {total_recent_attachments} attachments in {config['max']}s", + f"sent {total_recent_attachments} attachments in {config['interval']}s", (last_message.author,), relevant_messages )
{"golden_diff": "diff --git a/bot/rules/attachments.py b/bot/rules/attachments.py\n--- a/bot/rules/attachments.py\n+++ b/bot/rules/attachments.py\n@@ -19,7 +19,7 @@\n \n if total_recent_attachments > config['max']:\n return (\n- f\"sent {total_recent_attachments} attachments in {config['max']}s\",\n+ f\"sent {total_recent_attachments} attachments in {config['interval']}s\",\n (last_message.author,),\n relevant_messages\n )\n", "issue": "Write unit tests for `bot/rules/newlines.py`\nWrite unit tests for [`bot/rules/newlines.py`](../blob/master/bot/rules/newlines.py).\n\n## Implementation details\nPlease make sure to read the general information in the [meta issue](553) and the [testing README](../blob/master/tests/README.md). We are aiming for a 100% [branch coverage](https://coverage.readthedocs.io/en/stable/branch.html) for this file, but if you think that is not possible, please discuss that in this issue.\n\n## Additional information\nIf you want to work on this issue, **please make sure that you get assigned to it** by one of the core devs before starting to work on it. We would like to prevent the situation that multiple people are working on the same issue. To get assigned, leave a comment showing your interesting in tackling this issue.\n\n", "before_files": [{"content": "from typing import Dict, Iterable, List, Optional, Tuple\n\nfrom discord import Member, Message\n\n\nasync def apply(\n last_message: Message, recent_messages: List[Message], config: Dict[str, int]\n) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:\n \"\"\"Detects total attachments exceeding the limit sent by a single user.\"\"\"\n relevant_messages = tuple(\n msg\n for msg in recent_messages\n if (\n msg.author == last_message.author\n and len(msg.attachments) > 0\n )\n )\n total_recent_attachments = sum(len(msg.attachments) for msg in relevant_messages)\n\n if total_recent_attachments > config['max']:\n return (\n f\"sent {total_recent_attachments} attachments in {config['max']}s\",\n (last_message.author,),\n relevant_messages\n )\n return None\n", "path": "bot/rules/attachments.py"}]}
951
113
gh_patches_debug_26022
rasdani/github-patches
git_diff
mindee__doctr-173
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [docs] Add a visualization of the example script in the README While the readme specifies how you can use the example script, it does not show any visualization examples. We could easily add one to help users. </issue> <code> [start of doctr/utils/visualization.py] 1 # Copyright (C) 2021, Mindee. 2 3 # This program is licensed under the Apache License version 2. 4 # See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details. 5 6 import matplotlib.pyplot as plt 7 import matplotlib.patches as patches 8 import mplcursors 9 import numpy as np 10 from typing import Tuple, List, Dict, Any 11 12 from .common_types import BoundingBox 13 14 __all__ = ['visualize_page'] 15 16 17 def create_patch( 18 geometry: BoundingBox, 19 label: str, 20 page_dimensions: Tuple[int, int], 21 color: Tuple[int, int, int], 22 alpha: float = 0.3, 23 linewidth: int = 2, 24 ) -> patches.Patch: 25 """Create a matplotlib patch (rectangle) bounding the element 26 27 Args: 28 geometry: bounding box of the element 29 label: label to display when hovered 30 page_dimensions: dimensions of the Page 31 color: color to draw box 32 alpha: opacity parameter to fill the boxes, 0 = transparent 33 linewidth: line width 34 35 Returns: 36 a rectangular Patch 37 """ 38 h, w = page_dimensions 39 (xmin, ymin), (xmax, ymax) = geometry 40 xmin, xmax = xmin * w, xmax * w 41 ymin, ymax = ymin * h, ymax * h 42 rect = patches.Rectangle( 43 (xmin, ymin), 44 xmax - xmin, 45 ymax - ymin, 46 fill=True, 47 linewidth=linewidth, 48 edgecolor=(*color, alpha), 49 facecolor=(*color, alpha), 50 label=label 51 ) 52 return rect 53 54 55 def visualize_page( 56 page: Dict[str, Any], 57 image: np.ndarray, 58 words_only: bool = True, 59 ) -> None: 60 """Visualize a full page with predicted blocks, lines and words 61 62 Example:: 63 >>> import numpy as np 64 >>> import matplotlib.pyplot as plt 65 >>> from doctr.utils.visualization import visualize_page 66 >>> from doctr.models import ocr_db_crnn 67 >>> model = ocr_db_crnn(pretrained=True) 68 >>> input_page = (255 * np.random.rand(600, 800, 3)).astype(np.uint8) 69 >>> out = model([[input_page]]) 70 >>> visualize_page(out[0].pages[0].export(), input_page) 71 >>> plt.show() 72 73 Args: 74 page: the exported Page of a Document 75 image: np array of the page, needs to have the same shape than page['dimensions'] 76 words_only: whether only words should be displayed 77 """ 78 # Display the image 79 _, ax = plt.subplots() 80 ax.imshow(image) 81 # hide both axis 82 ax.axis('off') 83 84 artists: List[patches.Patch] = [] # instantiate an empty list of patches (to be drawn on the page) 85 86 for block in page['blocks']: 87 if not words_only: 88 rect = create_patch(block['geometry'], 'block', page['dimensions'], (0, 1, 0), linewidth=1) 89 # add patch on figure 90 ax.add_patch(rect) 91 # add patch to cursor's artists 92 artists.append(rect) 93 94 for line in block['lines']: 95 if not words_only: 96 rect = create_patch(line['geometry'], 'line', page['dimensions'], (1, 0, 0), linewidth=1) 97 ax.add_patch(rect) 98 artists.append(rect) 99 100 for word in line['words']: 101 rect = create_patch(word['geometry'], f"{word['value']} (confidence: {word['confidence']:.2%})", 102 page['dimensions'], (0, 0, 1)) 103 ax.add_patch(rect) 104 artists.append(rect) 105 106 if not words_only: 107 for artefact in block['artefacts']: 108 rect = create_patch(artefact['geometry'], 'artefact', page['dimensions'], (0.5, 0.5, 0.5), linewidth=1) 109 ax.add_patch(rect) 110 artists.append(rect) 111 112 # Create mlp Cursor to hover patches in artists 113 mplcursors.Cursor(artists, hover=2).connect("add", lambda sel: sel.annotation.set_text(sel.artist.get_label())) 114 [end of doctr/utils/visualization.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/doctr/utils/visualization.py b/doctr/utils/visualization.py --- a/doctr/utils/visualization.py +++ b/doctr/utils/visualization.py @@ -56,6 +56,7 @@ page: Dict[str, Any], image: np.ndarray, words_only: bool = True, + scale: float = 10, ) -> None: """Visualize a full page with predicted blocks, lines and words @@ -74,9 +75,13 @@ page: the exported Page of a Document image: np array of the page, needs to have the same shape than page['dimensions'] words_only: whether only words should be displayed + scale: figsize of the largest windows side """ + # Get proper scale and aspect ratio + h, w = image.shape[:2] + size = (scale * w / h, scale) if h > w else (scale, h / w * scale) + fig, ax = plt.subplots(figsize=size) # Display the image - _, ax = plt.subplots() ax.imshow(image) # hide both axis ax.axis('off') @@ -111,3 +116,4 @@ # Create mlp Cursor to hover patches in artists mplcursors.Cursor(artists, hover=2).connect("add", lambda sel: sel.annotation.set_text(sel.artist.get_label())) + fig.tight_layout()
{"golden_diff": "diff --git a/doctr/utils/visualization.py b/doctr/utils/visualization.py\n--- a/doctr/utils/visualization.py\n+++ b/doctr/utils/visualization.py\n@@ -56,6 +56,7 @@\n page: Dict[str, Any],\n image: np.ndarray,\n words_only: bool = True,\n+ scale: float = 10,\n ) -> None:\n \"\"\"Visualize a full page with predicted blocks, lines and words\n \n@@ -74,9 +75,13 @@\n page: the exported Page of a Document\n image: np array of the page, needs to have the same shape than page['dimensions']\n words_only: whether only words should be displayed\n+ scale: figsize of the largest windows side\n \"\"\"\n+ # Get proper scale and aspect ratio\n+ h, w = image.shape[:2]\n+ size = (scale * w / h, scale) if h > w else (scale, h / w * scale)\n+ fig, ax = plt.subplots(figsize=size)\n # Display the image\n- _, ax = plt.subplots()\n ax.imshow(image)\n # hide both axis\n ax.axis('off')\n@@ -111,3 +116,4 @@\n \n # Create mlp Cursor to hover patches in artists\n mplcursors.Cursor(artists, hover=2).connect(\"add\", lambda sel: sel.annotation.set_text(sel.artist.get_label()))\n+ fig.tight_layout()\n", "issue": "[docs] Add a visualization of the example script in the README\nWhile the readme specifies how you can use the example script, it does not show any visualization examples. We could easily add one to help users.\n", "before_files": [{"content": "# Copyright (C) 2021, Mindee.\n\n# This program is licensed under the Apache License version 2.\n# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.\n\nimport matplotlib.pyplot as plt\nimport matplotlib.patches as patches\nimport mplcursors\nimport numpy as np\nfrom typing import Tuple, List, Dict, Any\n\nfrom .common_types import BoundingBox\n\n__all__ = ['visualize_page']\n\n\ndef create_patch(\n geometry: BoundingBox,\n label: str,\n page_dimensions: Tuple[int, int],\n color: Tuple[int, int, int],\n alpha: float = 0.3,\n linewidth: int = 2,\n) -> patches.Patch:\n \"\"\"Create a matplotlib patch (rectangle) bounding the element\n\n Args:\n geometry: bounding box of the element\n label: label to display when hovered\n page_dimensions: dimensions of the Page\n color: color to draw box\n alpha: opacity parameter to fill the boxes, 0 = transparent\n linewidth: line width\n\n Returns:\n a rectangular Patch\n \"\"\"\n h, w = page_dimensions\n (xmin, ymin), (xmax, ymax) = geometry\n xmin, xmax = xmin * w, xmax * w\n ymin, ymax = ymin * h, ymax * h\n rect = patches.Rectangle(\n (xmin, ymin),\n xmax - xmin,\n ymax - ymin,\n fill=True,\n linewidth=linewidth,\n edgecolor=(*color, alpha),\n facecolor=(*color, alpha),\n label=label\n )\n return rect\n\n\ndef visualize_page(\n page: Dict[str, Any],\n image: np.ndarray,\n words_only: bool = True,\n) -> None:\n \"\"\"Visualize a full page with predicted blocks, lines and words\n\n Example::\n >>> import numpy as np\n >>> import matplotlib.pyplot as plt\n >>> from doctr.utils.visualization import visualize_page\n >>> from doctr.models import ocr_db_crnn\n >>> model = ocr_db_crnn(pretrained=True)\n >>> input_page = (255 * np.random.rand(600, 800, 3)).astype(np.uint8)\n >>> out = model([[input_page]])\n >>> visualize_page(out[0].pages[0].export(), input_page)\n >>> plt.show()\n\n Args:\n page: the exported Page of a Document\n image: np array of the page, needs to have the same shape than page['dimensions']\n words_only: whether only words should be displayed\n \"\"\"\n # Display the image\n _, ax = plt.subplots()\n ax.imshow(image)\n # hide both axis\n ax.axis('off')\n\n artists: List[patches.Patch] = [] # instantiate an empty list of patches (to be drawn on the page)\n\n for block in page['blocks']:\n if not words_only:\n rect = create_patch(block['geometry'], 'block', page['dimensions'], (0, 1, 0), linewidth=1)\n # add patch on figure\n ax.add_patch(rect)\n # add patch to cursor's artists\n artists.append(rect)\n\n for line in block['lines']:\n if not words_only:\n rect = create_patch(line['geometry'], 'line', page['dimensions'], (1, 0, 0), linewidth=1)\n ax.add_patch(rect)\n artists.append(rect)\n\n for word in line['words']:\n rect = create_patch(word['geometry'], f\"{word['value']} (confidence: {word['confidence']:.2%})\",\n page['dimensions'], (0, 0, 1))\n ax.add_patch(rect)\n artists.append(rect)\n\n if not words_only:\n for artefact in block['artefacts']:\n rect = create_patch(artefact['geometry'], 'artefact', page['dimensions'], (0.5, 0.5, 0.5), linewidth=1)\n ax.add_patch(rect)\n artists.append(rect)\n\n # Create mlp Cursor to hover patches in artists\n mplcursors.Cursor(artists, hover=2).connect(\"add\", lambda sel: sel.annotation.set_text(sel.artist.get_label()))\n", "path": "doctr/utils/visualization.py"}]}
1,737
318
gh_patches_debug_36520
rasdani/github-patches
git_diff
vacanza__python-holidays-1555
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update Denmark holidays I've received an email with a link to https://www.norden.org/en/info-norden/public-holidays-denmark The author complained about absence of June 5th in the list of holiday dates: > The calendar for Denmark does not include 5 June. Denmark holidays need to be extended using categories approach. </issue> <code> [start of holidays/countries/denmark.py] 1 # python-holidays 2 # --------------- 3 # A fast, efficient Python library for generating country, province and state 4 # specific sets of holidays on the fly. It aims to make determining whether a 5 # specific date is a holiday as fast and flexible as possible. 6 # 7 # Authors: dr-prodigy <[email protected]> (c) 2017-2023 8 # ryanss <[email protected]> (c) 2014-2017 9 # Website: https://github.com/dr-prodigy/python-holidays 10 # License: MIT (see LICENSE file) 11 12 from datetime import timedelta as td 13 from gettext import gettext as tr 14 15 from holidays.groups import ChristianHolidays, InternationalHolidays 16 from holidays.holiday_base import HolidayBase 17 18 19 class Denmark(HolidayBase, ChristianHolidays, InternationalHolidays): 20 """ 21 Denmark holidays. 22 23 References: 24 - https://en.wikipedia.org/wiki/Public_holidays_in_Denmark 25 - https://www.ft.dk/samling/20222/lovforslag/l13/index.htm 26 """ 27 28 country = "DK" 29 default_language = "da" 30 supported_languages = ("da", "en_US", "uk") 31 32 def __init__(self, *args, **kwargs): 33 ChristianHolidays.__init__(self) 34 InternationalHolidays.__init__(self) 35 super().__init__(*args, **kwargs) 36 37 def _populate(self, year): 38 super()._populate(year) 39 40 # New Year's Day. 41 self._add_new_years_day(tr("Nytårsdag")) 42 43 # Holy Thursday. 44 self._add_holy_thursday(tr("Skærtorsdag")) 45 46 # Good Friday. 47 self._add_good_friday(tr("Langfredag")) 48 49 # Easter Sunday. 50 self._add_easter_sunday(tr("Påskedag")) 51 52 # Easter Monday. 53 self._add_easter_monday(tr("Anden påskedag")) 54 55 # See https://www.ft.dk/samling/20222/lovforslag/l13/index.htm 56 if year <= 2023: 57 # Great Day of Prayers. 58 self._add_holiday(tr("Store bededag"), self._easter_sunday + td(days=+26)) 59 60 # Ascension Day. 61 self._add_ascension_thursday(tr("Kristi himmelfartsdag")) 62 63 # Whit Sunday. 64 self._add_whit_sunday(tr("Pinsedag")) 65 66 # Whit Monday. 67 self._add_whit_monday(tr("Anden pinsedag")) 68 69 # Christmas Day. 70 self._add_christmas_day(tr("Juledag")) 71 72 # Second Day of Christmas. 73 self._add_christmas_day_two(tr("Anden juledag")) 74 75 76 class DK(Denmark): 77 pass 78 79 80 class DNK(Denmark): 81 pass 82 [end of holidays/countries/denmark.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/holidays/countries/denmark.py b/holidays/countries/denmark.py --- a/holidays/countries/denmark.py +++ b/holidays/countries/denmark.py @@ -12,6 +12,7 @@ from datetime import timedelta as td from gettext import gettext as tr +from holidays.constants import OPTIONAL, PUBLIC from holidays.groups import ChristianHolidays, InternationalHolidays from holidays.holiday_base import HolidayBase @@ -22,11 +23,13 @@ References: - https://en.wikipedia.org/wiki/Public_holidays_in_Denmark + - https://www.norden.org/en/info-norden/public-holidays-denmark - https://www.ft.dk/samling/20222/lovforslag/l13/index.htm """ country = "DK" default_language = "da" + supported_categories = {OPTIONAL, PUBLIC} supported_languages = ("da", "en_US", "uk") def __init__(self, *args, **kwargs): @@ -34,9 +37,7 @@ InternationalHolidays.__init__(self) super().__init__(*args, **kwargs) - def _populate(self, year): - super()._populate(year) - + def _populate_public_holidays(self): # New Year's Day. self._add_new_years_day(tr("Nytårsdag")) @@ -53,7 +54,7 @@ self._add_easter_monday(tr("Anden påskedag")) # See https://www.ft.dk/samling/20222/lovforslag/l13/index.htm - if year <= 2023: + if self._year <= 2023: # Great Day of Prayers. self._add_holiday(tr("Store bededag"), self._easter_sunday + td(days=+26)) @@ -72,6 +73,19 @@ # Second Day of Christmas. self._add_christmas_day_two(tr("Anden juledag")) + def _populate_optional_holidays(self): + # International Workers' Day. + self._add_labor_day(tr("Arbejdernes kampdag")) + + # Constitution Day. + self._add_holiday_jun_5(tr("Grundlovsdag")) + + # Christmas Eve. + self._add_christmas_eve(tr("Juleaftensdag")) + + # New Year's Eve. + self._add_new_years_eve(tr("Nytårsaften")) + class DK(Denmark): pass
{"golden_diff": "diff --git a/holidays/countries/denmark.py b/holidays/countries/denmark.py\n--- a/holidays/countries/denmark.py\n+++ b/holidays/countries/denmark.py\n@@ -12,6 +12,7 @@\n from datetime import timedelta as td\n from gettext import gettext as tr\n \n+from holidays.constants import OPTIONAL, PUBLIC\n from holidays.groups import ChristianHolidays, InternationalHolidays\n from holidays.holiday_base import HolidayBase\n \n@@ -22,11 +23,13 @@\n \n References:\n - https://en.wikipedia.org/wiki/Public_holidays_in_Denmark\n+ - https://www.norden.org/en/info-norden/public-holidays-denmark\n - https://www.ft.dk/samling/20222/lovforslag/l13/index.htm\n \"\"\"\n \n country = \"DK\"\n default_language = \"da\"\n+ supported_categories = {OPTIONAL, PUBLIC}\n supported_languages = (\"da\", \"en_US\", \"uk\")\n \n def __init__(self, *args, **kwargs):\n@@ -34,9 +37,7 @@\n InternationalHolidays.__init__(self)\n super().__init__(*args, **kwargs)\n \n- def _populate(self, year):\n- super()._populate(year)\n-\n+ def _populate_public_holidays(self):\n # New Year's Day.\n self._add_new_years_day(tr(\"Nyt\u00e5rsdag\"))\n \n@@ -53,7 +54,7 @@\n self._add_easter_monday(tr(\"Anden p\u00e5skedag\"))\n \n # See https://www.ft.dk/samling/20222/lovforslag/l13/index.htm\n- if year <= 2023:\n+ if self._year <= 2023:\n # Great Day of Prayers.\n self._add_holiday(tr(\"Store bededag\"), self._easter_sunday + td(days=+26))\n \n@@ -72,6 +73,19 @@\n # Second Day of Christmas.\n self._add_christmas_day_two(tr(\"Anden juledag\"))\n \n+ def _populate_optional_holidays(self):\n+ # International Workers' Day.\n+ self._add_labor_day(tr(\"Arbejdernes kampdag\"))\n+\n+ # Constitution Day.\n+ self._add_holiday_jun_5(tr(\"Grundlovsdag\"))\n+\n+ # Christmas Eve.\n+ self._add_christmas_eve(tr(\"Juleaftensdag\"))\n+\n+ # New Year's Eve.\n+ self._add_new_years_eve(tr(\"Nyt\u00e5rsaften\"))\n+\n \n class DK(Denmark):\n pass\n", "issue": "Update Denmark holidays\nI've received an email with a link to https://www.norden.org/en/info-norden/public-holidays-denmark\r\n\r\nThe author complained about absence of June 5th in the list of holiday dates:\r\n\r\n> The calendar for Denmark does not include 5 June.\r\n\r\nDenmark holidays need to be extended using categories approach.\n", "before_files": [{"content": "# python-holidays\n# ---------------\n# A fast, efficient Python library for generating country, province and state\n# specific sets of holidays on the fly. It aims to make determining whether a\n# specific date is a holiday as fast and flexible as possible.\n#\n# Authors: dr-prodigy <[email protected]> (c) 2017-2023\n# ryanss <[email protected]> (c) 2014-2017\n# Website: https://github.com/dr-prodigy/python-holidays\n# License: MIT (see LICENSE file)\n\nfrom datetime import timedelta as td\nfrom gettext import gettext as tr\n\nfrom holidays.groups import ChristianHolidays, InternationalHolidays\nfrom holidays.holiday_base import HolidayBase\n\n\nclass Denmark(HolidayBase, ChristianHolidays, InternationalHolidays):\n \"\"\"\n Denmark holidays.\n\n References:\n - https://en.wikipedia.org/wiki/Public_holidays_in_Denmark\n - https://www.ft.dk/samling/20222/lovforslag/l13/index.htm\n \"\"\"\n\n country = \"DK\"\n default_language = \"da\"\n supported_languages = (\"da\", \"en_US\", \"uk\")\n\n def __init__(self, *args, **kwargs):\n ChristianHolidays.__init__(self)\n InternationalHolidays.__init__(self)\n super().__init__(*args, **kwargs)\n\n def _populate(self, year):\n super()._populate(year)\n\n # New Year's Day.\n self._add_new_years_day(tr(\"Nyt\u00e5rsdag\"))\n\n # Holy Thursday.\n self._add_holy_thursday(tr(\"Sk\u00e6rtorsdag\"))\n\n # Good Friday.\n self._add_good_friday(tr(\"Langfredag\"))\n\n # Easter Sunday.\n self._add_easter_sunday(tr(\"P\u00e5skedag\"))\n\n # Easter Monday.\n self._add_easter_monday(tr(\"Anden p\u00e5skedag\"))\n\n # See https://www.ft.dk/samling/20222/lovforslag/l13/index.htm\n if year <= 2023:\n # Great Day of Prayers.\n self._add_holiday(tr(\"Store bededag\"), self._easter_sunday + td(days=+26))\n\n # Ascension Day.\n self._add_ascension_thursday(tr(\"Kristi himmelfartsdag\"))\n\n # Whit Sunday.\n self._add_whit_sunday(tr(\"Pinsedag\"))\n\n # Whit Monday.\n self._add_whit_monday(tr(\"Anden pinsedag\"))\n\n # Christmas Day.\n self._add_christmas_day(tr(\"Juledag\"))\n\n # Second Day of Christmas.\n self._add_christmas_day_two(tr(\"Anden juledag\"))\n\n\nclass DK(Denmark):\n pass\n\n\nclass DNK(Denmark):\n pass\n", "path": "holidays/countries/denmark.py"}]}
1,421
599
gh_patches_debug_21675
rasdani/github-patches
git_diff
liqd__a4-meinberlin-2385
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> counting contributions to polls on module tile as discussed please count the comments AND all answers on poll module tiles. </issue> <code> [start of meinberlin/apps/projects/templatetags/meinberlin_project_tags.py] 1 from django import template 2 3 from adhocracy4.comments.models import Comment 4 from meinberlin.apps.budgeting.models import Proposal as budget_proposal 5 from meinberlin.apps.ideas.models import Idea 6 from meinberlin.apps.kiezkasse.models import Proposal as kiezkasse_proposal 7 from meinberlin.apps.mapideas.models import MapIdea 8 from meinberlin.apps.projects import get_project_type 9 10 register = template.Library() 11 12 13 @register.filter 14 def project_url(project): 15 if get_project_type(project) in ('external', 'bplan'): 16 return project.externalproject.url 17 return project.get_absolute_url() 18 19 20 @register.filter 21 def project_type(project): 22 return get_project_type(project) 23 24 25 @register.filter 26 def is_external(project): 27 return get_project_type(project) in ('external', 'bplan') 28 29 30 @register.filter 31 def is_container(project): 32 return get_project_type(project) == 'container' 33 34 35 @register.simple_tag 36 def to_class_name(value): 37 return value.__class__.__name__ 38 39 40 @register.simple_tag 41 def get_num_entries(module): 42 """Count all user-generated items.""" 43 item_count = \ 44 Idea.objects.filter(module=module).count() \ 45 + MapIdea.objects.filter(module=module).count() \ 46 + budget_proposal.objects.filter(module=module).count() \ 47 + kiezkasse_proposal.objects.filter(module=module).count() \ 48 + Comment.objects.filter(idea__module=module).count() \ 49 + Comment.objects.filter(mapidea__module=module).count() \ 50 + Comment.objects.filter(budget_proposal__module=module).count() \ 51 + Comment.objects.filter(kiezkasse_proposal__module=module).count() \ 52 + Comment.objects.filter(topic__module=module).count() \ 53 + Comment.objects.filter(maptopic__module=module).count() \ 54 + Comment.objects.filter(paragraph__chapter__module=module).count() \ 55 + Comment.objects.filter(chapter__module=module).count() \ 56 + Comment.objects.filter(poll__module=module).count() 57 return item_count 58 [end of meinberlin/apps/projects/templatetags/meinberlin_project_tags.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py b/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py --- a/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py +++ b/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py @@ -5,6 +5,7 @@ from meinberlin.apps.ideas.models import Idea from meinberlin.apps.kiezkasse.models import Proposal as kiezkasse_proposal from meinberlin.apps.mapideas.models import MapIdea +from meinberlin.apps.polls.models import Vote from meinberlin.apps.projects import get_project_type register = template.Library() @@ -53,5 +54,6 @@ + Comment.objects.filter(maptopic__module=module).count() \ + Comment.objects.filter(paragraph__chapter__module=module).count() \ + Comment.objects.filter(chapter__module=module).count() \ - + Comment.objects.filter(poll__module=module).count() + + Comment.objects.filter(poll__module=module).count() \ + + Vote.objects.filter(choice__question__poll__module=module).count() return item_count
{"golden_diff": "diff --git a/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py b/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py\n--- a/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py\n+++ b/meinberlin/apps/projects/templatetags/meinberlin_project_tags.py\n@@ -5,6 +5,7 @@\n from meinberlin.apps.ideas.models import Idea\n from meinberlin.apps.kiezkasse.models import Proposal as kiezkasse_proposal\n from meinberlin.apps.mapideas.models import MapIdea\n+from meinberlin.apps.polls.models import Vote\n from meinberlin.apps.projects import get_project_type\n \n register = template.Library()\n@@ -53,5 +54,6 @@\n + Comment.objects.filter(maptopic__module=module).count() \\\n + Comment.objects.filter(paragraph__chapter__module=module).count() \\\n + Comment.objects.filter(chapter__module=module).count() \\\n- + Comment.objects.filter(poll__module=module).count()\n+ + Comment.objects.filter(poll__module=module).count() \\\n+ + Vote.objects.filter(choice__question__poll__module=module).count()\n return item_count\n", "issue": "counting contributions to polls on module tile\nas discussed please count the comments AND all answers on poll module tiles.\n", "before_files": [{"content": "from django import template\n\nfrom adhocracy4.comments.models import Comment\nfrom meinberlin.apps.budgeting.models import Proposal as budget_proposal\nfrom meinberlin.apps.ideas.models import Idea\nfrom meinberlin.apps.kiezkasse.models import Proposal as kiezkasse_proposal\nfrom meinberlin.apps.mapideas.models import MapIdea\nfrom meinberlin.apps.projects import get_project_type\n\nregister = template.Library()\n\n\[email protected]\ndef project_url(project):\n if get_project_type(project) in ('external', 'bplan'):\n return project.externalproject.url\n return project.get_absolute_url()\n\n\[email protected]\ndef project_type(project):\n return get_project_type(project)\n\n\[email protected]\ndef is_external(project):\n return get_project_type(project) in ('external', 'bplan')\n\n\[email protected]\ndef is_container(project):\n return get_project_type(project) == 'container'\n\n\[email protected]_tag\ndef to_class_name(value):\n return value.__class__.__name__\n\n\[email protected]_tag\ndef get_num_entries(module):\n \"\"\"Count all user-generated items.\"\"\"\n item_count = \\\n Idea.objects.filter(module=module).count() \\\n + MapIdea.objects.filter(module=module).count() \\\n + budget_proposal.objects.filter(module=module).count() \\\n + kiezkasse_proposal.objects.filter(module=module).count() \\\n + Comment.objects.filter(idea__module=module).count() \\\n + Comment.objects.filter(mapidea__module=module).count() \\\n + Comment.objects.filter(budget_proposal__module=module).count() \\\n + Comment.objects.filter(kiezkasse_proposal__module=module).count() \\\n + Comment.objects.filter(topic__module=module).count() \\\n + Comment.objects.filter(maptopic__module=module).count() \\\n + Comment.objects.filter(paragraph__chapter__module=module).count() \\\n + Comment.objects.filter(chapter__module=module).count() \\\n + Comment.objects.filter(poll__module=module).count()\n return item_count\n", "path": "meinberlin/apps/projects/templatetags/meinberlin_project_tags.py"}]}
1,139
281
gh_patches_debug_4165
rasdani/github-patches
git_diff
ivy-llc__ivy-14979
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> extract </issue> <code> [start of ivy/functional/frontends/numpy/sorting_searching_counting/searching.py] 1 # local 2 3 import ivy 4 5 from ivy.functional.frontends.numpy import promote_types_of_numpy_inputs 6 7 from ivy.functional.frontends.numpy.func_wrapper import ( 8 to_ivy_arrays_and_back, 9 from_zero_dim_arrays_to_scalar, 10 handle_numpy_out, 11 ) 12 13 14 @to_ivy_arrays_and_back 15 def where(cond, x1=None, x2=None, /): 16 if x1 is None and x2 is None: 17 # numpy where behaves as np.asarray(condition).nonzero() when x and y 18 # not included 19 return ivy.asarray(cond).nonzero() 20 elif x1 is not None and x2 is not None: 21 x1, x2 = promote_types_of_numpy_inputs(x1, x2) 22 return ivy.where(cond, x1, x2) 23 else: 24 raise ivy.utils.exceptions.IvyException("where takes either 1 or 3 arguments") 25 26 27 @to_ivy_arrays_and_back 28 def nonzero(a): 29 return ivy.nonzero(a) 30 31 32 @handle_numpy_out 33 @to_ivy_arrays_and_back 34 @from_zero_dim_arrays_to_scalar 35 def argmin(a, /, *, axis=None, keepdims=False, out=None): 36 return ivy.argmin(a, axis=axis, out=out, keepdims=keepdims) 37 38 39 @handle_numpy_out 40 @to_ivy_arrays_and_back 41 @from_zero_dim_arrays_to_scalar 42 def argmax( 43 a, 44 /, 45 *, 46 axis=None, 47 out=None, 48 keepdims=False, 49 ): 50 return ivy.argmax(a, axis=axis, out=out, keepdims=keepdims) 51 52 53 @to_ivy_arrays_and_back 54 def flatnonzero(a): 55 return ivy.nonzero(ivy.reshape(a, (-1,))) 56 57 58 @to_ivy_arrays_and_back 59 def searchsorted(a, v, side="left", sorter=None): 60 return ivy.searchsorted(a, v, side=side, sorter=sorter) 61 62 63 @to_ivy_arrays_and_back 64 def argwhere(a): 65 return ivy.argwhere(a) 66 67 68 # nanargmin and nanargmax composition helper 69 def _nanargminmax(a, axis=None): 70 # check nans 71 nans = ivy.isnan(a).astype(ivy.bool) 72 # replace nans with inf 73 a = ivy.where(nans, ivy.inf, a) 74 if nans is not None: 75 nans = ivy.all(nans, axis=axis) 76 if ivy.any(nans): 77 raise ivy.utils.exceptions.IvyError("All-NaN slice encountered") 78 return a 79 80 81 @handle_numpy_out 82 @to_ivy_arrays_and_back 83 @from_zero_dim_arrays_to_scalar 84 def nanargmax(a, /, *, axis=None, out=None, keepdims=False): 85 a = _nanargminmax(a, axis=axis) 86 return ivy.argmax(a, axis=axis, keepdims=keepdims, out=out) 87 88 89 @handle_numpy_out 90 @to_ivy_arrays_and_back 91 @from_zero_dim_arrays_to_scalar 92 def nanargmin(a, /, *, axis=None, out=None, keepdims=False): 93 a = _nanargminmax(a, axis=axis) 94 return ivy.argmin(a, axis=axis, keepdims=keepdims, out=out) 95 [end of ivy/functional/frontends/numpy/sorting_searching_counting/searching.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py b/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py --- a/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py +++ b/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py @@ -92,3 +92,11 @@ def nanargmin(a, /, *, axis=None, out=None, keepdims=False): a = _nanargminmax(a, axis=axis) return ivy.argmin(a, axis=axis, keepdims=keepdims, out=out) + + +@to_ivy_arrays_and_back +def extract(cond, arr, /): + if cond.dtype == 'bool': + return arr[cond] + else: + return arr[cond !=0] \ No newline at end of file
{"golden_diff": "diff --git a/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py b/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py\n--- a/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py\n+++ b/ivy/functional/frontends/numpy/sorting_searching_counting/searching.py\n@@ -92,3 +92,11 @@\n def nanargmin(a, /, *, axis=None, out=None, keepdims=False):\n a = _nanargminmax(a, axis=axis)\n return ivy.argmin(a, axis=axis, keepdims=keepdims, out=out)\n+\n+\n+@to_ivy_arrays_and_back\n+def extract(cond, arr, /):\n+ if cond.dtype == 'bool':\n+ return arr[cond]\n+ else:\n+ return arr[cond !=0]\n\\ No newline at end of file\n", "issue": "extract\n\n", "before_files": [{"content": "# local\n\nimport ivy\n\nfrom ivy.functional.frontends.numpy import promote_types_of_numpy_inputs\n\nfrom ivy.functional.frontends.numpy.func_wrapper import (\n to_ivy_arrays_and_back,\n from_zero_dim_arrays_to_scalar,\n handle_numpy_out,\n)\n\n\n@to_ivy_arrays_and_back\ndef where(cond, x1=None, x2=None, /):\n if x1 is None and x2 is None:\n # numpy where behaves as np.asarray(condition).nonzero() when x and y\n # not included\n return ivy.asarray(cond).nonzero()\n elif x1 is not None and x2 is not None:\n x1, x2 = promote_types_of_numpy_inputs(x1, x2)\n return ivy.where(cond, x1, x2)\n else:\n raise ivy.utils.exceptions.IvyException(\"where takes either 1 or 3 arguments\")\n\n\n@to_ivy_arrays_and_back\ndef nonzero(a):\n return ivy.nonzero(a)\n\n\n@handle_numpy_out\n@to_ivy_arrays_and_back\n@from_zero_dim_arrays_to_scalar\ndef argmin(a, /, *, axis=None, keepdims=False, out=None):\n return ivy.argmin(a, axis=axis, out=out, keepdims=keepdims)\n\n\n@handle_numpy_out\n@to_ivy_arrays_and_back\n@from_zero_dim_arrays_to_scalar\ndef argmax(\n a,\n /,\n *,\n axis=None,\n out=None,\n keepdims=False,\n):\n return ivy.argmax(a, axis=axis, out=out, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef flatnonzero(a):\n return ivy.nonzero(ivy.reshape(a, (-1,)))\n\n\n@to_ivy_arrays_and_back\ndef searchsorted(a, v, side=\"left\", sorter=None):\n return ivy.searchsorted(a, v, side=side, sorter=sorter)\n\n\n@to_ivy_arrays_and_back\ndef argwhere(a):\n return ivy.argwhere(a)\n\n\n# nanargmin and nanargmax composition helper\ndef _nanargminmax(a, axis=None):\n # check nans\n nans = ivy.isnan(a).astype(ivy.bool)\n # replace nans with inf\n a = ivy.where(nans, ivy.inf, a)\n if nans is not None:\n nans = ivy.all(nans, axis=axis)\n if ivy.any(nans):\n raise ivy.utils.exceptions.IvyError(\"All-NaN slice encountered\")\n return a\n\n\n@handle_numpy_out\n@to_ivy_arrays_and_back\n@from_zero_dim_arrays_to_scalar\ndef nanargmax(a, /, *, axis=None, out=None, keepdims=False):\n a = _nanargminmax(a, axis=axis)\n return ivy.argmax(a, axis=axis, keepdims=keepdims, out=out)\n\n\n@handle_numpy_out\n@to_ivy_arrays_and_back\n@from_zero_dim_arrays_to_scalar\ndef nanargmin(a, /, *, axis=None, out=None, keepdims=False):\n a = _nanargminmax(a, axis=axis)\n return ivy.argmin(a, axis=axis, keepdims=keepdims, out=out)\n", "path": "ivy/functional/frontends/numpy/sorting_searching_counting/searching.py"}]}
1,457
205
gh_patches_debug_40331
rasdani/github-patches
git_diff
searxng__searxng-3418
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Wikimedia Commons **Working URL to the engine** https://commons.wikimedia.org **Why do you want to add this engine?** Out of all of the Wikimedia projects, Wikimedia Commons is one of only two to not appear in any engine category in SearXNG, with the other being Wikispecies. **Features of this engine** It has a collection of [82,886,704](https://commons.wikimedia.org/wiki/Special:Statistics) [freely usable](https://commons.wikimedia.org/wiki/Commons:Reusing_content_outside_Wikimedia) media files. **How can SearXNG fetch the information from this engine?** `https://commons.wikimedia.org/w/index.php?search=%s` with `%s` being what you want to search. **Applicable category of this engine** General, files, images, music, videos. </issue> <code> [start of searx/engines/wikicommons.py] 1 # SPDX-License-Identifier: AGPL-3.0-or-later 2 """Wikimedia Commons (images) 3 4 """ 5 6 from urllib.parse import urlencode 7 8 # about 9 about = { 10 "website": 'https://commons.wikimedia.org/', 11 "wikidata_id": 'Q565', 12 "official_api_documentation": 'https://commons.wikimedia.org/w/api.php', 13 "use_official_api": True, 14 "require_api_key": False, 15 "results": 'JSON', 16 } 17 18 base_url = "https://commons.wikimedia.org" 19 search_prefix = ( 20 '?action=query' 21 '&format=json' 22 '&generator=search' 23 '&gsrnamespace=6' 24 '&gsrprop=snippet' 25 '&prop=info|imageinfo' 26 '&iiprop=url|size|mime' 27 '&iiurlheight=180' # needed for the thumb url 28 ) 29 paging = True 30 number_of_results = 10 31 32 33 def request(query, params): 34 language = 'en' 35 if params['language'] != 'all': 36 language = params['language'].split('-')[0] 37 38 args = { 39 'uselang': language, 40 'gsrlimit': number_of_results, 41 'gsroffset': number_of_results * (params["pageno"] - 1), 42 'gsrsearch': "filetype:bitmap|drawing " + query, 43 } 44 45 params["url"] = f"{base_url}/w/api.php{search_prefix}&{urlencode(args, safe=':|')}" 46 return params 47 48 49 def response(resp): 50 results = [] 51 json = resp.json() 52 53 if not json.get("query", {}).get("pages"): 54 return results 55 56 for item in json["query"]["pages"].values(): 57 imageinfo = item["imageinfo"][0] 58 title = item["title"].replace("File:", "").rsplit('.', 1)[0] 59 result = { 60 'url': imageinfo["descriptionurl"], 61 'title': title, 62 'content': item["snippet"], 63 'img_src': imageinfo["url"], 64 'resolution': f'{imageinfo["width"]} x {imageinfo["height"]}', 65 'thumbnail_src': imageinfo["thumburl"], 66 'template': 'images.html', 67 } 68 results.append(result) 69 70 return results 71 [end of searx/engines/wikicommons.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/searx/engines/wikicommons.py b/searx/engines/wikicommons.py --- a/searx/engines/wikicommons.py +++ b/searx/engines/wikicommons.py @@ -3,6 +3,8 @@ """ +import datetime + from urllib.parse import urlencode # about @@ -14,6 +16,8 @@ "require_api_key": False, "results": 'JSON', } +categories = ['images'] +search_type = 'images' base_url = "https://commons.wikimedia.org" search_prefix = ( @@ -29,17 +33,29 @@ paging = True number_of_results = 10 +search_types = { + 'images': 'bitmap|drawing', + 'videos': 'video', + 'audio': 'audio', + 'files': 'multimedia|office|archive|3d', +} + def request(query, params): language = 'en' if params['language'] != 'all': language = params['language'].split('-')[0] + if search_type not in search_types: + raise ValueError(f"Unsupported search type: {search_type}") + + filetype = search_types[search_type] + args = { 'uselang': language, 'gsrlimit': number_of_results, 'gsroffset': number_of_results * (params["pageno"] - 1), - 'gsrsearch': "filetype:bitmap|drawing " + query, + 'gsrsearch': f"filetype:{filetype} {query}", } params["url"] = f"{base_url}/w/api.php{search_prefix}&{urlencode(args, safe=':|')}" @@ -52,7 +68,6 @@ if not json.get("query", {}).get("pages"): return results - for item in json["query"]["pages"].values(): imageinfo = item["imageinfo"][0] title = item["title"].replace("File:", "").rsplit('.', 1)[0] @@ -60,11 +75,28 @@ 'url': imageinfo["descriptionurl"], 'title': title, 'content': item["snippet"], - 'img_src': imageinfo["url"], - 'resolution': f'{imageinfo["width"]} x {imageinfo["height"]}', - 'thumbnail_src': imageinfo["thumburl"], - 'template': 'images.html', } + + if search_type == "images": + result['template'] = 'images.html' + result['img_src'] = imageinfo["url"] + result['thumbnail_src'] = imageinfo["thumburl"] + result['resolution'] = f'{imageinfo["width"]} x {imageinfo["height"]}' + else: + result['thumbnail'] = imageinfo["thumburl"] + + if search_type == "videos": + result['template'] = 'videos.html' + if imageinfo.get('duration'): + result['length'] = datetime.timedelta(seconds=int(imageinfo['duration'])) + result['iframe_src'] = imageinfo['url'] + elif search_type == "files": + result['template'] = 'files.html' + result['metadata'] = imageinfo['mime'] + result['size'] = imageinfo['size'] + elif search_type == "audio": + result['iframe_src'] = imageinfo['url'] + results.append(result) return results
{"golden_diff": "diff --git a/searx/engines/wikicommons.py b/searx/engines/wikicommons.py\n--- a/searx/engines/wikicommons.py\n+++ b/searx/engines/wikicommons.py\n@@ -3,6 +3,8 @@\n \n \"\"\"\n \n+import datetime\n+\n from urllib.parse import urlencode\n \n # about\n@@ -14,6 +16,8 @@\n \"require_api_key\": False,\n \"results\": 'JSON',\n }\n+categories = ['images']\n+search_type = 'images'\n \n base_url = \"https://commons.wikimedia.org\"\n search_prefix = (\n@@ -29,17 +33,29 @@\n paging = True\n number_of_results = 10\n \n+search_types = {\n+ 'images': 'bitmap|drawing',\n+ 'videos': 'video',\n+ 'audio': 'audio',\n+ 'files': 'multimedia|office|archive|3d',\n+}\n+\n \n def request(query, params):\n language = 'en'\n if params['language'] != 'all':\n language = params['language'].split('-')[0]\n \n+ if search_type not in search_types:\n+ raise ValueError(f\"Unsupported search type: {search_type}\")\n+\n+ filetype = search_types[search_type]\n+\n args = {\n 'uselang': language,\n 'gsrlimit': number_of_results,\n 'gsroffset': number_of_results * (params[\"pageno\"] - 1),\n- 'gsrsearch': \"filetype:bitmap|drawing \" + query,\n+ 'gsrsearch': f\"filetype:{filetype} {query}\",\n }\n \n params[\"url\"] = f\"{base_url}/w/api.php{search_prefix}&{urlencode(args, safe=':|')}\"\n@@ -52,7 +68,6 @@\n \n if not json.get(\"query\", {}).get(\"pages\"):\n return results\n-\n for item in json[\"query\"][\"pages\"].values():\n imageinfo = item[\"imageinfo\"][0]\n title = item[\"title\"].replace(\"File:\", \"\").rsplit('.', 1)[0]\n@@ -60,11 +75,28 @@\n 'url': imageinfo[\"descriptionurl\"],\n 'title': title,\n 'content': item[\"snippet\"],\n- 'img_src': imageinfo[\"url\"],\n- 'resolution': f'{imageinfo[\"width\"]} x {imageinfo[\"height\"]}',\n- 'thumbnail_src': imageinfo[\"thumburl\"],\n- 'template': 'images.html',\n }\n+\n+ if search_type == \"images\":\n+ result['template'] = 'images.html'\n+ result['img_src'] = imageinfo[\"url\"]\n+ result['thumbnail_src'] = imageinfo[\"thumburl\"]\n+ result['resolution'] = f'{imageinfo[\"width\"]} x {imageinfo[\"height\"]}'\n+ else:\n+ result['thumbnail'] = imageinfo[\"thumburl\"]\n+\n+ if search_type == \"videos\":\n+ result['template'] = 'videos.html'\n+ if imageinfo.get('duration'):\n+ result['length'] = datetime.timedelta(seconds=int(imageinfo['duration']))\n+ result['iframe_src'] = imageinfo['url']\n+ elif search_type == \"files\":\n+ result['template'] = 'files.html'\n+ result['metadata'] = imageinfo['mime']\n+ result['size'] = imageinfo['size']\n+ elif search_type == \"audio\":\n+ result['iframe_src'] = imageinfo['url']\n+\n results.append(result)\n \n return results\n", "issue": "Wikimedia Commons\n**Working URL to the engine**\r\nhttps://commons.wikimedia.org\r\n\r\n**Why do you want to add this engine?**\r\nOut of all of the Wikimedia projects, Wikimedia Commons is one of only two to not appear in any engine category in SearXNG, with the other being Wikispecies.\r\n\r\n**Features of this engine**\r\nIt has a collection of [82,886,704](https://commons.wikimedia.org/wiki/Special:Statistics) [freely usable](https://commons.wikimedia.org/wiki/Commons:Reusing_content_outside_Wikimedia) media files.\r\n\r\n**How can SearXNG fetch the information from this engine?**\r\n`https://commons.wikimedia.org/w/index.php?search=%s` with `%s` being what you want to search.\r\n\r\n**Applicable category of this engine**\r\nGeneral, files, images, music, videos.\n", "before_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n\"\"\"Wikimedia Commons (images)\n\n\"\"\"\n\nfrom urllib.parse import urlencode\n\n# about\nabout = {\n \"website\": 'https://commons.wikimedia.org/',\n \"wikidata_id\": 'Q565',\n \"official_api_documentation\": 'https://commons.wikimedia.org/w/api.php',\n \"use_official_api\": True,\n \"require_api_key\": False,\n \"results\": 'JSON',\n}\n\nbase_url = \"https://commons.wikimedia.org\"\nsearch_prefix = (\n '?action=query'\n '&format=json'\n '&generator=search'\n '&gsrnamespace=6'\n '&gsrprop=snippet'\n '&prop=info|imageinfo'\n '&iiprop=url|size|mime'\n '&iiurlheight=180' # needed for the thumb url\n)\npaging = True\nnumber_of_results = 10\n\n\ndef request(query, params):\n language = 'en'\n if params['language'] != 'all':\n language = params['language'].split('-')[0]\n\n args = {\n 'uselang': language,\n 'gsrlimit': number_of_results,\n 'gsroffset': number_of_results * (params[\"pageno\"] - 1),\n 'gsrsearch': \"filetype:bitmap|drawing \" + query,\n }\n\n params[\"url\"] = f\"{base_url}/w/api.php{search_prefix}&{urlencode(args, safe=':|')}\"\n return params\n\n\ndef response(resp):\n results = []\n json = resp.json()\n\n if not json.get(\"query\", {}).get(\"pages\"):\n return results\n\n for item in json[\"query\"][\"pages\"].values():\n imageinfo = item[\"imageinfo\"][0]\n title = item[\"title\"].replace(\"File:\", \"\").rsplit('.', 1)[0]\n result = {\n 'url': imageinfo[\"descriptionurl\"],\n 'title': title,\n 'content': item[\"snippet\"],\n 'img_src': imageinfo[\"url\"],\n 'resolution': f'{imageinfo[\"width\"]} x {imageinfo[\"height\"]}',\n 'thumbnail_src': imageinfo[\"thumburl\"],\n 'template': 'images.html',\n }\n results.append(result)\n\n return results\n", "path": "searx/engines/wikicommons.py"}]}
1,367
787
gh_patches_debug_60750
rasdani/github-patches
git_diff
larq__larq-80
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add docs on how to define your own quantizer </issue> <code> [start of larq/quantizers.py] 1 """A Quantizer defines the way of transforming a full precision input to a 2 quantized output and the pseudo-gradient method used for the backwards pass.""" 3 4 import tensorflow as tf 5 from larq import utils 6 7 8 def sign(x): 9 """A sign function that will never be zero""" 10 return tf.sign(tf.sign(x) + 0.1) 11 12 13 @tf.custom_gradient 14 def _binarize_with_identity_grad(x): 15 def grad(dy): 16 return dy 17 18 return sign(x), grad 19 20 21 @tf.custom_gradient 22 def _binarize_with_weighted_grad(x): 23 def grad(dy): 24 return (1 - tf.abs(x)) * 2 * dy 25 26 return sign(x), grad 27 28 29 @utils.register_keras_custom_object 30 def ste_sign(x): 31 r""" 32 Sign binarization function. 33 \\[ 34 q(x) = \begin{cases} 35 -1 & x < 0 \\\ 36 1 & x \geq 0 37 \end{cases} 38 \\] 39 40 The gradient is estimated using the Straight-Through Estimator 41 (essentially the binarization is replaced by a clipped identity on the 42 backward pass). 43 \\[\frac{\partial q(x)}{\partial x} = \begin{cases} 44 1 & \left|x\right| \leq 1 \\\ 45 0 & \left|x\right| > 1 46 \end{cases}\\] 47 48 # Arguments 49 x: Input tensor. 50 51 # Returns 52 Binarized tensor. 53 54 # References 55 - [Binarized Neural Networks: Training Deep Neural Networks with Weights and 56 Activations Constrained to +1 or -1](http://arxiv.org/abs/1602.02830) 57 """ 58 59 x = tf.clip_by_value(x, -1, 1) 60 61 return _binarize_with_identity_grad(x) 62 63 64 @utils.register_keras_custom_object 65 def magnitude_aware_sign(x): 66 r""" 67 Magnitude-aware sign for birealnet. 68 69 70 # Arguments 71 x: Input tensor 72 73 # Returns 74 Scaled binarized tensor (with values in $\{-a, a\}$, where $a$ is a float). 75 76 # References 77 - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved 78 Representational Capability and Advanced Training 79 Algorithm](https://arxiv.org/abs/1808.00278) 80 81 """ 82 scale_factor = tf.stop_gradient( 83 tf.reduce_mean(tf.abs(x), axis=list(range(len(x.shape) - 1))) 84 ) 85 return scale_factor * ste_sign(x) 86 87 88 @utils.register_keras_custom_object 89 def approx_sign(x): 90 r""" 91 Sign binarization function. 92 \\[ 93 q(x) = \begin{cases} 94 -1 & x < 0 \\\ 95 1 & x \geq 0 96 \end{cases} 97 \\] 98 99 The gradient is estimated using the ApproxSign method. 100 \\[\frac{\partial q(x)}{\partial x} = \begin{cases} 101 (2 - 2 \left|x\right|) & \left|x\right| \leq 1 \\\ 102 0 & \left|x\right| > 1 103 \end{cases} 104 \\] 105 106 # Arguments 107 x: Input tensor. 108 109 # Returns 110 Binarized tensor. 111 112 # References 113 - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved 114 Representational Capability and Advanced 115 Training Algorithm](http://arxiv.org/abs/1808.00278) 116 """ 117 118 x = tf.clip_by_value(x, -1, 1) 119 120 return _binarize_with_weighted_grad(x) 121 122 123 def serialize(initializer): 124 return tf.keras.utils.serialize_keras_object(initializer) 125 126 127 def deserialize(name, custom_objects=None): 128 return tf.keras.utils.deserialize_keras_object( 129 name, 130 module_objects=globals(), 131 custom_objects=custom_objects, 132 printable_module_name="quantization function", 133 ) 134 135 136 def get(identifier): 137 if identifier is None: 138 return None 139 if isinstance(identifier, str): 140 return deserialize(str(identifier)) 141 if callable(identifier): 142 return identifier 143 raise ValueError( 144 f"Could not interpret quantization function identifier: {identifier}" 145 ) 146 [end of larq/quantizers.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/larq/quantizers.py b/larq/quantizers.py --- a/larq/quantizers.py +++ b/larq/quantizers.py @@ -64,7 +64,7 @@ @utils.register_keras_custom_object def magnitude_aware_sign(x): r""" - Magnitude-aware sign for birealnet. + Magnitude-aware sign for Bi-Real Net. # Arguments
{"golden_diff": "diff --git a/larq/quantizers.py b/larq/quantizers.py\n--- a/larq/quantizers.py\n+++ b/larq/quantizers.py\n@@ -64,7 +64,7 @@\n @utils.register_keras_custom_object\n def magnitude_aware_sign(x):\n r\"\"\"\n- Magnitude-aware sign for birealnet.\n+ Magnitude-aware sign for Bi-Real Net.\n \n \n # Arguments\n", "issue": "Add docs on how to define your own quantizer\n\n", "before_files": [{"content": "\"\"\"A Quantizer defines the way of transforming a full precision input to a\nquantized output and the pseudo-gradient method used for the backwards pass.\"\"\"\n\nimport tensorflow as tf\nfrom larq import utils\n\n\ndef sign(x):\n \"\"\"A sign function that will never be zero\"\"\"\n return tf.sign(tf.sign(x) + 0.1)\n\n\[email protected]_gradient\ndef _binarize_with_identity_grad(x):\n def grad(dy):\n return dy\n\n return sign(x), grad\n\n\[email protected]_gradient\ndef _binarize_with_weighted_grad(x):\n def grad(dy):\n return (1 - tf.abs(x)) * 2 * dy\n\n return sign(x), grad\n\n\[email protected]_keras_custom_object\ndef ste_sign(x):\n r\"\"\"\n Sign binarization function.\n \\\\[\n q(x) = \\begin{cases}\n -1 & x < 0 \\\\\\\n 1 & x \\geq 0\n \\end{cases}\n \\\\]\n\n The gradient is estimated using the Straight-Through Estimator\n (essentially the binarization is replaced by a clipped identity on the\n backward pass).\n \\\\[\\frac{\\partial q(x)}{\\partial x} = \\begin{cases}\n 1 & \\left|x\\right| \\leq 1 \\\\\\\n 0 & \\left|x\\right| > 1\n \\end{cases}\\\\]\n\n # Arguments\n x: Input tensor.\n\n # Returns\n Binarized tensor.\n\n # References\n - [Binarized Neural Networks: Training Deep Neural Networks with Weights and\n Activations Constrained to +1 or -1](http://arxiv.org/abs/1602.02830)\n \"\"\"\n\n x = tf.clip_by_value(x, -1, 1)\n\n return _binarize_with_identity_grad(x)\n\n\[email protected]_keras_custom_object\ndef magnitude_aware_sign(x):\n r\"\"\"\n Magnitude-aware sign for birealnet.\n\n\n # Arguments\n x: Input tensor\n\n # Returns\n Scaled binarized tensor (with values in $\\{-a, a\\}$, where $a$ is a float).\n\n # References\n - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved\n Representational Capability and Advanced Training\n Algorithm](https://arxiv.org/abs/1808.00278)\n\n \"\"\"\n scale_factor = tf.stop_gradient(\n tf.reduce_mean(tf.abs(x), axis=list(range(len(x.shape) - 1)))\n )\n return scale_factor * ste_sign(x)\n\n\[email protected]_keras_custom_object\ndef approx_sign(x):\n r\"\"\"\n Sign binarization function.\n \\\\[\n q(x) = \\begin{cases}\n -1 & x < 0 \\\\\\\n 1 & x \\geq 0\n \\end{cases}\n \\\\]\n\n The gradient is estimated using the ApproxSign method.\n \\\\[\\frac{\\partial q(x)}{\\partial x} = \\begin{cases}\n (2 - 2 \\left|x\\right|) & \\left|x\\right| \\leq 1 \\\\\\\n 0 & \\left|x\\right| > 1\n \\end{cases}\n \\\\]\n\n # Arguments\n x: Input tensor.\n\n # Returns\n Binarized tensor.\n\n # References\n - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved\n Representational Capability and Advanced\n Training Algorithm](http://arxiv.org/abs/1808.00278)\n \"\"\"\n\n x = tf.clip_by_value(x, -1, 1)\n\n return _binarize_with_weighted_grad(x)\n\n\ndef serialize(initializer):\n return tf.keras.utils.serialize_keras_object(initializer)\n\n\ndef deserialize(name, custom_objects=None):\n return tf.keras.utils.deserialize_keras_object(\n name,\n module_objects=globals(),\n custom_objects=custom_objects,\n printable_module_name=\"quantization function\",\n )\n\n\ndef get(identifier):\n if identifier is None:\n return None\n if isinstance(identifier, str):\n return deserialize(str(identifier))\n if callable(identifier):\n return identifier\n raise ValueError(\n f\"Could not interpret quantization function identifier: {identifier}\"\n )\n", "path": "larq/quantizers.py"}]}
1,856
100
gh_patches_debug_33924
rasdani/github-patches
git_diff
PrefectHQ__prefect-710
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Context docs are broken For some reason the actual `context` class signature is not being documented. </issue> <code> [start of src/prefect/utilities/context.py] 1 # Licensed under LICENSE.md; also available at https://www.prefect.io/licenses/beta-eula 2 3 """ 4 This module implements the Prefect context that is available when tasks run. 5 6 Tasks can import prefect.context and access attributes that will be overwritten 7 when the task is run. 8 9 Example: 10 11 ```python 12 import prefect.context 13 with prefect.context(a=1, b=2): 14 print(prefect.context.a) # 1 15 print (prefect.context.a) # undefined 16 ``` 17 18 Prefect provides various key / value pairs in context that are always available during task runs: 19 20 | Variable | Description | 21 | :--- | --- | 22 | `scheduled_start_time` | an actual datetime object representing the scheduled start time for the Flow run; falls back to `now` for unscheduled runs | 23 | `date` | an actual datetime object representing the current time | 24 | `today` | the current date formatted as `YYYY-MM-DD`| 25 | `today_nodash` | the current date formatted as `YYYYMMDD`| 26 | `yesterday` | yesterday's date formatted as `YYYY-MM-DD`| 27 | `yesterday_nodash` | yesterday's date formatted as `YYYYMMDD`| 28 | `tomorrow` | tomorrow's date formatted as `YYYY-MM-DD`| 29 | `tomorrow_nodash` | tomorrow's date formatted as `YYYYMMDD`| 30 | `task_name` | the name of the current task | 31 """ 32 33 import contextlib 34 import threading 35 from typing import Any, Iterator, MutableMapping 36 37 from prefect.configuration import config 38 from prefect.utilities.collections import DotDict 39 40 41 class Context(DotDict, threading.local): 42 """ 43 A thread safe context store for Prefect data. 44 45 The `Context` is a `DotDict` subclass, and can be instantiated the same way. 46 47 Args: 48 - *args (Any): arguments to provide to the `DotDict` constructor (e.g., 49 an initial dictionary) 50 - *kwargs (Any): any key / value pairs to initialize this context with 51 """ 52 53 def __init__(self, *args, **kwargs) -> None: 54 super().__init__(*args, **kwargs) 55 if "context" in config: 56 self.update(config.context) 57 58 def __repr__(self) -> str: 59 return "<Context>" 60 61 @contextlib.contextmanager 62 def __call__(self, *args: MutableMapping, **kwargs: Any) -> Iterator["Context"]: 63 """ 64 A context manager for setting / resetting the Prefect context 65 66 Example: 67 import prefect.context 68 with prefect.context(dict(a=1, b=2), c=3): 69 print(prefect.context.a) # 1 70 """ 71 previous_context = self.copy() 72 try: 73 self.update(*args, **kwargs) 74 yield self 75 finally: 76 self.clear() 77 self.update(previous_context) 78 79 80 context = Context() 81 [end of src/prefect/utilities/context.py] [start of src/prefect/tasks/templates/jinja2.py] 1 # Licensed under LICENSE.md; also available at https://www.prefect.io/licenses/beta-eula 2 3 from typing import Any 4 5 from jinja2 import Template 6 7 import prefect 8 from prefect import Task 9 10 11 class JinjaTemplateTask(Task): 12 """ 13 This task contains a Jinja template which is formatted with the results of any 14 upstream tasks and returned. 15 16 Variables from `prefect.context` will also be used for rendering. 17 18 Args: 19 - template (str, optional): the optional _default_ template string to render at runtime; 20 can also be provided as a keyword to `run`, which takes precendence over this default. 21 - **kwargs (optional): additional keyword arguments to pass to the 22 standard Task constructor 23 """ 24 25 def __init__(self, template: str = None, **kwargs: Any): 26 self.template = Template(template or "") 27 super().__init__(**kwargs) 28 29 def run(self, template: str = None, **format_kwargs: Any) -> str: # type: ignore 30 """ 31 Formats the Jinja Template with the provided kwargs. 32 33 Args: 34 - template (str, optional): the template string to render; if not 35 provided, `self.template` will be used 36 - **format_kwargs (optional): keyword arguments to use for 37 rendering; note that variables from `prefect.context` will also be used 38 39 Returns: 40 - str: the rendered string 41 """ 42 template = self.template if template is None else Template(template) 43 with prefect.context(**format_kwargs) as data: 44 return template.render(**data) 45 [end of src/prefect/tasks/templates/jinja2.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/prefect/tasks/templates/jinja2.py b/src/prefect/tasks/templates/jinja2.py --- a/src/prefect/tasks/templates/jinja2.py +++ b/src/prefect/tasks/templates/jinja2.py @@ -6,6 +6,7 @@ import prefect from prefect import Task +from prefect.utilities.tasks import defaults_from_attrs class JinjaTemplateTask(Task): @@ -23,9 +24,10 @@ """ def __init__(self, template: str = None, **kwargs: Any): - self.template = Template(template or "") + self.template = template or "" super().__init__(**kwargs) + @defaults_from_attrs("template") def run(self, template: str = None, **format_kwargs: Any) -> str: # type: ignore """ Formats the Jinja Template with the provided kwargs. @@ -39,6 +41,6 @@ Returns: - str: the rendered string """ - template = self.template if template is None else Template(template) + template = Template(template) with prefect.context(**format_kwargs) as data: return template.render(**data) diff --git a/src/prefect/utilities/context.py b/src/prefect/utilities/context.py --- a/src/prefect/utilities/context.py +++ b/src/prefect/utilities/context.py @@ -10,9 +10,11 @@ ```python import prefect.context + with prefect.context(a=1, b=2): print(prefect.context.a) # 1 -print (prefect.context.a) # undefined + +print(prefect.context.a) # undefined ``` Prefect provides various key / value pairs in context that are always available during task runs: @@ -28,6 +30,8 @@ | `tomorrow` | tomorrow's date formatted as `YYYY-MM-DD`| | `tomorrow_nodash` | tomorrow's date formatted as `YYYYMMDD`| | `task_name` | the name of the current task | + +Users can also provide values to context at runtime. """ import contextlib
{"golden_diff": "diff --git a/src/prefect/tasks/templates/jinja2.py b/src/prefect/tasks/templates/jinja2.py\n--- a/src/prefect/tasks/templates/jinja2.py\n+++ b/src/prefect/tasks/templates/jinja2.py\n@@ -6,6 +6,7 @@\n \n import prefect\n from prefect import Task\n+from prefect.utilities.tasks import defaults_from_attrs\n \n \n class JinjaTemplateTask(Task):\n@@ -23,9 +24,10 @@\n \"\"\"\n \n def __init__(self, template: str = None, **kwargs: Any):\n- self.template = Template(template or \"\")\n+ self.template = template or \"\"\n super().__init__(**kwargs)\n \n+ @defaults_from_attrs(\"template\")\n def run(self, template: str = None, **format_kwargs: Any) -> str: # type: ignore\n \"\"\"\n Formats the Jinja Template with the provided kwargs.\n@@ -39,6 +41,6 @@\n Returns:\n - str: the rendered string\n \"\"\"\n- template = self.template if template is None else Template(template)\n+ template = Template(template)\n with prefect.context(**format_kwargs) as data:\n return template.render(**data)\ndiff --git a/src/prefect/utilities/context.py b/src/prefect/utilities/context.py\n--- a/src/prefect/utilities/context.py\n+++ b/src/prefect/utilities/context.py\n@@ -10,9 +10,11 @@\n \n ```python\n import prefect.context\n+\n with prefect.context(a=1, b=2):\n print(prefect.context.a) # 1\n-print (prefect.context.a) # undefined\n+\n+print(prefect.context.a) # undefined\n ```\n \n Prefect provides various key / value pairs in context that are always available during task runs:\n@@ -28,6 +30,8 @@\n | `tomorrow` | tomorrow's date formatted as `YYYY-MM-DD`|\n | `tomorrow_nodash` | tomorrow's date formatted as `YYYYMMDD`|\n | `task_name` | the name of the current task |\n+\n+Users can also provide values to context at runtime.\n \"\"\"\n \n import contextlib\n", "issue": "Context docs are broken\nFor some reason the actual `context` class signature is not being documented.\n", "before_files": [{"content": "# Licensed under LICENSE.md; also available at https://www.prefect.io/licenses/beta-eula\n\n\"\"\"\nThis module implements the Prefect context that is available when tasks run.\n\nTasks can import prefect.context and access attributes that will be overwritten\nwhen the task is run.\n\nExample:\n\n```python\nimport prefect.context\nwith prefect.context(a=1, b=2):\n print(prefect.context.a) # 1\nprint (prefect.context.a) # undefined\n```\n\nPrefect provides various key / value pairs in context that are always available during task runs:\n\n| Variable | Description |\n| :--- | --- |\n| `scheduled_start_time` | an actual datetime object representing the scheduled start time for the Flow run; falls back to `now` for unscheduled runs |\n| `date` | an actual datetime object representing the current time |\n| `today` | the current date formatted as `YYYY-MM-DD`|\n| `today_nodash` | the current date formatted as `YYYYMMDD`|\n| `yesterday` | yesterday's date formatted as `YYYY-MM-DD`|\n| `yesterday_nodash` | yesterday's date formatted as `YYYYMMDD`|\n| `tomorrow` | tomorrow's date formatted as `YYYY-MM-DD`|\n| `tomorrow_nodash` | tomorrow's date formatted as `YYYYMMDD`|\n| `task_name` | the name of the current task |\n\"\"\"\n\nimport contextlib\nimport threading\nfrom typing import Any, Iterator, MutableMapping\n\nfrom prefect.configuration import config\nfrom prefect.utilities.collections import DotDict\n\n\nclass Context(DotDict, threading.local):\n \"\"\"\n A thread safe context store for Prefect data.\n\n The `Context` is a `DotDict` subclass, and can be instantiated the same way.\n\n Args:\n - *args (Any): arguments to provide to the `DotDict` constructor (e.g.,\n an initial dictionary)\n - *kwargs (Any): any key / value pairs to initialize this context with\n \"\"\"\n\n def __init__(self, *args, **kwargs) -> None:\n super().__init__(*args, **kwargs)\n if \"context\" in config:\n self.update(config.context)\n\n def __repr__(self) -> str:\n return \"<Context>\"\n\n @contextlib.contextmanager\n def __call__(self, *args: MutableMapping, **kwargs: Any) -> Iterator[\"Context\"]:\n \"\"\"\n A context manager for setting / resetting the Prefect context\n\n Example:\n import prefect.context\n with prefect.context(dict(a=1, b=2), c=3):\n print(prefect.context.a) # 1\n \"\"\"\n previous_context = self.copy()\n try:\n self.update(*args, **kwargs)\n yield self\n finally:\n self.clear()\n self.update(previous_context)\n\n\ncontext = Context()\n", "path": "src/prefect/utilities/context.py"}, {"content": "# Licensed under LICENSE.md; also available at https://www.prefect.io/licenses/beta-eula\n\nfrom typing import Any\n\nfrom jinja2 import Template\n\nimport prefect\nfrom prefect import Task\n\n\nclass JinjaTemplateTask(Task):\n \"\"\"\n This task contains a Jinja template which is formatted with the results of any\n upstream tasks and returned.\n\n Variables from `prefect.context` will also be used for rendering.\n\n Args:\n - template (str, optional): the optional _default_ template string to render at runtime;\n can also be provided as a keyword to `run`, which takes precendence over this default.\n - **kwargs (optional): additional keyword arguments to pass to the\n standard Task constructor\n \"\"\"\n\n def __init__(self, template: str = None, **kwargs: Any):\n self.template = Template(template or \"\")\n super().__init__(**kwargs)\n\n def run(self, template: str = None, **format_kwargs: Any) -> str: # type: ignore\n \"\"\"\n Formats the Jinja Template with the provided kwargs.\n\n Args:\n - template (str, optional): the template string to render; if not\n provided, `self.template` will be used\n - **format_kwargs (optional): keyword arguments to use for\n rendering; note that variables from `prefect.context` will also be used\n\n Returns:\n - str: the rendered string\n \"\"\"\n template = self.template if template is None else Template(template)\n with prefect.context(**format_kwargs) as data:\n return template.render(**data)\n", "path": "src/prefect/tasks/templates/jinja2.py"}]}
1,778
470
gh_patches_debug_122
rasdani/github-patches
git_diff
XanaduAI__strawberryfields-581
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Dependency versions error #### Issue description I made a fork of this project and tried to setup a new virtual environment. ``` python -m venv sf-venv source sf-venv/bin/active.fish pip install -r requirements.txt ``` However, I got the following error ``` ERROR: Cannot install -r requirements.txt (line 4) and numpy>=1.20 because these package versions have conflicting dependencies. The conflict is caused by: The user requested numpy>=1.20 tensorflow 2.5.0 depends on numpy~=1.19.2 To fix this you could try to: 1. loosen the range of package versions you've specified 2. remove package versions to allow pip attempt to solve the dependency conflict ERROR: ResolutionImpossible: for help visit https://pip.pypa.io/en/latest/user_guide/#fixing-conflicting-dependencies ``` #### Additional information If it helps, I am using Python 3.9.4 and pip 21.1.1. A quick fix would be to downgrade the version of numpy in requirements.txt and solve the issue, but I am not sure it is the best way to go. </issue> <code> [start of setup.py] 1 # Copyright 2019 Xanadu Quantum Technologies Inc. 2 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 7 # http://www.apache.org/licenses/LICENSE-2.0 8 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 import os 15 import sys 16 17 from setuptools import setup, find_packages 18 19 20 with open("strawberryfields/_version.py") as f: 21 version = f.readlines()[-1].split()[-1].strip("\"'") 22 23 24 requirements = [ 25 "numpy>=1.17.4", 26 "scipy>=1.0.0", 27 "sympy>=1.5", 28 "networkx>=2.0", 29 "quantum-blackbird>=0.3.0", 30 "python-dateutil>=2.8.0", 31 "thewalrus>=0.15.0", 32 "numba", 33 "toml", 34 "appdirs", 35 "requests>=2.22.0", 36 "urllib3>=1.25.3", 37 ] 38 39 info = { 40 "name": "StrawberryFields", 41 "version": version, 42 "maintainer": "Xanadu Inc.", 43 "maintainer_email": "[email protected]", 44 "url": "https://github.com/XanaduAI/StrawberryFields", 45 "license": "Apache License 2.0", 46 "packages": find_packages(where="."), 47 "package_data": {"strawberryfields": ["backends/data/*", "apps/data/feature_data/*", 48 "apps/data/sample_data/*"]}, 49 "include_package_data": True, 50 "entry_points" : { 51 'console_scripts': [ 52 'sf=strawberryfields.cli:main' 53 ] 54 }, 55 "description": "Open source library for continuous-variable quantum computation", 56 "long_description": open("README.rst", encoding="utf-8").read(), 57 "long_description_content_type": "text/x-rst", 58 "provides": ["strawberryfields"], 59 "install_requires": requirements, 60 # 'extras_require': extra_requirements, 61 "command_options": { 62 "build_sphinx": {"version": ("setup.py", version), "release": ("setup.py", version)} 63 }, 64 } 65 66 classifiers = [ 67 "Development Status :: 4 - Beta", 68 "Environment :: Console", 69 "Intended Audience :: Science/Research", 70 "License :: OSI Approved :: Apache Software License", 71 "Natural Language :: English", 72 "Operating System :: POSIX", 73 "Operating System :: MacOS :: MacOS X", 74 "Operating System :: POSIX :: Linux", 75 "Operating System :: Microsoft :: Windows", 76 "Programming Language :: Python", 77 "Programming Language :: Python :: 3", 78 "Programming Language :: Python :: 3.7", 79 "Programming Language :: Python :: 3.8", 80 "Programming Language :: Python :: 3.9", 81 "Programming Language :: Python :: 3 :: Only", 82 "Topic :: Scientific/Engineering :: Physics", 83 ] 84 85 setup(classifiers=classifiers, **(info)) 86 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ requirements = [ - "numpy>=1.17.4", + "numpy>=1.19.2", "scipy>=1.0.0", "sympy>=1.5", "networkx>=2.0",
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -22,7 +22,7 @@\n \n \n requirements = [\n- \"numpy>=1.17.4\",\n+ \"numpy>=1.19.2\",\n \"scipy>=1.0.0\",\n \"sympy>=1.5\",\n \"networkx>=2.0\",\n", "issue": "Dependency versions error\n#### Issue description\r\nI made a fork of this project and tried to setup a new virtual environment.\r\n\r\n```\r\npython -m venv sf-venv\r\nsource sf-venv/bin/active.fish\r\npip install -r requirements.txt\r\n```\r\n\r\nHowever, I got the following error\r\n``` \r\nERROR: Cannot install -r requirements.txt (line 4) and numpy>=1.20 because these package versions have conflicting dependencies.\r\n\r\nThe conflict is caused by:\r\n The user requested numpy>=1.20\r\n tensorflow 2.5.0 depends on numpy~=1.19.2\r\n\r\nTo fix this you could try to:\r\n1. loosen the range of package versions you've specified\r\n2. remove package versions to allow pip attempt to solve the dependency conflict\r\n\r\nERROR: ResolutionImpossible: for help visit https://pip.pypa.io/en/latest/user_guide/#fixing-conflicting-dependencies\r\n```\r\n\r\n#### Additional information\r\n\r\nIf it helps, I am using Python 3.9.4 and pip 21.1.1. \r\n\r\nA quick fix would be to downgrade the version of numpy in requirements.txt and solve the issue, but I am not sure it is the best way to go.\r\n\n", "before_files": [{"content": "# Copyright 2019 Xanadu Quantum Technologies Inc.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nwith open(\"strawberryfields/_version.py\") as f:\n version = f.readlines()[-1].split()[-1].strip(\"\\\"'\")\n\n\nrequirements = [\n \"numpy>=1.17.4\",\n \"scipy>=1.0.0\",\n \"sympy>=1.5\",\n \"networkx>=2.0\",\n \"quantum-blackbird>=0.3.0\",\n \"python-dateutil>=2.8.0\",\n \"thewalrus>=0.15.0\",\n \"numba\",\n \"toml\",\n \"appdirs\",\n \"requests>=2.22.0\",\n \"urllib3>=1.25.3\",\n]\n\ninfo = {\n \"name\": \"StrawberryFields\",\n \"version\": version,\n \"maintainer\": \"Xanadu Inc.\",\n \"maintainer_email\": \"[email protected]\",\n \"url\": \"https://github.com/XanaduAI/StrawberryFields\",\n \"license\": \"Apache License 2.0\",\n \"packages\": find_packages(where=\".\"),\n \"package_data\": {\"strawberryfields\": [\"backends/data/*\", \"apps/data/feature_data/*\",\n \"apps/data/sample_data/*\"]},\n \"include_package_data\": True,\n \"entry_points\" : {\n 'console_scripts': [\n 'sf=strawberryfields.cli:main'\n ]\n },\n \"description\": \"Open source library for continuous-variable quantum computation\",\n \"long_description\": open(\"README.rst\", encoding=\"utf-8\").read(),\n \"long_description_content_type\": \"text/x-rst\",\n \"provides\": [\"strawberryfields\"],\n \"install_requires\": requirements,\n # 'extras_require': extra_requirements,\n \"command_options\": {\n \"build_sphinx\": {\"version\": (\"setup.py\", version), \"release\": (\"setup.py\", version)}\n },\n}\n\nclassifiers = [\n \"Development Status :: 4 - Beta\",\n \"Environment :: Console\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: POSIX\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: Microsoft :: Windows\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Topic :: Scientific/Engineering :: Physics\",\n]\n\nsetup(classifiers=classifiers, **(info))\n", "path": "setup.py"}]}
1,692
91
gh_patches_debug_19530
rasdani/github-patches
git_diff
mozmeao__snippets-service-995
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Fix NR reporting See https://github.com/mozmeao/infra/issues/1106 </issue> <code> [start of snippets/wsgi/app.py] 1 """ 2 WSGI config for snippets project. 3 4 It exposes the WSGI callable as a module-level variable named ``application``. 5 6 For more information on this file, see 7 https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ 8 """ 9 import os 10 os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'snippets.settings') # NOQA 11 12 from django.core.wsgi import get_wsgi_application 13 14 import newrelic.agent 15 from decouple import config 16 from raven.contrib.django.raven_compat.middleware.wsgi import Sentry 17 18 application = get_wsgi_application() 19 20 application = Sentry(application) 21 22 # Add NewRelic 23 newrelic_ini = config('NEW_RELIC_CONFIG_FILE', default='newrelic.ini') 24 newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None) 25 if newrelic_ini and newrelic_license_key: 26 newrelic.agent.initialize(newrelic_ini) 27 application = newrelic.agent.wsgi_application()(application) 28 [end of snippets/wsgi/app.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/snippets/wsgi/app.py b/snippets/wsgi/app.py --- a/snippets/wsgi/app.py +++ b/snippets/wsgi/app.py @@ -6,22 +6,14 @@ For more information on this file, see https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ """ -import os -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'snippets.settings') # NOQA - -from django.core.wsgi import get_wsgi_application - import newrelic.agent -from decouple import config -from raven.contrib.django.raven_compat.middleware.wsgi import Sentry +newrelic.agent.initialize('newrelic.ini') +import os # NOQA +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'snippets.settings') # NOQA + +from django.core.wsgi import get_wsgi_application # NOQA application = get_wsgi_application() +from raven.contrib.django.raven_compat.middleware.wsgi import Sentry # NOQA application = Sentry(application) - -# Add NewRelic -newrelic_ini = config('NEW_RELIC_CONFIG_FILE', default='newrelic.ini') -newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None) -if newrelic_ini and newrelic_license_key: - newrelic.agent.initialize(newrelic_ini) - application = newrelic.agent.wsgi_application()(application)
{"golden_diff": "diff --git a/snippets/wsgi/app.py b/snippets/wsgi/app.py\n--- a/snippets/wsgi/app.py\n+++ b/snippets/wsgi/app.py\n@@ -6,22 +6,14 @@\n For more information on this file, see\n https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/\n \"\"\"\n-import os\n-os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'snippets.settings') # NOQA\n-\n-from django.core.wsgi import get_wsgi_application\n-\n import newrelic.agent\n-from decouple import config\n-from raven.contrib.django.raven_compat.middleware.wsgi import Sentry\n+newrelic.agent.initialize('newrelic.ini')\n \n+import os # NOQA\n+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'snippets.settings') # NOQA\n+\n+from django.core.wsgi import get_wsgi_application # NOQA\n application = get_wsgi_application()\n \n+from raven.contrib.django.raven_compat.middleware.wsgi import Sentry # NOQA\n application = Sentry(application)\n-\n-# Add NewRelic\n-newrelic_ini = config('NEW_RELIC_CONFIG_FILE', default='newrelic.ini')\n-newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None)\n-if newrelic_ini and newrelic_license_key:\n- newrelic.agent.initialize(newrelic_ini)\n- application = newrelic.agent.wsgi_application()(application)\n", "issue": "Fix NR reporting\nSee https://github.com/mozmeao/infra/issues/1106\n", "before_files": [{"content": "\"\"\"\nWSGI config for snippets project.\n\nIt exposes the WSGI callable as a module-level variable named ``application``.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/\n\"\"\"\nimport os\nos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'snippets.settings') # NOQA\n\nfrom django.core.wsgi import get_wsgi_application\n\nimport newrelic.agent\nfrom decouple import config\nfrom raven.contrib.django.raven_compat.middleware.wsgi import Sentry\n\napplication = get_wsgi_application()\n\napplication = Sentry(application)\n\n# Add NewRelic\nnewrelic_ini = config('NEW_RELIC_CONFIG_FILE', default='newrelic.ini')\nnewrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None)\nif newrelic_ini and newrelic_license_key:\n newrelic.agent.initialize(newrelic_ini)\n application = newrelic.agent.wsgi_application()(application)\n", "path": "snippets/wsgi/app.py"}]}
816
312
gh_patches_debug_11166
rasdani/github-patches
git_diff
DataDog__dd-agent-2443
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [marathon] Marathon plugin slows down agent when marathon has many apps running We are monitoring a marathon framework using datadog which has over 150 apps, and the marathon check seems to be slowing down the entire datadog process. After investigating what the plugin actually does, the problem seems to be this loop: https://github.com/DataDog/dd-agent/blob/5.4.4/checks.d/marathon.py#L46. It appears that the agent is sequentially hitting the API 150 times, which is enough to stop the agent from reporting metrics long enough to trigger some of our other alerts. </issue> <code> [start of checks.d/marathon.py] 1 # (C) Datadog, Inc. 2014-2016 2 # (C) graemej <[email protected]> 2014 3 # All rights reserved 4 # Licensed under Simplified BSD License (see LICENSE) 5 6 7 # stdlib 8 from urlparse import urljoin 9 10 # 3rd party 11 import requests 12 13 # project 14 from checks import AgentCheck 15 16 17 class Marathon(AgentCheck): 18 19 DEFAULT_TIMEOUT = 5 20 SERVICE_CHECK_NAME = 'marathon.can_connect' 21 22 APP_METRICS = [ 23 'backoffFactor', 24 'backoffSeconds', 25 'cpus', 26 'disk', 27 'instances', 28 'mem', 29 'taskRateLimit', 30 'tasksRunning', 31 'tasksStaged' 32 ] 33 34 def check(self, instance): 35 if 'url' not in instance: 36 raise Exception('Marathon instance missing "url" value.') 37 38 # Load values from the instance config 39 url = instance['url'] 40 user = instance.get('user') 41 password = instance.get('password') 42 if user is not None and password is not None: 43 auth = (user,password) 44 else: 45 auth = None 46 instance_tags = instance.get('tags', []) 47 default_timeout = self.init_config.get('default_timeout', self.DEFAULT_TIMEOUT) 48 timeout = float(instance.get('timeout', default_timeout)) 49 50 response = self.get_json(urljoin(url, "/v2/apps"), timeout, auth) 51 if response is not None: 52 self.gauge('marathon.apps', len(response['apps']), tags=instance_tags) 53 for app in response['apps']: 54 tags = ['app_id:' + app['id'], 'version:' + app['version']] + instance_tags 55 for attr in self.APP_METRICS: 56 if attr in app: 57 self.gauge('marathon.' + attr, app[attr], tags=tags) 58 59 query_url = urljoin(url, "/v2/apps/{0}/versions".format(app['id'])) 60 versions_reply = self.get_json(query_url, timeout, auth) 61 62 if versions_reply is not None: 63 self.gauge('marathon.versions', len(versions_reply['versions']), tags=tags) 64 65 def get_json(self, url, timeout, auth): 66 try: 67 r = requests.get(url, timeout=timeout, auth=auth) 68 r.raise_for_status() 69 except requests.exceptions.Timeout: 70 # If there's a timeout 71 self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, 72 message='%s timed out after %s seconds.' % (url, timeout), 73 tags = ["url:{0}".format(url)]) 74 raise Exception("Timeout when hitting %s" % url) 75 76 except requests.exceptions.HTTPError: 77 self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, 78 message='%s returned a status of %s' % (url, r.status_code), 79 tags = ["url:{0}".format(url)]) 80 raise Exception("Got %s when hitting %s" % (r.status_code, url)) 81 82 else: 83 self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, 84 tags = ["url:{0}".format(url)] 85 ) 86 87 return r.json() 88 [end of checks.d/marathon.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/checks.d/marathon.py b/checks.d/marathon.py --- a/checks.d/marathon.py +++ b/checks.d/marathon.py @@ -56,12 +56,6 @@ if attr in app: self.gauge('marathon.' + attr, app[attr], tags=tags) - query_url = urljoin(url, "/v2/apps/{0}/versions".format(app['id'])) - versions_reply = self.get_json(query_url, timeout, auth) - - if versions_reply is not None: - self.gauge('marathon.versions', len(versions_reply['versions']), tags=tags) - def get_json(self, url, timeout, auth): try: r = requests.get(url, timeout=timeout, auth=auth)
{"golden_diff": "diff --git a/checks.d/marathon.py b/checks.d/marathon.py\n--- a/checks.d/marathon.py\n+++ b/checks.d/marathon.py\n@@ -56,12 +56,6 @@\n if attr in app:\n self.gauge('marathon.' + attr, app[attr], tags=tags)\n \n- query_url = urljoin(url, \"/v2/apps/{0}/versions\".format(app['id']))\n- versions_reply = self.get_json(query_url, timeout, auth)\n-\n- if versions_reply is not None:\n- self.gauge('marathon.versions', len(versions_reply['versions']), tags=tags)\n-\n def get_json(self, url, timeout, auth):\n try:\n r = requests.get(url, timeout=timeout, auth=auth)\n", "issue": "[marathon] Marathon plugin slows down agent when marathon has many apps running\nWe are monitoring a marathon framework using datadog which has over 150 apps, and the marathon check seems to be slowing down the entire datadog process.\n\nAfter investigating what the plugin actually does, the problem seems to be this loop: https://github.com/DataDog/dd-agent/blob/5.4.4/checks.d/marathon.py#L46. It appears that the agent is sequentially hitting the API 150 times, which is enough to stop the agent from reporting metrics long enough to trigger some of our other alerts.\n\n", "before_files": [{"content": "# (C) Datadog, Inc. 2014-2016\n# (C) graemej <[email protected]> 2014\n# All rights reserved\n# Licensed under Simplified BSD License (see LICENSE)\n\n\n# stdlib\nfrom urlparse import urljoin\n\n# 3rd party\nimport requests\n\n# project\nfrom checks import AgentCheck\n\n\nclass Marathon(AgentCheck):\n\n DEFAULT_TIMEOUT = 5\n SERVICE_CHECK_NAME = 'marathon.can_connect'\n\n APP_METRICS = [\n 'backoffFactor',\n 'backoffSeconds',\n 'cpus',\n 'disk',\n 'instances',\n 'mem',\n 'taskRateLimit',\n 'tasksRunning',\n 'tasksStaged'\n ]\n\n def check(self, instance):\n if 'url' not in instance:\n raise Exception('Marathon instance missing \"url\" value.')\n\n # Load values from the instance config\n url = instance['url']\n user = instance.get('user')\n password = instance.get('password')\n if user is not None and password is not None:\n auth = (user,password)\n else:\n auth = None\n instance_tags = instance.get('tags', [])\n default_timeout = self.init_config.get('default_timeout', self.DEFAULT_TIMEOUT)\n timeout = float(instance.get('timeout', default_timeout))\n\n response = self.get_json(urljoin(url, \"/v2/apps\"), timeout, auth)\n if response is not None:\n self.gauge('marathon.apps', len(response['apps']), tags=instance_tags)\n for app in response['apps']:\n tags = ['app_id:' + app['id'], 'version:' + app['version']] + instance_tags\n for attr in self.APP_METRICS:\n if attr in app:\n self.gauge('marathon.' + attr, app[attr], tags=tags)\n\n query_url = urljoin(url, \"/v2/apps/{0}/versions\".format(app['id']))\n versions_reply = self.get_json(query_url, timeout, auth)\n\n if versions_reply is not None:\n self.gauge('marathon.versions', len(versions_reply['versions']), tags=tags)\n\n def get_json(self, url, timeout, auth):\n try:\n r = requests.get(url, timeout=timeout, auth=auth)\n r.raise_for_status()\n except requests.exceptions.Timeout:\n # If there's a timeout\n self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,\n message='%s timed out after %s seconds.' % (url, timeout),\n tags = [\"url:{0}\".format(url)])\n raise Exception(\"Timeout when hitting %s\" % url)\n\n except requests.exceptions.HTTPError:\n self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,\n message='%s returned a status of %s' % (url, r.status_code),\n tags = [\"url:{0}\".format(url)])\n raise Exception(\"Got %s when hitting %s\" % (r.status_code, url))\n\n else:\n self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK,\n tags = [\"url:{0}\".format(url)]\n )\n\n return r.json()\n", "path": "checks.d/marathon.py"}]}
1,536
180
gh_patches_debug_17970
rasdani/github-patches
git_diff
bookwyrm-social__bookwyrm-604
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> link formatting not working properly I made a markdown link, but the "<a href" part was trimmed and garbled html remained </issue> <code> [start of bookwyrm/views/status.py] 1 ''' what are we here for if not for posting ''' 2 import re 3 from django.contrib.auth.decorators import login_required 4 from django.http import HttpResponseBadRequest 5 from django.shortcuts import get_object_or_404, redirect 6 from django.utils.decorators import method_decorator 7 from django.views import View 8 from markdown import markdown 9 10 from bookwyrm import forms, models 11 from bookwyrm.sanitize_html import InputHtmlParser 12 from bookwyrm.settings import DOMAIN 13 from bookwyrm.status import create_notification, delete_status 14 from bookwyrm.utils import regex 15 from .helpers import handle_remote_webfinger 16 17 18 # pylint: disable= no-self-use 19 @method_decorator(login_required, name='dispatch') 20 class CreateStatus(View): 21 ''' the view for *posting* ''' 22 def post(self, request, status_type): 23 ''' create status of whatever type ''' 24 status_type = status_type[0].upper() + status_type[1:] 25 26 try: 27 form = getattr(forms, '%sForm' % status_type)(request.POST) 28 except AttributeError: 29 return HttpResponseBadRequest() 30 if not form.is_valid(): 31 return redirect(request.headers.get('Referer', '/')) 32 33 status = form.save(commit=False) 34 if not status.sensitive and status.content_warning: 35 # the cw text field remains populated when you click "remove" 36 status.content_warning = None 37 status.save(broadcast=False) 38 39 # inspect the text for user tags 40 content = status.content 41 for (mention_text, mention_user) in find_mentions(content): 42 # add them to status mentions fk 43 status.mention_users.add(mention_user) 44 45 # turn the mention into a link 46 content = re.sub( 47 r'%s([^@]|$)' % mention_text, 48 r'<a href="%s">%s</a>\g<1>' % \ 49 (mention_user.remote_id, mention_text), 50 content) 51 52 # add reply parent to mentions and notify 53 if status.reply_parent: 54 status.mention_users.add(status.reply_parent.user) 55 56 if status.reply_parent.user.local: 57 create_notification( 58 status.reply_parent.user, 59 'REPLY', 60 related_user=request.user, 61 related_status=status 62 ) 63 64 # deduplicate mentions 65 status.mention_users.set(set(status.mention_users.all())) 66 # create mention notifications 67 for mention_user in status.mention_users.all(): 68 if status.reply_parent and mention_user == status.reply_parent.user: 69 continue 70 if mention_user.local: 71 create_notification( 72 mention_user, 73 'MENTION', 74 related_user=request.user, 75 related_status=status 76 ) 77 78 # don't apply formatting to generated notes 79 if not isinstance(status, models.GeneratedNote): 80 status.content = to_markdown(content) 81 # do apply formatting to quotes 82 if hasattr(status, 'quote'): 83 status.quote = to_markdown(status.quote) 84 85 status.save(created=True) 86 return redirect(request.headers.get('Referer', '/')) 87 88 89 class DeleteStatus(View): 90 ''' tombstone that bad boy ''' 91 def post(self, request, status_id): 92 ''' delete and tombstone a status ''' 93 status = get_object_or_404(models.Status, id=status_id) 94 95 # don't let people delete other people's statuses 96 if status.user != request.user: 97 return HttpResponseBadRequest() 98 99 # perform deletion 100 delete_status(status) 101 return redirect(request.headers.get('Referer', '/')) 102 103 def find_mentions(content): 104 ''' detect @mentions in raw status content ''' 105 for match in re.finditer(regex.strict_username, content): 106 username = match.group().strip().split('@')[1:] 107 if len(username) == 1: 108 # this looks like a local user (@user), fill in the domain 109 username.append(DOMAIN) 110 username = '@'.join(username) 111 112 mention_user = handle_remote_webfinger(username) 113 if not mention_user: 114 # we can ignore users we don't know about 115 continue 116 yield (match.group(), mention_user) 117 118 119 def format_links(content): 120 ''' detect and format links ''' 121 return re.sub( 122 r'([^(href=")]|^|\()(https?:\/\/(%s([\w\.\-_\/+&\?=:;,])*))' % \ 123 regex.domain, 124 r'\g<1><a href="\g<2>">\g<3></a>', 125 content) 126 127 def to_markdown(content): 128 ''' catch links and convert to markdown ''' 129 content = format_links(content) 130 content = markdown(content) 131 # sanitize resulting html 132 sanitizer = InputHtmlParser() 133 sanitizer.feed(content) 134 return sanitizer.get_output() 135 [end of bookwyrm/views/status.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/bookwyrm/views/status.py b/bookwyrm/views/status.py --- a/bookwyrm/views/status.py +++ b/bookwyrm/views/status.py @@ -48,7 +48,6 @@ r'<a href="%s">%s</a>\g<1>' % \ (mention_user.remote_id, mention_text), content) - # add reply parent to mentions and notify if status.reply_parent: status.mention_users.add(status.reply_parent.user) @@ -126,8 +125,8 @@ def to_markdown(content): ''' catch links and convert to markdown ''' - content = format_links(content) content = markdown(content) + content = format_links(content) # sanitize resulting html sanitizer = InputHtmlParser() sanitizer.feed(content)
{"golden_diff": "diff --git a/bookwyrm/views/status.py b/bookwyrm/views/status.py\n--- a/bookwyrm/views/status.py\n+++ b/bookwyrm/views/status.py\n@@ -48,7 +48,6 @@\n r'<a href=\"%s\">%s</a>\\g<1>' % \\\n (mention_user.remote_id, mention_text),\n content)\n-\n # add reply parent to mentions and notify\n if status.reply_parent:\n status.mention_users.add(status.reply_parent.user)\n@@ -126,8 +125,8 @@\n \n def to_markdown(content):\n ''' catch links and convert to markdown '''\n- content = format_links(content)\n content = markdown(content)\n+ content = format_links(content)\n # sanitize resulting html\n sanitizer = InputHtmlParser()\n sanitizer.feed(content)\n", "issue": "link formatting not working properly\nI made a markdown link, but the \"<a href\" part was trimmed and garbled html remained\n", "before_files": [{"content": "''' what are we here for if not for posting '''\nimport re\nfrom django.contrib.auth.decorators import login_required\nfrom django.http import HttpResponseBadRequest\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.utils.decorators import method_decorator\nfrom django.views import View\nfrom markdown import markdown\n\nfrom bookwyrm import forms, models\nfrom bookwyrm.sanitize_html import InputHtmlParser\nfrom bookwyrm.settings import DOMAIN\nfrom bookwyrm.status import create_notification, delete_status\nfrom bookwyrm.utils import regex\nfrom .helpers import handle_remote_webfinger\n\n\n# pylint: disable= no-self-use\n@method_decorator(login_required, name='dispatch')\nclass CreateStatus(View):\n ''' the view for *posting* '''\n def post(self, request, status_type):\n ''' create status of whatever type '''\n status_type = status_type[0].upper() + status_type[1:]\n\n try:\n form = getattr(forms, '%sForm' % status_type)(request.POST)\n except AttributeError:\n return HttpResponseBadRequest()\n if not form.is_valid():\n return redirect(request.headers.get('Referer', '/'))\n\n status = form.save(commit=False)\n if not status.sensitive and status.content_warning:\n # the cw text field remains populated when you click \"remove\"\n status.content_warning = None\n status.save(broadcast=False)\n\n # inspect the text for user tags\n content = status.content\n for (mention_text, mention_user) in find_mentions(content):\n # add them to status mentions fk\n status.mention_users.add(mention_user)\n\n # turn the mention into a link\n content = re.sub(\n r'%s([^@]|$)' % mention_text,\n r'<a href=\"%s\">%s</a>\\g<1>' % \\\n (mention_user.remote_id, mention_text),\n content)\n\n # add reply parent to mentions and notify\n if status.reply_parent:\n status.mention_users.add(status.reply_parent.user)\n\n if status.reply_parent.user.local:\n create_notification(\n status.reply_parent.user,\n 'REPLY',\n related_user=request.user,\n related_status=status\n )\n\n # deduplicate mentions\n status.mention_users.set(set(status.mention_users.all()))\n # create mention notifications\n for mention_user in status.mention_users.all():\n if status.reply_parent and mention_user == status.reply_parent.user:\n continue\n if mention_user.local:\n create_notification(\n mention_user,\n 'MENTION',\n related_user=request.user,\n related_status=status\n )\n\n # don't apply formatting to generated notes\n if not isinstance(status, models.GeneratedNote):\n status.content = to_markdown(content)\n # do apply formatting to quotes\n if hasattr(status, 'quote'):\n status.quote = to_markdown(status.quote)\n\n status.save(created=True)\n return redirect(request.headers.get('Referer', '/'))\n\n\nclass DeleteStatus(View):\n ''' tombstone that bad boy '''\n def post(self, request, status_id):\n ''' delete and tombstone a status '''\n status = get_object_or_404(models.Status, id=status_id)\n\n # don't let people delete other people's statuses\n if status.user != request.user:\n return HttpResponseBadRequest()\n\n # perform deletion\n delete_status(status)\n return redirect(request.headers.get('Referer', '/'))\n\ndef find_mentions(content):\n ''' detect @mentions in raw status content '''\n for match in re.finditer(regex.strict_username, content):\n username = match.group().strip().split('@')[1:]\n if len(username) == 1:\n # this looks like a local user (@user), fill in the domain\n username.append(DOMAIN)\n username = '@'.join(username)\n\n mention_user = handle_remote_webfinger(username)\n if not mention_user:\n # we can ignore users we don't know about\n continue\n yield (match.group(), mention_user)\n\n\ndef format_links(content):\n ''' detect and format links '''\n return re.sub(\n r'([^(href=\")]|^|\\()(https?:\\/\\/(%s([\\w\\.\\-_\\/+&\\?=:;,])*))' % \\\n regex.domain,\n r'\\g<1><a href=\"\\g<2>\">\\g<3></a>',\n content)\n\ndef to_markdown(content):\n ''' catch links and convert to markdown '''\n content = format_links(content)\n content = markdown(content)\n # sanitize resulting html\n sanitizer = InputHtmlParser()\n sanitizer.feed(content)\n return sanitizer.get_output()\n", "path": "bookwyrm/views/status.py"}]}
1,843
178
gh_patches_debug_15384
rasdani/github-patches
git_diff
bridgecrewio__checkov-2303
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> CKV_GIT_3 should not be triggered on archived repositories **Describe the issue** _CKV_GIT_3_ currently gets triggered also on archived GitHub repositories. When archiving a repository the configuration `vulnerability_alerts` will get changed to `false` automatically. It's also not possible to turn it on again on an archived repository. _CKV_GIT_3_ should be changed to ignore archived repositories. **Examples** ```terraform resource "github_repository" "test" { name = "test" visibility = "private" archived = true vulnerability_alerts = false } ``` **Version (please complete the following information):** - Starting with Checkov Version 2.0.764 **Additional context** See the [GitHub documentation](https://docs.github.com/en/code-security/supply-chain-security/managing-vulnerabilities-in-your-projects-dependencies/configuring-dependabot-security-updates#supported-repositories) that Dependabot is only supported on non-archived repositories. </issue> <code> [start of checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py] 1 from typing import Any 2 3 from checkov.common.models.enums import CheckCategories, CheckResult 4 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceCheck 5 6 7 class GithubRepositoryVulnerabilityAlerts(BaseResourceCheck): 8 def __init__(self) -> None: 9 name = "Ensure GitHub repository has vulnerability alerts enabled" 10 id = "CKV_GIT_3" 11 supported_resources = ["github_repository"] 12 categories = [CheckCategories.GENERAL_SECURITY] 13 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 14 15 def scan_resource_conf(self, conf) -> CheckResult: 16 # GitHub enables the alerts on public repos but disables them on private repos by default. 17 # is private repo 18 if conf.get("private") == [True] or conf.get("visibility") in [["private"], ["internal"]]: 19 if conf.get("vulnerability_alerts"): 20 return CheckResult.PASSED 21 return CheckResult.FAILED 22 # is public repo 23 if conf.get("vulnerability_alerts") == [False]: 24 return CheckResult.FAILED 25 return CheckResult.PASSED 26 27 28 check = GithubRepositoryVulnerabilityAlerts() 29 [end of checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py b/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py --- a/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py +++ b/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py @@ -13,6 +13,9 @@ super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) def scan_resource_conf(self, conf) -> CheckResult: + # GitHub disables the alerts when archiving the repository without an option to turn them on again. + if conf.get("archived") == [True]: + return CheckResult.PASSED # GitHub enables the alerts on public repos but disables them on private repos by default. # is private repo if conf.get("private") == [True] or conf.get("visibility") in [["private"], ["internal"]]:
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py b/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py\n--- a/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py\n+++ b/checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py\n@@ -13,6 +13,9 @@\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n \n def scan_resource_conf(self, conf) -> CheckResult:\n+ # GitHub disables the alerts when archiving the repository without an option to turn them on again.\n+ if conf.get(\"archived\") == [True]:\n+ return CheckResult.PASSED\n # GitHub enables the alerts on public repos but disables them on private repos by default.\n # is private repo\n if conf.get(\"private\") == [True] or conf.get(\"visibility\") in [[\"private\"], [\"internal\"]]:\n", "issue": "CKV_GIT_3 should not be triggered on archived repositories\n**Describe the issue**\r\n_CKV_GIT_3_ currently gets triggered also on archived GitHub repositories. When archiving a repository the configuration `vulnerability_alerts` will get changed to `false` automatically. It's also not possible to turn it on again on an archived repository. _CKV_GIT_3_ should be changed to ignore archived repositories.\r\n\r\n**Examples**\r\n\r\n```terraform\r\nresource \"github_repository\" \"test\" {\r\n name = \"test\"\r\n visibility = \"private\"\r\n archived = true\r\n vulnerability_alerts = false\r\n}\r\n```\r\n\r\n**Version (please complete the following information):**\r\n - Starting with Checkov Version 2.0.764\r\n\r\n**Additional context**\r\nSee the [GitHub documentation](https://docs.github.com/en/code-security/supply-chain-security/managing-vulnerabilities-in-your-projects-dependencies/configuring-dependabot-security-updates#supported-repositories) that Dependabot is only supported on non-archived repositories.\r\n\n", "before_files": [{"content": "from typing import Any\n\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceCheck\n\n\nclass GithubRepositoryVulnerabilityAlerts(BaseResourceCheck):\n def __init__(self) -> None:\n name = \"Ensure GitHub repository has vulnerability alerts enabled\"\n id = \"CKV_GIT_3\"\n supported_resources = [\"github_repository\"]\n categories = [CheckCategories.GENERAL_SECURITY]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf) -> CheckResult:\n # GitHub enables the alerts on public repos but disables them on private repos by default.\n # is private repo\n if conf.get(\"private\") == [True] or conf.get(\"visibility\") in [[\"private\"], [\"internal\"]]:\n if conf.get(\"vulnerability_alerts\"):\n return CheckResult.PASSED\n return CheckResult.FAILED\n # is public repo\n if conf.get(\"vulnerability_alerts\") == [False]:\n return CheckResult.FAILED\n return CheckResult.PASSED\n\n\ncheck = GithubRepositoryVulnerabilityAlerts()\n", "path": "checkov/terraform/checks/resource/github/RepositoryEnableVulnerabilityAlerts.py"}]}
1,093
220
gh_patches_debug_17636
rasdani/github-patches
git_diff
svthalia__concrexit-3528
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Albums pagination doesn't maintain search terms ### Describe the bug If you open https://thalia.nu/members/photos/?keywords=borrel#photos-albums, then go to the second page using the pagination buttons, the search term is dropped. ### Expected behaviour <!-- A clear and concise description of what you expected to happen. --> The search term remains ### Additional context <!-- Add any other context about the problem here. --> Could be since we introduced the shared paginated view template? So it's quite likely this occurs for other paginated filterable/searchable views as well. </issue> <code> [start of website/thaliawebsite/views.py] 1 """General views for the website.""" 2 3 from django.contrib.admin.views.decorators import staff_member_required 4 from django.contrib.auth.views import LoginView, PasswordResetView 5 from django.core.exceptions import PermissionDenied 6 from django.http import HttpResponse, HttpResponseForbidden 7 from django.shortcuts import redirect 8 from django.utils.decorators import method_decorator 9 from django.views.generic import ListView, TemplateView 10 from django.views.generic.base import View 11 12 from django_ratelimit.decorators import ratelimit 13 14 15 class IndexView(TemplateView): 16 template_name = "index.html" 17 18 19 @method_decorator(staff_member_required, "dispatch") 20 class TestCrashView(View): 21 """Test view to intentionally crash to test the error handling.""" 22 23 def dispatch(self, request, *args, **kwargs) -> HttpResponse: 24 if not request.user.is_superuser: 25 return HttpResponseForbidden("This is not for you") 26 raise Exception("Test exception") 27 28 29 class PagedView(ListView): 30 """A ListView with automatic pagination.""" 31 32 def get_context_data(self, **kwargs) -> dict: 33 context = super().get_context_data(**kwargs) 34 page = context["page_obj"].number 35 paginator = context["paginator"] 36 37 # Show the two pages before and after the current page 38 page_range_start = max(1, page - 2) 39 page_range_stop = min(page + 3, paginator.num_pages + 1) 40 41 # Add extra pages if we show less than 5 pages 42 page_range_start = min(page_range_start, page_range_stop - 5) 43 page_range_start = max(1, page_range_start) 44 45 # Add extra pages if we still show less than 5 pages 46 page_range_stop = max(page_range_stop, page_range_start + 5) 47 page_range_stop = min(page_range_stop, paginator.num_pages + 1) 48 49 page_range = range(page_range_start, page_range_stop) 50 51 context.update( 52 { 53 "page_range": page_range, 54 } 55 ) 56 57 return context 58 59 60 class RateLimitedPasswordResetView(PasswordResetView): 61 @method_decorator(ratelimit(key="ip", rate="5/h")) 62 def post(self, request, *args, **kwargs): 63 return super().post(request, *args, **kwargs) 64 65 66 class RateLimitedLoginView(LoginView): 67 @method_decorator(ratelimit(key="ip", rate="30/h")) 68 @method_decorator(ratelimit(key="post:username", rate="30/h")) 69 def post(self, request, *args, **kwargs): 70 return super().post(request, *args, **kwargs) 71 72 73 def rate_limited_view(request, *args, **kwargs): 74 return HttpResponse("You are rate limited", status=429) 75 76 77 def admin_unauthorized_view(request): 78 if not request.member: 79 url = "/user/login" 80 args = request.META.get("QUERY_STRING", "") 81 if args: 82 url = f"{url}?{args}" 83 return redirect(url) 84 elif not request.member.is_staff and not request.member.is_superuser: 85 raise PermissionDenied("You are not allowed to access the administration page.") 86 else: 87 return redirect(request.GET.get("next", "/")) 88 [end of website/thaliawebsite/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/website/thaliawebsite/views.py b/website/thaliawebsite/views.py --- a/website/thaliawebsite/views.py +++ b/website/thaliawebsite/views.py @@ -31,6 +31,7 @@ def get_context_data(self, **kwargs) -> dict: context = super().get_context_data(**kwargs) + print(kwargs) page = context["page_obj"].number paginator = context["paginator"] @@ -48,9 +49,17 @@ page_range = range(page_range_start, page_range_stop) + querydict = self.request.GET.copy() + + if "page" in querydict: + del querydict["page"] + context.update( { "page_range": page_range, + "base_url": f"{self.request.path}?{querydict.urlencode()}&" + if querydict + else f"{self.request.path}?", } )
{"golden_diff": "diff --git a/website/thaliawebsite/views.py b/website/thaliawebsite/views.py\n--- a/website/thaliawebsite/views.py\n+++ b/website/thaliawebsite/views.py\n@@ -31,6 +31,7 @@\n \n def get_context_data(self, **kwargs) -> dict:\n context = super().get_context_data(**kwargs)\n+ print(kwargs)\n page = context[\"page_obj\"].number\n paginator = context[\"paginator\"]\n \n@@ -48,9 +49,17 @@\n \n page_range = range(page_range_start, page_range_stop)\n \n+ querydict = self.request.GET.copy()\n+\n+ if \"page\" in querydict:\n+ del querydict[\"page\"]\n+\n context.update(\n {\n \"page_range\": page_range,\n+ \"base_url\": f\"{self.request.path}?{querydict.urlencode()}&\"\n+ if querydict\n+ else f\"{self.request.path}?\",\n }\n )\n", "issue": "Albums pagination doesn't maintain search terms\n### Describe the bug\r\nIf you open https://thalia.nu/members/photos/?keywords=borrel#photos-albums, then go to the second page using the pagination buttons, the search term is dropped.\r\n\r\n### Expected behaviour\r\n<!-- A clear and concise description of what you expected to happen. -->\r\nThe search term remains\r\n\r\n### Additional context\r\n<!-- Add any other context about the problem here. -->\r\nCould be since we introduced the shared paginated view template? So it's quite likely this occurs for other paginated filterable/searchable views as well.\n", "before_files": [{"content": "\"\"\"General views for the website.\"\"\"\n\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom django.contrib.auth.views import LoginView, PasswordResetView\nfrom django.core.exceptions import PermissionDenied\nfrom django.http import HttpResponse, HttpResponseForbidden\nfrom django.shortcuts import redirect\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import ListView, TemplateView\nfrom django.views.generic.base import View\n\nfrom django_ratelimit.decorators import ratelimit\n\n\nclass IndexView(TemplateView):\n template_name = \"index.html\"\n\n\n@method_decorator(staff_member_required, \"dispatch\")\nclass TestCrashView(View):\n \"\"\"Test view to intentionally crash to test the error handling.\"\"\"\n\n def dispatch(self, request, *args, **kwargs) -> HttpResponse:\n if not request.user.is_superuser:\n return HttpResponseForbidden(\"This is not for you\")\n raise Exception(\"Test exception\")\n\n\nclass PagedView(ListView):\n \"\"\"A ListView with automatic pagination.\"\"\"\n\n def get_context_data(self, **kwargs) -> dict:\n context = super().get_context_data(**kwargs)\n page = context[\"page_obj\"].number\n paginator = context[\"paginator\"]\n\n # Show the two pages before and after the current page\n page_range_start = max(1, page - 2)\n page_range_stop = min(page + 3, paginator.num_pages + 1)\n\n # Add extra pages if we show less than 5 pages\n page_range_start = min(page_range_start, page_range_stop - 5)\n page_range_start = max(1, page_range_start)\n\n # Add extra pages if we still show less than 5 pages\n page_range_stop = max(page_range_stop, page_range_start + 5)\n page_range_stop = min(page_range_stop, paginator.num_pages + 1)\n\n page_range = range(page_range_start, page_range_stop)\n\n context.update(\n {\n \"page_range\": page_range,\n }\n )\n\n return context\n\n\nclass RateLimitedPasswordResetView(PasswordResetView):\n @method_decorator(ratelimit(key=\"ip\", rate=\"5/h\"))\n def post(self, request, *args, **kwargs):\n return super().post(request, *args, **kwargs)\n\n\nclass RateLimitedLoginView(LoginView):\n @method_decorator(ratelimit(key=\"ip\", rate=\"30/h\"))\n @method_decorator(ratelimit(key=\"post:username\", rate=\"30/h\"))\n def post(self, request, *args, **kwargs):\n return super().post(request, *args, **kwargs)\n\n\ndef rate_limited_view(request, *args, **kwargs):\n return HttpResponse(\"You are rate limited\", status=429)\n\n\ndef admin_unauthorized_view(request):\n if not request.member:\n url = \"/user/login\"\n args = request.META.get(\"QUERY_STRING\", \"\")\n if args:\n url = f\"{url}?{args}\"\n return redirect(url)\n elif not request.member.is_staff and not request.member.is_superuser:\n raise PermissionDenied(\"You are not allowed to access the administration page.\")\n else:\n return redirect(request.GET.get(\"next\", \"/\"))\n", "path": "website/thaliawebsite/views.py"}]}
1,507
217
gh_patches_debug_23410
rasdani/github-patches
git_diff
OCA__bank-payment-630
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [12.0][BUG] account_payment_sale Hi I have found a bug in module account_payment_sale, but I am not sure how to fix it nicely. The payment_mode_id does not propagate from the sale order to the invoice. I guess the tests are a bit to naive, that is why they pass anyway. Here we try to propagate the payment mode : https://github.com/OCA/bank-payment/blob/12.0/account_payment_sale/models/sale_order.py#L35 Here, the invoice is created with the right value (coming from the SO) : https://github.com/OCA/OCB/blob/12.0/addons/sale/models/sale.py#L521 And it is overriden here https://github.com/OCA/OCB/blob/12.0/addons/sale/models/sale.py#L570 I really don't get why they have refactored it this way, they create the invoice and then they override a lot of values... And I do not really see a clean solution to solve this. Any idea? </issue> <code> [start of account_payment_sale/models/sale_order.py] 1 # Copyright 2014-2016 Akretion - Alexis de Lattre 2 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). 3 4 from odoo import models, fields, api 5 6 7 class SaleOrder(models.Model): 8 _inherit = "sale.order" 9 10 payment_mode_id = fields.Many2one( 11 'account.payment.mode', string='Payment Mode', 12 domain=[('payment_type', '=', 'inbound')]) 13 14 def _get_payment_mode_vals(self, vals): 15 if self.payment_mode_id: 16 vals['payment_mode_id'] = self.payment_mode_id.id 17 if self.payment_mode_id.bank_account_link == 'fixed': 18 vals['partner_bank_id'] =\ 19 self.payment_mode_id.fixed_journal_id.bank_account_id.id 20 return vals 21 22 @api.onchange('partner_id') 23 def onchange_partner_id(self): 24 res = super().onchange_partner_id() 25 if self.partner_id: 26 self.payment_mode_id = self.partner_id.customer_payment_mode_id 27 else: 28 self.payment_mode_id = False 29 return res 30 31 @api.multi 32 def _prepare_invoice(self): 33 """Copy bank partner from sale order to invoice""" 34 vals = super()._prepare_invoice() 35 return self._get_payment_mode_vals(vals) 36 [end of account_payment_sale/models/sale_order.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/account_payment_sale/models/sale_order.py b/account_payment_sale/models/sale_order.py --- a/account_payment_sale/models/sale_order.py +++ b/account_payment_sale/models/sale_order.py @@ -33,3 +33,31 @@ """Copy bank partner from sale order to invoice""" vals = super()._prepare_invoice() return self._get_payment_mode_vals(vals) + + def _finalize_invoices(self, invoices, references): + """ + Invoked after creating invoices at the end of action_invoice_create. + + We must override this method since the onchange on partner is called by + the base method and therefore will change the specific payment_mode set + on the SO if one is defined on the partner.. + + :param invoices: {group_key: invoice} + :param references: {invoice: order} + """ + payment_vals_by_invoice = {} + for invoice in invoices.values(): + payment_vals_by_invoice[invoice] = { + 'payment_mode_id': invoice.payment_mode_id.id, + 'partner_bank_id': invoice.partner_bank_id.id + } + res = super()._finalize_invoices(invoices, references) + for invoice in invoices.values(): + payment_vals = payment_vals_by_invoice[invoice] + if invoice.payment_mode_id.id == payment_vals['payment_mode_id']: + payment_vals.pop("payment_mode_id") + if invoice.partner_bank_id.id == payment_vals["partner_bank_id"]: + payment_vals.pop("partner_bank_id") + if payment_vals: + invoice.write(payment_vals) + return res
{"golden_diff": "diff --git a/account_payment_sale/models/sale_order.py b/account_payment_sale/models/sale_order.py\n--- a/account_payment_sale/models/sale_order.py\n+++ b/account_payment_sale/models/sale_order.py\n@@ -33,3 +33,31 @@\n \"\"\"Copy bank partner from sale order to invoice\"\"\"\n vals = super()._prepare_invoice()\n return self._get_payment_mode_vals(vals)\n+\n+ def _finalize_invoices(self, invoices, references):\n+ \"\"\"\n+ Invoked after creating invoices at the end of action_invoice_create.\n+\n+ We must override this method since the onchange on partner is called by\n+ the base method and therefore will change the specific payment_mode set\n+ on the SO if one is defined on the partner..\n+\n+ :param invoices: {group_key: invoice}\n+ :param references: {invoice: order}\n+ \"\"\"\n+ payment_vals_by_invoice = {}\n+ for invoice in invoices.values():\n+ payment_vals_by_invoice[invoice] = {\n+ 'payment_mode_id': invoice.payment_mode_id.id,\n+ 'partner_bank_id': invoice.partner_bank_id.id\n+ }\n+ res = super()._finalize_invoices(invoices, references)\n+ for invoice in invoices.values():\n+ payment_vals = payment_vals_by_invoice[invoice]\n+ if invoice.payment_mode_id.id == payment_vals['payment_mode_id']:\n+ payment_vals.pop(\"payment_mode_id\")\n+ if invoice.partner_bank_id.id == payment_vals[\"partner_bank_id\"]:\n+ payment_vals.pop(\"partner_bank_id\")\n+ if payment_vals:\n+ invoice.write(payment_vals)\n+ return res\n", "issue": "[12.0][BUG] account_payment_sale\nHi\r\nI have found a bug in module account_payment_sale, but I am not sure how to fix it nicely.\r\nThe payment_mode_id does not propagate from the sale order to the invoice. \r\nI guess the tests are a bit to naive, that is why they pass anyway.\r\nHere we try to propagate the payment mode : https://github.com/OCA/bank-payment/blob/12.0/account_payment_sale/models/sale_order.py#L35\r\nHere, the invoice is created with the right value (coming from the SO) : https://github.com/OCA/OCB/blob/12.0/addons/sale/models/sale.py#L521\r\nAnd it is overriden here https://github.com/OCA/OCB/blob/12.0/addons/sale/models/sale.py#L570\r\n\r\nI really don't get why they have refactored it this way, they create the invoice and then they override a lot of values...\r\nAnd I do not really see a clean solution to solve this.\r\nAny idea?\n", "before_files": [{"content": "# Copyright 2014-2016 Akretion - Alexis de Lattre\n# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).\n\nfrom odoo import models, fields, api\n\n\nclass SaleOrder(models.Model):\n _inherit = \"sale.order\"\n\n payment_mode_id = fields.Many2one(\n 'account.payment.mode', string='Payment Mode',\n domain=[('payment_type', '=', 'inbound')])\n\n def _get_payment_mode_vals(self, vals):\n if self.payment_mode_id:\n vals['payment_mode_id'] = self.payment_mode_id.id\n if self.payment_mode_id.bank_account_link == 'fixed':\n vals['partner_bank_id'] =\\\n self.payment_mode_id.fixed_journal_id.bank_account_id.id\n return vals\n\n @api.onchange('partner_id')\n def onchange_partner_id(self):\n res = super().onchange_partner_id()\n if self.partner_id:\n self.payment_mode_id = self.partner_id.customer_payment_mode_id\n else:\n self.payment_mode_id = False\n return res\n\n @api.multi\n def _prepare_invoice(self):\n \"\"\"Copy bank partner from sale order to invoice\"\"\"\n vals = super()._prepare_invoice()\n return self._get_payment_mode_vals(vals)\n", "path": "account_payment_sale/models/sale_order.py"}]}
1,116
351
gh_patches_debug_15077
rasdani/github-patches
git_diff
microsoft__botbuilder-python-1748
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> botbuilder-core library is missing the botframework-streaming dependency ## Version 4.14.0.20210616.dev252366 ## Describe the bug The botbuilder-core library is missing the botframework-streaming dependency. When running a python bot with the botbuilder-core library installed, it won't run because it is missing the botframework-streaming dependency. The dependency reference is missing from the requirements.txt file, and this new library is not published in any of the regular packages indexes ([test.pypi](https://test.pypi.org/), [pypi](https://pypi.org/) and [azure artifacts](https://dev.azure.com/ConversationalAI/BotFramework/_packaging?_a=feed&feed=SDK%40Local)), so it can't be installed manually. When running the bots locally it is possible to install the dependency from a local folder with the code cloned from the repo. ## To Reproduce 1. Open a bot that uses the botbuilder-core library. 2. Install a preview version (4.14.x). 3. Run the bot. ## Expected behavior The dependencies being installed should install all the required sub-dependencies or have them available for manual installation. ## Screenshots ![image](https://user-images.githubusercontent.com/38112957/122459875-d79f2d80-cf87-11eb-93dd-d6aaf5128fbd.png) ## Additional context This issue is blocking the pipelines from the [BotFramework-FunctionalTests](https://github.com/microsoft/BotFramework-FunctionalTests/) repository from testing preview versions of the BotBuilder Python libraries. </issue> <code> [start of libraries/botframework-streaming/setup.py] 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. 3 4 import os 5 from setuptools import setup 6 7 VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.12.0" 8 REQUIRES = [ 9 "botbuilder-schema>=4.12.0", 10 "botframework-connector>=4.12.0", 11 "botbuilder-core>=4.12.0", 12 ] 13 14 root = os.path.abspath(os.path.dirname(__file__)) 15 16 with open(os.path.join(root, "botframework", "streaming", "about.py")) as f: 17 package_info = {} 18 info = f.read() 19 exec(info, package_info) 20 21 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f: 22 long_description = f.read() 23 24 setup( 25 name=package_info["__title__"], 26 version=package_info["__version__"], 27 url=package_info["__uri__"], 28 author=package_info["__author__"], 29 description=package_info["__description__"], 30 keywords=["BotFrameworkStreaming", "bots", "ai", "botframework", "botframework",], 31 long_description=long_description, 32 long_description_content_type="text/x-rst", 33 license=package_info["__license__"], 34 packages=[ 35 "botframework.streaming", 36 "botframework.streaming.payloads", 37 "botframework.streaming.payloads.models", 38 "botframework.streaming.payload_transport", 39 "botframework.streaming.transport", 40 "botframework.streaming.transport.web_socket", 41 ], 42 install_requires=REQUIRES, 43 classifiers=[ 44 "Programming Language :: Python :: 3.7", 45 "Intended Audience :: Developers", 46 "License :: OSI Approved :: MIT License", 47 "Operating System :: OS Independent", 48 "Development Status :: 5 - Production/Stable", 49 "Topic :: Scientific/Engineering :: Artificial Intelligence", 50 ], 51 ) 52 [end of libraries/botframework-streaming/setup.py] [start of libraries/botbuilder-core/setup.py] 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. 3 4 import os 5 from setuptools import setup 6 7 VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.14.0" 8 REQUIRES = [ 9 "botbuilder-schema==4.14.0", 10 "botframework-connector==4.14.0", 11 "jsonpickle>=1.2,<1.5", 12 ] 13 14 root = os.path.abspath(os.path.dirname(__file__)) 15 16 with open(os.path.join(root, "botbuilder", "core", "about.py")) as f: 17 package_info = {} 18 info = f.read() 19 exec(info, package_info) 20 21 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f: 22 long_description = f.read() 23 24 setup( 25 name=package_info["__title__"], 26 version=package_info["__version__"], 27 url=package_info["__uri__"], 28 author=package_info["__author__"], 29 description=package_info["__description__"], 30 keywords=["BotBuilderCore", "bots", "ai", "botframework", "botbuilder"], 31 long_description=long_description, 32 long_description_content_type="text/x-rst", 33 license=package_info["__license__"], 34 packages=[ 35 "botbuilder.core", 36 "botbuilder.core.adapters", 37 "botbuilder.core.inspection", 38 "botbuilder.core.integration", 39 "botbuilder.core.skills", 40 "botbuilder.core.streaming", 41 "botbuilder.core.teams", 42 "botbuilder.core.oauth", 43 ], 44 install_requires=REQUIRES, 45 classifiers=[ 46 "Programming Language :: Python :: 3.7", 47 "Intended Audience :: Developers", 48 "License :: OSI Approved :: MIT License", 49 "Operating System :: OS Independent", 50 "Development Status :: 5 - Production/Stable", 51 "Topic :: Scientific/Engineering :: Artificial Intelligence", 52 ], 53 ) 54 [end of libraries/botbuilder-core/setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/libraries/botbuilder-core/setup.py b/libraries/botbuilder-core/setup.py --- a/libraries/botbuilder-core/setup.py +++ b/libraries/botbuilder-core/setup.py @@ -8,6 +8,7 @@ REQUIRES = [ "botbuilder-schema==4.14.0", "botframework-connector==4.14.0", + "botframework-streaming==4.14.0", "jsonpickle>=1.2,<1.5", ] diff --git a/libraries/botframework-streaming/setup.py b/libraries/botframework-streaming/setup.py --- a/libraries/botframework-streaming/setup.py +++ b/libraries/botframework-streaming/setup.py @@ -4,11 +4,10 @@ import os from setuptools import setup -VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.12.0" +VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.14.0" REQUIRES = [ "botbuilder-schema>=4.12.0", "botframework-connector>=4.12.0", - "botbuilder-core>=4.12.0", ] root = os.path.abspath(os.path.dirname(__file__))
{"golden_diff": "diff --git a/libraries/botbuilder-core/setup.py b/libraries/botbuilder-core/setup.py\n--- a/libraries/botbuilder-core/setup.py\n+++ b/libraries/botbuilder-core/setup.py\n@@ -8,6 +8,7 @@\n REQUIRES = [\n \"botbuilder-schema==4.14.0\",\n \"botframework-connector==4.14.0\",\n+ \"botframework-streaming==4.14.0\",\n \"jsonpickle>=1.2,<1.5\",\n ]\n \ndiff --git a/libraries/botframework-streaming/setup.py b/libraries/botframework-streaming/setup.py\n--- a/libraries/botframework-streaming/setup.py\n+++ b/libraries/botframework-streaming/setup.py\n@@ -4,11 +4,10 @@\n import os\n from setuptools import setup\n \n-VERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.12.0\"\n+VERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.14.0\"\n REQUIRES = [\n \"botbuilder-schema>=4.12.0\",\n \"botframework-connector>=4.12.0\",\n- \"botbuilder-core>=4.12.0\",\n ]\n \n root = os.path.abspath(os.path.dirname(__file__))\n", "issue": "botbuilder-core library is missing the botframework-streaming dependency\n## Version\r\n4.14.0.20210616.dev252366\r\n\r\n## Describe the bug\r\nThe botbuilder-core library is missing the botframework-streaming dependency.\r\nWhen running a python bot with the botbuilder-core library installed, it won't run because it is missing the botframework-streaming dependency.\r\nThe dependency reference is missing from the requirements.txt file, and this new library is not published in any of the regular packages indexes ([test.pypi](https://test.pypi.org/), [pypi](https://pypi.org/) and [azure artifacts](https://dev.azure.com/ConversationalAI/BotFramework/_packaging?_a=feed&feed=SDK%40Local)), so it can't be installed manually.\r\nWhen running the bots locally it is possible to install the dependency from a local folder with the code cloned from the repo.\r\n\r\n## To Reproduce\r\n1. Open a bot that uses the botbuilder-core library.\r\n2. Install a preview version (4.14.x).\r\n3. Run the bot.\r\n\r\n## Expected behavior\r\nThe dependencies being installed should install all the required sub-dependencies or have them available for manual installation.\r\n\r\n## Screenshots\r\n![image](https://user-images.githubusercontent.com/38112957/122459875-d79f2d80-cf87-11eb-93dd-d6aaf5128fbd.png)\r\n\r\n## Additional context\r\nThis issue is blocking the pipelines from the [BotFramework-FunctionalTests](https://github.com/microsoft/BotFramework-FunctionalTests/) repository from testing preview versions of the BotBuilder Python libraries.\r\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nVERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.12.0\"\nREQUIRES = [\n \"botbuilder-schema>=4.12.0\",\n \"botframework-connector>=4.12.0\",\n \"botbuilder-core>=4.12.0\",\n]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(root, \"botframework\", \"streaming\", \"about.py\")) as f:\n package_info = {}\n info = f.read()\n exec(info, package_info)\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=package_info[\"__title__\"],\n version=package_info[\"__version__\"],\n url=package_info[\"__uri__\"],\n author=package_info[\"__author__\"],\n description=package_info[\"__description__\"],\n keywords=[\"BotFrameworkStreaming\", \"bots\", \"ai\", \"botframework\", \"botframework\",],\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=package_info[\"__license__\"],\n packages=[\n \"botframework.streaming\",\n \"botframework.streaming.payloads\",\n \"botframework.streaming.payloads.models\",\n \"botframework.streaming.payload_transport\",\n \"botframework.streaming.transport\",\n \"botframework.streaming.transport.web_socket\",\n ],\n install_requires=REQUIRES,\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 5 - Production/Stable\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botframework-streaming/setup.py"}, {"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nVERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.14.0\"\nREQUIRES = [\n \"botbuilder-schema==4.14.0\",\n \"botframework-connector==4.14.0\",\n \"jsonpickle>=1.2,<1.5\",\n]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(root, \"botbuilder\", \"core\", \"about.py\")) as f:\n package_info = {}\n info = f.read()\n exec(info, package_info)\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=package_info[\"__title__\"],\n version=package_info[\"__version__\"],\n url=package_info[\"__uri__\"],\n author=package_info[\"__author__\"],\n description=package_info[\"__description__\"],\n keywords=[\"BotBuilderCore\", \"bots\", \"ai\", \"botframework\", \"botbuilder\"],\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=package_info[\"__license__\"],\n packages=[\n \"botbuilder.core\",\n \"botbuilder.core.adapters\",\n \"botbuilder.core.inspection\",\n \"botbuilder.core.integration\",\n \"botbuilder.core.skills\",\n \"botbuilder.core.streaming\",\n \"botbuilder.core.teams\",\n \"botbuilder.core.oauth\",\n ],\n install_requires=REQUIRES,\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 5 - Production/Stable\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botbuilder-core/setup.py"}]}
1,957
294
gh_patches_debug_18689
rasdani/github-patches
git_diff
sanic-org__sanic-1553
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Unable to start server -- Running run_async.py failed **Describe the bug** [2019-04-14 19:22:02 +0800] [21512] [INFO] Goin' Fast @ http://0.0.0.0:8000 [2019-04-14 19:22:02 +0800] [21512] [ERROR] Unable to start server Traceback (most recent call last): File "C:\ProgramData\Anaconda3\envs\venom\lib\site-packages\sanic\server.py", line 745, in serve http_server = loop.run_until_complete(server_coroutine) File "C:\ProgramData\Anaconda3\envs\venom\lib\asyncio\base_events.py", line 571, in run_until_complete self.run_forever() File "C:\ProgramData\Anaconda3\envs\venom\lib\asyncio\base_events.py", line 529, in run_forever 'Cannot run the event loop while another loop is running') RuntimeError: Cannot run the event loop while another loop is running **Code snippet** Relevant source code, make sure to remove what is not necessary. https://github.com/huge-success/sanic/blob/master/examples/run_async.py **Expected behavior** A clear and concise description of what you expected to happen. **Environment (please complete the following information):** - OS: [e.g. iOS] - Version [e.g. 0.8.3] Window and Linux, Python 3.6 or 3.7 don't work **Additional context** Add any other context about the problem here. Is this example still work ? </issue> <code> [start of examples/log_request_id.py] 1 ''' 2 Based on example from https://github.com/Skyscanner/aiotask-context 3 and `examples/{override_logging,run_async}.py`. 4 5 Needs https://github.com/Skyscanner/aiotask-context/tree/52efbc21e2e1def2d52abb9a8e951f3ce5e6f690 or newer 6 7 $ pip install git+https://github.com/Skyscanner/aiotask-context.git 8 ''' 9 10 import asyncio 11 import uuid 12 import logging 13 from signal import signal, SIGINT 14 15 from sanic import Sanic 16 from sanic import response 17 18 import uvloop 19 import aiotask_context as context 20 21 log = logging.getLogger(__name__) 22 23 24 class RequestIdFilter(logging.Filter): 25 def filter(self, record): 26 record.request_id = context.get('X-Request-ID') 27 return True 28 29 30 LOG_SETTINGS = { 31 'version': 1, 32 'disable_existing_loggers': False, 33 'handlers': { 34 'console': { 35 'class': 'logging.StreamHandler', 36 'level': 'DEBUG', 37 'formatter': 'default', 38 'filters': ['requestid'], 39 }, 40 }, 41 'filters': { 42 'requestid': { 43 '()': RequestIdFilter, 44 }, 45 }, 46 'formatters': { 47 'default': { 48 'format': '%(asctime)s %(levelname)s %(name)s:%(lineno)d %(request_id)s | %(message)s', 49 }, 50 }, 51 'loggers': { 52 '': { 53 'level': 'DEBUG', 54 'handlers': ['console'], 55 'propagate': True 56 }, 57 } 58 } 59 60 61 app = Sanic(__name__, log_config=LOG_SETTINGS) 62 63 64 @app.middleware('request') 65 async def set_request_id(request): 66 request_id = request.headers.get('X-Request-ID') or str(uuid.uuid4()) 67 context.set("X-Request-ID", request_id) 68 69 70 @app.route("/") 71 async def test(request): 72 log.debug('X-Request-ID: %s', context.get('X-Request-ID')) 73 log.info('Hello from test!') 74 return response.json({"test": True}) 75 76 77 if __name__ == '__main__': 78 asyncio.set_event_loop(uvloop.new_event_loop()) 79 server = app.create_server(host="0.0.0.0", port=8000) 80 loop = asyncio.get_event_loop() 81 loop.set_task_factory(context.task_factory) 82 task = asyncio.ensure_future(server) 83 try: 84 loop.run_forever() 85 except: 86 loop.stop() 87 [end of examples/log_request_id.py] [start of examples/run_async.py] 1 from sanic import Sanic 2 from sanic import response 3 from signal import signal, SIGINT 4 import asyncio 5 import uvloop 6 7 app = Sanic(__name__) 8 9 10 @app.route("/") 11 async def test(request): 12 return response.json({"answer": "42"}) 13 14 asyncio.set_event_loop(uvloop.new_event_loop()) 15 server = app.create_server(host="0.0.0.0", port=8000) 16 loop = asyncio.get_event_loop() 17 task = asyncio.ensure_future(server) 18 signal(SIGINT, lambda s, f: loop.stop()) 19 try: 20 loop.run_forever() 21 except: 22 loop.stop() 23 [end of examples/run_async.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/examples/log_request_id.py b/examples/log_request_id.py --- a/examples/log_request_id.py +++ b/examples/log_request_id.py @@ -76,7 +76,7 @@ if __name__ == '__main__': asyncio.set_event_loop(uvloop.new_event_loop()) - server = app.create_server(host="0.0.0.0", port=8000) + server = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True) loop = asyncio.get_event_loop() loop.set_task_factory(context.task_factory) task = asyncio.ensure_future(server) diff --git a/examples/run_async.py b/examples/run_async.py --- a/examples/run_async.py +++ b/examples/run_async.py @@ -12,7 +12,7 @@ return response.json({"answer": "42"}) asyncio.set_event_loop(uvloop.new_event_loop()) -server = app.create_server(host="0.0.0.0", port=8000) +server = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True) loop = asyncio.get_event_loop() task = asyncio.ensure_future(server) signal(SIGINT, lambda s, f: loop.stop())
{"golden_diff": "diff --git a/examples/log_request_id.py b/examples/log_request_id.py\n--- a/examples/log_request_id.py\n+++ b/examples/log_request_id.py\n@@ -76,7 +76,7 @@\n \n if __name__ == '__main__':\n asyncio.set_event_loop(uvloop.new_event_loop())\n- server = app.create_server(host=\"0.0.0.0\", port=8000)\n+ server = app.create_server(host=\"0.0.0.0\", port=8000, return_asyncio_server=True)\n loop = asyncio.get_event_loop()\n loop.set_task_factory(context.task_factory)\n task = asyncio.ensure_future(server)\ndiff --git a/examples/run_async.py b/examples/run_async.py\n--- a/examples/run_async.py\n+++ b/examples/run_async.py\n@@ -12,7 +12,7 @@\n return response.json({\"answer\": \"42\"})\n \n asyncio.set_event_loop(uvloop.new_event_loop())\n-server = app.create_server(host=\"0.0.0.0\", port=8000)\n+server = app.create_server(host=\"0.0.0.0\", port=8000, return_asyncio_server=True)\n loop = asyncio.get_event_loop()\n task = asyncio.ensure_future(server)\n signal(SIGINT, lambda s, f: loop.stop())\n", "issue": "Unable to start server -- Running run_async.py failed\n**Describe the bug**\r\n[2019-04-14 19:22:02 +0800] [21512] [INFO] Goin' Fast @ http://0.0.0.0:8000\r\n[2019-04-14 19:22:02 +0800] [21512] [ERROR] Unable to start server\r\nTraceback (most recent call last):\r\n File \"C:\\ProgramData\\Anaconda3\\envs\\venom\\lib\\site-packages\\sanic\\server.py\", line 745, in serve\r\n http_server = loop.run_until_complete(server_coroutine)\r\n File \"C:\\ProgramData\\Anaconda3\\envs\\venom\\lib\\asyncio\\base_events.py\", line 571, in run_until_complete\r\n self.run_forever()\r\n File \"C:\\ProgramData\\Anaconda3\\envs\\venom\\lib\\asyncio\\base_events.py\", line 529, in run_forever\r\n 'Cannot run the event loop while another loop is running')\r\nRuntimeError: Cannot run the event loop while another loop is running\r\n\r\n**Code snippet**\r\nRelevant source code, make sure to remove what is not necessary.\r\n\r\nhttps://github.com/huge-success/sanic/blob/master/examples/run_async.py\r\n\r\n**Expected behavior**\r\nA clear and concise description of what you expected to happen.\r\n\r\n\r\n**Environment (please complete the following information):**\r\n - OS: [e.g. iOS]\r\n - Version [e.g. 0.8.3]\r\nWindow and Linux, Python 3.6 or 3.7 don't work\r\n\r\n**Additional context**\r\nAdd any other context about the problem here.\r\n\r\n\r\nIs this example still work ?\n", "before_files": [{"content": "'''\nBased on example from https://github.com/Skyscanner/aiotask-context\nand `examples/{override_logging,run_async}.py`.\n\nNeeds https://github.com/Skyscanner/aiotask-context/tree/52efbc21e2e1def2d52abb9a8e951f3ce5e6f690 or newer\n\n$ pip install git+https://github.com/Skyscanner/aiotask-context.git\n'''\n\nimport asyncio\nimport uuid\nimport logging\nfrom signal import signal, SIGINT\n\nfrom sanic import Sanic\nfrom sanic import response\n\nimport uvloop\nimport aiotask_context as context\n\nlog = logging.getLogger(__name__)\n\n\nclass RequestIdFilter(logging.Filter):\n def filter(self, record):\n record.request_id = context.get('X-Request-ID')\n return True\n\n\nLOG_SETTINGS = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'handlers': {\n 'console': {\n 'class': 'logging.StreamHandler',\n 'level': 'DEBUG',\n 'formatter': 'default',\n 'filters': ['requestid'],\n },\n },\n 'filters': {\n 'requestid': {\n '()': RequestIdFilter,\n },\n },\n 'formatters': {\n 'default': {\n 'format': '%(asctime)s %(levelname)s %(name)s:%(lineno)d %(request_id)s | %(message)s',\n },\n },\n 'loggers': {\n '': {\n 'level': 'DEBUG',\n 'handlers': ['console'],\n 'propagate': True\n },\n }\n}\n\n\napp = Sanic(__name__, log_config=LOG_SETTINGS)\n\n\[email protected]('request')\nasync def set_request_id(request):\n request_id = request.headers.get('X-Request-ID') or str(uuid.uuid4())\n context.set(\"X-Request-ID\", request_id)\n\n\[email protected](\"/\")\nasync def test(request):\n log.debug('X-Request-ID: %s', context.get('X-Request-ID'))\n log.info('Hello from test!')\n return response.json({\"test\": True})\n\n\nif __name__ == '__main__':\n asyncio.set_event_loop(uvloop.new_event_loop())\n server = app.create_server(host=\"0.0.0.0\", port=8000)\n loop = asyncio.get_event_loop()\n loop.set_task_factory(context.task_factory)\n task = asyncio.ensure_future(server)\n try:\n loop.run_forever()\n except:\n loop.stop()\n", "path": "examples/log_request_id.py"}, {"content": "from sanic import Sanic\nfrom sanic import response\nfrom signal import signal, SIGINT\nimport asyncio\nimport uvloop\n\napp = Sanic(__name__)\n\n\[email protected](\"/\")\nasync def test(request):\n return response.json({\"answer\": \"42\"})\n\nasyncio.set_event_loop(uvloop.new_event_loop())\nserver = app.create_server(host=\"0.0.0.0\", port=8000)\nloop = asyncio.get_event_loop()\ntask = asyncio.ensure_future(server)\nsignal(SIGINT, lambda s, f: loop.stop())\ntry:\n loop.run_forever()\nexcept:\n loop.stop()\n", "path": "examples/run_async.py"}]}
1,841
285
gh_patches_debug_6849
rasdani/github-patches
git_diff
WordPress__openverse-api-233
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [Bug] A circular import prevents starting the project correctly ## Description <!-- Concisely describe the bug. --> There is a problem with models imports, run the project and see: ``` web_1 | Exception in thread django-main-thread: web_1 | Traceback (most recent call last): web_1 | File "/usr/local/lib/python3.9/threading.py", line 973, in _bootstrap_inner web_1 | self.run() web_1 | File "/usr/local/lib/python3.9/threading.py", line 910, in run web_1 | self._target(*self._args, **self._kwargs) web_1 | File "/usr/local/lib/python3.9/site-packages/django/utils/autoreload.py", line 64, in wrapper web_1 | fn(*args, **kwargs) web_1 | File "/usr/local/lib/python3.9/site-packages/django/core/management/commands/runserver.py", line 110, in inner_run web_1 | autoreload.raise_last_exception() web_1 | File "/usr/local/lib/python3.9/site-packages/django/utils/autoreload.py", line 87, in raise_last_exception web_1 | raise _exception[1] web_1 | File "/usr/local/lib/python3.9/site-packages/django/core/management/__init__.py", line 375, in execute web_1 | autoreload.check_errors(django.setup)() web_1 | File "/usr/local/lib/python3.9/site-packages/django/utils/autoreload.py", line 64, in wrapper web_1 | fn(*args, **kwargs) web_1 | File "/usr/local/lib/python3.9/site-packages/django/__init__.py", line 24, in setup web_1 | apps.populate(settings.INSTALLED_APPS) web_1 | File "/usr/local/lib/python3.9/site-packages/django/apps/registry.py", line 114, in populate web_1 | app_config.import_models() web_1 | File "/usr/local/lib/python3.9/site-packages/django/apps/config.py", line 301, in import_models web_1 | self.models_module = import_module(models_module_name) web_1 | File "/usr/local/lib/python3.9/importlib/__init__.py", line 127, in import_module web_1 | return _bootstrap._gcd_import(name[level:], package, level) web_1 | File "<frozen importlib._bootstrap>", line 1030, in _gcd_import web_1 | File "<frozen importlib._bootstrap>", line 1007, in _find_and_load web_1 | File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked web_1 | File "<frozen importlib._bootstrap>", line 680, in _load_unlocked web_1 | File "<frozen importlib._bootstrap_external>", line 850, in exec_module web_1 | File "<frozen importlib._bootstrap>", line 228, in _call_with_frames_removed web_1 | File "/openverse-api/catalog/api/models/__init__.py", line 1, in <module> web_1 | from catalog.api.models.audio import ( web_1 | File "/openverse-api/catalog/api/models/audio.py", line 2, in <module> web_1 | from catalog.api.models import OpenLedgerModel web_1 | ImportError: cannot import name 'OpenLedgerModel' from partially initialized module 'catalog.api.models' (most likely due to a circular import) (/openverse-api/catalog/api/models/__init__.py) ``` ## Expectation <!-- Concisely describe what you expected to happen. --> The project should start without errors and run normally, passing tests. ## Additional context <!-- Add any other context about the problem here; or delete the section entirely. --> The wrong order is introduced due to the `isort` rules so we should make an exception for these lines or the file. ## Resolution <!-- Replace the [ ] with [x] to check the box. --> - [ ] 🙋 I would be interested in resolving this bug. </issue> <code> [start of openverse-api/catalog/api/models/__init__.py] 1 from catalog.api.models.audio import ( 2 AltAudioFile, 3 Audio, 4 AudioList, 5 AudioReport, 6 AudioSet, 7 DeletedAudio, 8 MatureAudio, 9 ) 10 from catalog.api.models.base import OpenLedgerModel 11 from catalog.api.models.image import ( 12 DeletedImage, 13 Image, 14 ImageList, 15 ImageReport, 16 MatureImage, 17 ) 18 from catalog.api.models.media import ( 19 DEINDEXED, 20 DMCA, 21 MATURE, 22 MATURE_FILTERED, 23 NO_ACTION, 24 OTHER, 25 PENDING, 26 ) 27 from catalog.api.models.models import ContentProvider, ShortenedLink, SourceLogo, Tag 28 from catalog.api.models.oauth import ( 29 OAuth2Registration, 30 OAuth2Verification, 31 ThrottledApplication, 32 ) 33 [end of openverse-api/catalog/api/models/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/openverse-api/catalog/api/models/__init__.py b/openverse-api/catalog/api/models/__init__.py --- a/openverse-api/catalog/api/models/__init__.py +++ b/openverse-api/catalog/api/models/__init__.py @@ -1,3 +1,4 @@ +from catalog.api.models.base import OpenLedgerModel # isort:skip from catalog.api.models.audio import ( AltAudioFile, Audio, @@ -7,7 +8,6 @@ DeletedAudio, MatureAudio, ) -from catalog.api.models.base import OpenLedgerModel from catalog.api.models.image import ( DeletedImage, Image,
{"golden_diff": "diff --git a/openverse-api/catalog/api/models/__init__.py b/openverse-api/catalog/api/models/__init__.py\n--- a/openverse-api/catalog/api/models/__init__.py\n+++ b/openverse-api/catalog/api/models/__init__.py\n@@ -1,3 +1,4 @@\n+from catalog.api.models.base import OpenLedgerModel # isort:skip\n from catalog.api.models.audio import (\n AltAudioFile,\n Audio,\n@@ -7,7 +8,6 @@\n DeletedAudio,\n MatureAudio,\n )\n-from catalog.api.models.base import OpenLedgerModel\n from catalog.api.models.image import (\n DeletedImage,\n Image,\n", "issue": "[Bug] A circular import prevents starting the project correctly\n## Description\r\n<!-- Concisely describe the bug. -->\r\nThere is a problem with models imports, run the project and see:\r\n\r\n```\r\nweb_1 | Exception in thread django-main-thread:\r\nweb_1 | Traceback (most recent call last):\r\nweb_1 | File \"/usr/local/lib/python3.9/threading.py\", line 973, in _bootstrap_inner\r\nweb_1 | self.run()\r\nweb_1 | File \"/usr/local/lib/python3.9/threading.py\", line 910, in run\r\nweb_1 | self._target(*self._args, **self._kwargs)\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/utils/autoreload.py\", line 64, in wrapper\r\nweb_1 | fn(*args, **kwargs)\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/core/management/commands/runserver.py\", line 110, in inner_run\r\nweb_1 | autoreload.raise_last_exception()\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/utils/autoreload.py\", line 87, in raise_last_exception\r\nweb_1 | raise _exception[1]\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/core/management/__init__.py\", line 375, in execute\r\nweb_1 | autoreload.check_errors(django.setup)()\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/utils/autoreload.py\", line 64, in wrapper\r\nweb_1 | fn(*args, **kwargs)\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/__init__.py\", line 24, in setup\r\nweb_1 | apps.populate(settings.INSTALLED_APPS)\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/apps/registry.py\", line 114, in populate\r\nweb_1 | app_config.import_models()\r\nweb_1 | File \"/usr/local/lib/python3.9/site-packages/django/apps/config.py\", line 301, in import_models\r\nweb_1 | self.models_module = import_module(models_module_name)\r\nweb_1 | File \"/usr/local/lib/python3.9/importlib/__init__.py\", line 127, in import_module\r\nweb_1 | return _bootstrap._gcd_import(name[level:], package, level)\r\nweb_1 | File \"<frozen importlib._bootstrap>\", line 1030, in _gcd_import\r\nweb_1 | File \"<frozen importlib._bootstrap>\", line 1007, in _find_and_load\r\nweb_1 | File \"<frozen importlib._bootstrap>\", line 986, in _find_and_load_unlocked\r\nweb_1 | File \"<frozen importlib._bootstrap>\", line 680, in _load_unlocked\r\nweb_1 | File \"<frozen importlib._bootstrap_external>\", line 850, in exec_module\r\nweb_1 | File \"<frozen importlib._bootstrap>\", line 228, in _call_with_frames_removed\r\nweb_1 | File \"/openverse-api/catalog/api/models/__init__.py\", line 1, in <module>\r\nweb_1 | from catalog.api.models.audio import (\r\nweb_1 | File \"/openverse-api/catalog/api/models/audio.py\", line 2, in <module>\r\nweb_1 | from catalog.api.models import OpenLedgerModel\r\nweb_1 | ImportError: cannot import name 'OpenLedgerModel' from partially initialized module 'catalog.api.models' (most likely due to a circular import) (/openverse-api/catalog/api/models/__init__.py)\r\n```\r\n\r\n## Expectation\r\n<!-- Concisely describe what you expected to happen. -->\r\nThe project should start without errors and run normally, passing tests.\r\n\r\n## Additional context\r\n<!-- Add any other context about the problem here; or delete the section entirely. -->\r\nThe wrong order is introduced due to the `isort` rules so we should make an exception for these lines or the file.\r\n\r\n## Resolution\r\n<!-- Replace the [ ] with [x] to check the box. -->\r\n- [ ] \ud83d\ude4b I would be interested in resolving this bug.\r\n\n", "before_files": [{"content": "from catalog.api.models.audio import (\n AltAudioFile,\n Audio,\n AudioList,\n AudioReport,\n AudioSet,\n DeletedAudio,\n MatureAudio,\n)\nfrom catalog.api.models.base import OpenLedgerModel\nfrom catalog.api.models.image import (\n DeletedImage,\n Image,\n ImageList,\n ImageReport,\n MatureImage,\n)\nfrom catalog.api.models.media import (\n DEINDEXED,\n DMCA,\n MATURE,\n MATURE_FILTERED,\n NO_ACTION,\n OTHER,\n PENDING,\n)\nfrom catalog.api.models.models import ContentProvider, ShortenedLink, SourceLogo, Tag\nfrom catalog.api.models.oauth import (\n OAuth2Registration,\n OAuth2Verification,\n ThrottledApplication,\n)\n", "path": "openverse-api/catalog/api/models/__init__.py"}]}
1,748
140
gh_patches_debug_25787
rasdani/github-patches
git_diff
pypa__setuptools-1905
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> TestDepends.testRequire regression in 41.6.0 (py3) While trying to build the new release of setuptools, I get the following test failure: ``` ==================================================================== FAILURES ===================================================================== _____________________________________________________________ TestDepends.testRequire _____________________________________________________________ self = <setuptools.tests.test_setuptools.TestDepends object at 0x7fbfae31d710> @needs_bytecode def testRequire(self): req = Require('Json', '1.0.3', 'json') assert req.name == 'Json' assert req.module == 'json' assert req.requested_version == '1.0.3' assert req.attribute == '__version__' assert req.full_name() == 'Json-1.0.3' from json import __version__ assert req.get_version() == __version__ assert req.version_ok('1.0.9') assert not req.version_ok('0.9.1') assert not req.version_ok('unknown') assert req.is_present() assert req.is_current() req = Require('Json 3000', '03000', 'json', format=LooseVersion) assert req.is_present() assert not req.is_current() assert not req.version_ok('unknown') req = Require('Do-what-I-mean', '1.0', 'd-w-i-m') assert not req.is_present() assert not req.is_current() req = Require('Tests', None, 'tests', homepage="http://example.com") assert req.format is None assert req.attribute is None assert req.requested_version is None assert req.full_name() == 'Tests' assert req.homepage == 'http://example.com' from setuptools.tests import __path__ paths = [os.path.dirname(p) for p in __path__] > assert req.is_present(paths) E AssertionError: assert False E + where False = <bound method Require.is_present of <setuptools.depends.Require object at 0x7fbfae0d0b38>>(['/tmp/portage/dev-python/setuptools-41.6.0/work/setuptools-41.6.0-python3_5/setuptools']) E + where <bound method Require.is_present of <setuptools.depends.Require object at 0x7fbfae0d0b38>> = <setuptools.depends.Require object at 0x7fbfae0d0b38>.is_present setuptools/tests/test_setuptools.py:120: AssertionError ``` I can reproduce it reliably with at least pypy3.6 (7.2.0) & python3.5 (3.5.7). I haven't tested other versions yet. Full build log: [dev-python:setuptools-41.6.0:20191030-083347.log](https://github.com/pypa/setuptools/files/3787797/dev-python.setuptools-41.6.0.20191030-083347.log) </issue> <code> [start of setuptools/_imp.py] 1 """ 2 Re-implementation of find_module and get_frozen_object 3 from the deprecated imp module. 4 """ 5 6 import os 7 import importlib.util 8 import importlib.machinery 9 10 from .py34compat import module_from_spec 11 12 13 PY_SOURCE = 1 14 PY_COMPILED = 2 15 C_EXTENSION = 3 16 C_BUILTIN = 6 17 PY_FROZEN = 7 18 19 20 def find_module(module, paths=None): 21 """Just like 'imp.find_module()', but with package support""" 22 spec = importlib.util.find_spec(module, paths) 23 if spec is None: 24 raise ImportError("Can't find %s" % module) 25 if not spec.has_location and hasattr(spec, 'submodule_search_locations'): 26 spec = importlib.util.spec_from_loader('__init__.py', spec.loader) 27 28 kind = -1 29 file = None 30 static = isinstance(spec.loader, type) 31 if spec.origin == 'frozen' or static and issubclass( 32 spec.loader, importlib.machinery.FrozenImporter): 33 kind = PY_FROZEN 34 path = None # imp compabilty 35 suffix = mode = '' # imp compability 36 elif spec.origin == 'built-in' or static and issubclass( 37 spec.loader, importlib.machinery.BuiltinImporter): 38 kind = C_BUILTIN 39 path = None # imp compabilty 40 suffix = mode = '' # imp compability 41 elif spec.has_location: 42 path = spec.origin 43 suffix = os.path.splitext(path)[1] 44 mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb' 45 46 if suffix in importlib.machinery.SOURCE_SUFFIXES: 47 kind = PY_SOURCE 48 elif suffix in importlib.machinery.BYTECODE_SUFFIXES: 49 kind = PY_COMPILED 50 elif suffix in importlib.machinery.EXTENSION_SUFFIXES: 51 kind = C_EXTENSION 52 53 if kind in {PY_SOURCE, PY_COMPILED}: 54 file = open(path, mode) 55 else: 56 path = None 57 suffix = mode = '' 58 59 return file, path, (suffix, mode, kind) 60 61 62 def get_frozen_object(module, paths=None): 63 spec = importlib.util.find_spec(module, paths) 64 if not spec: 65 raise ImportError("Can't find %s" % module) 66 return spec.loader.get_code(module) 67 68 69 def get_module(module, paths, info): 70 spec = importlib.util.find_spec(module, paths) 71 if not spec: 72 raise ImportError("Can't find %s" % module) 73 return module_from_spec(spec) 74 [end of setuptools/_imp.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setuptools/_imp.py b/setuptools/_imp.py --- a/setuptools/_imp.py +++ b/setuptools/_imp.py @@ -17,9 +17,18 @@ PY_FROZEN = 7 +def find_spec(module, paths): + finder = ( + importlib.machinery.PathFinder().find_spec + if isinstance(paths, list) else + importlib.util.find_spec + ) + return finder(module, paths) + + def find_module(module, paths=None): """Just like 'imp.find_module()', but with package support""" - spec = importlib.util.find_spec(module, paths) + spec = find_spec(module, paths) if spec is None: raise ImportError("Can't find %s" % module) if not spec.has_location and hasattr(spec, 'submodule_search_locations'): @@ -60,14 +69,14 @@ def get_frozen_object(module, paths=None): - spec = importlib.util.find_spec(module, paths) + spec = find_spec(module, paths) if not spec: raise ImportError("Can't find %s" % module) return spec.loader.get_code(module) def get_module(module, paths, info): - spec = importlib.util.find_spec(module, paths) + spec = find_spec(module, paths) if not spec: raise ImportError("Can't find %s" % module) return module_from_spec(spec)
{"golden_diff": "diff --git a/setuptools/_imp.py b/setuptools/_imp.py\n--- a/setuptools/_imp.py\n+++ b/setuptools/_imp.py\n@@ -17,9 +17,18 @@\n PY_FROZEN = 7\n \n \n+def find_spec(module, paths):\n+ finder = (\n+ importlib.machinery.PathFinder().find_spec\n+ if isinstance(paths, list) else\n+ importlib.util.find_spec\n+ )\n+ return finder(module, paths)\n+\n+\n def find_module(module, paths=None):\n \"\"\"Just like 'imp.find_module()', but with package support\"\"\"\n- spec = importlib.util.find_spec(module, paths)\n+ spec = find_spec(module, paths)\n if spec is None:\n raise ImportError(\"Can't find %s\" % module)\n if not spec.has_location and hasattr(spec, 'submodule_search_locations'):\n@@ -60,14 +69,14 @@\n \n \n def get_frozen_object(module, paths=None):\n- spec = importlib.util.find_spec(module, paths)\n+ spec = find_spec(module, paths)\n if not spec:\n raise ImportError(\"Can't find %s\" % module)\n return spec.loader.get_code(module)\n \n \n def get_module(module, paths, info):\n- spec = importlib.util.find_spec(module, paths)\n+ spec = find_spec(module, paths)\n if not spec:\n raise ImportError(\"Can't find %s\" % module)\n return module_from_spec(spec)\n", "issue": "TestDepends.testRequire regression in 41.6.0 (py3)\nWhile trying to build the new release of setuptools, I get the following test failure:\r\n\r\n```\r\n==================================================================== FAILURES =====================================================================\r\n_____________________________________________________________ TestDepends.testRequire _____________________________________________________________\r\n\r\nself = <setuptools.tests.test_setuptools.TestDepends object at 0x7fbfae31d710>\r\n\r\n @needs_bytecode\r\n def testRequire(self):\r\n req = Require('Json', '1.0.3', 'json')\r\n \r\n assert req.name == 'Json'\r\n assert req.module == 'json'\r\n assert req.requested_version == '1.0.3'\r\n assert req.attribute == '__version__'\r\n assert req.full_name() == 'Json-1.0.3'\r\n \r\n from json import __version__\r\n assert req.get_version() == __version__\r\n assert req.version_ok('1.0.9')\r\n assert not req.version_ok('0.9.1')\r\n assert not req.version_ok('unknown')\r\n \r\n assert req.is_present()\r\n assert req.is_current()\r\n \r\n req = Require('Json 3000', '03000', 'json', format=LooseVersion)\r\n assert req.is_present()\r\n assert not req.is_current()\r\n assert not req.version_ok('unknown')\r\n \r\n req = Require('Do-what-I-mean', '1.0', 'd-w-i-m')\r\n assert not req.is_present()\r\n assert not req.is_current()\r\n \r\n req = Require('Tests', None, 'tests', homepage=\"http://example.com\")\r\n assert req.format is None\r\n assert req.attribute is None\r\n assert req.requested_version is None\r\n assert req.full_name() == 'Tests'\r\n assert req.homepage == 'http://example.com'\r\n \r\n from setuptools.tests import __path__\r\n paths = [os.path.dirname(p) for p in __path__]\r\n> assert req.is_present(paths)\r\nE AssertionError: assert False\r\nE + where False = <bound method Require.is_present of <setuptools.depends.Require object at 0x7fbfae0d0b38>>(['/tmp/portage/dev-python/setuptools-41.6.0/work/setuptools-41.6.0-python3_5/setuptools'])\r\nE + where <bound method Require.is_present of <setuptools.depends.Require object at 0x7fbfae0d0b38>> = <setuptools.depends.Require object at 0x7fbfae0d0b38>.is_present\r\n\r\nsetuptools/tests/test_setuptools.py:120: AssertionError\r\n```\r\n\r\nI can reproduce it reliably with at least pypy3.6 (7.2.0) & python3.5 (3.5.7). I haven't tested other versions yet.\r\n\r\nFull build log: [dev-python:setuptools-41.6.0:20191030-083347.log](https://github.com/pypa/setuptools/files/3787797/dev-python.setuptools-41.6.0.20191030-083347.log)\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nRe-implementation of find_module and get_frozen_object\nfrom the deprecated imp module.\n\"\"\"\n\nimport os\nimport importlib.util\nimport importlib.machinery\n\nfrom .py34compat import module_from_spec\n\n\nPY_SOURCE = 1\nPY_COMPILED = 2\nC_EXTENSION = 3\nC_BUILTIN = 6\nPY_FROZEN = 7\n\n\ndef find_module(module, paths=None):\n \"\"\"Just like 'imp.find_module()', but with package support\"\"\"\n spec = importlib.util.find_spec(module, paths)\n if spec is None:\n raise ImportError(\"Can't find %s\" % module)\n if not spec.has_location and hasattr(spec, 'submodule_search_locations'):\n spec = importlib.util.spec_from_loader('__init__.py', spec.loader)\n\n kind = -1\n file = None\n static = isinstance(spec.loader, type)\n if spec.origin == 'frozen' or static and issubclass(\n spec.loader, importlib.machinery.FrozenImporter):\n kind = PY_FROZEN\n path = None # imp compabilty\n suffix = mode = '' # imp compability\n elif spec.origin == 'built-in' or static and issubclass(\n spec.loader, importlib.machinery.BuiltinImporter):\n kind = C_BUILTIN\n path = None # imp compabilty\n suffix = mode = '' # imp compability\n elif spec.has_location:\n path = spec.origin\n suffix = os.path.splitext(path)[1]\n mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'\n\n if suffix in importlib.machinery.SOURCE_SUFFIXES:\n kind = PY_SOURCE\n elif suffix in importlib.machinery.BYTECODE_SUFFIXES:\n kind = PY_COMPILED\n elif suffix in importlib.machinery.EXTENSION_SUFFIXES:\n kind = C_EXTENSION\n\n if kind in {PY_SOURCE, PY_COMPILED}:\n file = open(path, mode)\n else:\n path = None\n suffix = mode = ''\n\n return file, path, (suffix, mode, kind)\n\n\ndef get_frozen_object(module, paths=None):\n spec = importlib.util.find_spec(module, paths)\n if not spec:\n raise ImportError(\"Can't find %s\" % module)\n return spec.loader.get_code(module)\n\n\ndef get_module(module, paths, info):\n spec = importlib.util.find_spec(module, paths)\n if not spec:\n raise ImportError(\"Can't find %s\" % module)\n return module_from_spec(spec)\n", "path": "setuptools/_imp.py"}]}
1,934
324
gh_patches_debug_7219
rasdani/github-patches
git_diff
spack__spack-18478
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> nn-c uses invalid self.compiler.pic_flag? (breaks nn-c build, via elmerfem build) These lines fail, because there is no such member, and looking at other packages, it seems that flags like ``` self.compiler.cc_pic_flag self.compiler.cxx_pic_flag self.compiler.fc_pic_flag #or ? self.compiler.f77_pic_flag ``` would be appropriate. https://github.com/spack/spack/blob/601f97d8a50b1840df9b056a34256b6dd2b54ce3/var/spack/repos/builtin/packages/nn-c/package.py#L29-L31 I triggered this on recent `devel` (today) by ``` spack install --test=root elmerfem@devel +mpi +hypre +lua +mumps +openmp +scatt2d +trilinos +zoltan ``` </issue> <code> [start of var/spack/repos/builtin/packages/nn-c/package.py] 1 # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other 2 # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 # 4 # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 6 from spack import * 7 8 9 class NnC(AutotoolsPackage): 10 """nn: Natural Neighbours interpolation. nn is a C code 11 for Natural Neighbours interpolation of 2D scattered data. 12 It provides a C library and a command line utility nnbathy.""" 13 14 homepage = "https://github.com/sakov/nn-c" 15 git = "https://github.com/sakov/nn-c.git" 16 17 version('master', branch='master') 18 version('1.86.2', commit='343c7784d38d3270d75d450569fc0b64767c37e9') 19 20 variant('pic', default=True, 21 description='Produce position-independent code (for shared libs)') 22 23 configure_directory = 'nn' 24 25 def configure_args(self): 26 args = [] 27 if '+pic' in self.spec: 28 args.extend([ 29 'CFLAGS={0}'.format(self.compiler.pic_flag), 30 'CXXFLAGS={0}'.format(self.compiler.pic_flag), 31 'FFLAGS={0}'.format(self.compiler.pic_flag) 32 ]) 33 return args 34 [end of var/spack/repos/builtin/packages/nn-c/package.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/var/spack/repos/builtin/packages/nn-c/package.py b/var/spack/repos/builtin/packages/nn-c/package.py --- a/var/spack/repos/builtin/packages/nn-c/package.py +++ b/var/spack/repos/builtin/packages/nn-c/package.py @@ -26,8 +26,8 @@ args = [] if '+pic' in self.spec: args.extend([ - 'CFLAGS={0}'.format(self.compiler.pic_flag), - 'CXXFLAGS={0}'.format(self.compiler.pic_flag), - 'FFLAGS={0}'.format(self.compiler.pic_flag) + 'CFLAGS={0}'.format(self.compiler.cc_pic_flag), + 'CXXFLAGS={0}'.format(self.compiler.cxx_pic_flag), + 'FFLAGS={0}'.format(self.compiler.fc_pic_flag) ]) return args
{"golden_diff": "diff --git a/var/spack/repos/builtin/packages/nn-c/package.py b/var/spack/repos/builtin/packages/nn-c/package.py\n--- a/var/spack/repos/builtin/packages/nn-c/package.py\n+++ b/var/spack/repos/builtin/packages/nn-c/package.py\n@@ -26,8 +26,8 @@\n args = []\n if '+pic' in self.spec:\n args.extend([\n- 'CFLAGS={0}'.format(self.compiler.pic_flag),\n- 'CXXFLAGS={0}'.format(self.compiler.pic_flag),\n- 'FFLAGS={0}'.format(self.compiler.pic_flag)\n+ 'CFLAGS={0}'.format(self.compiler.cc_pic_flag),\n+ 'CXXFLAGS={0}'.format(self.compiler.cxx_pic_flag),\n+ 'FFLAGS={0}'.format(self.compiler.fc_pic_flag)\n ])\n return args\n", "issue": "nn-c uses invalid self.compiler.pic_flag? (breaks nn-c build, via elmerfem build)\nThese lines fail, because there is no such member, and looking at other packages, it seems that flags like\r\n```\r\nself.compiler.cc_pic_flag\r\nself.compiler.cxx_pic_flag\r\nself.compiler.fc_pic_flag\r\n#or ?\r\nself.compiler.f77_pic_flag\r\n```\r\nwould be appropriate.\r\n\r\nhttps://github.com/spack/spack/blob/601f97d8a50b1840df9b056a34256b6dd2b54ce3/var/spack/repos/builtin/packages/nn-c/package.py#L29-L31\r\n\r\nI triggered this on recent `devel` (today) by\r\n```\r\nspack install --test=root elmerfem@devel +mpi +hypre +lua +mumps +openmp +scatt2d +trilinos +zoltan\r\n```\n", "before_files": [{"content": "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack import *\n\n\nclass NnC(AutotoolsPackage):\n \"\"\"nn: Natural Neighbours interpolation. nn is a C code\n for Natural Neighbours interpolation of 2D scattered data.\n It provides a C library and a command line utility nnbathy.\"\"\"\n\n homepage = \"https://github.com/sakov/nn-c\"\n git = \"https://github.com/sakov/nn-c.git\"\n\n version('master', branch='master')\n version('1.86.2', commit='343c7784d38d3270d75d450569fc0b64767c37e9')\n\n variant('pic', default=True,\n description='Produce position-independent code (for shared libs)')\n\n configure_directory = 'nn'\n\n def configure_args(self):\n args = []\n if '+pic' in self.spec:\n args.extend([\n 'CFLAGS={0}'.format(self.compiler.pic_flag),\n 'CXXFLAGS={0}'.format(self.compiler.pic_flag),\n 'FFLAGS={0}'.format(self.compiler.pic_flag)\n ])\n return args\n", "path": "var/spack/repos/builtin/packages/nn-c/package.py"}]}
1,127
188
gh_patches_debug_392
rasdani/github-patches
git_diff
Nitrate__Nitrate-527
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Remove dependency mock Use `unittest.mock` instead. </issue> <code> [start of setup.py] 1 # -*- coding: utf-8 -*- 2 3 from setuptools import setup, find_packages 4 5 6 with open('VERSION.txt', 'r') as f: 7 pkg_version = f.read().strip() 8 9 10 def get_long_description(): 11 with open('README.rst', 'r') as f: 12 return f.read() 13 14 15 install_requires = [ 16 'beautifulsoup4 >= 4.1.1', 17 'django >= 2.0,<3.0', 18 'django-contrib-comments == 1.9.1', 19 'django-tinymce == 2.7.0', 20 'django-uuslug == 1.1.8', 21 'html2text', 22 'odfpy >= 0.9.6', 23 'python-bugzilla', 24 'xmltodict', 25 'kobo == 0.9.0' 26 ] 27 28 extras_require = { 29 'mysql': ['mysqlclient >= 1.2.3'], 30 'pgsql': ['psycopg2 == 2.7.5'], 31 32 # Required for tcms.auth.backends.KerberosBackend 33 'krbauth': [ 34 'kerberos == 1.2.5' 35 ], 36 37 # Packages for building documentation 38 'docs': [ 39 'Sphinx >= 1.1.2', 40 'sphinx_rtd_theme', 41 ], 42 43 # Necessary packages for running tests 44 'tests': [ 45 'beautifulsoup4', 46 'coverage', 47 'factory_boy', 48 'flake8', 49 'mock', 50 'pytest', 51 'pytest-cov', 52 'pytest-django', 53 ], 54 55 # Contain tools that assists the development 56 'devtools': [ 57 'django-debug-toolbar', 58 'tox', 59 'django-extensions', 60 'pygraphviz', 61 ], 62 63 # Required packages required to run async tasks 64 'async': [ 65 'celery == 4.2.0', 66 ], 67 68 'multiauth': [ 69 'social-auth-app-django == 3.1.0', 70 ] 71 } 72 73 setup( 74 name='nitrate-tcms', 75 version=pkg_version, 76 description='A full-featured Test Case Management System', 77 long_description=get_long_description(), 78 author='Nitrate Team', 79 maintainer='Chenxiong Qi', 80 maintainer_email='[email protected]', 81 url='https://github.com/Nitrate/Nitrate/', 82 license='GPLv2+', 83 keywords='test case', 84 install_requires=install_requires, 85 extras_require=extras_require, 86 python_requires='>=3.6', 87 package_dir={'': 'src'}, 88 packages=find_packages('src', exclude=['test*']), 89 include_package_data=True, 90 zip_safe=False, 91 classifiers=[ 92 'Framework :: Django', 93 'Framework :: Django :: 2.0', 94 'Framework :: Django :: 2.1', 95 'Framework :: Django :: 2.2', 96 'Intended Audience :: Developers', 97 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)', 98 'Programming Language :: Python :: 3', 99 'Programming Language :: Python :: 3.6', 100 'Programming Language :: Python :: 3.7', 101 'Programming Language :: Python :: 3 :: Only', 102 'Topic :: Software Development :: Quality Assurance', 103 'Topic :: Software Development :: Testing', 104 ], 105 project_urls={ 106 'Issue Tracker': 'https://github.com/Nitrate/Nitrate/issues', 107 'Source Code': 'https://github.com/Nitrate/Nitrate', 108 'Documentation': 'https://nitrate.readthedocs.io/', 109 }, 110 ) 111 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -46,7 +46,6 @@ 'coverage', 'factory_boy', 'flake8', - 'mock', 'pytest', 'pytest-cov', 'pytest-django',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -46,7 +46,6 @@\n 'coverage',\n 'factory_boy',\n 'flake8',\n- 'mock',\n 'pytest',\n 'pytest-cov',\n 'pytest-django',\n", "issue": "Remove dependency mock\nUse `unittest.mock` instead.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom setuptools import setup, find_packages\n\n\nwith open('VERSION.txt', 'r') as f:\n pkg_version = f.read().strip()\n\n\ndef get_long_description():\n with open('README.rst', 'r') as f:\n return f.read()\n\n\ninstall_requires = [\n 'beautifulsoup4 >= 4.1.1',\n 'django >= 2.0,<3.0',\n 'django-contrib-comments == 1.9.1',\n 'django-tinymce == 2.7.0',\n 'django-uuslug == 1.1.8',\n 'html2text',\n 'odfpy >= 0.9.6',\n 'python-bugzilla',\n 'xmltodict',\n 'kobo == 0.9.0'\n]\n\nextras_require = {\n 'mysql': ['mysqlclient >= 1.2.3'],\n 'pgsql': ['psycopg2 == 2.7.5'],\n\n # Required for tcms.auth.backends.KerberosBackend\n 'krbauth': [\n 'kerberos == 1.2.5'\n ],\n\n # Packages for building documentation\n 'docs': [\n 'Sphinx >= 1.1.2',\n 'sphinx_rtd_theme',\n ],\n\n # Necessary packages for running tests\n 'tests': [\n 'beautifulsoup4',\n 'coverage',\n 'factory_boy',\n 'flake8',\n 'mock',\n 'pytest',\n 'pytest-cov',\n 'pytest-django',\n ],\n\n # Contain tools that assists the development\n 'devtools': [\n 'django-debug-toolbar',\n 'tox',\n 'django-extensions',\n 'pygraphviz',\n ],\n\n # Required packages required to run async tasks\n 'async': [\n 'celery == 4.2.0',\n ],\n\n 'multiauth': [\n 'social-auth-app-django == 3.1.0',\n ]\n}\n\nsetup(\n name='nitrate-tcms',\n version=pkg_version,\n description='A full-featured Test Case Management System',\n long_description=get_long_description(),\n author='Nitrate Team',\n maintainer='Chenxiong Qi',\n maintainer_email='[email protected]',\n url='https://github.com/Nitrate/Nitrate/',\n license='GPLv2+',\n keywords='test case',\n install_requires=install_requires,\n extras_require=extras_require,\n python_requires='>=3.6',\n package_dir={'': 'src'},\n packages=find_packages('src', exclude=['test*']),\n include_package_data=True,\n zip_safe=False,\n classifiers=[\n 'Framework :: Django',\n 'Framework :: Django :: 2.0',\n 'Framework :: Django :: 2.1',\n 'Framework :: Django :: 2.2',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Quality Assurance',\n 'Topic :: Software Development :: Testing',\n ],\n project_urls={\n 'Issue Tracker': 'https://github.com/Nitrate/Nitrate/issues',\n 'Source Code': 'https://github.com/Nitrate/Nitrate',\n 'Documentation': 'https://nitrate.readthedocs.io/',\n },\n)\n", "path": "setup.py"}]}
1,552
69
gh_patches_debug_9398
rasdani/github-patches
git_diff
saulpw__visidata-1890
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> fixed width saver truncates data if columns are not fully expanded **Small description** If you save or syscopy a table as `fixed` format, and the visible column width is less than the width of the data in the column, the data is truncated. Also, the resulting file is not a valid fixed width format file as the columns are not aligned with the headers. **Expected result** Saving or copying a table when the columns are not fully expanded should yield the same result as when the columns are expanded. **Actual result with screenshot** ![image](https://user-images.githubusercontent.com/2499066/231616002-553a1a46-0b56-4950-b3eb-90507a77b823.png) **Steps to reproduce with sample data and a .vd** [test-vd-fixed.zip](https://github.com/saulpw/visidata/files/11217144/test-vd-fixed.zip) **Additional context** saul.pw/VisiData v2.11 </issue> <code> [start of visidata/loaders/fixed_width.py] 1 2 from visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet 3 4 5 vd.option('fixed_rows', 1000, 'number of rows to check for fixed width columns') 6 vd.option('fixed_maxcols', 0, 'max number of fixed-width columns to create (0 is no max)') 7 8 @VisiData.api 9 def open_fixed(vd, p): 10 return FixedWidthColumnsSheet(p.name, source=p, headerlines=[]) 11 12 class FixedWidthColumn(Column): 13 def __init__(self, name, i, j, **kwargs): 14 super().__init__(name, **kwargs) 15 self.i, self.j = i, j 16 17 def calcValue(self, row): 18 return row[0][self.i:self.j] 19 20 def putValue(self, row, value): 21 value = str(value)[:self.j-self.i] 22 j = self.j or len(row) 23 row[0] = row[0][:self.i] + '%-*s' % (j-self.i, value) + row[0][self.j:] 24 25 def columnize(rows): 26 'Generate (i,j) indexes for fixed-width columns found in rows' 27 28 ## find all character columns that are not spaces ever 29 allNonspaces = set() 30 for r in rows: 31 for i, ch in enumerate(r): 32 if not ch.isspace(): 33 allNonspaces.add(i) 34 35 colstart = 0 36 prev = 0 37 38 # collapse fields 39 for i in allNonspaces: 40 if i > prev+1: 41 yield colstart, i 42 colstart = i 43 prev = i 44 45 yield colstart, prev+1 # final column gets rest of line 46 47 48 class FixedWidthColumnsSheet(SequenceSheet): 49 rowtype = 'lines' # rowdef: [line] (wrapping in list makes it unique and modifiable) 50 def addRow(self, row, index=None): 51 Sheet.addRow(self, row, index=index) 52 53 def iterload(self): 54 itsource = iter(self.source) 55 56 # compute fixed width columns from first fixed_rows lines 57 maxcols = self.options.fixed_maxcols 58 self.columns = [] 59 fixedRows = list([x] for x in self.optlines(itsource, 'fixed_rows')) 60 for i, j in columnize(list(r[0] for r in fixedRows)): 61 if maxcols and self.nCols >= maxcols-1: 62 self.addColumn(FixedWidthColumn('', i, None)) 63 break 64 else: 65 self.addColumn(FixedWidthColumn('', i, j)) 66 67 yield from fixedRows 68 69 self.setColNames(self.headerlines) 70 71 yield from ([line] for line in itsource) 72 73 def setCols(self, headerlines): 74 self.headerlines = headerlines 75 76 77 @VisiData.api 78 def save_fixed(vd, p, *vsheets): 79 with p.open(mode='w', encoding=vsheets[0].options.save_encoding) as fp: 80 for sheet in vsheets: 81 if len(vsheets) > 1: 82 fp.write('%s\n\n' % sheet.name) 83 84 widths = {} # Column -> width:int 85 # headers 86 for col in Progress(sheet.visibleCols, gerund='sizing'): 87 maxWidth = col.getMaxWidth(sheet.rows) 88 widths[col] = col.width if col.width >= maxWidth else sheet.options.default_width or maxWidth 89 fp.write(('{0:%s} ' % widths[col]).format(col.name)) 90 fp.write('\n') 91 92 # rows 93 with Progress(gerund='saving'): 94 for dispvals in sheet.iterdispvals(format=True): 95 for col, val in dispvals.items(): 96 fp.write(('{0:%s%s.%s} ' % ('>' if vd.isNumeric(col) else '<', widths[col], widths[col])).format(val)) 97 fp.write('\n') 98 99 vd.status('%s save finished' % p) 100 [end of visidata/loaders/fixed_width.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/visidata/loaders/fixed_width.py b/visidata/loaders/fixed_width.py --- a/visidata/loaders/fixed_width.py +++ b/visidata/loaders/fixed_width.py @@ -84,8 +84,7 @@ widths = {} # Column -> width:int # headers for col in Progress(sheet.visibleCols, gerund='sizing'): - maxWidth = col.getMaxWidth(sheet.rows) - widths[col] = col.width if col.width >= maxWidth else sheet.options.default_width or maxWidth + widths[col] = col.getMaxWidth(sheet.rows) #1849 fp.write(('{0:%s} ' % widths[col]).format(col.name)) fp.write('\n')
{"golden_diff": "diff --git a/visidata/loaders/fixed_width.py b/visidata/loaders/fixed_width.py\n--- a/visidata/loaders/fixed_width.py\n+++ b/visidata/loaders/fixed_width.py\n@@ -84,8 +84,7 @@\n widths = {} # Column -> width:int\n # headers\n for col in Progress(sheet.visibleCols, gerund='sizing'):\n- maxWidth = col.getMaxWidth(sheet.rows)\n- widths[col] = col.width if col.width >= maxWidth else sheet.options.default_width or maxWidth\n+ widths[col] = col.getMaxWidth(sheet.rows) #1849 \n fp.write(('{0:%s} ' % widths[col]).format(col.name))\n fp.write('\\n')\n", "issue": "fixed width saver truncates data if columns are not fully expanded\n**Small description**\r\n\r\nIf you save or syscopy a table as `fixed` format, and the visible column width is less than the width of the data in the column, the data is truncated. Also, the resulting file is not a valid fixed width format file as the columns are not aligned with the headers.\r\n\r\n**Expected result**\r\n\r\nSaving or copying a table when the columns are not fully expanded should yield the same result as when the columns are expanded.\r\n\r\n**Actual result with screenshot**\r\n![image](https://user-images.githubusercontent.com/2499066/231616002-553a1a46-0b56-4950-b3eb-90507a77b823.png)\r\n\r\n**Steps to reproduce with sample data and a .vd**\r\n[test-vd-fixed.zip](https://github.com/saulpw/visidata/files/11217144/test-vd-fixed.zip)\r\n\r\n**Additional context**\r\nsaul.pw/VisiData v2.11\r\n\n", "before_files": [{"content": "\nfrom visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet\n\n\nvd.option('fixed_rows', 1000, 'number of rows to check for fixed width columns')\nvd.option('fixed_maxcols', 0, 'max number of fixed-width columns to create (0 is no max)')\n\[email protected]\ndef open_fixed(vd, p):\n return FixedWidthColumnsSheet(p.name, source=p, headerlines=[])\n\nclass FixedWidthColumn(Column):\n def __init__(self, name, i, j, **kwargs):\n super().__init__(name, **kwargs)\n self.i, self.j = i, j\n\n def calcValue(self, row):\n return row[0][self.i:self.j]\n\n def putValue(self, row, value):\n value = str(value)[:self.j-self.i]\n j = self.j or len(row)\n row[0] = row[0][:self.i] + '%-*s' % (j-self.i, value) + row[0][self.j:]\n\ndef columnize(rows):\n 'Generate (i,j) indexes for fixed-width columns found in rows'\n\n ## find all character columns that are not spaces ever\n allNonspaces = set()\n for r in rows:\n for i, ch in enumerate(r):\n if not ch.isspace():\n allNonspaces.add(i)\n\n colstart = 0\n prev = 0\n\n # collapse fields\n for i in allNonspaces:\n if i > prev+1:\n yield colstart, i\n colstart = i\n prev = i\n\n yield colstart, prev+1 # final column gets rest of line\n\n\nclass FixedWidthColumnsSheet(SequenceSheet):\n rowtype = 'lines' # rowdef: [line] (wrapping in list makes it unique and modifiable)\n def addRow(self, row, index=None):\n Sheet.addRow(self, row, index=index)\n\n def iterload(self):\n itsource = iter(self.source)\n\n # compute fixed width columns from first fixed_rows lines\n maxcols = self.options.fixed_maxcols\n self.columns = []\n fixedRows = list([x] for x in self.optlines(itsource, 'fixed_rows'))\n for i, j in columnize(list(r[0] for r in fixedRows)):\n if maxcols and self.nCols >= maxcols-1:\n self.addColumn(FixedWidthColumn('', i, None))\n break\n else:\n self.addColumn(FixedWidthColumn('', i, j))\n\n yield from fixedRows\n\n self.setColNames(self.headerlines)\n\n yield from ([line] for line in itsource)\n\n def setCols(self, headerlines):\n self.headerlines = headerlines\n\n\[email protected]\ndef save_fixed(vd, p, *vsheets):\n with p.open(mode='w', encoding=vsheets[0].options.save_encoding) as fp:\n for sheet in vsheets:\n if len(vsheets) > 1:\n fp.write('%s\\n\\n' % sheet.name)\n\n widths = {} # Column -> width:int\n # headers\n for col in Progress(sheet.visibleCols, gerund='sizing'):\n maxWidth = col.getMaxWidth(sheet.rows)\n widths[col] = col.width if col.width >= maxWidth else sheet.options.default_width or maxWidth\n fp.write(('{0:%s} ' % widths[col]).format(col.name))\n fp.write('\\n')\n\n # rows\n with Progress(gerund='saving'):\n for dispvals in sheet.iterdispvals(format=True):\n for col, val in dispvals.items():\n fp.write(('{0:%s%s.%s} ' % ('>' if vd.isNumeric(col) else '<', widths[col], widths[col])).format(val))\n fp.write('\\n')\n\n vd.status('%s save finished' % p)\n", "path": "visidata/loaders/fixed_width.py"}]}
1,824
165
gh_patches_debug_4240
rasdani/github-patches
git_diff
liqd__adhocracy4-210
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Keep html time field optional even if a DateTimeField is set to be required Time is optional in the backend but the html input field still gets the required attribute if the the DateTimeField is initializes with `required=True` The time Widget should always be initialized without required. </issue> <code> [start of adhocracy4/forms/widgets.py] 1 import datetime 2 3 from django.contrib.staticfiles.storage import staticfiles_storage 4 from django.forms import widgets as form_widgets 5 from django.template.loader import render_to_string 6 from django.utils.timezone import localtime 7 8 9 class DateTimeInput(form_widgets.SplitDateTimeWidget): 10 def __init__(self, time_label='', time_default=None, *args, **kwargs): 11 super().__init__(*args, **kwargs) 12 self.time_label = time_label 13 self.time_default = time_default or datetime.time(hour=0, minute=0) 14 15 class Media: 16 js = ( 17 staticfiles_storage.url('datepicker.js'), 18 ) 19 css = {'all': [ 20 staticfiles_storage.url('datepicker.css'), 21 ]} 22 23 def render(self, name, value, attrs=None): 24 date_attrs = self.build_attrs(attrs) 25 date_attrs.update({ 26 'class': 'datepicker', 27 'placeholder': self.widgets[0].format_value(datetime.date.today()), 28 'id': attrs['id'] + '_date' 29 }) 30 time_attrs = self.build_attrs(attrs) 31 time_attrs.update({ 32 'class': 'timepicker', 33 'placeholder': self.widgets[1].format_value( 34 self.get_default_time()), 35 'id': attrs['id'] + '_time' 36 }) 37 38 if isinstance(value, datetime.datetime): 39 value = localtime(value) 40 date = value.date() 41 time = value.time() 42 else: 43 # value's just a list in case of an error 44 date = value[0] if value else None 45 time = value[1] if value else None 46 47 return render_to_string( 48 'a4forms/datetime_input.html', { 49 'date': self.widgets[0].render( 50 name + '_0', 51 date, 52 date_attrs 53 ), 54 'time': self.widgets[1].render( 55 name + '_1', 56 time, 57 time_attrs 58 ), 59 'time_label': { 60 'label': self.time_label, 61 'id_for_label': attrs['id'] + '_time' 62 }, 63 }) 64 65 def id_for_label(self, id_): 66 if id_: 67 id_ += '_date' 68 return id_ 69 70 def get_default_time(self): 71 time_widget = self.widgets[1] 72 73 if not self.time_default: 74 return time_widget.format_value(datetime.time(hour=0, minute=0)) 75 elif isinstance(self.time_default, (datetime.time, datetime.datetime)): 76 return time_widget.format_value(self.time_default) 77 else: 78 return self.time_default 79 [end of adhocracy4/forms/widgets.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/adhocracy4/forms/widgets.py b/adhocracy4/forms/widgets.py --- a/adhocracy4/forms/widgets.py +++ b/adhocracy4/forms/widgets.py @@ -32,7 +32,8 @@ 'class': 'timepicker', 'placeholder': self.widgets[1].format_value( self.get_default_time()), - 'id': attrs['id'] + '_time' + 'id': attrs['id'] + '_time', + 'required': False }) if isinstance(value, datetime.datetime):
{"golden_diff": "diff --git a/adhocracy4/forms/widgets.py b/adhocracy4/forms/widgets.py\n--- a/adhocracy4/forms/widgets.py\n+++ b/adhocracy4/forms/widgets.py\n@@ -32,7 +32,8 @@\n 'class': 'timepicker',\n 'placeholder': self.widgets[1].format_value(\n self.get_default_time()),\n- 'id': attrs['id'] + '_time'\n+ 'id': attrs['id'] + '_time',\n+ 'required': False\n })\n \n if isinstance(value, datetime.datetime):\n", "issue": "Keep html time field optional even if a DateTimeField is set to be required\nTime is optional in the backend but the html input field still gets the required attribute if the the DateTimeField is initializes with `required=True`\r\nThe time Widget should always be initialized without required.\n", "before_files": [{"content": "import datetime\n\nfrom django.contrib.staticfiles.storage import staticfiles_storage\nfrom django.forms import widgets as form_widgets\nfrom django.template.loader import render_to_string\nfrom django.utils.timezone import localtime\n\n\nclass DateTimeInput(form_widgets.SplitDateTimeWidget):\n def __init__(self, time_label='', time_default=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.time_label = time_label\n self.time_default = time_default or datetime.time(hour=0, minute=0)\n\n class Media:\n js = (\n staticfiles_storage.url('datepicker.js'),\n )\n css = {'all': [\n staticfiles_storage.url('datepicker.css'),\n ]}\n\n def render(self, name, value, attrs=None):\n date_attrs = self.build_attrs(attrs)\n date_attrs.update({\n 'class': 'datepicker',\n 'placeholder': self.widgets[0].format_value(datetime.date.today()),\n 'id': attrs['id'] + '_date'\n })\n time_attrs = self.build_attrs(attrs)\n time_attrs.update({\n 'class': 'timepicker',\n 'placeholder': self.widgets[1].format_value(\n self.get_default_time()),\n 'id': attrs['id'] + '_time'\n })\n\n if isinstance(value, datetime.datetime):\n value = localtime(value)\n date = value.date()\n time = value.time()\n else:\n # value's just a list in case of an error\n date = value[0] if value else None\n time = value[1] if value else None\n\n return render_to_string(\n 'a4forms/datetime_input.html', {\n 'date': self.widgets[0].render(\n name + '_0',\n date,\n date_attrs\n ),\n 'time': self.widgets[1].render(\n name + '_1',\n time,\n time_attrs\n ),\n 'time_label': {\n 'label': self.time_label,\n 'id_for_label': attrs['id'] + '_time'\n },\n })\n\n def id_for_label(self, id_):\n if id_:\n id_ += '_date'\n return id_\n\n def get_default_time(self):\n time_widget = self.widgets[1]\n\n if not self.time_default:\n return time_widget.format_value(datetime.time(hour=0, minute=0))\n elif isinstance(self.time_default, (datetime.time, datetime.datetime)):\n return time_widget.format_value(self.time_default)\n else:\n return self.time_default\n", "path": "adhocracy4/forms/widgets.py"}]}
1,275
123
gh_patches_debug_24120
rasdani/github-patches
git_diff
conan-io__conan-center-index-11233
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [request] kcov/40 ### Package Details * Package Name/Version: **kcov/40** * Changelog: **https://github.com/SimonKagstrom/kcov/blob/master/ChangeLog** Hello, Currently conan-center provides only 38 release, I would like to have latest release (40) also available. I'll provides a pull request. </issue> <code> [start of recipes/kcov/all/conanfile.py] 1 import os 2 from conans import ConanFile, CMake, tools 3 from conans.errors import ConanInvalidConfiguration 4 5 6 class KcovConan(ConanFile): 7 name = "kcov" 8 license = "GPL-2.0" 9 url = "https://github.com/conan-io/conan-center-index/" 10 homepage = "http://simonkagstrom.github.io/kcov/index.html" 11 description = "Code coverage tool for compiled programs, Python and Bash\ 12 which uses debugging information to collect and report data without\ 13 special compilation options" 14 topics = ("coverage", "linux", "debug") 15 settings = "os", "compiler", "build_type", "arch" 16 exports_sources = "CMakeLists.txt", "patches/**" 17 requires = ["zlib/1.2.11", 18 "libiberty/9.1.0", 19 "libcurl/7.64.1", 20 "elfutils/0.180"] 21 generators = "cmake" 22 _cmake = None 23 _source_subfolder = "source_subfolder" 24 _build_subfolder = "build_subfolder" 25 26 def configure(self): 27 if self.settings.os == "Windows": 28 raise ConanInvalidConfiguration( 29 "kcov can not be built on windows.") 30 31 def source(self): 32 tools.get(**self.conan_data["sources"][self.version]) 33 extracted_dir = self.name + "-" + self.version 34 os.rename(extracted_dir, self._source_subfolder) 35 36 def _patch_sources(self): 37 for patch in self.conan_data["patches"][self.version]: 38 tools.patch(**patch) 39 40 def _configure_cmake(self): 41 if self._cmake is not None: 42 return self._cmake 43 self._cmake = CMake(self) 44 self._cmake.configure(build_folder=self._build_subfolder) 45 return self._cmake 46 47 def build(self): 48 self._patch_sources() 49 cmake = self._configure_cmake() 50 cmake.build() 51 52 def package(self): 53 cmake = self._configure_cmake() 54 cmake.install() 55 tools.rmdir(os.path.join(self.package_folder, "share")) 56 self.copy("COPYING*", dst="licenses", src=self._source_subfolder) 57 58 def package_info(self): 59 bindir = os.path.join(self.package_folder, "bin") 60 self.output.info("Appending PATH environment variable: {}" 61 .format(bindir)) 62 self.env_info.PATH.append(bindir) 63 [end of recipes/kcov/all/conanfile.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/recipes/kcov/all/conanfile.py b/recipes/kcov/all/conanfile.py --- a/recipes/kcov/all/conanfile.py +++ b/recipes/kcov/all/conanfile.py @@ -1,8 +1,8 @@ import os -from conans import ConanFile, CMake, tools +from conan import ConanFile +from conans import CMake, tools from conans.errors import ConanInvalidConfiguration - class KcovConan(ConanFile): name = "kcov" license = "GPL-2.0" @@ -14,9 +14,9 @@ topics = ("coverage", "linux", "debug") settings = "os", "compiler", "build_type", "arch" exports_sources = "CMakeLists.txt", "patches/**" - requires = ["zlib/1.2.11", + requires = ["zlib/1.2.12", "libiberty/9.1.0", - "libcurl/7.64.1", + "libcurl/7.83.1", "elfutils/0.180"] generators = "cmake" _cmake = None @@ -60,3 +60,4 @@ self.output.info("Appending PATH environment variable: {}" .format(bindir)) self.env_info.PATH.append(bindir) + self.cpp_info.includedirs = []
{"golden_diff": "diff --git a/recipes/kcov/all/conanfile.py b/recipes/kcov/all/conanfile.py\n--- a/recipes/kcov/all/conanfile.py\n+++ b/recipes/kcov/all/conanfile.py\n@@ -1,8 +1,8 @@\n import os\n-from conans import ConanFile, CMake, tools\n+from conan import ConanFile\n+from conans import CMake, tools\n from conans.errors import ConanInvalidConfiguration\n \n-\n class KcovConan(ConanFile):\n name = \"kcov\"\n license = \"GPL-2.0\"\n@@ -14,9 +14,9 @@\n topics = (\"coverage\", \"linux\", \"debug\")\n settings = \"os\", \"compiler\", \"build_type\", \"arch\"\n exports_sources = \"CMakeLists.txt\", \"patches/**\"\n- requires = [\"zlib/1.2.11\",\n+ requires = [\"zlib/1.2.12\",\n \"libiberty/9.1.0\",\n- \"libcurl/7.64.1\",\n+ \"libcurl/7.83.1\",\n \"elfutils/0.180\"]\n generators = \"cmake\"\n _cmake = None\n@@ -60,3 +60,4 @@\n self.output.info(\"Appending PATH environment variable: {}\"\n .format(bindir))\n self.env_info.PATH.append(bindir)\n+ self.cpp_info.includedirs = []\n", "issue": "[request] kcov/40\n### Package Details\r\n * Package Name/Version: **kcov/40**\r\n * Changelog: **https://github.com/SimonKagstrom/kcov/blob/master/ChangeLog**\r\n\r\nHello,\r\n\r\nCurrently conan-center provides only 38 release, I would like to have latest release (40) also available.\r\nI'll provides a pull request.\r\n\n", "before_files": [{"content": "import os\nfrom conans import ConanFile, CMake, tools\nfrom conans.errors import ConanInvalidConfiguration\n\n\nclass KcovConan(ConanFile):\n name = \"kcov\"\n license = \"GPL-2.0\"\n url = \"https://github.com/conan-io/conan-center-index/\"\n homepage = \"http://simonkagstrom.github.io/kcov/index.html\"\n description = \"Code coverage tool for compiled programs, Python and Bash\\\n which uses debugging information to collect and report data without\\\n special compilation options\"\n topics = (\"coverage\", \"linux\", \"debug\")\n settings = \"os\", \"compiler\", \"build_type\", \"arch\"\n exports_sources = \"CMakeLists.txt\", \"patches/**\"\n requires = [\"zlib/1.2.11\",\n \"libiberty/9.1.0\",\n \"libcurl/7.64.1\",\n \"elfutils/0.180\"]\n generators = \"cmake\"\n _cmake = None\n _source_subfolder = \"source_subfolder\"\n _build_subfolder = \"build_subfolder\"\n\n def configure(self):\n if self.settings.os == \"Windows\":\n raise ConanInvalidConfiguration(\n \"kcov can not be built on windows.\")\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n extracted_dir = self.name + \"-\" + self.version\n os.rename(extracted_dir, self._source_subfolder)\n\n def _patch_sources(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n\n def _configure_cmake(self):\n if self._cmake is not None:\n return self._cmake\n self._cmake = CMake(self)\n self._cmake.configure(build_folder=self._build_subfolder)\n return self._cmake\n\n def build(self):\n self._patch_sources()\n cmake = self._configure_cmake()\n cmake.build()\n\n def package(self):\n cmake = self._configure_cmake()\n cmake.install()\n tools.rmdir(os.path.join(self.package_folder, \"share\"))\n self.copy(\"COPYING*\", dst=\"licenses\", src=self._source_subfolder)\n\n def package_info(self):\n bindir = os.path.join(self.package_folder, \"bin\")\n self.output.info(\"Appending PATH environment variable: {}\"\n .format(bindir))\n self.env_info.PATH.append(bindir)\n", "path": "recipes/kcov/all/conanfile.py"}]}
1,279
326
gh_patches_debug_8416
rasdani/github-patches
git_diff
optuna__optuna-449
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> ValueError when calling suggest_categorical with int and str **Conditions** - Optuna version: 0.13.0 - Python version: 3.7.3 - OS: Windows 10 Education - Machine Learning library to be optimized: none **Code to reproduce** ``` def objective(trial: optuna.Trial): x = trial.suggest_categorical("x", [1, "0"]) print(x) optuna.create_study( study_name="test_" + now_string(), storage="sqlite:///tmp/example.db").optimize(objective, n_trials=10) ``` **Error messages, stack traces, or logs** ``` Traceback (most recent call last): File "C:\Users\imri\github\scoring-model\venv\lib\site-packages\optuna\study.py", line 468, in _run_trial result = func(trial) File "~\github\scoring-model\tests\TestOptuna.py", line 12, in objective x = trial.suggest_categorical("x", [1, "0"]) File "~\github\scoring-model\venv\lib\site-packages\optuna\trial.py", line 337, in suggest_categorical return self._suggest(name, distributions.CategoricalDistribution(choices=choices)) File "~\github\scoring-model\venv\lib\site-packages\optuna\trial.py", line 457, in _suggest return self._set_new_param_or_get_existing(name, param_value, distribution) File "~\github\scoring-model\venv\lib\site-packages\optuna\trial.py", line 462, in _set_new_param_or_get_existing param_value_in_internal_repr = distribution.to_internal_repr(param_value) File "~\github\scoring-model\venv\lib\site-packages\optuna\distributions.py", line 236, in to_internal_repr return self.choices.index(param_value_in_external_repr) ValueError: tuple.index(x): x not in tuple ``` </issue> <code> [start of optuna/samplers/random.py] 1 import numpy 2 3 from optuna import distributions 4 from optuna.samplers.base import BaseSampler 5 from optuna import types 6 7 if types.TYPE_CHECKING: 8 from typing import Any # NOQA 9 from typing import Dict # NOQA 10 from typing import Optional # NOQA 11 12 from optuna.distributions import BaseDistribution # NOQA 13 from optuna.structs import FrozenTrial # NOQA 14 from optuna.study import InTrialStudy # NOQA 15 16 17 class RandomSampler(BaseSampler): 18 """Sampler using random sampling. 19 20 Example: 21 22 .. code:: 23 24 >>> study = optuna.create_study(sampler=RandomSampler()) 25 >>> study.optimize(objective, direction='minimize') 26 27 Args: 28 seed: Seed for random number generator. 29 """ 30 31 def __init__(self, seed=None): 32 # type: (Optional[int]) -> None 33 34 self.seed = seed 35 self.rng = numpy.random.RandomState(seed) 36 37 def infer_relative_search_space(self, study, trial): 38 # type: (InTrialStudy, FrozenTrial) -> Dict[str, BaseDistribution] 39 40 return {} 41 42 def sample_relative(self, study, trial, search_space): 43 # type: (InTrialStudy, FrozenTrial, Dict[str, BaseDistribution]) -> Dict[str, Any] 44 45 return {} 46 47 def sample_independent(self, study, trial, param_name, param_distribution): 48 # type: (InTrialStudy, FrozenTrial, str, distributions.BaseDistribution) -> Any 49 """Please consult the documentation for :func:`BaseSampler.sample_independent`.""" 50 51 if isinstance(param_distribution, distributions.UniformDistribution): 52 return self.rng.uniform(param_distribution.low, param_distribution.high) 53 elif isinstance(param_distribution, distributions.LogUniformDistribution): 54 log_low = numpy.log(param_distribution.low) 55 log_high = numpy.log(param_distribution.high) 56 return float(numpy.exp(self.rng.uniform(log_low, log_high))) 57 elif isinstance(param_distribution, distributions.DiscreteUniformDistribution): 58 q = param_distribution.q 59 r = param_distribution.high - param_distribution.low 60 # [low, high] is shifted to [0, r] to align sampled values at regular intervals. 61 low = 0 - 0.5 * q 62 high = r + 0.5 * q 63 s = self.rng.uniform(low, high) 64 v = numpy.round(s / q) * q + param_distribution.low 65 # v may slightly exceed range due to round-off errors. 66 return float(min(max(v, param_distribution.low), param_distribution.high)) 67 elif isinstance(param_distribution, distributions.IntUniformDistribution): 68 # numpy.random.randint includes low but excludes high. 69 return self.rng.randint(param_distribution.low, param_distribution.high + 1) 70 elif isinstance(param_distribution, distributions.CategoricalDistribution): 71 choices = param_distribution.choices 72 return self.rng.choice(choices) 73 else: 74 raise NotImplementedError 75 [end of optuna/samplers/random.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/optuna/samplers/random.py b/optuna/samplers/random.py --- a/optuna/samplers/random.py +++ b/optuna/samplers/random.py @@ -69,6 +69,7 @@ return self.rng.randint(param_distribution.low, param_distribution.high + 1) elif isinstance(param_distribution, distributions.CategoricalDistribution): choices = param_distribution.choices - return self.rng.choice(choices) + index = self.rng.randint(0, len(choices)) + return choices[index] else: raise NotImplementedError
{"golden_diff": "diff --git a/optuna/samplers/random.py b/optuna/samplers/random.py\n--- a/optuna/samplers/random.py\n+++ b/optuna/samplers/random.py\n@@ -69,6 +69,7 @@\n return self.rng.randint(param_distribution.low, param_distribution.high + 1)\n elif isinstance(param_distribution, distributions.CategoricalDistribution):\n choices = param_distribution.choices\n- return self.rng.choice(choices)\n+ index = self.rng.randint(0, len(choices))\n+ return choices[index]\n else:\n raise NotImplementedError\n", "issue": "ValueError when calling suggest_categorical with int and str\n**Conditions**\r\n- Optuna version: 0.13.0\r\n- Python version: 3.7.3\r\n- OS: Windows 10 Education\r\n- Machine Learning library to be optimized: none\r\n\r\n**Code to reproduce**\r\n```\r\ndef objective(trial: optuna.Trial):\r\n x = trial.suggest_categorical(\"x\", [1, \"0\"])\r\n print(x)\r\noptuna.create_study( study_name=\"test_\" + now_string(), storage=\"sqlite:///tmp/example.db\").optimize(objective, n_trials=10)\r\n```\r\n\r\n**Error messages, stack traces, or logs**\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\imri\\github\\scoring-model\\venv\\lib\\site-packages\\optuna\\study.py\", line 468, in _run_trial\r\n result = func(trial)\r\n File \"~\\github\\scoring-model\\tests\\TestOptuna.py\", line 12, in objective\r\n x = trial.suggest_categorical(\"x\", [1, \"0\"])\r\n File \"~\\github\\scoring-model\\venv\\lib\\site-packages\\optuna\\trial.py\", line 337, in suggest_categorical\r\n return self._suggest(name, distributions.CategoricalDistribution(choices=choices))\r\n File \"~\\github\\scoring-model\\venv\\lib\\site-packages\\optuna\\trial.py\", line 457, in _suggest\r\n return self._set_new_param_or_get_existing(name, param_value, distribution)\r\n File \"~\\github\\scoring-model\\venv\\lib\\site-packages\\optuna\\trial.py\", line 462, in _set_new_param_or_get_existing\r\n param_value_in_internal_repr = distribution.to_internal_repr(param_value)\r\n File \"~\\github\\scoring-model\\venv\\lib\\site-packages\\optuna\\distributions.py\", line 236, in to_internal_repr\r\n return self.choices.index(param_value_in_external_repr)\r\nValueError: tuple.index(x): x not in tuple\r\n```\r\n\r\n\n", "before_files": [{"content": "import numpy\n\nfrom optuna import distributions\nfrom optuna.samplers.base import BaseSampler\nfrom optuna import types\n\nif types.TYPE_CHECKING:\n from typing import Any # NOQA\n from typing import Dict # NOQA\n from typing import Optional # NOQA\n\n from optuna.distributions import BaseDistribution # NOQA\n from optuna.structs import FrozenTrial # NOQA\n from optuna.study import InTrialStudy # NOQA\n\n\nclass RandomSampler(BaseSampler):\n \"\"\"Sampler using random sampling.\n\n Example:\n\n .. code::\n\n >>> study = optuna.create_study(sampler=RandomSampler())\n >>> study.optimize(objective, direction='minimize')\n\n Args:\n seed: Seed for random number generator.\n \"\"\"\n\n def __init__(self, seed=None):\n # type: (Optional[int]) -> None\n\n self.seed = seed\n self.rng = numpy.random.RandomState(seed)\n\n def infer_relative_search_space(self, study, trial):\n # type: (InTrialStudy, FrozenTrial) -> Dict[str, BaseDistribution]\n\n return {}\n\n def sample_relative(self, study, trial, search_space):\n # type: (InTrialStudy, FrozenTrial, Dict[str, BaseDistribution]) -> Dict[str, Any]\n\n return {}\n\n def sample_independent(self, study, trial, param_name, param_distribution):\n # type: (InTrialStudy, FrozenTrial, str, distributions.BaseDistribution) -> Any\n \"\"\"Please consult the documentation for :func:`BaseSampler.sample_independent`.\"\"\"\n\n if isinstance(param_distribution, distributions.UniformDistribution):\n return self.rng.uniform(param_distribution.low, param_distribution.high)\n elif isinstance(param_distribution, distributions.LogUniformDistribution):\n log_low = numpy.log(param_distribution.low)\n log_high = numpy.log(param_distribution.high)\n return float(numpy.exp(self.rng.uniform(log_low, log_high)))\n elif isinstance(param_distribution, distributions.DiscreteUniformDistribution):\n q = param_distribution.q\n r = param_distribution.high - param_distribution.low\n # [low, high] is shifted to [0, r] to align sampled values at regular intervals.\n low = 0 - 0.5 * q\n high = r + 0.5 * q\n s = self.rng.uniform(low, high)\n v = numpy.round(s / q) * q + param_distribution.low\n # v may slightly exceed range due to round-off errors.\n return float(min(max(v, param_distribution.low), param_distribution.high))\n elif isinstance(param_distribution, distributions.IntUniformDistribution):\n # numpy.random.randint includes low but excludes high.\n return self.rng.randint(param_distribution.low, param_distribution.high + 1)\n elif isinstance(param_distribution, distributions.CategoricalDistribution):\n choices = param_distribution.choices\n return self.rng.choice(choices)\n else:\n raise NotImplementedError\n", "path": "optuna/samplers/random.py"}]}
1,756
124
gh_patches_debug_24056
rasdani/github-patches
git_diff
pypi__warehouse-2574
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Improve sorting on simple page I'd like to submit a patch for this but I have a few questions :) First I'll describe what I'd like to do... ## sort by version number See https://pypi.org/simple/pre-commit/ You'll notice that `0.10.0` erroneously sorts *before* `0.2.0` (I'd like to fix this) ## investigation I've found the code which does this sorting [here](https://github.com/pypa/warehouse/blob/3bdfe5a89cc9a922ee97304c98384c24822a09ee/warehouse/legacy/api/simple.py#L76-L89) This seems to just sort by filename, but by inspecting and viewing [this page](https://pypi.org/simple/pre-commit-mirror-maker/) I notice it seems to ignore `_` vs. `-` (which is good, that's what I want to continue to happen but I'm just not seeing it from the code!) ## other questions The `File` objects which come back from the database contain a `.version` attribute that I'd like to use to participate in sorting, my main question is: **Can I depend on this version to be a valid [PEP440](https://www.python.org/dev/peps/pep-0440/) version and use something like `pkg_resources.parse_version`?** I'd basically like to replicate something close to the sorting which @chriskuehl's [dumb-pypi](https://github.com/chriskuehl/dumb-pypi) does [here](https://github.com/chriskuehl/dumb-pypi/blob/fd0f93fc2e82cbd9bae41b3c60c5f006b2319c60/dumb_pypi/main.py#L77-L91). Thanks in advance :) --- **Good First Issue**: This issue is good for first time contributors. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, please feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev). </issue> <code> [start of warehouse/legacy/api/simple.py] 1 # Licensed under the Apache License, Version 2.0 (the "License"); 2 # you may not use this file except in compliance with the License. 3 # You may obtain a copy of the License at 4 # 5 # http://www.apache.org/licenses/LICENSE-2.0 6 # 7 # Unless required by applicable law or agreed to in writing, software 8 # distributed under the License is distributed on an "AS IS" BASIS, 9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 # See the License for the specific language governing permissions and 11 # limitations under the License. 12 13 from pyramid.httpexceptions import HTTPMovedPermanently 14 from pyramid.view import view_config 15 from sqlalchemy import func 16 from sqlalchemy.orm import joinedload 17 18 from warehouse.cache.http import cache_control 19 from warehouse.cache.origin import origin_cache 20 from warehouse.packaging.models import JournalEntry, File, Project, Release 21 22 23 @view_config( 24 route_name="legacy.api.simple.index", 25 renderer="legacy/api/simple/index.html", 26 decorator=[ 27 cache_control(10 * 60), # 10 minutes 28 origin_cache( 29 1 * 24 * 60 * 60, # 1 day 30 stale_while_revalidate=5 * 60, # 5 minutes 31 stale_if_error=1 * 24 * 60 * 60, # 1 day 32 ), 33 ], 34 ) 35 def simple_index(request): 36 # Get the latest serial number 37 serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0 38 request.response.headers["X-PyPI-Last-Serial"] = str(serial) 39 40 # Fetch the name and normalized name for all of our projects 41 projects = ( 42 request.db.query(Project.name, Project.normalized_name) 43 .order_by(Project.normalized_name) 44 .all() 45 ) 46 47 return {"projects": projects} 48 49 50 @view_config( 51 route_name="legacy.api.simple.detail", 52 renderer="legacy/api/simple/detail.html", 53 decorator=[ 54 cache_control(10 * 60), # 10 minutes 55 origin_cache( 56 1 * 24 * 60 * 60, # 1 day 57 stale_while_revalidate=5 * 60, # 5 minutes 58 stale_if_error=1 * 24 * 60 * 60, # 1 day 59 ), 60 ], 61 ) 62 def simple_detail(project, request): 63 # TODO: Handle files which are not hosted on PyPI 64 65 # Make sure that we're using the normalized version of the URL. 66 if (project.normalized_name != 67 request.matchdict.get("name", project.normalized_name)): 68 return HTTPMovedPermanently( 69 request.current_route_path(name=project.normalized_name), 70 ) 71 72 # Get the latest serial number for this project. 73 request.response.headers["X-PyPI-Last-Serial"] = str(project.last_serial) 74 75 # Get all of the files for this project. 76 files = ( 77 request.db.query(File) 78 .options(joinedload(File.release)) 79 .filter( 80 File.name == project.name, 81 File.version.in_( 82 request.db.query(Release) 83 .filter(Release.project == project) 84 .with_entities(Release.version) 85 ) 86 ) 87 .order_by(File.filename) 88 .all() 89 ) 90 91 return {"project": project, "files": files} 92 [end of warehouse/legacy/api/simple.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/warehouse/legacy/api/simple.py b/warehouse/legacy/api/simple.py --- a/warehouse/legacy/api/simple.py +++ b/warehouse/legacy/api/simple.py @@ -10,6 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. + +from packaging.version import parse from pyramid.httpexceptions import HTTPMovedPermanently from pyramid.view import view_config from sqlalchemy import func @@ -73,7 +75,7 @@ request.response.headers["X-PyPI-Last-Serial"] = str(project.last_serial) # Get all of the files for this project. - files = ( + files = sorted( request.db.query(File) .options(joinedload(File.release)) .filter( @@ -84,8 +86,8 @@ .with_entities(Release.version) ) ) - .order_by(File.filename) - .all() + .all(), + key=lambda f: (parse(f.version), f.packagetype) ) return {"project": project, "files": files}
{"golden_diff": "diff --git a/warehouse/legacy/api/simple.py b/warehouse/legacy/api/simple.py\n--- a/warehouse/legacy/api/simple.py\n+++ b/warehouse/legacy/api/simple.py\n@@ -10,6 +10,8 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n+\n+from packaging.version import parse\n from pyramid.httpexceptions import HTTPMovedPermanently\n from pyramid.view import view_config\n from sqlalchemy import func\n@@ -73,7 +75,7 @@\n request.response.headers[\"X-PyPI-Last-Serial\"] = str(project.last_serial)\n \n # Get all of the files for this project.\n- files = (\n+ files = sorted(\n request.db.query(File)\n .options(joinedload(File.release))\n .filter(\n@@ -84,8 +86,8 @@\n .with_entities(Release.version)\n )\n )\n- .order_by(File.filename)\n- .all()\n+ .all(),\n+ key=lambda f: (parse(f.version), f.packagetype)\n )\n \n return {\"project\": project, \"files\": files}\n", "issue": "Improve sorting on simple page\nI'd like to submit a patch for this but I have a few questions :)\r\n\r\nFirst I'll describe what I'd like to do...\r\n\r\n## sort by version number\r\n\r\nSee https://pypi.org/simple/pre-commit/\r\n\r\nYou'll notice that `0.10.0` erroneously sorts *before* `0.2.0` (I'd like to fix this)\r\n\r\n## investigation\r\n\r\nI've found the code which does this sorting [here](https://github.com/pypa/warehouse/blob/3bdfe5a89cc9a922ee97304c98384c24822a09ee/warehouse/legacy/api/simple.py#L76-L89)\r\n\r\nThis seems to just sort by filename, but by inspecting and viewing [this page](https://pypi.org/simple/pre-commit-mirror-maker/) I notice it seems to ignore `_` vs. `-` (which is good, that's what I want to continue to happen but I'm just not seeing it from the code!)\r\n\r\n## other questions\r\n\r\nThe `File` objects which come back from the database contain a `.version` attribute that I'd like to use to participate in sorting, my main question is: **Can I depend on this version to be a valid [PEP440](https://www.python.org/dev/peps/pep-0440/) version and use something like `pkg_resources.parse_version`?**\r\n\r\nI'd basically like to replicate something close to the sorting which @chriskuehl's [dumb-pypi](https://github.com/chriskuehl/dumb-pypi) does [here](https://github.com/chriskuehl/dumb-pypi/blob/fd0f93fc2e82cbd9bae41b3c60c5f006b2319c60/dumb_pypi/main.py#L77-L91).\r\n\r\nThanks in advance :)\r\n\r\n---\r\n\r\n**Good First Issue**: This issue is good for first time contributors. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, please feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom pyramid.httpexceptions import HTTPMovedPermanently\nfrom pyramid.view import view_config\nfrom sqlalchemy import func\nfrom sqlalchemy.orm import joinedload\n\nfrom warehouse.cache.http import cache_control\nfrom warehouse.cache.origin import origin_cache\nfrom warehouse.packaging.models import JournalEntry, File, Project, Release\n\n\n@view_config(\n route_name=\"legacy.api.simple.index\",\n renderer=\"legacy/api/simple/index.html\",\n decorator=[\n cache_control(10 * 60), # 10 minutes\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=5 * 60, # 5 minutes\n stale_if_error=1 * 24 * 60 * 60, # 1 day\n ),\n ],\n)\ndef simple_index(request):\n # Get the latest serial number\n serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0\n request.response.headers[\"X-PyPI-Last-Serial\"] = str(serial)\n\n # Fetch the name and normalized name for all of our projects\n projects = (\n request.db.query(Project.name, Project.normalized_name)\n .order_by(Project.normalized_name)\n .all()\n )\n\n return {\"projects\": projects}\n\n\n@view_config(\n route_name=\"legacy.api.simple.detail\",\n renderer=\"legacy/api/simple/detail.html\",\n decorator=[\n cache_control(10 * 60), # 10 minutes\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=5 * 60, # 5 minutes\n stale_if_error=1 * 24 * 60 * 60, # 1 day\n ),\n ],\n)\ndef simple_detail(project, request):\n # TODO: Handle files which are not hosted on PyPI\n\n # Make sure that we're using the normalized version of the URL.\n if (project.normalized_name !=\n request.matchdict.get(\"name\", project.normalized_name)):\n return HTTPMovedPermanently(\n request.current_route_path(name=project.normalized_name),\n )\n\n # Get the latest serial number for this project.\n request.response.headers[\"X-PyPI-Last-Serial\"] = str(project.last_serial)\n\n # Get all of the files for this project.\n files = (\n request.db.query(File)\n .options(joinedload(File.release))\n .filter(\n File.name == project.name,\n File.version.in_(\n request.db.query(Release)\n .filter(Release.project == project)\n .with_entities(Release.version)\n )\n )\n .order_by(File.filename)\n .all()\n )\n\n return {\"project\": project, \"files\": files}\n", "path": "warehouse/legacy/api/simple.py"}]}
2,009
250
gh_patches_debug_9708
rasdani/github-patches
git_diff
praw-dev__praw-1810
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Failed to upload a video. **Describe the bug** Failed to upload a video. **To Reproduce** Steps to reproduce the behavior: submit any video **Code/Logs** ``` >>> s = sbrdt.submit_video ('video', 'myvideo.mp4') Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/home/gaspar/.local/lib/python3.9/site-packages/praw/models/reddit/subreddit.py", line 1383, in submit_video video_poster_url=self._upload_media(thumbnail_path)[0], File "/home/gaspar/.local/lib/python3.9/site-packages/praw/models/reddit/subreddit.py", line 695, in _upload_media with open(media_path, "rb") as media: FileNotFoundError: [Errno 2] No such file or directory: '/home/gaspar/.local/lib/python3.9/site-packages/praw/images/PRAW logo.png' ``` **System Info** - OS: Arch Linux - Python: 3.9.5 - PRAW Version: 7.4.0 </issue> <code> [start of setup.py] 1 """praw setup.py""" 2 3 import re 4 from codecs import open 5 from os import path 6 7 from setuptools import find_packages, setup 8 9 PACKAGE_NAME = "praw" 10 HERE = path.abspath(path.dirname(__file__)) 11 with open(path.join(HERE, "README.rst"), encoding="utf-8") as fp: 12 README = fp.read() 13 with open(path.join(HERE, PACKAGE_NAME, "const.py"), encoding="utf-8") as fp: 14 VERSION = re.search('__version__ = "([^"]+)"', fp.read()).group(1) 15 16 extras = { 17 "ci": ["coveralls"], 18 "dev": ["packaging"], 19 "lint": [ 20 "pre-commit", 21 "sphinx", 22 "sphinx_rtd_theme", 23 ], 24 "readthedocs": ["sphinx", "sphinx_rtd_theme"], 25 "test": [ 26 "betamax >=0.8, <0.9", 27 "betamax-matchers >=0.3.0, <0.5", 28 "pytest >=2.7.3", 29 ], 30 } 31 extras["dev"] += extras["lint"] + extras["test"] 32 33 setup( 34 name=PACKAGE_NAME, 35 author="Bryce Boe", 36 author_email="[email protected]", 37 python_requires="~=3.6", 38 classifiers=[ 39 "Development Status :: 5 - Production/Stable", 40 "Environment :: Console", 41 "Intended Audience :: Developers", 42 "License :: OSI Approved :: BSD License", 43 "Natural Language :: English", 44 "Operating System :: OS Independent", 45 "Programming Language :: Python", 46 "Programming Language :: Python :: 3", 47 "Programming Language :: Python :: 3.6", 48 "Programming Language :: Python :: 3.7", 49 "Programming Language :: Python :: 3.8", 50 "Programming Language :: Python :: 3.9", 51 "Programming Language :: Python :: 3.10", 52 "Topic :: Utilities", 53 ], 54 description=( 55 "PRAW, an acronym for `Python Reddit API Wrapper`, is a python package that" 56 " allows for simple access to reddit's API." 57 ), 58 extras_require=extras, 59 install_requires=[ 60 "prawcore >=2.1, <3", 61 "update_checker >=0.18", 62 "websocket-client >=0.54.0", 63 ], 64 keywords="reddit api wrapper", 65 license="Simplified BSD License", 66 long_description=README, 67 package_data={"": ["LICENSE.txt"], PACKAGE_NAME: ["*.ini", "images/*.jpg"]}, 68 packages=find_packages(exclude=["tests", "tests.*", "tools", "tools.*"]), 69 project_urls={ 70 "Change Log": "https://praw.readthedocs.io/en/latest/package_info/change_log.html", 71 "Documentation": "https://praw.readthedocs.io/", 72 "Issue Tracker": "https://github.com/praw-dev/praw/issues", 73 "Source Code": "https://github.com/praw-dev/praw", 74 }, 75 version=VERSION, 76 ) 77 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -64,7 +64,7 @@ keywords="reddit api wrapper", license="Simplified BSD License", long_description=README, - package_data={"": ["LICENSE.txt"], PACKAGE_NAME: ["*.ini", "images/*.jpg"]}, + package_data={"": ["LICENSE.txt"], PACKAGE_NAME: ["*.ini", "images/*.png"]}, packages=find_packages(exclude=["tests", "tests.*", "tools", "tools.*"]), project_urls={ "Change Log": "https://praw.readthedocs.io/en/latest/package_info/change_log.html",
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -64,7 +64,7 @@\n keywords=\"reddit api wrapper\",\n license=\"Simplified BSD License\",\n long_description=README,\n- package_data={\"\": [\"LICENSE.txt\"], PACKAGE_NAME: [\"*.ini\", \"images/*.jpg\"]},\n+ package_data={\"\": [\"LICENSE.txt\"], PACKAGE_NAME: [\"*.ini\", \"images/*.png\"]},\n packages=find_packages(exclude=[\"tests\", \"tests.*\", \"tools\", \"tools.*\"]),\n project_urls={\n \"Change Log\": \"https://praw.readthedocs.io/en/latest/package_info/change_log.html\",\n", "issue": "Failed to upload a video.\n**Describe the bug**\r\nFailed to upload a video.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\nsubmit any video\r\n\r\n**Code/Logs**\r\n```\r\n>>> s = sbrdt.submit_video ('video', 'myvideo.mp4')\r\nTraceback (most recent call last):\r\n File \"<stdin>\", line 1, in <module>\r\n File \"/home/gaspar/.local/lib/python3.9/site-packages/praw/models/reddit/subreddit.py\", line 1383, in submit_video\r\n video_poster_url=self._upload_media(thumbnail_path)[0],\r\n File \"/home/gaspar/.local/lib/python3.9/site-packages/praw/models/reddit/subreddit.py\", line 695, in _upload_media\r\n with open(media_path, \"rb\") as media:\r\nFileNotFoundError: [Errno 2] No such file or directory: '/home/gaspar/.local/lib/python3.9/site-packages/praw/images/PRAW logo.png'\r\n```\r\n\r\n**System Info**\r\n - OS: Arch Linux\r\n - Python: 3.9.5\r\n - PRAW Version: 7.4.0\r\n\n", "before_files": [{"content": "\"\"\"praw setup.py\"\"\"\n\nimport re\nfrom codecs import open\nfrom os import path\n\nfrom setuptools import find_packages, setup\n\nPACKAGE_NAME = \"praw\"\nHERE = path.abspath(path.dirname(__file__))\nwith open(path.join(HERE, \"README.rst\"), encoding=\"utf-8\") as fp:\n README = fp.read()\nwith open(path.join(HERE, PACKAGE_NAME, \"const.py\"), encoding=\"utf-8\") as fp:\n VERSION = re.search('__version__ = \"([^\"]+)\"', fp.read()).group(1)\n\nextras = {\n \"ci\": [\"coveralls\"],\n \"dev\": [\"packaging\"],\n \"lint\": [\n \"pre-commit\",\n \"sphinx\",\n \"sphinx_rtd_theme\",\n ],\n \"readthedocs\": [\"sphinx\", \"sphinx_rtd_theme\"],\n \"test\": [\n \"betamax >=0.8, <0.9\",\n \"betamax-matchers >=0.3.0, <0.5\",\n \"pytest >=2.7.3\",\n ],\n}\nextras[\"dev\"] += extras[\"lint\"] + extras[\"test\"]\n\nsetup(\n name=PACKAGE_NAME,\n author=\"Bryce Boe\",\n author_email=\"[email protected]\",\n python_requires=\"~=3.6\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Utilities\",\n ],\n description=(\n \"PRAW, an acronym for `Python Reddit API Wrapper`, is a python package that\"\n \" allows for simple access to reddit's API.\"\n ),\n extras_require=extras,\n install_requires=[\n \"prawcore >=2.1, <3\",\n \"update_checker >=0.18\",\n \"websocket-client >=0.54.0\",\n ],\n keywords=\"reddit api wrapper\",\n license=\"Simplified BSD License\",\n long_description=README,\n package_data={\"\": [\"LICENSE.txt\"], PACKAGE_NAME: [\"*.ini\", \"images/*.jpg\"]},\n packages=find_packages(exclude=[\"tests\", \"tests.*\", \"tools\", \"tools.*\"]),\n project_urls={\n \"Change Log\": \"https://praw.readthedocs.io/en/latest/package_info/change_log.html\",\n \"Documentation\": \"https://praw.readthedocs.io/\",\n \"Issue Tracker\": \"https://github.com/praw-dev/praw/issues\",\n \"Source Code\": \"https://github.com/praw-dev/praw\",\n },\n version=VERSION,\n)\n", "path": "setup.py"}]}
1,572
146
gh_patches_debug_35228
rasdani/github-patches
git_diff
mirumee__ariadne-529
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> OpenTracing plugin performs deepcopy of resolver's args, which fails when file upload for larger file is used. OpenTracing performs deep copy of arguments passed to the resolver function when args filtering is used (eg. to hide passwords), but this apparently fails there's larger uploaded file in the args. Potential fix would be default filter that replaces uploaded files with cheap str representation (eg. `<UploadedFile(name="test.jpg", type="image/jpeg", size=44100)>`) before custom filtering logic is ran next. </issue> <code> [start of ariadne/contrib/tracing/opentracing.py] 1 from copy import deepcopy 2 from functools import partial 3 from inspect import isawaitable 4 from typing import Any, Callable, Dict, Optional 5 6 from graphql import GraphQLResolveInfo 7 from opentracing import Scope, Tracer, global_tracer 8 from opentracing.ext import tags 9 10 from ...types import ContextValue, Extension, Resolver 11 from .utils import format_path, should_trace 12 13 ArgFilter = Callable[[Dict[str, Any], GraphQLResolveInfo], Dict[str, Any]] 14 15 16 class OpenTracingExtension(Extension): 17 _arg_filter: Optional[ArgFilter] 18 _root_scope: Scope 19 _tracer: Tracer 20 21 def __init__(self, *, arg_filter: Optional[ArgFilter] = None): 22 self._arg_filter = arg_filter 23 self._tracer = global_tracer() 24 self._root_scope = None 25 26 def request_started(self, context: ContextValue): 27 self._root_scope = self._tracer.start_active_span("GraphQL Query") 28 self._root_scope.span.set_tag(tags.COMPONENT, "graphql") 29 30 def request_finished(self, context: ContextValue): 31 self._root_scope.close() 32 33 async def resolve( 34 self, next_: Resolver, parent: Any, info: GraphQLResolveInfo, **kwargs 35 ): 36 if not should_trace(info): 37 result = next_(parent, info, **kwargs) 38 if isawaitable(result): 39 result = await result 40 return result 41 42 with self._tracer.start_active_span(info.field_name) as scope: 43 span = scope.span 44 span.set_tag(tags.COMPONENT, "graphql") 45 span.set_tag("graphql.parentType", info.parent_type.name) 46 47 graphql_path = ".".join( 48 map(str, format_path(info.path)) # pylint: disable=bad-builtin 49 ) 50 span.set_tag("graphql.path", graphql_path) 51 52 if kwargs: 53 filtered_kwargs = self.filter_resolver_args(kwargs, info) 54 for kwarg, value in filtered_kwargs.items(): 55 span.set_tag(f"graphql.param.{kwarg}", value) 56 57 result = next_(parent, info, **kwargs) 58 if isawaitable(result): 59 result = await result 60 return result 61 62 def filter_resolver_args( 63 self, args: Dict[str, Any], info: GraphQLResolveInfo 64 ) -> Dict[str, Any]: 65 if not self._arg_filter: 66 return args 67 68 return self._arg_filter(deepcopy(args), info) 69 70 71 class OpenTracingExtensionSync(OpenTracingExtension): 72 def resolve( 73 self, next_: Resolver, parent: Any, info: GraphQLResolveInfo, **kwargs 74 ): # pylint: disable=invalid-overridden-method 75 if not should_trace(info): 76 result = next_(parent, info, **kwargs) 77 return result 78 79 with self._tracer.start_active_span(info.field_name) as scope: 80 span = scope.span 81 span.set_tag(tags.COMPONENT, "graphql") 82 span.set_tag("graphql.parentType", info.parent_type.name) 83 84 graphql_path = ".".join( 85 map(str, format_path(info.path)) # pylint: disable=bad-builtin 86 ) 87 span.set_tag("graphql.path", graphql_path) 88 89 if kwargs: 90 filtered_kwargs = self.filter_resolver_args(kwargs, info) 91 for kwarg, value in filtered_kwargs.items(): 92 span.set_tag(f"graphql.param.{kwarg}", value) 93 94 result = next_(parent, info, **kwargs) 95 return result 96 97 98 def opentracing_extension(*, arg_filter: Optional[ArgFilter] = None): 99 return partial(OpenTracingExtension, arg_filter=arg_filter) 100 101 102 def opentracing_extension_sync(*, arg_filter: Optional[ArgFilter] = None): 103 return partial(OpenTracingExtensionSync, arg_filter=arg_filter) 104 [end of ariadne/contrib/tracing/opentracing.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ariadne/contrib/tracing/opentracing.py b/ariadne/contrib/tracing/opentracing.py --- a/ariadne/contrib/tracing/opentracing.py +++ b/ariadne/contrib/tracing/opentracing.py @@ -1,11 +1,13 @@ -from copy import deepcopy +import cgi +import os from functools import partial from inspect import isawaitable -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Dict, Optional, Union from graphql import GraphQLResolveInfo from opentracing import Scope, Tracer, global_tracer from opentracing.ext import tags +from starlette.datastructures import UploadFile from ...types import ContextValue, Extension, Resolver from .utils import format_path, should_trace @@ -62,10 +64,12 @@ def filter_resolver_args( self, args: Dict[str, Any], info: GraphQLResolveInfo ) -> Dict[str, Any]: + args_to_trace = copy_args_for_tracing(args) + if not self._arg_filter: - return args + return args_to_trace - return self._arg_filter(deepcopy(args), info) + return self._arg_filter(args_to_trace, info) class OpenTracingExtensionSync(OpenTracingExtension): @@ -101,3 +105,34 @@ def opentracing_extension_sync(*, arg_filter: Optional[ArgFilter] = None): return partial(OpenTracingExtensionSync, arg_filter=arg_filter) + + +def copy_args_for_tracing(value: Any) -> Any: + if isinstance(value, dict): + return {k: copy_args_for_tracing(v) for k, v in value.items()} + if isinstance(value, list): + return [copy_args_for_tracing(v) for v in value] + if isinstance(value, (UploadFile, cgi.FieldStorage)): + return repr_upload_file(value) + return value + + +def repr_upload_file(upload_file: Union[UploadFile, cgi.FieldStorage]) -> str: + filename = upload_file.filename + + if isinstance(upload_file, cgi.FieldStorage): + mime_type = upload_file.type + else: + mime_type = upload_file.content_type + + if upload_file.file is None and isinstance(upload_file, cgi.FieldStorage): + size = len(upload_file.value) if upload_file.value is not None else 0 + else: + file_ = upload_file.file + file_.seek(0, os.SEEK_END) + size = file_.tell() + file_.seek(0) + + return ( + f"{type(upload_file)}(mime_type={mime_type}, size={size}, filename={filename})" + )
{"golden_diff": "diff --git a/ariadne/contrib/tracing/opentracing.py b/ariadne/contrib/tracing/opentracing.py\n--- a/ariadne/contrib/tracing/opentracing.py\n+++ b/ariadne/contrib/tracing/opentracing.py\n@@ -1,11 +1,13 @@\n-from copy import deepcopy\n+import cgi\n+import os\n from functools import partial\n from inspect import isawaitable\n-from typing import Any, Callable, Dict, Optional\n+from typing import Any, Callable, Dict, Optional, Union\n \n from graphql import GraphQLResolveInfo\n from opentracing import Scope, Tracer, global_tracer\n from opentracing.ext import tags\n+from starlette.datastructures import UploadFile\n \n from ...types import ContextValue, Extension, Resolver\n from .utils import format_path, should_trace\n@@ -62,10 +64,12 @@\n def filter_resolver_args(\n self, args: Dict[str, Any], info: GraphQLResolveInfo\n ) -> Dict[str, Any]:\n+ args_to_trace = copy_args_for_tracing(args)\n+\n if not self._arg_filter:\n- return args\n+ return args_to_trace\n \n- return self._arg_filter(deepcopy(args), info)\n+ return self._arg_filter(args_to_trace, info)\n \n \n class OpenTracingExtensionSync(OpenTracingExtension):\n@@ -101,3 +105,34 @@\n \n def opentracing_extension_sync(*, arg_filter: Optional[ArgFilter] = None):\n return partial(OpenTracingExtensionSync, arg_filter=arg_filter)\n+\n+\n+def copy_args_for_tracing(value: Any) -> Any:\n+ if isinstance(value, dict):\n+ return {k: copy_args_for_tracing(v) for k, v in value.items()}\n+ if isinstance(value, list):\n+ return [copy_args_for_tracing(v) for v in value]\n+ if isinstance(value, (UploadFile, cgi.FieldStorage)):\n+ return repr_upload_file(value)\n+ return value\n+\n+\n+def repr_upload_file(upload_file: Union[UploadFile, cgi.FieldStorage]) -> str:\n+ filename = upload_file.filename\n+\n+ if isinstance(upload_file, cgi.FieldStorage):\n+ mime_type = upload_file.type\n+ else:\n+ mime_type = upload_file.content_type\n+\n+ if upload_file.file is None and isinstance(upload_file, cgi.FieldStorage):\n+ size = len(upload_file.value) if upload_file.value is not None else 0\n+ else:\n+ file_ = upload_file.file\n+ file_.seek(0, os.SEEK_END)\n+ size = file_.tell()\n+ file_.seek(0)\n+\n+ return (\n+ f\"{type(upload_file)}(mime_type={mime_type}, size={size}, filename={filename})\"\n+ )\n", "issue": "OpenTracing plugin performs deepcopy of resolver's args, which fails when file upload for larger file is used.\nOpenTracing performs deep copy of arguments passed to the resolver function when args filtering is used (eg. to hide passwords), but this apparently fails there's larger uploaded file in the args.\r\n\r\nPotential fix would be default filter that replaces uploaded files with cheap str representation (eg. `<UploadedFile(name=\"test.jpg\", type=\"image/jpeg\", size=44100)>`) before custom filtering logic is ran next.\n", "before_files": [{"content": "from copy import deepcopy\nfrom functools import partial\nfrom inspect import isawaitable\nfrom typing import Any, Callable, Dict, Optional\n\nfrom graphql import GraphQLResolveInfo\nfrom opentracing import Scope, Tracer, global_tracer\nfrom opentracing.ext import tags\n\nfrom ...types import ContextValue, Extension, Resolver\nfrom .utils import format_path, should_trace\n\nArgFilter = Callable[[Dict[str, Any], GraphQLResolveInfo], Dict[str, Any]]\n\n\nclass OpenTracingExtension(Extension):\n _arg_filter: Optional[ArgFilter]\n _root_scope: Scope\n _tracer: Tracer\n\n def __init__(self, *, arg_filter: Optional[ArgFilter] = None):\n self._arg_filter = arg_filter\n self._tracer = global_tracer()\n self._root_scope = None\n\n def request_started(self, context: ContextValue):\n self._root_scope = self._tracer.start_active_span(\"GraphQL Query\")\n self._root_scope.span.set_tag(tags.COMPONENT, \"graphql\")\n\n def request_finished(self, context: ContextValue):\n self._root_scope.close()\n\n async def resolve(\n self, next_: Resolver, parent: Any, info: GraphQLResolveInfo, **kwargs\n ):\n if not should_trace(info):\n result = next_(parent, info, **kwargs)\n if isawaitable(result):\n result = await result\n return result\n\n with self._tracer.start_active_span(info.field_name) as scope:\n span = scope.span\n span.set_tag(tags.COMPONENT, \"graphql\")\n span.set_tag(\"graphql.parentType\", info.parent_type.name)\n\n graphql_path = \".\".join(\n map(str, format_path(info.path)) # pylint: disable=bad-builtin\n )\n span.set_tag(\"graphql.path\", graphql_path)\n\n if kwargs:\n filtered_kwargs = self.filter_resolver_args(kwargs, info)\n for kwarg, value in filtered_kwargs.items():\n span.set_tag(f\"graphql.param.{kwarg}\", value)\n\n result = next_(parent, info, **kwargs)\n if isawaitable(result):\n result = await result\n return result\n\n def filter_resolver_args(\n self, args: Dict[str, Any], info: GraphQLResolveInfo\n ) -> Dict[str, Any]:\n if not self._arg_filter:\n return args\n\n return self._arg_filter(deepcopy(args), info)\n\n\nclass OpenTracingExtensionSync(OpenTracingExtension):\n def resolve(\n self, next_: Resolver, parent: Any, info: GraphQLResolveInfo, **kwargs\n ): # pylint: disable=invalid-overridden-method\n if not should_trace(info):\n result = next_(parent, info, **kwargs)\n return result\n\n with self._tracer.start_active_span(info.field_name) as scope:\n span = scope.span\n span.set_tag(tags.COMPONENT, \"graphql\")\n span.set_tag(\"graphql.parentType\", info.parent_type.name)\n\n graphql_path = \".\".join(\n map(str, format_path(info.path)) # pylint: disable=bad-builtin\n )\n span.set_tag(\"graphql.path\", graphql_path)\n\n if kwargs:\n filtered_kwargs = self.filter_resolver_args(kwargs, info)\n for kwarg, value in filtered_kwargs.items():\n span.set_tag(f\"graphql.param.{kwarg}\", value)\n\n result = next_(parent, info, **kwargs)\n return result\n\n\ndef opentracing_extension(*, arg_filter: Optional[ArgFilter] = None):\n return partial(OpenTracingExtension, arg_filter=arg_filter)\n\n\ndef opentracing_extension_sync(*, arg_filter: Optional[ArgFilter] = None):\n return partial(OpenTracingExtensionSync, arg_filter=arg_filter)\n", "path": "ariadne/contrib/tracing/opentracing.py"}]}
1,675
624
gh_patches_debug_9307
rasdani/github-patches
git_diff
streamlink__streamlink-4210
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> plugins.tviplayer: unable to handle CNN Portugal ### Checklist - [X] This is a plugin issue and not a different kind of issue - [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink) - [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22) - [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master) ### Streamlink version Latest stable release ### Description - issue: - the new `tviplayer` plugin is unable to handle https://tviplayer.iol.pt/direto/CNN - of note, the previous TVI 24 became CNN Portugal after #4199. - to reproduce: ```sh streamlink https://tviplayer.iol.pt/direto/CNN ``` ```sh [cli][info] Found matching plugin tviplayer for URL https://tviplayer.iol.pt/direto/CNN error: Unable to validate response text: Unable to parse HTML: Unicode strings with encoding declaration are not supported. Please use bytes input or XML fragments without declaration. ('<?xml version=\'1.0\' encoding=\'U ...) ``` ### Debug log ```text streamlink --loglevel debug https://tviplayer.iol.pt/direto/CNN [cli][debug] OS: Linux-5.10.0-9-amd64-x86_64-with-glibc2.31 [cli][debug] Python: 3.9.2 [cli][debug] Streamlink: 3.0.2 [cli][debug] Requests(2.26.0), Socks(1.7.1), Websocket(1.2.1) [cli][debug] Arguments: [cli][debug] url=https://tviplayer.iol.pt/direto/CNN [cli][debug] --loglevel=debug [cli][info] Found matching plugin tviplayer for URL https://tviplayer.iol.pt/direto/CNN error: Unable to validate response text: Unable to parse HTML: Unicode strings with encoding declaration are not supported. Please use bytes input or XML fragments without declaration. ('<?xml version=\'1.0\' encoding=\'U ...) ``` </issue> <code> [start of src/streamlink/utils/parse.py] 1 import json 2 import re 3 from urllib.parse import parse_qsl 4 5 from lxml.etree import HTML, XML 6 7 from streamlink.plugin import PluginError 8 9 10 def _parse(parser, data, name, exception, schema, *args, **kwargs): 11 try: 12 parsed = parser(data, *args, **kwargs) 13 except Exception as err: 14 snippet = repr(data) 15 if len(snippet) > 35: 16 snippet = f"{snippet[:35]} ..." 17 18 raise exception(f"Unable to parse {name}: {err} ({snippet})") 19 20 if schema: 21 parsed = schema.validate(parsed, name=name, exception=exception) 22 23 return parsed 24 25 26 def parse_json( 27 data, 28 name="JSON", 29 exception=PluginError, 30 schema=None, 31 *args, **kwargs 32 ): 33 """Wrapper around json.loads. 34 35 Provides these extra features: 36 - Wraps errors in custom exception with a snippet of the data in the message 37 """ 38 return _parse(json.loads, data, name, exception, schema, *args, **kwargs) 39 40 41 def parse_html( 42 data, 43 name="HTML", 44 exception=PluginError, 45 schema=None, 46 *args, **kwargs 47 ): 48 """Wrapper around lxml.etree.HTML with some extras. 49 50 Provides these extra features: 51 - Wraps errors in custom exception with a snippet of the data in the message 52 """ 53 return _parse(HTML, data, name, exception, schema, *args, **kwargs) 54 55 56 def parse_xml( 57 data, 58 ignore_ns=False, 59 invalid_char_entities=False, 60 name="XML", 61 exception=PluginError, 62 schema=None, 63 *args, **kwargs 64 ): 65 """Wrapper around lxml.etree.XML with some extras. 66 67 Provides these extra features: 68 - Handles incorrectly encoded XML 69 - Allows stripping namespace information 70 - Wraps errors in custom exception with a snippet of the data in the message 71 """ 72 if isinstance(data, str): 73 data = bytes(data, "utf8") 74 if ignore_ns: 75 data = re.sub(br"\s+xmlns=\"(.+?)\"", b"", data) 76 if invalid_char_entities: 77 data = re.sub(br"&(?!(?:#(?:[0-9]+|[Xx][0-9A-Fa-f]+)|[A-Za-z0-9]+);)", b"&amp;", data) 78 79 return _parse(XML, data, name, exception, schema, *args, **kwargs) 80 81 82 def parse_qsd( 83 data, 84 name="query string", 85 exception=PluginError, 86 schema=None, 87 *args, **kwargs 88 ): 89 """Parses a query string into a dict. 90 91 Provides these extra features: 92 - Unlike parse_qs and parse_qsl, duplicate keys are not preserved in favor of a simpler return value 93 - Wraps errors in custom exception with a snippet of the data in the message 94 """ 95 return _parse(lambda d: dict(parse_qsl(d, *args, **kwargs)), data, name, exception, schema) 96 [end of src/streamlink/utils/parse.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/streamlink/utils/parse.py b/src/streamlink/utils/parse.py --- a/src/streamlink/utils/parse.py +++ b/src/streamlink/utils/parse.py @@ -48,8 +48,12 @@ """Wrapper around lxml.etree.HTML with some extras. Provides these extra features: + - Removes XML declarations of invalid XHTML5 documents - Wraps errors in custom exception with a snippet of the data in the message """ + if isinstance(data, str) and data.lstrip().startswith("<?xml"): + data = re.sub(r"^\s*<\?xml.+?\?>", "", data) + return _parse(HTML, data, name, exception, schema, *args, **kwargs)
{"golden_diff": "diff --git a/src/streamlink/utils/parse.py b/src/streamlink/utils/parse.py\n--- a/src/streamlink/utils/parse.py\n+++ b/src/streamlink/utils/parse.py\n@@ -48,8 +48,12 @@\n \"\"\"Wrapper around lxml.etree.HTML with some extras.\n \n Provides these extra features:\n+ - Removes XML declarations of invalid XHTML5 documents\n - Wraps errors in custom exception with a snippet of the data in the message\n \"\"\"\n+ if isinstance(data, str) and data.lstrip().startswith(\"<?xml\"):\n+ data = re.sub(r\"^\\s*<\\?xml.+?\\?>\", \"\", data)\n+\n return _parse(HTML, data, name, exception, schema, *args, **kwargs)\n", "issue": "plugins.tviplayer: unable to handle CNN Portugal\n### Checklist\n\n- [X] This is a plugin issue and not a different kind of issue\n- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)\n- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)\n- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)\n\n### Streamlink version\n\nLatest stable release\n\n### Description\n\n- issue:\r\n - the new `tviplayer` plugin is unable to handle https://tviplayer.iol.pt/direto/CNN \r\n - of note, the previous TVI 24 became CNN Portugal after #4199.\r\n\r\n- to reproduce:\r\n ```sh\r\n streamlink https://tviplayer.iol.pt/direto/CNN\r\n ```\r\n ```sh\r\n [cli][info] Found matching plugin tviplayer for URL https://tviplayer.iol.pt/direto/CNN\r\n error: Unable to validate response text: Unable to parse HTML: Unicode strings with encoding declaration are not supported. Please use bytes input or XML fragments without declaration. ('<?xml version=\\'1.0\\' encoding=\\'U ...)\r\n ```\r\n\r\n\r\n\n\n### Debug log\n\n```text\nstreamlink --loglevel debug https://tviplayer.iol.pt/direto/CNN\r\n[cli][debug] OS: Linux-5.10.0-9-amd64-x86_64-with-glibc2.31\r\n[cli][debug] Python: 3.9.2\r\n[cli][debug] Streamlink: 3.0.2\r\n[cli][debug] Requests(2.26.0), Socks(1.7.1), Websocket(1.2.1)\r\n[cli][debug] Arguments:\r\n[cli][debug] url=https://tviplayer.iol.pt/direto/CNN\r\n[cli][debug] --loglevel=debug\r\n[cli][info] Found matching plugin tviplayer for URL https://tviplayer.iol.pt/direto/CNN\r\nerror: Unable to validate response text: Unable to parse HTML: Unicode strings with encoding declaration are not supported. Please use bytes input or XML fragments without declaration. ('<?xml version=\\'1.0\\' encoding=\\'U ...)\n```\n\n", "before_files": [{"content": "import json\nimport re\nfrom urllib.parse import parse_qsl\n\nfrom lxml.etree import HTML, XML\n\nfrom streamlink.plugin import PluginError\n\n\ndef _parse(parser, data, name, exception, schema, *args, **kwargs):\n try:\n parsed = parser(data, *args, **kwargs)\n except Exception as err:\n snippet = repr(data)\n if len(snippet) > 35:\n snippet = f\"{snippet[:35]} ...\"\n\n raise exception(f\"Unable to parse {name}: {err} ({snippet})\")\n\n if schema:\n parsed = schema.validate(parsed, name=name, exception=exception)\n\n return parsed\n\n\ndef parse_json(\n data,\n name=\"JSON\",\n exception=PluginError,\n schema=None,\n *args, **kwargs\n):\n \"\"\"Wrapper around json.loads.\n\n Provides these extra features:\n - Wraps errors in custom exception with a snippet of the data in the message\n \"\"\"\n return _parse(json.loads, data, name, exception, schema, *args, **kwargs)\n\n\ndef parse_html(\n data,\n name=\"HTML\",\n exception=PluginError,\n schema=None,\n *args, **kwargs\n):\n \"\"\"Wrapper around lxml.etree.HTML with some extras.\n\n Provides these extra features:\n - Wraps errors in custom exception with a snippet of the data in the message\n \"\"\"\n return _parse(HTML, data, name, exception, schema, *args, **kwargs)\n\n\ndef parse_xml(\n data,\n ignore_ns=False,\n invalid_char_entities=False,\n name=\"XML\",\n exception=PluginError,\n schema=None,\n *args, **kwargs\n):\n \"\"\"Wrapper around lxml.etree.XML with some extras.\n\n Provides these extra features:\n - Handles incorrectly encoded XML\n - Allows stripping namespace information\n - Wraps errors in custom exception with a snippet of the data in the message\n \"\"\"\n if isinstance(data, str):\n data = bytes(data, \"utf8\")\n if ignore_ns:\n data = re.sub(br\"\\s+xmlns=\\\"(.+?)\\\"\", b\"\", data)\n if invalid_char_entities:\n data = re.sub(br\"&(?!(?:#(?:[0-9]+|[Xx][0-9A-Fa-f]+)|[A-Za-z0-9]+);)\", b\"&amp;\", data)\n\n return _parse(XML, data, name, exception, schema, *args, **kwargs)\n\n\ndef parse_qsd(\n data,\n name=\"query string\",\n exception=PluginError,\n schema=None,\n *args, **kwargs\n):\n \"\"\"Parses a query string into a dict.\n\n Provides these extra features:\n - Unlike parse_qs and parse_qsl, duplicate keys are not preserved in favor of a simpler return value\n - Wraps errors in custom exception with a snippet of the data in the message\n \"\"\"\n return _parse(lambda d: dict(parse_qsl(d, *args, **kwargs)), data, name, exception, schema)\n", "path": "src/streamlink/utils/parse.py"}]}
1,940
166
gh_patches_debug_24490
rasdani/github-patches
git_diff
ansible__ansible-lint-3437
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> no-handler: should not react on when-conditions containing "and" or "or" <!--- Verify first that your issue is not already reported on GitHub --> <!--- Also test if the latest release and master branch are affected too --> ##### Summary Right now the rule `Tasks that run when changed should likely be handlers` (which BTW, i am a big fan of) would produce findings for all of this lines: `when: mytask.changed` `when: mytask is changed` ... `when: mytask is changed and wartherIsNice|bool` While i totally agree that the first two examples are bad practices and should produce a linter warning, i would not agree, that the last example should. ##### Proposed solution As mentioned in #419 i could imagine of splitting up E503 into two rules, one of which reacts to single conditions and one for more complex conditions involving `and` or `or` - that way both could be skipped/disabled seperately. As @ssbarnea pointed out, it might also be a solution to disable the check completeley for complex conditons. ##### Issue Type - Bug Report - ansible installation method: OS package - ansible-lint installation method: pip </issue> <code> [start of src/ansiblelint/rules/no_handler.py] 1 # Copyright (c) 2016 Will Thames <[email protected]> 2 # 3 # Permission is hereby granted, free of charge, to any person obtaining a copy 4 # of this software and associated documentation files (the "Software"), to deal 5 # in the Software without restriction, including without limitation the rights 6 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 # copies of the Software, and to permit persons to whom the Software is 8 # furnished to do so, subject to the following conditions: 9 # 10 # The above copyright notice and this permission notice shall be included in 11 # all copies or substantial portions of the Software. 12 # 13 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 # THE SOFTWARE. 20 21 """UseHandlerRatherThanWhenChangedRule used with ansible-lint.""" 22 from __future__ import annotations 23 24 import sys 25 from typing import TYPE_CHECKING 26 27 from ansiblelint.rules import AnsibleLintRule 28 29 if TYPE_CHECKING: 30 from ansiblelint.file_utils import Lintable 31 from ansiblelint.utils import Task 32 33 34 def _changed_in_when(item: str) -> bool: 35 if not isinstance(item, str): 36 return False 37 item_list = item.split() 38 39 if {"and", "not"} & set(item_list): 40 return False 41 return any( 42 changed in item 43 for changed in [ 44 ".changed", 45 "|changed", 46 '["changed"]', 47 "['changed']", 48 "is changed", 49 ] 50 ) 51 52 53 class UseHandlerRatherThanWhenChangedRule(AnsibleLintRule): 54 """Tasks that run when changed should likely be handlers.""" 55 56 id = "no-handler" 57 description = ( 58 "If a task has a ``when: result.changed`` setting, it is effectively " 59 "acting as a handler. You could use ``notify`` and move that task to " 60 "``handlers``." 61 ) 62 link = "https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html#handlers" 63 severity = "MEDIUM" 64 tags = ["idiom"] 65 version_added = "historic" 66 67 def matchtask( 68 self, 69 task: Task, 70 file: Lintable | None = None, 71 ) -> bool | str: 72 if task["__ansible_action_type__"] != "task": 73 return False 74 75 when = task.get("when") 76 77 if isinstance(when, list): 78 for item in when: 79 if _changed_in_when(item): 80 return True 81 if isinstance(when, str): 82 return _changed_in_when(when) 83 return False 84 85 86 if "pytest" in sys.modules: 87 import pytest 88 89 from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports 90 from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports 91 92 @pytest.mark.parametrize( 93 ("test_file", "failures"), 94 ( 95 pytest.param("examples/playbooks/no_handler_fail.yml", 7, id="fail"), 96 pytest.param("examples/playbooks/no_handler_pass.yml", 0, id="pass"), 97 ), 98 ) 99 def test_no_handler( 100 default_rules_collection: RulesCollection, 101 test_file: str, 102 failures: int, 103 ) -> None: 104 """Test rule matches.""" 105 results = Runner(test_file, rules=default_rules_collection).run() 106 assert len(results) == failures 107 for result in results: 108 assert result.tag == "no-handler" 109 [end of src/ansiblelint/rules/no_handler.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/ansiblelint/rules/no_handler.py b/src/ansiblelint/rules/no_handler.py --- a/src/ansiblelint/rules/no_handler.py +++ b/src/ansiblelint/rules/no_handler.py @@ -36,7 +36,7 @@ return False item_list = item.split() - if {"and", "not"} & set(item_list): + if {"and", "or", "not"} & set(item_list): return False return any( changed in item @@ -75,9 +75,9 @@ when = task.get("when") if isinstance(when, list): - for item in when: - if _changed_in_when(item): - return True + if len(when) > 1: + return False + return _changed_in_when(when[0]) if isinstance(when, str): return _changed_in_when(when) return False @@ -92,7 +92,7 @@ @pytest.mark.parametrize( ("test_file", "failures"), ( - pytest.param("examples/playbooks/no_handler_fail.yml", 7, id="fail"), + pytest.param("examples/playbooks/no_handler_fail.yml", 5, id="fail"), pytest.param("examples/playbooks/no_handler_pass.yml", 0, id="pass"), ), )
{"golden_diff": "diff --git a/src/ansiblelint/rules/no_handler.py b/src/ansiblelint/rules/no_handler.py\n--- a/src/ansiblelint/rules/no_handler.py\n+++ b/src/ansiblelint/rules/no_handler.py\n@@ -36,7 +36,7 @@\n return False\n item_list = item.split()\n \n- if {\"and\", \"not\"} & set(item_list):\n+ if {\"and\", \"or\", \"not\"} & set(item_list):\n return False\n return any(\n changed in item\n@@ -75,9 +75,9 @@\n when = task.get(\"when\")\n \n if isinstance(when, list):\n- for item in when:\n- if _changed_in_when(item):\n- return True\n+ if len(when) > 1:\n+ return False\n+ return _changed_in_when(when[0])\n if isinstance(when, str):\n return _changed_in_when(when)\n return False\n@@ -92,7 +92,7 @@\n @pytest.mark.parametrize(\n (\"test_file\", \"failures\"),\n (\n- pytest.param(\"examples/playbooks/no_handler_fail.yml\", 7, id=\"fail\"),\n+ pytest.param(\"examples/playbooks/no_handler_fail.yml\", 5, id=\"fail\"),\n pytest.param(\"examples/playbooks/no_handler_pass.yml\", 0, id=\"pass\"),\n ),\n )\n", "issue": "no-handler: should not react on when-conditions containing \"and\" or \"or\"\n<!--- Verify first that your issue is not already reported on GitHub -->\r\n<!--- Also test if the latest release and master branch are affected too -->\r\n\r\n##### Summary\r\nRight now the rule `Tasks that run when changed should likely be handlers` (which BTW, i am a big fan of) would produce findings for all of this lines:\r\n\r\n`when: mytask.changed`\r\n`when: mytask is changed`\r\n...\r\n`when: mytask is changed and wartherIsNice|bool`\r\n\r\nWhile i totally agree that the first two examples are bad practices and should produce a linter warning, i would not agree, that the last example should.\r\n\r\n##### Proposed solution\r\n\r\nAs mentioned in #419 i could imagine of splitting up E503 into two rules, one of which reacts to single conditions and one for more complex conditions involving `and` or `or` - that way both could be skipped/disabled seperately.\r\n\r\nAs @ssbarnea pointed out, it might also be a solution to disable the check completeley for complex conditons.\r\n\r\n##### Issue Type\r\n\r\n- Bug Report\r\n\r\n\r\n- ansible installation method: OS package\r\n- ansible-lint installation method: pip\r\n\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "# Copyright (c) 2016 Will Thames <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\n\"\"\"UseHandlerRatherThanWhenChangedRule used with ansible-lint.\"\"\"\nfrom __future__ import annotations\n\nimport sys\nfrom typing import TYPE_CHECKING\n\nfrom ansiblelint.rules import AnsibleLintRule\n\nif TYPE_CHECKING:\n from ansiblelint.file_utils import Lintable\n from ansiblelint.utils import Task\n\n\ndef _changed_in_when(item: str) -> bool:\n if not isinstance(item, str):\n return False\n item_list = item.split()\n\n if {\"and\", \"not\"} & set(item_list):\n return False\n return any(\n changed in item\n for changed in [\n \".changed\",\n \"|changed\",\n '[\"changed\"]',\n \"['changed']\",\n \"is changed\",\n ]\n )\n\n\nclass UseHandlerRatherThanWhenChangedRule(AnsibleLintRule):\n \"\"\"Tasks that run when changed should likely be handlers.\"\"\"\n\n id = \"no-handler\"\n description = (\n \"If a task has a ``when: result.changed`` setting, it is effectively \"\n \"acting as a handler. You could use ``notify`` and move that task to \"\n \"``handlers``.\"\n )\n link = \"https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html#handlers\"\n severity = \"MEDIUM\"\n tags = [\"idiom\"]\n version_added = \"historic\"\n\n def matchtask(\n self,\n task: Task,\n file: Lintable | None = None,\n ) -> bool | str:\n if task[\"__ansible_action_type__\"] != \"task\":\n return False\n\n when = task.get(\"when\")\n\n if isinstance(when, list):\n for item in when:\n if _changed_in_when(item):\n return True\n if isinstance(when, str):\n return _changed_in_when(when)\n return False\n\n\nif \"pytest\" in sys.modules:\n import pytest\n\n from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports\n from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports\n\n @pytest.mark.parametrize(\n (\"test_file\", \"failures\"),\n (\n pytest.param(\"examples/playbooks/no_handler_fail.yml\", 7, id=\"fail\"),\n pytest.param(\"examples/playbooks/no_handler_pass.yml\", 0, id=\"pass\"),\n ),\n )\n def test_no_handler(\n default_rules_collection: RulesCollection,\n test_file: str,\n failures: int,\n ) -> None:\n \"\"\"Test rule matches.\"\"\"\n results = Runner(test_file, rules=default_rules_collection).run()\n assert len(results) == failures\n for result in results:\n assert result.tag == \"no-handler\"\n", "path": "src/ansiblelint/rules/no_handler.py"}]}
1,848
304
gh_patches_debug_7454
rasdani/github-patches
git_diff
alltheplaces__alltheplaces-3333
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Spider target_au is broken During the global build at 2021-05-26-14-42-23, spider **target_au** failed with **0 features** and **16 errors**. Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/target_au.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/target_au.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/target_au.geojson)) </issue> <code> [start of locations/spiders/target_au.py] 1 import scrapy 2 3 from locations.hours import OpeningHours 4 from locations.items import GeojsonPointItem 5 6 7 class TargetAUSpider(scrapy.Spider): 8 name = "target_au" 9 item_attributes = { 'brand': "Target", 'brand_wikidata': "Q7685854" } 10 allowed_domains = ["target.com.au"] 11 states = ["nsw","vic","qld","nt", "act", "sa", "tas", "wa"] 12 headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0", 13 "Referer": "https://www.target.com.au/store-finder"} 14 15 custom_settings = {'DOWNLOAD_DELAY' : 0.5,} 16 17 def start_requests(self): 18 url = "https://www.target.com.au/store-finder/state/{}" 19 for state in self.states: 20 yield scrapy.Request(url.format(state),headers=self.headers, callback=self.parse) 21 22 23 def parse(self, response): 24 store_links = response.xpath('//a[@class="table-tap-canonical"]/@href').getall() 25 for link in store_links: 26 yield scrapy.Request(response.urljoin(link), callback=self.parse_store, headers=self.headers) 27 28 def _parse_hour_str(self, hour_string): 29 time_, am_pm = tuple(hour_string.split(" ")) 30 hour, min = tuple(time_.split(":")) 31 hour = int(hour) 32 if am_pm == "PM": 33 hour += 12 34 return f"{hour}:{min}" 35 36 def parse_hours(self, hours_node): 37 opening_hours = OpeningHours() 38 days = hours_node.xpath(".//dt/text()").getall() 39 hours = hours_node.xpath(".//dd/text()").getall() 40 for idx, day in enumerate(days): 41 store_hours = hours[idx] 42 if "–" not in store_hours or ":" not in store_hours: 43 continue 44 parts = store_hours.strip().split(" – ") 45 open_time = self._parse_hour_str(parts[0]) 46 close_time = self._parse_hour_str(parts[1]) 47 opening_hours.add_range(day[0:2], open_time, close_time) 48 49 return opening_hours.as_opening_hours() 50 51 52 53 def parse_store(self, response): 54 store_name = response.xpath("//h4/text()").get().replace("Target – ","") 55 address_header = response.xpath("//span[@itemprop='streetAddress']/strong/text()").get() 56 address = " ".join(response.xpath("//span[@itemprop='streetAddress']/text()").getall()).strip() 57 if address_header: 58 address = address_header + " " + address 59 locality = response.xpath("//span[@itemprop='addressLocality']/text()").get() 60 region = response.xpath("//span[@itemprop='addressRegion']/text()").get() 61 post_code = response.xpath("//span[@itemprop='postalCode']/text()").get() 62 phone_number = response.xpath("//span[@itemprop='telephone']/text()").get() 63 hours_section = response.xpath("(//dl)[1]")[0] 64 opening_hours = self.parse_hours(hours_section) 65 lat = response.xpath("//div[@data-embedded-json='store-content-data']//@data-lat").get() 66 lon = response.xpath("//div[@data-embedded-json='store-content-data']//@data-lng").get() 67 68 yield GeojsonPointItem(lat=lat, 69 lon=lon, 70 name=store_name, 71 addr_full=address, 72 city=locality, 73 state=region, 74 postcode=post_code, 75 country="AU", 76 phone=phone_number, 77 website=response.url, 78 opening_hours=opening_hours, 79 ref=response.url.split("/")[-1]) 80 [end of locations/spiders/target_au.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/locations/spiders/target_au.py b/locations/spiders/target_au.py --- a/locations/spiders/target_au.py +++ b/locations/spiders/target_au.py @@ -26,6 +26,8 @@ yield scrapy.Request(response.urljoin(link), callback=self.parse_store, headers=self.headers) def _parse_hour_str(self, hour_string): + if hour_string == "Midnight": + return self._parse_hour_str("12:00 AM") time_, am_pm = tuple(hour_string.split(" ")) hour, min = tuple(time_.split(":")) hour = int(hour)
{"golden_diff": "diff --git a/locations/spiders/target_au.py b/locations/spiders/target_au.py\n--- a/locations/spiders/target_au.py\n+++ b/locations/spiders/target_au.py\n@@ -26,6 +26,8 @@\n yield scrapy.Request(response.urljoin(link), callback=self.parse_store, headers=self.headers)\n \n def _parse_hour_str(self, hour_string):\n+ if hour_string == \"Midnight\":\n+ return self._parse_hour_str(\"12:00 AM\")\n time_, am_pm = tuple(hour_string.split(\" \"))\n hour, min = tuple(time_.split(\":\"))\n hour = int(hour)\n", "issue": "Spider target_au is broken\nDuring the global build at 2021-05-26-14-42-23, spider **target_au** failed with **0 features** and **16 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/target_au.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/target_au.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/target_au.geojson))\n", "before_files": [{"content": "import scrapy\n\nfrom locations.hours import OpeningHours\nfrom locations.items import GeojsonPointItem\n\n\nclass TargetAUSpider(scrapy.Spider):\n name = \"target_au\"\n item_attributes = { 'brand': \"Target\", 'brand_wikidata': \"Q7685854\" }\n allowed_domains = [\"target.com.au\"]\n states = [\"nsw\",\"vic\",\"qld\",\"nt\", \"act\", \"sa\", \"tas\", \"wa\"]\n headers = {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0\",\n \"Referer\": \"https://www.target.com.au/store-finder\"}\n\n custom_settings = {'DOWNLOAD_DELAY' : 0.5,}\n\n def start_requests(self):\n url = \"https://www.target.com.au/store-finder/state/{}\"\n for state in self.states:\n yield scrapy.Request(url.format(state),headers=self.headers, callback=self.parse)\n\n\n def parse(self, response):\n store_links = response.xpath('//a[@class=\"table-tap-canonical\"]/@href').getall()\n for link in store_links:\n yield scrapy.Request(response.urljoin(link), callback=self.parse_store, headers=self.headers)\n\n def _parse_hour_str(self, hour_string):\n time_, am_pm = tuple(hour_string.split(\" \"))\n hour, min = tuple(time_.split(\":\"))\n hour = int(hour)\n if am_pm == \"PM\":\n hour += 12\n return f\"{hour}:{min}\"\n\n def parse_hours(self, hours_node):\n opening_hours = OpeningHours()\n days = hours_node.xpath(\".//dt/text()\").getall()\n hours = hours_node.xpath(\".//dd/text()\").getall()\n for idx, day in enumerate(days):\n store_hours = hours[idx]\n if \"\u2013\" not in store_hours or \":\" not in store_hours:\n continue\n parts = store_hours.strip().split(\" \u2013 \")\n open_time = self._parse_hour_str(parts[0])\n close_time = self._parse_hour_str(parts[1])\n opening_hours.add_range(day[0:2], open_time, close_time)\n \n return opening_hours.as_opening_hours()\n\n\n\n def parse_store(self, response):\n store_name = response.xpath(\"//h4/text()\").get().replace(\"Target \u2013 \",\"\")\n address_header = response.xpath(\"//span[@itemprop='streetAddress']/strong/text()\").get()\n address = \" \".join(response.xpath(\"//span[@itemprop='streetAddress']/text()\").getall()).strip()\n if address_header:\n address = address_header + \" \" + address\n locality = response.xpath(\"//span[@itemprop='addressLocality']/text()\").get()\n region = response.xpath(\"//span[@itemprop='addressRegion']/text()\").get()\n post_code = response.xpath(\"//span[@itemprop='postalCode']/text()\").get()\n phone_number = response.xpath(\"//span[@itemprop='telephone']/text()\").get()\n hours_section = response.xpath(\"(//dl)[1]\")[0]\n opening_hours = self.parse_hours(hours_section)\n lat = response.xpath(\"//div[@data-embedded-json='store-content-data']//@data-lat\").get()\n lon = response.xpath(\"//div[@data-embedded-json='store-content-data']//@data-lng\").get()\n\n yield GeojsonPointItem(lat=lat,\n lon=lon,\n name=store_name,\n addr_full=address,\n city=locality,\n state=region,\n postcode=post_code,\n country=\"AU\",\n phone=phone_number,\n website=response.url,\n opening_hours=opening_hours,\n ref=response.url.split(\"/\")[-1]) \n", "path": "locations/spiders/target_au.py"}]}
1,701
142
gh_patches_debug_22562
rasdani/github-patches
git_diff
liqd__a4-meinberlin-2372
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> text commenting results show module detail when only text commenting module used for project, module detail also shown in results tab </issue> <code> [start of meinberlin/apps/documents/views.py] 1 from django.http import Http404 2 from django.http.response import HttpResponseRedirect 3 from django.urls import reverse 4 from django.utils.translation import ugettext_lazy as _ 5 from django.views import generic 6 7 from adhocracy4.dashboard import mixins as dashboard_mixins 8 from adhocracy4.projects.mixins import ProjectMixin 9 from adhocracy4.rules import mixins as rules_mixins 10 from meinberlin.apps.contrib import mixins as contrib_mixins 11 from meinberlin.apps.exports.views import DashboardExportView 12 13 from . import models 14 15 16 class DocumentDashboardView(ProjectMixin, 17 dashboard_mixins.DashboardBaseMixin, 18 dashboard_mixins.DashboardComponentMixin, 19 generic.TemplateView): 20 template_name = 'meinberlin_documents/document_dashboard.html' 21 permission_required = 'a4projects.change_project' 22 23 def get_permission_object(self): 24 return self.project 25 26 27 class ChapterDetailView(ProjectMixin, 28 rules_mixins.PermissionRequiredMixin, 29 generic.DetailView, 30 contrib_mixins.DisplayProjectOrModuleMixin): 31 model = models.Chapter 32 permission_required = 'meinberlin_documents.view_chapter' 33 get_context_from_object = True 34 35 def dispatch(self, request, *args, **kwargs): 36 # Redirect first chapter view to the project detail page 37 res = super().dispatch(request, *args, **kwargs) 38 chapter = self.get_object() 39 if self.request.path == chapter.get_absolute_url() \ 40 and chapter == self.chapter_list.first(): 41 return HttpResponseRedirect(self.project.get_absolute_url()) 42 else: 43 return res 44 45 def get_context_data(self, **kwargs): 46 context = super(ChapterDetailView, self).get_context_data(**kwargs) 47 context['chapter_list'] = self.chapter_list 48 return context 49 50 @property 51 def chapter_list(self): 52 return models.Chapter.objects.filter(module=self.module) 53 54 55 class DocumentDetailView(ChapterDetailView): 56 get_context_from_object = False 57 58 def get_object(self): 59 first_chapter = models.Chapter.objects \ 60 .filter(module=self.module) \ 61 .first() 62 63 if not first_chapter: 64 raise Http404(_('Document has no chapters defined.')) 65 return first_chapter 66 67 68 class ParagraphDetailView(ProjectMixin, 69 rules_mixins.PermissionRequiredMixin, 70 generic.DetailView): 71 model = models.Paragraph 72 permission_required = 'meinberlin_documents.view_paragraph' 73 74 75 class DocumentDashboardExportView(DashboardExportView): 76 template_name = 'meinberlin_exports/export_dashboard.html' 77 78 def get_context_data(self, **kwargs): 79 context = super().get_context_data(**kwargs) 80 context['comment_export'] = reverse( 81 'a4dashboard:document-comment-export', 82 kwargs={'module_slug': self.module.slug}) 83 return context 84 [end of meinberlin/apps/documents/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/meinberlin/apps/documents/views.py b/meinberlin/apps/documents/views.py --- a/meinberlin/apps/documents/views.py +++ b/meinberlin/apps/documents/views.py @@ -1,5 +1,4 @@ from django.http import Http404 -from django.http.response import HttpResponseRedirect from django.urls import reverse from django.utils.translation import ugettext_lazy as _ from django.views import generic @@ -32,16 +31,6 @@ permission_required = 'meinberlin_documents.view_chapter' get_context_from_object = True - def dispatch(self, request, *args, **kwargs): - # Redirect first chapter view to the project detail page - res = super().dispatch(request, *args, **kwargs) - chapter = self.get_object() - if self.request.path == chapter.get_absolute_url() \ - and chapter == self.chapter_list.first(): - return HttpResponseRedirect(self.project.get_absolute_url()) - else: - return res - def get_context_data(self, **kwargs): context = super(ChapterDetailView, self).get_context_data(**kwargs) context['chapter_list'] = self.chapter_list
{"golden_diff": "diff --git a/meinberlin/apps/documents/views.py b/meinberlin/apps/documents/views.py\n--- a/meinberlin/apps/documents/views.py\n+++ b/meinberlin/apps/documents/views.py\n@@ -1,5 +1,4 @@\n from django.http import Http404\n-from django.http.response import HttpResponseRedirect\n from django.urls import reverse\n from django.utils.translation import ugettext_lazy as _\n from django.views import generic\n@@ -32,16 +31,6 @@\n permission_required = 'meinberlin_documents.view_chapter'\n get_context_from_object = True\n \n- def dispatch(self, request, *args, **kwargs):\n- # Redirect first chapter view to the project detail page\n- res = super().dispatch(request, *args, **kwargs)\n- chapter = self.get_object()\n- if self.request.path == chapter.get_absolute_url() \\\n- and chapter == self.chapter_list.first():\n- return HttpResponseRedirect(self.project.get_absolute_url())\n- else:\n- return res\n-\n def get_context_data(self, **kwargs):\n context = super(ChapterDetailView, self).get_context_data(**kwargs)\n context['chapter_list'] = self.chapter_list\n", "issue": "text commenting results show module detail \nwhen only text commenting module used for project, module detail also shown in results tab\n", "before_files": [{"content": "from django.http import Http404\nfrom django.http.response import HttpResponseRedirect\nfrom django.urls import reverse\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.views import generic\n\nfrom adhocracy4.dashboard import mixins as dashboard_mixins\nfrom adhocracy4.projects.mixins import ProjectMixin\nfrom adhocracy4.rules import mixins as rules_mixins\nfrom meinberlin.apps.contrib import mixins as contrib_mixins\nfrom meinberlin.apps.exports.views import DashboardExportView\n\nfrom . import models\n\n\nclass DocumentDashboardView(ProjectMixin,\n dashboard_mixins.DashboardBaseMixin,\n dashboard_mixins.DashboardComponentMixin,\n generic.TemplateView):\n template_name = 'meinberlin_documents/document_dashboard.html'\n permission_required = 'a4projects.change_project'\n\n def get_permission_object(self):\n return self.project\n\n\nclass ChapterDetailView(ProjectMixin,\n rules_mixins.PermissionRequiredMixin,\n generic.DetailView,\n contrib_mixins.DisplayProjectOrModuleMixin):\n model = models.Chapter\n permission_required = 'meinberlin_documents.view_chapter'\n get_context_from_object = True\n\n def dispatch(self, request, *args, **kwargs):\n # Redirect first chapter view to the project detail page\n res = super().dispatch(request, *args, **kwargs)\n chapter = self.get_object()\n if self.request.path == chapter.get_absolute_url() \\\n and chapter == self.chapter_list.first():\n return HttpResponseRedirect(self.project.get_absolute_url())\n else:\n return res\n\n def get_context_data(self, **kwargs):\n context = super(ChapterDetailView, self).get_context_data(**kwargs)\n context['chapter_list'] = self.chapter_list\n return context\n\n @property\n def chapter_list(self):\n return models.Chapter.objects.filter(module=self.module)\n\n\nclass DocumentDetailView(ChapterDetailView):\n get_context_from_object = False\n\n def get_object(self):\n first_chapter = models.Chapter.objects \\\n .filter(module=self.module) \\\n .first()\n\n if not first_chapter:\n raise Http404(_('Document has no chapters defined.'))\n return first_chapter\n\n\nclass ParagraphDetailView(ProjectMixin,\n rules_mixins.PermissionRequiredMixin,\n generic.DetailView):\n model = models.Paragraph\n permission_required = 'meinberlin_documents.view_paragraph'\n\n\nclass DocumentDashboardExportView(DashboardExportView):\n template_name = 'meinberlin_exports/export_dashboard.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['comment_export'] = reverse(\n 'a4dashboard:document-comment-export',\n kwargs={'module_slug': self.module.slug})\n return context\n", "path": "meinberlin/apps/documents/views.py"}]}
1,303
258
gh_patches_debug_1924
rasdani/github-patches
git_diff
cobbler__cobbler-1265
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> build_reporting fails if empty string in ignorelist The default configuration in the ubuntu 12.04 cobbler 2.6.5 package has the following in `/etc/settings`: ``` build_reporting_ignorelist = [""] ``` The code that reads this value is in `install_post_report.py`, and the condition that determines whether to send a build report email is: ``` for prefix in settings.build_reporting_ignorelist: if name.lower().startswith(prefix) == True: sendmail = False ``` With the default configuration, this check always succeeds, and **mail is not sent**. Fix the issue by modifying the condition to: ``` if prefix != '' and name.lower().startswith(prefix): ``` </issue> <code> [start of cobbler/modules/install_post_report.py] 1 # (c) 2008-2009 2 # Jeff Schroeder <[email protected]> 3 # Michael DeHaan <michael.dehaan AT gmail> 4 # 5 # License: GPLv2+ 6 7 # Post install trigger for cobbler to 8 # send out a pretty email report that 9 # contains target information. 10 11 import distutils.sysconfig 12 import sys 13 import os 14 import traceback 15 16 plib = distutils.sysconfig.get_python_lib() 17 mod_path="%s/cobbler" % plib 18 sys.path.insert(0, mod_path) 19 20 from utils import _ 21 import smtplib 22 import sys 23 import cobbler.templar as templar 24 from cobbler.cexceptions import CX 25 import utils 26 27 def register(): 28 # this pure python trigger acts as if it were a legacy shell-trigger, but is much faster. 29 # the return of this method indicates the trigger type 30 return "/var/lib/cobbler/triggers/install/post/*" 31 32 def run(api, args, logger): 33 # FIXME: make everything use the logger 34 35 settings = api.settings() 36 37 # go no further if this feature is turned off 38 if not str(settings.build_reporting_enabled).lower() in [ "1", "yes", "y", "true"]: 39 return 0 40 41 objtype = args[0] # "target" or "profile" 42 name = args[1] # name of target or profile 43 boot_ip = args[2] # ip or "?" 44 45 if objtype == "system": 46 target = api.find_system(name) 47 else: 48 target = api.find_profile(name) 49 50 # collapse the object down to a rendered datastructure 51 target = utils.blender(api, False, target) 52 53 if target == {}: 54 raise CX("failure looking up target") 55 56 to_addr = settings.build_reporting_email 57 if to_addr == "": 58 return 0 59 60 # add the ability to specify an MTA for servers that don't run their own 61 smtp_server = settings.build_reporting_smtp_server 62 if smtp_server == "": 63 smtp_server = "localhost" 64 65 # use a custom from address or fall back to a reasonable default 66 from_addr = settings.build_reporting_sender 67 if from_addr == "": 68 from_addr = "cobbler@%s" % settings.server 69 70 subject = settings.build_reporting_subject 71 if subject == "": 72 subject = '[Cobbler] install complete ' 73 74 to_addr = ",".join(to_addr) 75 metadata = { 76 "from_addr" : from_addr, 77 "to_addr" : to_addr, 78 "subject" : subject, 79 "boot_ip" : boot_ip 80 } 81 metadata.update(target) 82 83 input_template = open("/etc/cobbler/reporting/build_report_email.template") 84 input_data = input_template.read() 85 input_template.close() 86 87 message = templar.Templar(api._config).render(input_data, metadata, None) 88 89 # for debug, call 90 # print message 91 92 sendmail = True 93 for prefix in settings.build_reporting_ignorelist: 94 if name.lower().startswith(prefix) == True: 95 sendmail = False 96 97 if sendmail == True: 98 # Send the mail 99 # FIXME: on error, return non-zero 100 server_handle = smtplib.SMTP(smtp_server) 101 server_handle.sendmail(from_addr, to_addr.split(','), message) 102 server_handle.quit() 103 104 return 0 105 106 107 108 109 [end of cobbler/modules/install_post_report.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/cobbler/modules/install_post_report.py b/cobbler/modules/install_post_report.py --- a/cobbler/modules/install_post_report.py +++ b/cobbler/modules/install_post_report.py @@ -91,7 +91,7 @@ sendmail = True for prefix in settings.build_reporting_ignorelist: - if name.lower().startswith(prefix) == True: + if prefix != '' and name.lower().startswith(prefix): sendmail = False if sendmail == True:
{"golden_diff": "diff --git a/cobbler/modules/install_post_report.py b/cobbler/modules/install_post_report.py\n--- a/cobbler/modules/install_post_report.py\n+++ b/cobbler/modules/install_post_report.py\n@@ -91,7 +91,7 @@\n \n sendmail = True\n for prefix in settings.build_reporting_ignorelist:\n- if name.lower().startswith(prefix) == True:\n+ if prefix != '' and name.lower().startswith(prefix):\n sendmail = False\n \n if sendmail == True:\n", "issue": "build_reporting fails if empty string in ignorelist\nThe default configuration in the ubuntu 12.04 cobbler 2.6.5 package has the following in `/etc/settings`:\n\n```\nbuild_reporting_ignorelist = [\"\"]\n```\n\nThe code that reads this value is in `install_post_report.py`, and the condition that determines whether to send a build report email is:\n\n```\nfor prefix in settings.build_reporting_ignorelist:\n if name.lower().startswith(prefix) == True:\n sendmail = False\n```\n\nWith the default configuration, this check always succeeds, and **mail is not sent**.\n\nFix the issue by modifying the condition to:\n\n```\n if prefix != '' and name.lower().startswith(prefix):\n```\n\n", "before_files": [{"content": "# (c) 2008-2009\n# Jeff Schroeder <[email protected]>\n# Michael DeHaan <michael.dehaan AT gmail>\n#\n# License: GPLv2+\n\n# Post install trigger for cobbler to\n# send out a pretty email report that\n# contains target information.\n\nimport distutils.sysconfig\nimport sys\nimport os\nimport traceback\n\nplib = distutils.sysconfig.get_python_lib()\nmod_path=\"%s/cobbler\" % plib\nsys.path.insert(0, mod_path)\n\nfrom utils import _\nimport smtplib\nimport sys\nimport cobbler.templar as templar\nfrom cobbler.cexceptions import CX\nimport utils\n\ndef register():\n # this pure python trigger acts as if it were a legacy shell-trigger, but is much faster.\n # the return of this method indicates the trigger type\n return \"/var/lib/cobbler/triggers/install/post/*\"\n\ndef run(api, args, logger):\n # FIXME: make everything use the logger\n\n settings = api.settings()\n\n # go no further if this feature is turned off\n if not str(settings.build_reporting_enabled).lower() in [ \"1\", \"yes\", \"y\", \"true\"]:\n return 0\n\n objtype = args[0] # \"target\" or \"profile\"\n name = args[1] # name of target or profile\n boot_ip = args[2] # ip or \"?\"\n\n if objtype == \"system\":\n target = api.find_system(name)\n else:\n target = api.find_profile(name)\n\n # collapse the object down to a rendered datastructure\n target = utils.blender(api, False, target)\n\n if target == {}:\n raise CX(\"failure looking up target\")\n\n to_addr = settings.build_reporting_email\n if to_addr == \"\":\n return 0\n\n # add the ability to specify an MTA for servers that don't run their own\n smtp_server = settings.build_reporting_smtp_server\n if smtp_server == \"\":\n smtp_server = \"localhost\"\n\n # use a custom from address or fall back to a reasonable default\n from_addr = settings.build_reporting_sender\n if from_addr == \"\":\n from_addr = \"cobbler@%s\" % settings.server\n\n subject = settings.build_reporting_subject\n if subject == \"\":\n subject = '[Cobbler] install complete '\n\n to_addr = \",\".join(to_addr)\n metadata = {\n \"from_addr\" : from_addr,\n \"to_addr\" : to_addr,\n \"subject\" : subject,\n \"boot_ip\" : boot_ip\n }\n metadata.update(target)\n\n input_template = open(\"/etc/cobbler/reporting/build_report_email.template\")\n input_data = input_template.read()\n input_template.close()\n\n message = templar.Templar(api._config).render(input_data, metadata, None)\n \n # for debug, call\n # print message\n\n sendmail = True\n for prefix in settings.build_reporting_ignorelist:\n if name.lower().startswith(prefix) == True:\n sendmail = False\n\n if sendmail == True:\n # Send the mail\n # FIXME: on error, return non-zero\n server_handle = smtplib.SMTP(smtp_server)\n server_handle.sendmail(from_addr, to_addr.split(','), message)\n server_handle.quit()\n\n return 0\n\n\n\n\n", "path": "cobbler/modules/install_post_report.py"}]}
1,670
112
gh_patches_debug_6862
rasdani/github-patches
git_diff
doccano__doccano-1654
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> I can't add members in the Django admin page. I can't add members in the Django admin page. steps - Add a member in the admin page (click a SAVE button). - <img width="1273" alt="スクリーンショット 2022-01-27 9 52 17" src="https://user-images.githubusercontent.com/20487308/151271702-bf60ae7e-f131-45fe-8314-e7726e90f90c.png"> - However, I get a 500 error. - <img width="1085" alt="スクリーンショット 2022-01-27 9 53 08" src="https://user-images.githubusercontent.com/20487308/151271872-c3fa75e8-c491-4aff-b88e-c9d970406ede.png"> - The endpoints of the POST request are different between admin page and member page. - `POST /admin/members/member/add/` - `POST /v1/projects/1/members` Environment --------- doccano v1.5.5 </issue> <code> [start of backend/members/models.py] 1 from django.conf import settings 2 from django.contrib.auth.models import User 3 from django.core.exceptions import ValidationError 4 from django.db import models 5 6 from django.db.models import Manager 7 8 from api.models import Project 9 from roles.models import Role 10 11 12 class MemberManager(Manager): 13 14 def can_update(self, project: int, member_id: int, new_role: str) -> bool: 15 """The project needs at least 1 admin. 16 17 Args: 18 project: The project id. 19 member_id: The member id. 20 new_role: The new role name. 21 22 Returns: 23 Whether the mapping can be updated or not. 24 """ 25 queryset = self.filter( 26 project=project, role__name=settings.ROLE_PROJECT_ADMIN 27 ) 28 if queryset.count() > 1: 29 return True 30 else: 31 admin = queryset.first() 32 # we can change the role except for the only admin. 33 return admin.id != member_id or new_role == settings.ROLE_PROJECT_ADMIN 34 35 def has_role(self, project_id: int, user: User, role_name: str): 36 return self.filter(project=project_id, user=user, role__name=role_name).exists() 37 38 39 class Member(models.Model): 40 user = models.ForeignKey( 41 to=User, 42 on_delete=models.CASCADE, 43 related_name='role_mappings' 44 ) 45 project = models.ForeignKey( 46 to=Project, 47 on_delete=models.CASCADE, 48 related_name='role_mappings' 49 ) 50 role = models.ForeignKey( 51 to=Role, 52 on_delete=models.CASCADE 53 ) 54 created_at = models.DateTimeField(auto_now_add=True) 55 updated_at = models.DateTimeField(auto_now=True) 56 objects = MemberManager() 57 58 def clean(self): 59 members = self.objects.exclude(id=self.id) 60 if members.filter(user=self.user, project=self.project).exists(): 61 message = 'This user is already assigned to a role in this project.' 62 raise ValidationError(message) 63 64 @property 65 def username(self): 66 return self.user.username 67 68 class Meta: 69 unique_together = ('user', 'project') 70 [end of backend/members/models.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/backend/members/models.py b/backend/members/models.py --- a/backend/members/models.py +++ b/backend/members/models.py @@ -56,7 +56,7 @@ objects = MemberManager() def clean(self): - members = self.objects.exclude(id=self.id) + members = self.__class__.objects.exclude(id=self.id) if members.filter(user=self.user, project=self.project).exists(): message = 'This user is already assigned to a role in this project.' raise ValidationError(message)
{"golden_diff": "diff --git a/backend/members/models.py b/backend/members/models.py\n--- a/backend/members/models.py\n+++ b/backend/members/models.py\n@@ -56,7 +56,7 @@\n objects = MemberManager()\n \n def clean(self):\n- members = self.objects.exclude(id=self.id)\n+ members = self.__class__.objects.exclude(id=self.id)\n if members.filter(user=self.user, project=self.project).exists():\n message = 'This user is already assigned to a role in this project.'\n raise ValidationError(message)\n", "issue": "I can't add members in the Django admin page.\nI can't add members in the Django admin page.\r\n\r\nsteps\r\n- Add a member in the admin page (click a SAVE button).\r\n - <img width=\"1273\" alt=\"\u30b9\u30af\u30ea\u30fc\u30f3\u30b7\u30e7\u30c3\u30c8 2022-01-27 9 52 17\" src=\"https://user-images.githubusercontent.com/20487308/151271702-bf60ae7e-f131-45fe-8314-e7726e90f90c.png\">\r\n- However, I get a 500 error.\r\n - <img width=\"1085\" alt=\"\u30b9\u30af\u30ea\u30fc\u30f3\u30b7\u30e7\u30c3\u30c8 2022-01-27 9 53 08\" src=\"https://user-images.githubusercontent.com/20487308/151271872-c3fa75e8-c491-4aff-b88e-c9d970406ede.png\">\r\n- The endpoints of the POST request are different between admin page and member page.\r\n - `POST /admin/members/member/add/`\r\n - `POST /v1/projects/1/members`\r\n\r\nEnvironment\r\n---------\r\ndoccano v1.5.5\r\n\n", "before_files": [{"content": "from django.conf import settings\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ValidationError\nfrom django.db import models\n\nfrom django.db.models import Manager\n\nfrom api.models import Project\nfrom roles.models import Role\n\n\nclass MemberManager(Manager):\n\n def can_update(self, project: int, member_id: int, new_role: str) -> bool:\n \"\"\"The project needs at least 1 admin.\n\n Args:\n project: The project id.\n member_id: The member id.\n new_role: The new role name.\n\n Returns:\n Whether the mapping can be updated or not.\n \"\"\"\n queryset = self.filter(\n project=project, role__name=settings.ROLE_PROJECT_ADMIN\n )\n if queryset.count() > 1:\n return True\n else:\n admin = queryset.first()\n # we can change the role except for the only admin.\n return admin.id != member_id or new_role == settings.ROLE_PROJECT_ADMIN\n\n def has_role(self, project_id: int, user: User, role_name: str):\n return self.filter(project=project_id, user=user, role__name=role_name).exists()\n\n\nclass Member(models.Model):\n user = models.ForeignKey(\n to=User,\n on_delete=models.CASCADE,\n related_name='role_mappings'\n )\n project = models.ForeignKey(\n to=Project,\n on_delete=models.CASCADE,\n related_name='role_mappings'\n )\n role = models.ForeignKey(\n to=Role,\n on_delete=models.CASCADE\n )\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n objects = MemberManager()\n\n def clean(self):\n members = self.objects.exclude(id=self.id)\n if members.filter(user=self.user, project=self.project).exists():\n message = 'This user is already assigned to a role in this project.'\n raise ValidationError(message)\n\n @property\n def username(self):\n return self.user.username\n\n class Meta:\n unique_together = ('user', 'project')\n", "path": "backend/members/models.py"}]}
1,405
117