problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
10.2k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 582
21k
| num_tokens
int64 271
2.05k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_1771 | rasdani/github-patches | git_diff | matrix-org__synapse-11690 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add a capabilties flag for MSC3440
Per https://github.com/matrix-org/matrix-doc/commit/fc81bbd836955876c931c95277249981b3e2778c
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `synapse/rest/client/capabilities.py`
Content:
```
1 # Copyright 2019 New Vector
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 import logging
15 from typing import TYPE_CHECKING, Tuple
16
17 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, MSC3244_CAPABILITIES
18 from synapse.http.server import HttpServer
19 from synapse.http.servlet import RestServlet
20 from synapse.http.site import SynapseRequest
21 from synapse.types import JsonDict
22
23 from ._base import client_patterns
24
25 if TYPE_CHECKING:
26 from synapse.server import HomeServer
27
28 logger = logging.getLogger(__name__)
29
30
31 class CapabilitiesRestServlet(RestServlet):
32 """End point to expose the capabilities of the server."""
33
34 PATTERNS = client_patterns("/capabilities$")
35
36 def __init__(self, hs: "HomeServer"):
37 super().__init__()
38 self.hs = hs
39 self.config = hs.config
40 self.auth = hs.get_auth()
41 self.auth_handler = hs.get_auth_handler()
42
43 async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
44 await self.auth.get_user_by_req(request, allow_guest=True)
45 change_password = self.auth_handler.can_change_password()
46
47 response: JsonDict = {
48 "capabilities": {
49 "m.room_versions": {
50 "default": self.config.server.default_room_version.identifier,
51 "available": {
52 v.identifier: v.disposition
53 for v in KNOWN_ROOM_VERSIONS.values()
54 },
55 },
56 "m.change_password": {"enabled": change_password},
57 }
58 }
59
60 if self.config.experimental.msc3244_enabled:
61 response["capabilities"]["m.room_versions"][
62 "org.matrix.msc3244.room_capabilities"
63 ] = MSC3244_CAPABILITIES
64
65 if self.config.experimental.msc3283_enabled:
66 response["capabilities"]["org.matrix.msc3283.set_displayname"] = {
67 "enabled": self.config.registration.enable_set_displayname
68 }
69 response["capabilities"]["org.matrix.msc3283.set_avatar_url"] = {
70 "enabled": self.config.registration.enable_set_avatar_url
71 }
72 response["capabilities"]["org.matrix.msc3283.3pid_changes"] = {
73 "enabled": self.config.registration.enable_3pid_changes
74 }
75
76 return 200, response
77
78
79 def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
80 CapabilitiesRestServlet(hs).register(http_server)
81
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py
--- a/synapse/rest/client/capabilities.py
+++ b/synapse/rest/client/capabilities.py
@@ -73,6 +73,9 @@
"enabled": self.config.registration.enable_3pid_changes
}
+ if self.config.experimental.msc3440_enabled:
+ response["capabilities"]["io.element.thread"] = {"enabled": True}
+
return 200, response
| {"golden_diff": "diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py\n--- a/synapse/rest/client/capabilities.py\n+++ b/synapse/rest/client/capabilities.py\n@@ -73,6 +73,9 @@\n \"enabled\": self.config.registration.enable_3pid_changes\n }\n \n+ if self.config.experimental.msc3440_enabled:\n+ response[\"capabilities\"][\"io.element.thread\"] = {\"enabled\": True}\n+\n return 200, response\n", "issue": "Add a capabilties flag for MSC3440\nPer https://github.com/matrix-org/matrix-doc/commit/fc81bbd836955876c931c95277249981b3e2778c\r\n\r\n\n", "before_files": [{"content": "# Copyright 2019 New Vector\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport logging\nfrom typing import TYPE_CHECKING, Tuple\n\nfrom synapse.api.room_versions import KNOWN_ROOM_VERSIONS, MSC3244_CAPABILITIES\nfrom synapse.http.server import HttpServer\nfrom synapse.http.servlet import RestServlet\nfrom synapse.http.site import SynapseRequest\nfrom synapse.types import JsonDict\n\nfrom ._base import client_patterns\n\nif TYPE_CHECKING:\n from synapse.server import HomeServer\n\nlogger = logging.getLogger(__name__)\n\n\nclass CapabilitiesRestServlet(RestServlet):\n \"\"\"End point to expose the capabilities of the server.\"\"\"\n\n PATTERNS = client_patterns(\"/capabilities$\")\n\n def __init__(self, hs: \"HomeServer\"):\n super().__init__()\n self.hs = hs\n self.config = hs.config\n self.auth = hs.get_auth()\n self.auth_handler = hs.get_auth_handler()\n\n async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:\n await self.auth.get_user_by_req(request, allow_guest=True)\n change_password = self.auth_handler.can_change_password()\n\n response: JsonDict = {\n \"capabilities\": {\n \"m.room_versions\": {\n \"default\": self.config.server.default_room_version.identifier,\n \"available\": {\n v.identifier: v.disposition\n for v in KNOWN_ROOM_VERSIONS.values()\n },\n },\n \"m.change_password\": {\"enabled\": change_password},\n }\n }\n\n if self.config.experimental.msc3244_enabled:\n response[\"capabilities\"][\"m.room_versions\"][\n \"org.matrix.msc3244.room_capabilities\"\n ] = MSC3244_CAPABILITIES\n\n if self.config.experimental.msc3283_enabled:\n response[\"capabilities\"][\"org.matrix.msc3283.set_displayname\"] = {\n \"enabled\": self.config.registration.enable_set_displayname\n }\n response[\"capabilities\"][\"org.matrix.msc3283.set_avatar_url\"] = {\n \"enabled\": self.config.registration.enable_set_avatar_url\n }\n response[\"capabilities\"][\"org.matrix.msc3283.3pid_changes\"] = {\n \"enabled\": self.config.registration.enable_3pid_changes\n }\n\n return 200, response\n\n\ndef register_servlets(hs: \"HomeServer\", http_server: HttpServer) -> None:\n CapabilitiesRestServlet(hs).register(http_server)\n", "path": "synapse/rest/client/capabilities.py"}], "after_files": [{"content": "# Copyright 2019 New Vector\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport logging\nfrom typing import TYPE_CHECKING, Tuple\n\nfrom synapse.api.room_versions import KNOWN_ROOM_VERSIONS, MSC3244_CAPABILITIES\nfrom synapse.http.server import HttpServer\nfrom synapse.http.servlet import RestServlet\nfrom synapse.http.site import SynapseRequest\nfrom synapse.types import JsonDict\n\nfrom ._base import client_patterns\n\nif TYPE_CHECKING:\n from synapse.server import HomeServer\n\nlogger = logging.getLogger(__name__)\n\n\nclass CapabilitiesRestServlet(RestServlet):\n \"\"\"End point to expose the capabilities of the server.\"\"\"\n\n PATTERNS = client_patterns(\"/capabilities$\")\n\n def __init__(self, hs: \"HomeServer\"):\n super().__init__()\n self.hs = hs\n self.config = hs.config\n self.auth = hs.get_auth()\n self.auth_handler = hs.get_auth_handler()\n\n async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:\n await self.auth.get_user_by_req(request, allow_guest=True)\n change_password = self.auth_handler.can_change_password()\n\n response: JsonDict = {\n \"capabilities\": {\n \"m.room_versions\": {\n \"default\": self.config.server.default_room_version.identifier,\n \"available\": {\n v.identifier: v.disposition\n for v in KNOWN_ROOM_VERSIONS.values()\n },\n },\n \"m.change_password\": {\"enabled\": change_password},\n }\n }\n\n if self.config.experimental.msc3244_enabled:\n response[\"capabilities\"][\"m.room_versions\"][\n \"org.matrix.msc3244.room_capabilities\"\n ] = MSC3244_CAPABILITIES\n\n if self.config.experimental.msc3283_enabled:\n response[\"capabilities\"][\"org.matrix.msc3283.set_displayname\"] = {\n \"enabled\": self.config.registration.enable_set_displayname\n }\n response[\"capabilities\"][\"org.matrix.msc3283.set_avatar_url\"] = {\n \"enabled\": self.config.registration.enable_set_avatar_url\n }\n response[\"capabilities\"][\"org.matrix.msc3283.3pid_changes\"] = {\n \"enabled\": self.config.registration.enable_3pid_changes\n }\n\n if self.config.experimental.msc3440_enabled:\n response[\"capabilities\"][\"io.element.thread\"] = {\"enabled\": True}\n\n return 200, response\n\n\ndef register_servlets(hs: \"HomeServer\", http_server: HttpServer) -> None:\n CapabilitiesRestServlet(hs).register(http_server)\n", "path": "synapse/rest/client/capabilities.py"}]} | 1,131 | 116 |
gh_patches_debug_35477 | rasdani/github-patches | git_diff | sunpy__sunpy-4485 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove sunpy.instr.aia.aiaprep
The `aiaprep` function should be removed from the `sunpy.instr` subpackage. Any AIA specific functionality should transition to the new [`aiapy` package](https://gitlab.com/LMSAL_HUB/aia_hub/aiapy).
This is part of the broader goal of moving instrument-specific functionality out of the core sunpy package and into affiliated packages.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sunpy/instr/aia.py`
Content:
```
1 """
2 This module provides processing routines for data captured with the AIA
3 instrument on SDO.
4 """
5 import numpy as np
6
7 import astropy.units as u
8
9 from sunpy.map.sources.sdo import AIAMap, HMIMap
10 from sunpy.util.decorators import deprecated
11
12 __all__ = ['aiaprep']
13
14
15 @deprecated("2.0", alternative="`register` in aiapy (https://aiapy.readthedocs.io) for converting \
16 AIA images to level 1.5")
17 def aiaprep(aiamap):
18 """
19 Processes a level 1 `~sunpy.map.sources.sdo.AIAMap` into a level 1.5
20 `~sunpy.map.sources.sdo.AIAMap`.
21
22 Rotates, scales and translates the image so that solar North is aligned
23 with the y axis, each pixel is 0.6 arcsec across, and the center of the
24 Sun is at the center of the image. The actual transformation is done by Map's
25 `~sunpy.map.mapbase.GenericMap.rotate` method.
26
27 This function is similar in functionality to ``aia_prep`` in SSWIDL, but
28 it does not use the same transformation to rotate the image and it handles
29 the meta data differently. It should therefore not be expected to produce
30 the same results.
31
32 Parameters
33 ----------
34 aiamap : `~sunpy.map.sources.sdo.AIAMap`
35 A `sunpy.map.Map` from AIA.
36
37 Returns
38 -------
39 `~sunpy.map.sources.sdo.AIAMap`:
40 A level 1.5 copy of `~sunpy.map.sources.sdo.AIAMap`.
41
42 Notes
43 -----
44 This routine modifies the header information to the standard PCi_j WCS
45 formalism. The FITS header resulting in saving a file after this
46 procedure will therefore differ from the original file.
47 """
48
49 if not isinstance(aiamap, (AIAMap, HMIMap)):
50 raise ValueError("Input must be an AIAMap or HMIMap.")
51
52 # Target scale is 0.6 arcsec/pixel, but this needs to be adjusted if the map
53 # has already been rescaled.
54 if ((aiamap.scale[0] / 0.6).round() != 1.0 * u.arcsec / u.pix
55 and aiamap.data.shape != (4096, 4096)):
56 scale = (aiamap.scale[0] / 0.6).round() * 0.6 * u.arcsec
57 else:
58 scale = 0.6 * u.arcsec # pragma: no cover # can't test this because it needs a full res image
59 scale_factor = aiamap.scale[0] / scale
60
61 tempmap = aiamap.rotate(recenter=True, scale=scale_factor.value, missing=aiamap.min())
62
63 # extract center from padded aiamap.rotate output
64 # crpix1 and crpix2 will be equal (recenter=True), as aiaprep does not work with submaps
65 center = np.floor(tempmap.meta['crpix1'])
66 range_side = (center + np.array([-1, 1]) * aiamap.data.shape[0] / 2) * u.pix
67 newmap = tempmap.submap(u.Quantity([range_side[0], range_side[0]]),
68 u.Quantity([range_side[1] - 1 * u.pix,
69 range_side[1] - 1 * u.pix]))
70
71 newmap.meta['r_sun'] = newmap.meta['rsun_obs'] / newmap.meta['cdelt1']
72 newmap.meta['lvl_num'] = 1.5
73 newmap.meta['bitpix'] = -64
74
75 return newmap
76
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sunpy/instr/aia.py b/sunpy/instr/aia.py
deleted file mode 100644
--- a/sunpy/instr/aia.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""
-This module provides processing routines for data captured with the AIA
-instrument on SDO.
-"""
-import numpy as np
-
-import astropy.units as u
-
-from sunpy.map.sources.sdo import AIAMap, HMIMap
-from sunpy.util.decorators import deprecated
-
-__all__ = ['aiaprep']
-
-
-@deprecated("2.0", alternative="`register` in aiapy (https://aiapy.readthedocs.io) for converting \
-AIA images to level 1.5")
-def aiaprep(aiamap):
- """
- Processes a level 1 `~sunpy.map.sources.sdo.AIAMap` into a level 1.5
- `~sunpy.map.sources.sdo.AIAMap`.
-
- Rotates, scales and translates the image so that solar North is aligned
- with the y axis, each pixel is 0.6 arcsec across, and the center of the
- Sun is at the center of the image. The actual transformation is done by Map's
- `~sunpy.map.mapbase.GenericMap.rotate` method.
-
- This function is similar in functionality to ``aia_prep`` in SSWIDL, but
- it does not use the same transformation to rotate the image and it handles
- the meta data differently. It should therefore not be expected to produce
- the same results.
-
- Parameters
- ----------
- aiamap : `~sunpy.map.sources.sdo.AIAMap`
- A `sunpy.map.Map` from AIA.
-
- Returns
- -------
- `~sunpy.map.sources.sdo.AIAMap`:
- A level 1.5 copy of `~sunpy.map.sources.sdo.AIAMap`.
-
- Notes
- -----
- This routine modifies the header information to the standard PCi_j WCS
- formalism. The FITS header resulting in saving a file after this
- procedure will therefore differ from the original file.
- """
-
- if not isinstance(aiamap, (AIAMap, HMIMap)):
- raise ValueError("Input must be an AIAMap or HMIMap.")
-
- # Target scale is 0.6 arcsec/pixel, but this needs to be adjusted if the map
- # has already been rescaled.
- if ((aiamap.scale[0] / 0.6).round() != 1.0 * u.arcsec / u.pix
- and aiamap.data.shape != (4096, 4096)):
- scale = (aiamap.scale[0] / 0.6).round() * 0.6 * u.arcsec
- else:
- scale = 0.6 * u.arcsec # pragma: no cover # can't test this because it needs a full res image
- scale_factor = aiamap.scale[0] / scale
-
- tempmap = aiamap.rotate(recenter=True, scale=scale_factor.value, missing=aiamap.min())
-
- # extract center from padded aiamap.rotate output
- # crpix1 and crpix2 will be equal (recenter=True), as aiaprep does not work with submaps
- center = np.floor(tempmap.meta['crpix1'])
- range_side = (center + np.array([-1, 1]) * aiamap.data.shape[0] / 2) * u.pix
- newmap = tempmap.submap(u.Quantity([range_side[0], range_side[0]]),
- u.Quantity([range_side[1] - 1 * u.pix,
- range_side[1] - 1 * u.pix]))
-
- newmap.meta['r_sun'] = newmap.meta['rsun_obs'] / newmap.meta['cdelt1']
- newmap.meta['lvl_num'] = 1.5
- newmap.meta['bitpix'] = -64
-
- return newmap
| {"golden_diff": "diff --git a/sunpy/instr/aia.py b/sunpy/instr/aia.py\ndeleted file mode 100644\n--- a/sunpy/instr/aia.py\n+++ /dev/null\n@@ -1,75 +0,0 @@\n-\"\"\"\n-This module provides processing routines for data captured with the AIA\n-instrument on SDO.\n-\"\"\"\n-import numpy as np\n-\n-import astropy.units as u\n-\n-from sunpy.map.sources.sdo import AIAMap, HMIMap\n-from sunpy.util.decorators import deprecated\n-\n-__all__ = ['aiaprep']\n-\n-\n-@deprecated(\"2.0\", alternative=\"`register` in aiapy (https://aiapy.readthedocs.io) for converting \\\n-AIA images to level 1.5\")\n-def aiaprep(aiamap):\n- \"\"\"\n- Processes a level 1 `~sunpy.map.sources.sdo.AIAMap` into a level 1.5\n- `~sunpy.map.sources.sdo.AIAMap`.\n-\n- Rotates, scales and translates the image so that solar North is aligned\n- with the y axis, each pixel is 0.6 arcsec across, and the center of the\n- Sun is at the center of the image. The actual transformation is done by Map's\n- `~sunpy.map.mapbase.GenericMap.rotate` method.\n-\n- This function is similar in functionality to ``aia_prep`` in SSWIDL, but\n- it does not use the same transformation to rotate the image and it handles\n- the meta data differently. It should therefore not be expected to produce\n- the same results.\n-\n- Parameters\n- ----------\n- aiamap : `~sunpy.map.sources.sdo.AIAMap`\n- A `sunpy.map.Map` from AIA.\n-\n- Returns\n- -------\n- `~sunpy.map.sources.sdo.AIAMap`:\n- A level 1.5 copy of `~sunpy.map.sources.sdo.AIAMap`.\n-\n- Notes\n- -----\n- This routine modifies the header information to the standard PCi_j WCS\n- formalism. The FITS header resulting in saving a file after this\n- procedure will therefore differ from the original file.\n- \"\"\"\n-\n- if not isinstance(aiamap, (AIAMap, HMIMap)):\n- raise ValueError(\"Input must be an AIAMap or HMIMap.\")\n-\n- # Target scale is 0.6 arcsec/pixel, but this needs to be adjusted if the map\n- # has already been rescaled.\n- if ((aiamap.scale[0] / 0.6).round() != 1.0 * u.arcsec / u.pix\n- and aiamap.data.shape != (4096, 4096)):\n- scale = (aiamap.scale[0] / 0.6).round() * 0.6 * u.arcsec\n- else:\n- scale = 0.6 * u.arcsec # pragma: no cover # can't test this because it needs a full res image\n- scale_factor = aiamap.scale[0] / scale\n-\n- tempmap = aiamap.rotate(recenter=True, scale=scale_factor.value, missing=aiamap.min())\n-\n- # extract center from padded aiamap.rotate output\n- # crpix1 and crpix2 will be equal (recenter=True), as aiaprep does not work with submaps\n- center = np.floor(tempmap.meta['crpix1'])\n- range_side = (center + np.array([-1, 1]) * aiamap.data.shape[0] / 2) * u.pix\n- newmap = tempmap.submap(u.Quantity([range_side[0], range_side[0]]),\n- u.Quantity([range_side[1] - 1 * u.pix,\n- range_side[1] - 1 * u.pix]))\n-\n- newmap.meta['r_sun'] = newmap.meta['rsun_obs'] / newmap.meta['cdelt1']\n- newmap.meta['lvl_num'] = 1.5\n- newmap.meta['bitpix'] = -64\n-\n- return newmap\n", "issue": "Remove sunpy.instr.aia.aiaprep\nThe `aiaprep` function should be removed from the `sunpy.instr` subpackage. Any AIA specific functionality should transition to the new [`aiapy` package](https://gitlab.com/LMSAL_HUB/aia_hub/aiapy).\r\n\r\nThis is part of the broader goal of moving instrument-specific functionality out of the core sunpy package and into affiliated packages.\n", "before_files": [{"content": "\"\"\"\nThis module provides processing routines for data captured with the AIA\ninstrument on SDO.\n\"\"\"\nimport numpy as np\n\nimport astropy.units as u\n\nfrom sunpy.map.sources.sdo import AIAMap, HMIMap\nfrom sunpy.util.decorators import deprecated\n\n__all__ = ['aiaprep']\n\n\n@deprecated(\"2.0\", alternative=\"`register` in aiapy (https://aiapy.readthedocs.io) for converting \\\nAIA images to level 1.5\")\ndef aiaprep(aiamap):\n \"\"\"\n Processes a level 1 `~sunpy.map.sources.sdo.AIAMap` into a level 1.5\n `~sunpy.map.sources.sdo.AIAMap`.\n\n Rotates, scales and translates the image so that solar North is aligned\n with the y axis, each pixel is 0.6 arcsec across, and the center of the\n Sun is at the center of the image. The actual transformation is done by Map's\n `~sunpy.map.mapbase.GenericMap.rotate` method.\n\n This function is similar in functionality to ``aia_prep`` in SSWIDL, but\n it does not use the same transformation to rotate the image and it handles\n the meta data differently. It should therefore not be expected to produce\n the same results.\n\n Parameters\n ----------\n aiamap : `~sunpy.map.sources.sdo.AIAMap`\n A `sunpy.map.Map` from AIA.\n\n Returns\n -------\n `~sunpy.map.sources.sdo.AIAMap`:\n A level 1.5 copy of `~sunpy.map.sources.sdo.AIAMap`.\n\n Notes\n -----\n This routine modifies the header information to the standard PCi_j WCS\n formalism. The FITS header resulting in saving a file after this\n procedure will therefore differ from the original file.\n \"\"\"\n\n if not isinstance(aiamap, (AIAMap, HMIMap)):\n raise ValueError(\"Input must be an AIAMap or HMIMap.\")\n\n # Target scale is 0.6 arcsec/pixel, but this needs to be adjusted if the map\n # has already been rescaled.\n if ((aiamap.scale[0] / 0.6).round() != 1.0 * u.arcsec / u.pix\n and aiamap.data.shape != (4096, 4096)):\n scale = (aiamap.scale[0] / 0.6).round() * 0.6 * u.arcsec\n else:\n scale = 0.6 * u.arcsec # pragma: no cover # can't test this because it needs a full res image\n scale_factor = aiamap.scale[0] / scale\n\n tempmap = aiamap.rotate(recenter=True, scale=scale_factor.value, missing=aiamap.min())\n\n # extract center from padded aiamap.rotate output\n # crpix1 and crpix2 will be equal (recenter=True), as aiaprep does not work with submaps\n center = np.floor(tempmap.meta['crpix1'])\n range_side = (center + np.array([-1, 1]) * aiamap.data.shape[0] / 2) * u.pix\n newmap = tempmap.submap(u.Quantity([range_side[0], range_side[0]]),\n u.Quantity([range_side[1] - 1 * u.pix,\n range_side[1] - 1 * u.pix]))\n\n newmap.meta['r_sun'] = newmap.meta['rsun_obs'] / newmap.meta['cdelt1']\n newmap.meta['lvl_num'] = 1.5\n newmap.meta['bitpix'] = -64\n\n return newmap\n", "path": "sunpy/instr/aia.py"}], "after_files": [{"content": null, "path": "sunpy/instr/aia.py"}]} | 1,323 | 942 |
gh_patches_debug_4218 | rasdani/github-patches | git_diff | biolab__orange3-4252 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Validation: Remove deprecated constructor calls
**Describe the bug**
Our tests (and code?) use deprecated calls, resulting in many of the following lines in output of tests:
`DeprecationWarning: calling Validation's constructor with data and learners is deprecated;`
**To Reproduce**
Run tests or look at travis.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `Orange/ensembles/stack.py`
Content:
```
1 import numpy as np
2
3 from Orange.base import Learner, Model
4 from Orange.modelling import Fitter
5 from Orange.classification import LogisticRegressionLearner
6 from Orange.classification.base_classification import LearnerClassification
7 from Orange.data import Domain, ContinuousVariable, Table
8 from Orange.evaluation import CrossValidation
9 from Orange.regression import RidgeRegressionLearner
10 from Orange.regression.base_regression import LearnerRegression
11
12
13 __all__ = ['StackedLearner', 'StackedClassificationLearner',
14 'StackedRegressionLearner', 'StackedFitter']
15
16
17 class StackedModel(Model):
18 def __init__(self, models, aggregate, use_prob=True, domain=None):
19 super().__init__(domain=domain)
20 self.models = models
21 self.aggregate = aggregate
22 self.use_prob = use_prob
23
24 def predict_storage(self, data):
25 if self.use_prob:
26 probs = [m(data, Model.Probs) for m in self.models]
27 X = np.hstack(probs)
28 else:
29 pred = [m(data) for m in self.models]
30 X = np.column_stack(pred)
31 Y = np.repeat(np.nan, X.shape[0])
32 stacked_data = data.transform(self.aggregate.domain)
33 stacked_data.X = X
34 stacked_data.Y = Y
35 return self.aggregate(
36 stacked_data, Model.ValueProbs if self.use_prob else Model.Value)
37
38
39 class StackedLearner(Learner):
40 """
41 Constructs a stacked model by fitting an aggregator
42 over the results of base models.
43
44 K-fold cross-validation is used to get predictions of the base learners
45 and fit the aggregator to obtain a stacked model.
46
47 Args:
48 learners (list):
49 list of `Learner`s used for base models
50
51 aggregate (Learner):
52 Learner used to fit the meta model, aggregating predictions
53 of base models
54
55 k (int):
56 number of folds for cross-validation
57
58 Returns:
59 instance of StackedModel
60 """
61
62 __returns__ = StackedModel
63
64 def __init__(self, learners, aggregate, k=5, preprocessors=None):
65 super().__init__(preprocessors=preprocessors)
66 self.learners = learners
67 self.aggregate = aggregate
68 self.k = k
69 self.params = vars()
70
71 def fit_storage(self, data):
72 res = CrossValidation(data, self.learners, k=self.k)
73 if data.domain.class_var.is_discrete:
74 X = np.hstack(res.probabilities)
75 use_prob = True
76 else:
77 X = res.predicted.T
78 use_prob = False
79 dom = Domain([ContinuousVariable('f{}'.format(i + 1))
80 for i in range(X.shape[1])],
81 data.domain.class_var)
82 stacked_data = data.transform(dom)
83 stacked_data.X = X
84 stacked_data.Y = res.actual
85 models = [l(data) for l in self.learners]
86 aggregate_model = self.aggregate(stacked_data)
87 return StackedModel(models, aggregate_model, use_prob=use_prob,
88 domain=data.domain)
89
90
91 class StackedClassificationLearner(StackedLearner, LearnerClassification):
92 """
93 Subclass of StackedLearner intended for classification tasks.
94
95 Same as the super class, but has a default
96 classification-specific aggregator (`LogisticRegressionLearner`).
97 """
98
99 def __init__(self, learners, aggregate=LogisticRegressionLearner(), k=5,
100 preprocessors=None):
101 super().__init__(learners, aggregate, k=k, preprocessors=preprocessors)
102
103
104 class StackedRegressionLearner(StackedLearner, LearnerRegression):
105 """
106 Subclass of StackedLearner intended for regression tasks.
107
108 Same as the super class, but has a default
109 regression-specific aggregator (`RidgeRegressionLearner`).
110 """
111 def __init__(self, learners, aggregate=RidgeRegressionLearner(), k=5,
112 preprocessors=None):
113 super().__init__(learners, aggregate, k=k, preprocessors=preprocessors)
114
115
116 class StackedFitter(Fitter):
117 __fits__ = {'classification': StackedClassificationLearner,
118 'regression': StackedRegressionLearner}
119
120 def __init__(self, learners, **kwargs):
121 kwargs['learners'] = learners
122 super().__init__(**kwargs)
123
124
125 if __name__ == '__main__':
126 import Orange
127 iris = Table('iris')
128 knn = Orange.modelling.KNNLearner()
129 tree = Orange.modelling.TreeLearner()
130 sl = StackedFitter([tree, knn])
131 m = sl(iris[::2])
132 print(m(iris[1::2], Model.Value))
133
134 housing = Table('housing')
135 sl = StackedFitter([tree, knn])
136 m = sl(housing[::2])
137 print(list(zip(housing[1:10:2].Y, m(housing[1:10:2], Model.Value))))
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/Orange/ensembles/stack.py b/Orange/ensembles/stack.py
--- a/Orange/ensembles/stack.py
+++ b/Orange/ensembles/stack.py
@@ -69,7 +69,8 @@
self.params = vars()
def fit_storage(self, data):
- res = CrossValidation(data, self.learners, k=self.k)
+ cv = CrossValidation(k=self.k)
+ res = cv(data, self.learners)
if data.domain.class_var.is_discrete:
X = np.hstack(res.probabilities)
use_prob = True
| {"golden_diff": "diff --git a/Orange/ensembles/stack.py b/Orange/ensembles/stack.py\n--- a/Orange/ensembles/stack.py\n+++ b/Orange/ensembles/stack.py\n@@ -69,7 +69,8 @@\n self.params = vars()\n \n def fit_storage(self, data):\n- res = CrossValidation(data, self.learners, k=self.k)\n+ cv = CrossValidation(k=self.k)\n+ res = cv(data, self.learners)\n if data.domain.class_var.is_discrete:\n X = np.hstack(res.probabilities)\n use_prob = True\n", "issue": "Validation: Remove deprecated constructor calls\n**Describe the bug**\r\nOur tests (and code?) use deprecated calls, resulting in many of the following lines in output of tests:\r\n`DeprecationWarning: calling Validation's constructor with data and learners is deprecated;`\r\n\r\n**To Reproduce**\r\nRun tests or look at travis.\n", "before_files": [{"content": "import numpy as np\n\nfrom Orange.base import Learner, Model\nfrom Orange.modelling import Fitter\nfrom Orange.classification import LogisticRegressionLearner\nfrom Orange.classification.base_classification import LearnerClassification\nfrom Orange.data import Domain, ContinuousVariable, Table\nfrom Orange.evaluation import CrossValidation\nfrom Orange.regression import RidgeRegressionLearner\nfrom Orange.regression.base_regression import LearnerRegression\n\n\n__all__ = ['StackedLearner', 'StackedClassificationLearner',\n 'StackedRegressionLearner', 'StackedFitter']\n\n\nclass StackedModel(Model):\n def __init__(self, models, aggregate, use_prob=True, domain=None):\n super().__init__(domain=domain)\n self.models = models\n self.aggregate = aggregate\n self.use_prob = use_prob\n\n def predict_storage(self, data):\n if self.use_prob:\n probs = [m(data, Model.Probs) for m in self.models]\n X = np.hstack(probs)\n else:\n pred = [m(data) for m in self.models]\n X = np.column_stack(pred)\n Y = np.repeat(np.nan, X.shape[0])\n stacked_data = data.transform(self.aggregate.domain)\n stacked_data.X = X\n stacked_data.Y = Y\n return self.aggregate(\n stacked_data, Model.ValueProbs if self.use_prob else Model.Value)\n\n\nclass StackedLearner(Learner):\n \"\"\"\n Constructs a stacked model by fitting an aggregator\n over the results of base models.\n\n K-fold cross-validation is used to get predictions of the base learners\n and fit the aggregator to obtain a stacked model.\n\n Args:\n learners (list):\n list of `Learner`s used for base models\n\n aggregate (Learner):\n Learner used to fit the meta model, aggregating predictions\n of base models\n\n k (int):\n number of folds for cross-validation\n\n Returns:\n instance of StackedModel\n \"\"\"\n\n __returns__ = StackedModel\n\n def __init__(self, learners, aggregate, k=5, preprocessors=None):\n super().__init__(preprocessors=preprocessors)\n self.learners = learners\n self.aggregate = aggregate\n self.k = k\n self.params = vars()\n\n def fit_storage(self, data):\n res = CrossValidation(data, self.learners, k=self.k)\n if data.domain.class_var.is_discrete:\n X = np.hstack(res.probabilities)\n use_prob = True\n else:\n X = res.predicted.T\n use_prob = False\n dom = Domain([ContinuousVariable('f{}'.format(i + 1))\n for i in range(X.shape[1])],\n data.domain.class_var)\n stacked_data = data.transform(dom)\n stacked_data.X = X\n stacked_data.Y = res.actual\n models = [l(data) for l in self.learners]\n aggregate_model = self.aggregate(stacked_data)\n return StackedModel(models, aggregate_model, use_prob=use_prob,\n domain=data.domain)\n\n\nclass StackedClassificationLearner(StackedLearner, LearnerClassification):\n \"\"\"\n Subclass of StackedLearner intended for classification tasks.\n\n Same as the super class, but has a default\n classification-specific aggregator (`LogisticRegressionLearner`).\n \"\"\"\n\n def __init__(self, learners, aggregate=LogisticRegressionLearner(), k=5,\n preprocessors=None):\n super().__init__(learners, aggregate, k=k, preprocessors=preprocessors)\n\n\nclass StackedRegressionLearner(StackedLearner, LearnerRegression):\n \"\"\"\n Subclass of StackedLearner intended for regression tasks.\n\n Same as the super class, but has a default\n regression-specific aggregator (`RidgeRegressionLearner`).\n \"\"\"\n def __init__(self, learners, aggregate=RidgeRegressionLearner(), k=5,\n preprocessors=None):\n super().__init__(learners, aggregate, k=k, preprocessors=preprocessors)\n\n\nclass StackedFitter(Fitter):\n __fits__ = {'classification': StackedClassificationLearner,\n 'regression': StackedRegressionLearner}\n\n def __init__(self, learners, **kwargs):\n kwargs['learners'] = learners\n super().__init__(**kwargs)\n\n\nif __name__ == '__main__':\n import Orange\n iris = Table('iris')\n knn = Orange.modelling.KNNLearner()\n tree = Orange.modelling.TreeLearner()\n sl = StackedFitter([tree, knn])\n m = sl(iris[::2])\n print(m(iris[1::2], Model.Value))\n\n housing = Table('housing')\n sl = StackedFitter([tree, knn])\n m = sl(housing[::2])\n print(list(zip(housing[1:10:2].Y, m(housing[1:10:2], Model.Value))))\n", "path": "Orange/ensembles/stack.py"}], "after_files": [{"content": "import numpy as np\n\nfrom Orange.base import Learner, Model\nfrom Orange.modelling import Fitter\nfrom Orange.classification import LogisticRegressionLearner\nfrom Orange.classification.base_classification import LearnerClassification\nfrom Orange.data import Domain, ContinuousVariable, Table\nfrom Orange.evaluation import CrossValidation\nfrom Orange.regression import RidgeRegressionLearner\nfrom Orange.regression.base_regression import LearnerRegression\n\n\n__all__ = ['StackedLearner', 'StackedClassificationLearner',\n 'StackedRegressionLearner', 'StackedFitter']\n\n\nclass StackedModel(Model):\n def __init__(self, models, aggregate, use_prob=True, domain=None):\n super().__init__(domain=domain)\n self.models = models\n self.aggregate = aggregate\n self.use_prob = use_prob\n\n def predict_storage(self, data):\n if self.use_prob:\n probs = [m(data, Model.Probs) for m in self.models]\n X = np.hstack(probs)\n else:\n pred = [m(data) for m in self.models]\n X = np.column_stack(pred)\n Y = np.repeat(np.nan, X.shape[0])\n stacked_data = data.transform(self.aggregate.domain)\n stacked_data.X = X\n stacked_data.Y = Y\n return self.aggregate(\n stacked_data, Model.ValueProbs if self.use_prob else Model.Value)\n\n\nclass StackedLearner(Learner):\n \"\"\"\n Constructs a stacked model by fitting an aggregator\n over the results of base models.\n\n K-fold cross-validation is used to get predictions of the base learners\n and fit the aggregator to obtain a stacked model.\n\n Args:\n learners (list):\n list of `Learner`s used for base models\n\n aggregate (Learner):\n Learner used to fit the meta model, aggregating predictions\n of base models\n\n k (int):\n number of folds for cross-validation\n\n Returns:\n instance of StackedModel\n \"\"\"\n\n __returns__ = StackedModel\n\n def __init__(self, learners, aggregate, k=5, preprocessors=None):\n super().__init__(preprocessors=preprocessors)\n self.learners = learners\n self.aggregate = aggregate\n self.k = k\n self.params = vars()\n\n def fit_storage(self, data):\n cv = CrossValidation(k=self.k)\n res = cv(data, self.learners)\n if data.domain.class_var.is_discrete:\n X = np.hstack(res.probabilities)\n use_prob = True\n else:\n X = res.predicted.T\n use_prob = False\n dom = Domain([ContinuousVariable('f{}'.format(i + 1))\n for i in range(X.shape[1])],\n data.domain.class_var)\n stacked_data = data.transform(dom)\n stacked_data.X = X\n stacked_data.Y = res.actual\n models = [l(data) for l in self.learners]\n aggregate_model = self.aggregate(stacked_data)\n return StackedModel(models, aggregate_model, use_prob=use_prob,\n domain=data.domain)\n\n\nclass StackedClassificationLearner(StackedLearner, LearnerClassification):\n \"\"\"\n Subclass of StackedLearner intended for classification tasks.\n\n Same as the super class, but has a default\n classification-specific aggregator (`LogisticRegressionLearner`).\n \"\"\"\n\n def __init__(self, learners, aggregate=LogisticRegressionLearner(), k=5,\n preprocessors=None):\n super().__init__(learners, aggregate, k=k, preprocessors=preprocessors)\n\n\nclass StackedRegressionLearner(StackedLearner, LearnerRegression):\n \"\"\"\n Subclass of StackedLearner intended for regression tasks.\n\n Same as the super class, but has a default\n regression-specific aggregator (`RidgeRegressionLearner`).\n \"\"\"\n def __init__(self, learners, aggregate=RidgeRegressionLearner(), k=5,\n preprocessors=None):\n super().__init__(learners, aggregate, k=k, preprocessors=preprocessors)\n\n\nclass StackedFitter(Fitter):\n __fits__ = {'classification': StackedClassificationLearner,\n 'regression': StackedRegressionLearner}\n\n def __init__(self, learners, **kwargs):\n kwargs['learners'] = learners\n super().__init__(**kwargs)\n\n\nif __name__ == '__main__':\n import Orange\n iris = Table('iris')\n knn = Orange.modelling.KNNLearner()\n tree = Orange.modelling.TreeLearner()\n sl = StackedFitter([tree, knn])\n m = sl(iris[::2])\n print(m(iris[1::2], Model.Value))\n\n housing = Table('housing')\n sl = StackedFitter([tree, knn])\n m = sl(housing[::2])\n print(list(zip(housing[1:10:2].Y, m(housing[1:10:2], Model.Value))))\n", "path": "Orange/ensembles/stack.py"}]} | 1,731 | 139 |
gh_patches_debug_26061 | rasdani/github-patches | git_diff | huggingface__transformers-7153 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Convert 12-1 and 6-1 en-de models from AllenNLP
https://github.com/jungokasai/deep-shallow#download-trained-deep-shallow-models
+ These should be FSMT models, so can be part of #6940 or done after.
+ They should be uploaded to the AllenNLP namespace. If stas takes this, they can start in stas/ and I will move them.
+ model card(s) should link to the original repo and paper.
+ I hope same en-de tokenizer already ported.
+ Would be interesting to compare BLEU to the initial models in that PR. There is no ensemble so we should be able to reported scores pretty well.
+ **Ideally** this requires 0 lines of checked in python code, besides maybe an integration test.
Desired Signature:
```python
model = FSMT.from_pretrained('allen_nlp/en-de-12-1')
```
Weights can be downloaded with gdown https://pypi.org/project/gdown/
```bash
pip install gdown
gdown https://drive.google.com/uc?id=1x_G2cjvM1nW5hjAB8-vWxRqtQTlmIaQU
```
@stas00 if you are blocked in the late stages of #6940 and have extra cycles, you could give this a whirl. We could also wait for that to be finalized and then either of us can take this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/fsmt/gen-card-allenai-wmt19.py`
Content:
```
1 #!/usr/bin/env python
2
3 # Usage:
4 # ./gen-card-allenai-wmt19.py
5
6 import os
7 from pathlib import Path
8
9 def write_model_card(model_card_dir, src_lang, tgt_lang, model_name):
10
11 texts = {
12 "en": "Machine learning is great, isn't it?",
13 "ru": "Машинное обучение - это здорово, не так ли?",
14 "de": "Maschinelles Lernen ist großartig, nicht wahr?",
15 }
16
17 # BLUE scores as follows:
18 # "pair": [fairseq, transformers]
19 scores = {
20 "wmt19-de-en-6-6-base": [0, 38.37],
21 "wmt19-de-en-6-6-big": [0, 39.90],
22 }
23 pair = f"{src_lang}-{tgt_lang}"
24
25 readme = f"""
26 ---
27
28 language: {src_lang}, {tgt_lang}
29 thumbnail:
30 tags:
31 - translation
32 - wmt19
33 - allenai
34 license: Apache 2.0
35 datasets:
36 - http://www.statmt.org/wmt19/ ([test-set](http://matrix.statmt.org/test_sets/newstest2019.tgz?1556572561))
37 metrics:
38 - http://www.statmt.org/wmt19/metrics-task.html
39 ---
40
41 # FSMT
42
43 ## Model description
44
45 This is a ported version of fairseq-based wmt19 transformer created by [jungokasai]](https://github.com/jungokasai/) @ allenai for {src_lang}-{tgt_lang}.
46
47 2 models are available:
48
49 * [wmt19-de-en-6-6-big](https://huggingface.co/allenai/wmt19-de-en-6-6-big)
50 * [wmt19-de-en-6-6-base](https://huggingface.co/allenai/wmt19-de-en-6-6-base)
51
52 ## Intended uses & limitations
53
54 #### How to use
55
56 ```python
57 from transformers.tokenization_fsmt import FSMTTokenizer
58 from transformers.modeling_fsmt import FSMTForConditionalGeneration
59 mname = "allenai/{model_name}"
60 tokenizer = FSMTTokenizer.from_pretrained(mname)
61 model = FSMTForConditionalGeneration.from_pretrained(mname)
62
63 input = "{texts[src_lang]}"
64 input_ids = tokenizer.encode(input, return_tensors="pt")
65 outputs = model.generate(input_ids)
66 decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
67 print(decoded) # {texts[tgt_lang]}
68
69 ```
70
71 #### Limitations and bias
72
73
74 ## Training data
75
76 Pretrained weights were left identical to the original model released by the researcher.
77
78 ## Eval results
79
80 Here are the BLEU scores:
81
82 model | transformers
83 -------|---------|----------
84 {model_name} | {scores[model_name][1]}
85
86 The score was calculated using this code:
87
88 ```bash
89 git clone https://github.com/huggingface/transformers
90 cd transformers
91 export PAIR={pair}
92 export DATA_DIR=data/$PAIR
93 export SAVE_DIR=data/$PAIR
94 export BS=8
95 export NUM_BEAMS=5
96 mkdir -p $DATA_DIR
97 sacrebleu -t wmt19 -l $PAIR --echo src > $DATA_DIR/val.source
98 sacrebleu -t wmt19 -l $PAIR --echo ref > $DATA_DIR/val.target
99 echo $PAIR
100 PYTHONPATH="src:examples/seq2seq" python examples/seq2seq/run_eval.py allenai/{model_name} $DATA_DIR/val.source $SAVE_DIR/test_translations.txt --reference_path $DATA_DIR/val.target --score_path $SAVE_DIR/test_bleu.json --bs $BS --task translation --num_beams $NUM_BEAMS
101 ```
102
103 """
104 model_card_dir.mkdir(parents=True, exist_ok=True)
105 path = os.path.join(model_card_dir, "README.md")
106 print(f"Generating {path}")
107 with open(path, "w", encoding="utf-8") as f:
108 f.write(readme)
109
110 # make sure we are under the root of the project
111 repo_dir = Path(__file__).resolve().parent.parent.parent
112 model_cards_dir = repo_dir / "model_cards"
113
114 for model_name in ["wmt19-de-en-6-6-base", "wmt19-de-en-6-6-big"]:
115 model_card_dir = model_cards_dir / "allenai" / model_name
116 write_model_card(model_card_dir, src_lang="de", tgt_lang="en", model_name=model_name)
117
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/fsmt/gen-card-allenai-wmt19.py b/scripts/fsmt/gen-card-allenai-wmt19.py
--- a/scripts/fsmt/gen-card-allenai-wmt19.py
+++ b/scripts/fsmt/gen-card-allenai-wmt19.py
@@ -42,13 +42,26 @@
## Model description
-This is a ported version of fairseq-based wmt19 transformer created by [jungokasai]](https://github.com/jungokasai/) @ allenai for {src_lang}-{tgt_lang}.
+This is a ported version of fairseq-based [wmt19 transformer](https://github.com/jungokasai/deep-shallow/) for {src_lang}-{tgt_lang}.
+
+For more details, please, see [Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation](https://arxiv.org/abs/2006.10369).
2 models are available:
* [wmt19-de-en-6-6-big](https://huggingface.co/allenai/wmt19-de-en-6-6-big)
* [wmt19-de-en-6-6-base](https://huggingface.co/allenai/wmt19-de-en-6-6-base)
+```
+@misc{{kasai2020deep,
+ title={{Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation}},
+ author={{Jungo Kasai and Nikolaos Pappas and Hao Peng and James Cross and Noah A. Smith}},
+ year={{2020}},
+ eprint={{2006.10369}},
+ archivePrefix={{arXiv}},
+ primaryClass={{cs.CL}}
+}}
+```
+
## Intended uses & limitations
#### How to use
@@ -73,7 +86,7 @@
## Training data
-Pretrained weights were left identical to the original model released by the researcher.
+Pretrained weights were left identical to the original model released by allenai. For more details, please, see the [paper](https://arxiv.org/abs/2006.10369).
## Eval results
| {"golden_diff": "diff --git a/scripts/fsmt/gen-card-allenai-wmt19.py b/scripts/fsmt/gen-card-allenai-wmt19.py\n--- a/scripts/fsmt/gen-card-allenai-wmt19.py\n+++ b/scripts/fsmt/gen-card-allenai-wmt19.py\n@@ -42,13 +42,26 @@\n \n ## Model description\n \n-This is a ported version of fairseq-based wmt19 transformer created by [jungokasai]](https://github.com/jungokasai/) @ allenai for {src_lang}-{tgt_lang}.\n+This is a ported version of fairseq-based [wmt19 transformer](https://github.com/jungokasai/deep-shallow/) for {src_lang}-{tgt_lang}.\n+\n+For more details, please, see [Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation](https://arxiv.org/abs/2006.10369).\n \n 2 models are available:\n \n * [wmt19-de-en-6-6-big](https://huggingface.co/allenai/wmt19-de-en-6-6-big)\n * [wmt19-de-en-6-6-base](https://huggingface.co/allenai/wmt19-de-en-6-6-base)\n \n+```\n+@misc{{kasai2020deep,\n+ title={{Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation}},\n+ author={{Jungo Kasai and Nikolaos Pappas and Hao Peng and James Cross and Noah A. Smith}},\n+ year={{2020}},\n+ eprint={{2006.10369}},\n+ archivePrefix={{arXiv}},\n+ primaryClass={{cs.CL}}\n+}}\n+```\n+\n ## Intended uses & limitations\n \n #### How to use\n@@ -73,7 +86,7 @@\n \n ## Training data\n \n-Pretrained weights were left identical to the original model released by the researcher.\n+Pretrained weights were left identical to the original model released by allenai. For more details, please, see the [paper](https://arxiv.org/abs/2006.10369).\n \n ## Eval results\n", "issue": "Convert 12-1 and 6-1 en-de models from AllenNLP\nhttps://github.com/jungokasai/deep-shallow#download-trained-deep-shallow-models\r\n\r\n+ These should be FSMT models, so can be part of #6940 or done after. \r\n+ They should be uploaded to the AllenNLP namespace. If stas takes this, they can start in stas/ and I will move them.\r\n+ model card(s) should link to the original repo and paper.\r\n+ I hope same en-de tokenizer already ported.\r\n+ Would be interesting to compare BLEU to the initial models in that PR. There is no ensemble so we should be able to reported scores pretty well.\r\n+ **Ideally** this requires 0 lines of checked in python code, besides maybe an integration test.\r\n\r\nDesired Signature:\r\n```python\r\nmodel = FSMT.from_pretrained('allen_nlp/en-de-12-1')\r\n```\r\n\r\nWeights can be downloaded with gdown https://pypi.org/project/gdown/\r\n\r\n```bash\r\npip install gdown\r\ngdown https://drive.google.com/uc?id=1x_G2cjvM1nW5hjAB8-vWxRqtQTlmIaQU\r\n```\r\n\r\n@stas00 if you are blocked in the late stages of #6940 and have extra cycles, you could give this a whirl. We could also wait for that to be finalized and then either of us can take this.\n", "before_files": [{"content": "#!/usr/bin/env python\n\n# Usage:\n# ./gen-card-allenai-wmt19.py\n\nimport os\nfrom pathlib import Path\n\ndef write_model_card(model_card_dir, src_lang, tgt_lang, model_name):\n\n texts = {\n \"en\": \"Machine learning is great, isn't it?\",\n \"ru\": \"\u041c\u0430\u0448\u0438\u043d\u043d\u043e\u0435 \u043e\u0431\u0443\u0447\u0435\u043d\u0438\u0435 - \u044d\u0442\u043e \u0437\u0434\u043e\u0440\u043e\u0432\u043e, \u043d\u0435 \u0442\u0430\u043a \u043b\u0438?\",\n \"de\": \"Maschinelles Lernen ist gro\u00dfartig, nicht wahr?\",\n }\n\n # BLUE scores as follows:\n # \"pair\": [fairseq, transformers]\n scores = {\n \"wmt19-de-en-6-6-base\": [0, 38.37],\n \"wmt19-de-en-6-6-big\": [0, 39.90],\n }\n pair = f\"{src_lang}-{tgt_lang}\"\n\n readme = f\"\"\"\n---\n\nlanguage: {src_lang}, {tgt_lang}\nthumbnail:\ntags:\n- translation\n- wmt19\n- allenai\nlicense: Apache 2.0\ndatasets:\n- http://www.statmt.org/wmt19/ ([test-set](http://matrix.statmt.org/test_sets/newstest2019.tgz?1556572561))\nmetrics:\n- http://www.statmt.org/wmt19/metrics-task.html\n---\n\n# FSMT\n\n## Model description\n\nThis is a ported version of fairseq-based wmt19 transformer created by [jungokasai]](https://github.com/jungokasai/) @ allenai for {src_lang}-{tgt_lang}.\n\n2 models are available:\n\n* [wmt19-de-en-6-6-big](https://huggingface.co/allenai/wmt19-de-en-6-6-big)\n* [wmt19-de-en-6-6-base](https://huggingface.co/allenai/wmt19-de-en-6-6-base)\n\n## Intended uses & limitations\n\n#### How to use\n\n```python\nfrom transformers.tokenization_fsmt import FSMTTokenizer\nfrom transformers.modeling_fsmt import FSMTForConditionalGeneration\nmname = \"allenai/{model_name}\"\ntokenizer = FSMTTokenizer.from_pretrained(mname)\nmodel = FSMTForConditionalGeneration.from_pretrained(mname)\n\ninput = \"{texts[src_lang]}\"\ninput_ids = tokenizer.encode(input, return_tensors=\"pt\")\noutputs = model.generate(input_ids)\ndecoded = tokenizer.decode(outputs[0], skip_special_tokens=True)\nprint(decoded) # {texts[tgt_lang]}\n\n```\n\n#### Limitations and bias\n\n\n## Training data\n\nPretrained weights were left identical to the original model released by the researcher.\n\n## Eval results\n\nHere are the BLEU scores:\n\nmodel | transformers\n-------|---------|----------\n{model_name} | {scores[model_name][1]}\n\nThe score was calculated using this code:\n\n```bash\ngit clone https://github.com/huggingface/transformers\ncd transformers\nexport PAIR={pair}\nexport DATA_DIR=data/$PAIR\nexport SAVE_DIR=data/$PAIR\nexport BS=8\nexport NUM_BEAMS=5\nmkdir -p $DATA_DIR\nsacrebleu -t wmt19 -l $PAIR --echo src > $DATA_DIR/val.source\nsacrebleu -t wmt19 -l $PAIR --echo ref > $DATA_DIR/val.target\necho $PAIR\nPYTHONPATH=\"src:examples/seq2seq\" python examples/seq2seq/run_eval.py allenai/{model_name} $DATA_DIR/val.source $SAVE_DIR/test_translations.txt --reference_path $DATA_DIR/val.target --score_path $SAVE_DIR/test_bleu.json --bs $BS --task translation --num_beams $NUM_BEAMS\n```\n\n\"\"\"\n model_card_dir.mkdir(parents=True, exist_ok=True)\n path = os.path.join(model_card_dir, \"README.md\")\n print(f\"Generating {path}\")\n with open(path, \"w\", encoding=\"utf-8\") as f:\n f.write(readme)\n\n# make sure we are under the root of the project\nrepo_dir = Path(__file__).resolve().parent.parent.parent\nmodel_cards_dir = repo_dir / \"model_cards\"\n\nfor model_name in [\"wmt19-de-en-6-6-base\", \"wmt19-de-en-6-6-big\"]:\n model_card_dir = model_cards_dir / \"allenai\" / model_name\n write_model_card(model_card_dir, src_lang=\"de\", tgt_lang=\"en\", model_name=model_name)\n", "path": "scripts/fsmt/gen-card-allenai-wmt19.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n# Usage:\n# ./gen-card-allenai-wmt19.py\n\nimport os\nfrom pathlib import Path\n\ndef write_model_card(model_card_dir, src_lang, tgt_lang, model_name):\n\n texts = {\n \"en\": \"Machine learning is great, isn't it?\",\n \"ru\": \"\u041c\u0430\u0448\u0438\u043d\u043d\u043e\u0435 \u043e\u0431\u0443\u0447\u0435\u043d\u0438\u0435 - \u044d\u0442\u043e \u0437\u0434\u043e\u0440\u043e\u0432\u043e, \u043d\u0435 \u0442\u0430\u043a \u043b\u0438?\",\n \"de\": \"Maschinelles Lernen ist gro\u00dfartig, nicht wahr?\",\n }\n\n # BLUE scores as follows:\n # \"pair\": [fairseq, transformers]\n scores = {\n \"wmt19-de-en-6-6-base\": [0, 38.37],\n \"wmt19-de-en-6-6-big\": [0, 39.90],\n }\n pair = f\"{src_lang}-{tgt_lang}\"\n\n readme = f\"\"\"\n---\n\nlanguage: {src_lang}, {tgt_lang}\nthumbnail:\ntags:\n- translation\n- wmt19\n- allenai\nlicense: Apache 2.0\ndatasets:\n- http://www.statmt.org/wmt19/ ([test-set](http://matrix.statmt.org/test_sets/newstest2019.tgz?1556572561))\nmetrics:\n- http://www.statmt.org/wmt19/metrics-task.html\n---\n\n# FSMT\n\n## Model description\n\nThis is a ported version of fairseq-based [wmt19 transformer](https://github.com/jungokasai/deep-shallow/) for {src_lang}-{tgt_lang}.\n\nFor more details, please, see [Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation](https://arxiv.org/abs/2006.10369).\n\n2 models are available:\n\n* [wmt19-de-en-6-6-big](https://huggingface.co/allenai/wmt19-de-en-6-6-big)\n* [wmt19-de-en-6-6-base](https://huggingface.co/allenai/wmt19-de-en-6-6-base)\n\n```\n@misc{{kasai2020deep,\n title={{Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation}},\n author={{Jungo Kasai and Nikolaos Pappas and Hao Peng and James Cross and Noah A. Smith}},\n year={{2020}},\n eprint={{2006.10369}},\n archivePrefix={{arXiv}},\n primaryClass={{cs.CL}}\n}}\n```\n\n## Intended uses & limitations\n\n#### How to use\n\n```python\nfrom transformers.tokenization_fsmt import FSMTTokenizer\nfrom transformers.modeling_fsmt import FSMTForConditionalGeneration\nmname = \"allenai/{model_name}\"\ntokenizer = FSMTTokenizer.from_pretrained(mname)\nmodel = FSMTForConditionalGeneration.from_pretrained(mname)\n\ninput = \"{texts[src_lang]}\"\ninput_ids = tokenizer.encode(input, return_tensors=\"pt\")\noutputs = model.generate(input_ids)\ndecoded = tokenizer.decode(outputs[0], skip_special_tokens=True)\nprint(decoded) # {texts[tgt_lang]}\n\n```\n\n#### Limitations and bias\n\n\n## Training data\n\nPretrained weights were left identical to the original model released by allenai. For more details, please, see the [paper](https://arxiv.org/abs/2006.10369).\n\n## Eval results\n\nHere are the BLEU scores:\n\nmodel | transformers\n-------|---------|----------\n{model_name} | {scores[model_name][1]}\n\nThe score was calculated using this code:\n\n```bash\ngit clone https://github.com/huggingface/transformers\ncd transformers\nexport PAIR={pair}\nexport DATA_DIR=data/$PAIR\nexport SAVE_DIR=data/$PAIR\nexport BS=8\nexport NUM_BEAMS=5\nmkdir -p $DATA_DIR\nsacrebleu -t wmt19 -l $PAIR --echo src > $DATA_DIR/val.source\nsacrebleu -t wmt19 -l $PAIR --echo ref > $DATA_DIR/val.target\necho $PAIR\nPYTHONPATH=\"src:examples/seq2seq\" python examples/seq2seq/run_eval.py allenai/{model_name} $DATA_DIR/val.source $SAVE_DIR/test_translations.txt --reference_path $DATA_DIR/val.target --score_path $SAVE_DIR/test_bleu.json --bs $BS --task translation --num_beams $NUM_BEAMS\n```\n\n\"\"\"\n model_card_dir.mkdir(parents=True, exist_ok=True)\n path = os.path.join(model_card_dir, \"README.md\")\n print(f\"Generating {path}\")\n with open(path, \"w\", encoding=\"utf-8\") as f:\n f.write(readme)\n\n# make sure we are under the root of the project\nrepo_dir = Path(__file__).resolve().parent.parent.parent\nmodel_cards_dir = repo_dir / \"model_cards\"\n\nfor model_name in [\"wmt19-de-en-6-6-base\", \"wmt19-de-en-6-6-big\"]:\n model_card_dir = model_cards_dir / \"allenai\" / model_name\n write_model_card(model_card_dir, src_lang=\"de\", tgt_lang=\"en\", model_name=model_name)\n", "path": "scripts/fsmt/gen-card-allenai-wmt19.py"}]} | 1,826 | 508 |
gh_patches_debug_4677 | rasdani/github-patches | git_diff | svthalia__concrexit-2011 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use API v2 for pizza orders
### Is your feature request related to a problem? Please describe.
For some reason (#1931) the payment widget of the order overview page is broken.
### Describe the solution you'd like
Instead of debugging the existing code, I prefer switching API v2 immedaitely.
### Motivation
Better
### Describe alternatives you've considered
Debugging
### Additional context
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/pizzas/api/v2/admin/urls.py`
Content:
```
1 """Events app API v2 urls."""
2 from django.urls import path
3
4 from events.api.v2.admin.views import EventRegistrationAdminFieldsView
5 from pizzas.api.v2.admin.views import (
6 FoodEventAdminListView,
7 FoodEventAdminDetailView,
8 FoodOrderAdminDetailView,
9 FoodEventOrdersAdminListView,
10 ProductsAdminListView,
11 ProductAdminDetailAPIView,
12 )
13
14 app_name = "food"
15
16 urlpatterns = [
17 path("food/events/", FoodEventAdminListView.as_view(), name="food-events-index"),
18 path(
19 "food/events/<int:pk>/",
20 FoodEventAdminDetailView.as_view(),
21 name="food-event-detail",
22 ),
23 path(
24 "food/events/<int:pk>/orders/",
25 FoodEventOrdersAdminListView.as_view(),
26 name="food-event-orders",
27 ),
28 path(
29 "food/events/<int:event_id>/orders/<int:pk>/",
30 FoodOrderAdminDetailView.as_view(),
31 name="event-registration-detail",
32 ),
33 path("food/products/", ProductsAdminListView.as_view(), name="food-products-index"),
34 path(
35 "food/products/<int:pk>/",
36 ProductAdminDetailAPIView.as_view(),
37 name="food-product-detail",
38 ),
39 ]
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/pizzas/api/v2/admin/urls.py b/website/pizzas/api/v2/admin/urls.py
--- a/website/pizzas/api/v2/admin/urls.py
+++ b/website/pizzas/api/v2/admin/urls.py
@@ -28,7 +28,7 @@
path(
"food/events/<int:event_id>/orders/<int:pk>/",
FoodOrderAdminDetailView.as_view(),
- name="event-registration-detail",
+ name="food-event-order-detail",
),
path("food/products/", ProductsAdminListView.as_view(), name="food-products-index"),
path(
| {"golden_diff": "diff --git a/website/pizzas/api/v2/admin/urls.py b/website/pizzas/api/v2/admin/urls.py\n--- a/website/pizzas/api/v2/admin/urls.py\n+++ b/website/pizzas/api/v2/admin/urls.py\n@@ -28,7 +28,7 @@\n path(\n \"food/events/<int:event_id>/orders/<int:pk>/\",\n FoodOrderAdminDetailView.as_view(),\n- name=\"event-registration-detail\",\n+ name=\"food-event-order-detail\",\n ),\n path(\"food/products/\", ProductsAdminListView.as_view(), name=\"food-products-index\"),\n path(\n", "issue": "Use API v2 for pizza orders\n### Is your feature request related to a problem? Please describe.\r\nFor some reason (#1931) the payment widget of the order overview page is broken. \r\n\r\n### Describe the solution you'd like\r\nInstead of debugging the existing code, I prefer switching API v2 immedaitely.\r\n\r\n### Motivation\r\nBetter\r\n\r\n### Describe alternatives you've considered\r\nDebugging \r\n\r\n### Additional context\r\n\n", "before_files": [{"content": "\"\"\"Events app API v2 urls.\"\"\"\nfrom django.urls import path\n\nfrom events.api.v2.admin.views import EventRegistrationAdminFieldsView\nfrom pizzas.api.v2.admin.views import (\n FoodEventAdminListView,\n FoodEventAdminDetailView,\n FoodOrderAdminDetailView,\n FoodEventOrdersAdminListView,\n ProductsAdminListView,\n ProductAdminDetailAPIView,\n)\n\napp_name = \"food\"\n\nurlpatterns = [\n path(\"food/events/\", FoodEventAdminListView.as_view(), name=\"food-events-index\"),\n path(\n \"food/events/<int:pk>/\",\n FoodEventAdminDetailView.as_view(),\n name=\"food-event-detail\",\n ),\n path(\n \"food/events/<int:pk>/orders/\",\n FoodEventOrdersAdminListView.as_view(),\n name=\"food-event-orders\",\n ),\n path(\n \"food/events/<int:event_id>/orders/<int:pk>/\",\n FoodOrderAdminDetailView.as_view(),\n name=\"event-registration-detail\",\n ),\n path(\"food/products/\", ProductsAdminListView.as_view(), name=\"food-products-index\"),\n path(\n \"food/products/<int:pk>/\",\n ProductAdminDetailAPIView.as_view(),\n name=\"food-product-detail\",\n ),\n]\n", "path": "website/pizzas/api/v2/admin/urls.py"}], "after_files": [{"content": "\"\"\"Events app API v2 urls.\"\"\"\nfrom django.urls import path\n\nfrom events.api.v2.admin.views import EventRegistrationAdminFieldsView\nfrom pizzas.api.v2.admin.views import (\n FoodEventAdminListView,\n FoodEventAdminDetailView,\n FoodOrderAdminDetailView,\n FoodEventOrdersAdminListView,\n ProductsAdminListView,\n ProductAdminDetailAPIView,\n)\n\napp_name = \"food\"\n\nurlpatterns = [\n path(\"food/events/\", FoodEventAdminListView.as_view(), name=\"food-events-index\"),\n path(\n \"food/events/<int:pk>/\",\n FoodEventAdminDetailView.as_view(),\n name=\"food-event-detail\",\n ),\n path(\n \"food/events/<int:pk>/orders/\",\n FoodEventOrdersAdminListView.as_view(),\n name=\"food-event-orders\",\n ),\n path(\n \"food/events/<int:event_id>/orders/<int:pk>/\",\n FoodOrderAdminDetailView.as_view(),\n name=\"food-event-order-detail\",\n ),\n path(\"food/products/\", ProductsAdminListView.as_view(), name=\"food-products-index\"),\n path(\n \"food/products/<int:pk>/\",\n ProductAdminDetailAPIView.as_view(),\n name=\"food-product-detail\",\n ),\n]\n", "path": "website/pizzas/api/v2/admin/urls.py"}]} | 672 | 134 |
gh_patches_debug_39976 | rasdani/github-patches | git_diff | carpentries__amy-2250 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot view upcoming teaching opportunities
Hi @maneesha
Could you please help me with this?
The blue button at https://amy.carpentries.org/dashboard/instructor/
as shown in the screenshot below

leads to "Server Error (500)" at https://amy.carpentries.org/dashboard/instructor/teaching_opportunities/
as shown in the screenshot below

Tried a few times at different hours.
Thank you in advance.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `amy/recruitment/templatetags/instructorrecruitment.py`
Content:
```
1 from datetime import timedelta
2 from typing import Sequence, Union
3
4 from django import template
5 from django.conf import settings
6
7 from recruitment.models import (
8 InstructorRecruitment,
9 InstructorRecruitmentSignup,
10 RecruitmentPriority,
11 )
12 from workshops.models import Event
13
14 register = template.Library()
15
16
17 @register.simple_tag
18 def is_instructor_recruitment_enabled() -> bool:
19 try:
20 return bool(settings.INSTRUCTOR_RECRUITMENT_ENABLED)
21 except AttributeError:
22 return False
23
24
25 @register.simple_tag
26 def get_event_conflicts(events: Sequence[Event], event: Event) -> list[Event]:
27 conflicts: list[Event] = []
28
29 for event_to_check in events:
30 if event == event_to_check:
31 continue
32
33 if event.start <= event_to_check.end and event.end >= event_to_check.start:
34 conflicts.append(event_to_check)
35
36 return conflicts
37
38
39 @register.simple_tag
40 def get_events_nearby(
41 events: Sequence[Event], event: Event, days_before: int = 14, days_after: int = 14
42 ) -> list[Event]:
43 nearby: list[Event] = []
44
45 for event_to_check in events:
46 if event == event_to_check:
47 continue
48
49 if (
50 event.start - timedelta(days=days_before) <= event_to_check.end
51 and event.end + timedelta(days=days_after) >= event_to_check.start
52 ):
53 nearby.append(event_to_check)
54
55 return nearby
56
57
58 @register.simple_tag
59 def get_signup_conflicts(
60 signups: Sequence[InstructorRecruitmentSignup], recruitment: InstructorRecruitment
61 ) -> list[InstructorRecruitmentSignup]:
62 conflicts: list[InstructorRecruitmentSignup] = []
63
64 for signup_to_check in signups:
65 if recruitment == signup_to_check.recruitment:
66 continue
67
68 if (
69 recruitment.event.start <= signup_to_check.recruitment.event.end
70 and recruitment.event.end >= signup_to_check.recruitment.event.start
71 ):
72 conflicts.append(signup_to_check)
73
74 return conflicts
75
76
77 @register.filter
78 def priority_label(value: Union[int, RecruitmentPriority]) -> str:
79 return RecruitmentPriority(value).label
80
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/amy/recruitment/templatetags/instructorrecruitment.py b/amy/recruitment/templatetags/instructorrecruitment.py
--- a/amy/recruitment/templatetags/instructorrecruitment.py
+++ b/amy/recruitment/templatetags/instructorrecruitment.py
@@ -23,13 +23,21 @@
@register.simple_tag
-def get_event_conflicts(events: Sequence[Event], event: Event) -> list[Event]:
+def get_event_conflicts(events_to_check: Sequence[Event], event: Event) -> list[Event]:
conflicts: list[Event] = []
- for event_to_check in events:
+ # event must have start and end dates, otherwise we can't get conflicts
+ if not (event.start and event.end):
+ return conflicts
+
+ for event_to_check in events_to_check:
if event == event_to_check:
continue
+ # event getting checked must have start and end dates
+ if not (event_to_check.start and event_to_check.end):
+ continue
+
if event.start <= event_to_check.end and event.end >= event_to_check.start:
conflicts.append(event_to_check)
@@ -38,14 +46,26 @@
@register.simple_tag
def get_events_nearby(
- events: Sequence[Event], event: Event, days_before: int = 14, days_after: int = 14
+ events_to_check: Sequence[Event],
+ event: Event,
+ days_before: int = 14,
+ days_after: int = 14,
) -> list[Event]:
+ """Get events nearby another event time-wise."""
nearby: list[Event] = []
- for event_to_check in events:
+ # event must have start and end dates, otherwise we can't get nearby events
+ if not (event.start and event.end):
+ return nearby
+
+ for event_to_check in events_to_check:
if event == event_to_check:
continue
+ # event getting checked must have start and end dates
+ if not (event_to_check.start and event_to_check.end):
+ continue
+
if (
event.start - timedelta(days=days_before) <= event_to_check.end
and event.end + timedelta(days=days_after) >= event_to_check.start
@@ -57,14 +77,26 @@
@register.simple_tag
def get_signup_conflicts(
- signups: Sequence[InstructorRecruitmentSignup], recruitment: InstructorRecruitment
+ signups_to_check: Sequence[InstructorRecruitmentSignup],
+ recruitment: InstructorRecruitment,
) -> list[InstructorRecruitmentSignup]:
conflicts: list[InstructorRecruitmentSignup] = []
- for signup_to_check in signups:
+ # recruitment event must have start and end dates, otherwise we can't get conflicts
+ if not (recruitment.event.start and recruitment.event.end):
+ return conflicts
+
+ for signup_to_check in signups_to_check:
if recruitment == signup_to_check.recruitment:
continue
+ # event getting checked must have start and end dates
+ if not (
+ signup_to_check.recruitment.event.start
+ and signup_to_check.recruitment.event.end
+ ):
+ continue
+
if (
recruitment.event.start <= signup_to_check.recruitment.event.end
and recruitment.event.end >= signup_to_check.recruitment.event.start
| {"golden_diff": "diff --git a/amy/recruitment/templatetags/instructorrecruitment.py b/amy/recruitment/templatetags/instructorrecruitment.py\n--- a/amy/recruitment/templatetags/instructorrecruitment.py\n+++ b/amy/recruitment/templatetags/instructorrecruitment.py\n@@ -23,13 +23,21 @@\n \n \n @register.simple_tag\n-def get_event_conflicts(events: Sequence[Event], event: Event) -> list[Event]:\n+def get_event_conflicts(events_to_check: Sequence[Event], event: Event) -> list[Event]:\n conflicts: list[Event] = []\n \n- for event_to_check in events:\n+ # event must have start and end dates, otherwise we can't get conflicts\n+ if not (event.start and event.end):\n+ return conflicts\n+\n+ for event_to_check in events_to_check:\n if event == event_to_check:\n continue\n \n+ # event getting checked must have start and end dates\n+ if not (event_to_check.start and event_to_check.end):\n+ continue\n+\n if event.start <= event_to_check.end and event.end >= event_to_check.start:\n conflicts.append(event_to_check)\n \n@@ -38,14 +46,26 @@\n \n @register.simple_tag\n def get_events_nearby(\n- events: Sequence[Event], event: Event, days_before: int = 14, days_after: int = 14\n+ events_to_check: Sequence[Event],\n+ event: Event,\n+ days_before: int = 14,\n+ days_after: int = 14,\n ) -> list[Event]:\n+ \"\"\"Get events nearby another event time-wise.\"\"\"\n nearby: list[Event] = []\n \n- for event_to_check in events:\n+ # event must have start and end dates, otherwise we can't get nearby events\n+ if not (event.start and event.end):\n+ return nearby\n+\n+ for event_to_check in events_to_check:\n if event == event_to_check:\n continue\n \n+ # event getting checked must have start and end dates\n+ if not (event_to_check.start and event_to_check.end):\n+ continue\n+\n if (\n event.start - timedelta(days=days_before) <= event_to_check.end\n and event.end + timedelta(days=days_after) >= event_to_check.start\n@@ -57,14 +77,26 @@\n \n @register.simple_tag\n def get_signup_conflicts(\n- signups: Sequence[InstructorRecruitmentSignup], recruitment: InstructorRecruitment\n+ signups_to_check: Sequence[InstructorRecruitmentSignup],\n+ recruitment: InstructorRecruitment,\n ) -> list[InstructorRecruitmentSignup]:\n conflicts: list[InstructorRecruitmentSignup] = []\n \n- for signup_to_check in signups:\n+ # recruitment event must have start and end dates, otherwise we can't get conflicts\n+ if not (recruitment.event.start and recruitment.event.end):\n+ return conflicts\n+\n+ for signup_to_check in signups_to_check:\n if recruitment == signup_to_check.recruitment:\n continue\n \n+ # event getting checked must have start and end dates\n+ if not (\n+ signup_to_check.recruitment.event.start\n+ and signup_to_check.recruitment.event.end\n+ ):\n+ continue\n+\n if (\n recruitment.event.start <= signup_to_check.recruitment.event.end\n and recruitment.event.end >= signup_to_check.recruitment.event.start\n", "issue": "Cannot view upcoming teaching opportunities\nHi @maneesha \r\nCould you please help me with this?\r\n\r\nThe blue button at https://amy.carpentries.org/dashboard/instructor/\r\nas shown in the screenshot below\r\n\r\n\r\nleads to \"Server Error (500)\" at https://amy.carpentries.org/dashboard/instructor/teaching_opportunities/\r\nas shown in the screenshot below\r\n\r\n\r\nTried a few times at different hours.\r\n\r\nThank you in advance.\n", "before_files": [{"content": "from datetime import timedelta\nfrom typing import Sequence, Union\n\nfrom django import template\nfrom django.conf import settings\n\nfrom recruitment.models import (\n InstructorRecruitment,\n InstructorRecruitmentSignup,\n RecruitmentPriority,\n)\nfrom workshops.models import Event\n\nregister = template.Library()\n\n\[email protected]_tag\ndef is_instructor_recruitment_enabled() -> bool:\n try:\n return bool(settings.INSTRUCTOR_RECRUITMENT_ENABLED)\n except AttributeError:\n return False\n\n\[email protected]_tag\ndef get_event_conflicts(events: Sequence[Event], event: Event) -> list[Event]:\n conflicts: list[Event] = []\n\n for event_to_check in events:\n if event == event_to_check:\n continue\n\n if event.start <= event_to_check.end and event.end >= event_to_check.start:\n conflicts.append(event_to_check)\n\n return conflicts\n\n\[email protected]_tag\ndef get_events_nearby(\n events: Sequence[Event], event: Event, days_before: int = 14, days_after: int = 14\n) -> list[Event]:\n nearby: list[Event] = []\n\n for event_to_check in events:\n if event == event_to_check:\n continue\n\n if (\n event.start - timedelta(days=days_before) <= event_to_check.end\n and event.end + timedelta(days=days_after) >= event_to_check.start\n ):\n nearby.append(event_to_check)\n\n return nearby\n\n\[email protected]_tag\ndef get_signup_conflicts(\n signups: Sequence[InstructorRecruitmentSignup], recruitment: InstructorRecruitment\n) -> list[InstructorRecruitmentSignup]:\n conflicts: list[InstructorRecruitmentSignup] = []\n\n for signup_to_check in signups:\n if recruitment == signup_to_check.recruitment:\n continue\n\n if (\n recruitment.event.start <= signup_to_check.recruitment.event.end\n and recruitment.event.end >= signup_to_check.recruitment.event.start\n ):\n conflicts.append(signup_to_check)\n\n return conflicts\n\n\[email protected]\ndef priority_label(value: Union[int, RecruitmentPriority]) -> str:\n return RecruitmentPriority(value).label\n", "path": "amy/recruitment/templatetags/instructorrecruitment.py"}], "after_files": [{"content": "from datetime import timedelta\nfrom typing import Sequence, Union\n\nfrom django import template\nfrom django.conf import settings\n\nfrom recruitment.models import (\n InstructorRecruitment,\n InstructorRecruitmentSignup,\n RecruitmentPriority,\n)\nfrom workshops.models import Event\n\nregister = template.Library()\n\n\[email protected]_tag\ndef is_instructor_recruitment_enabled() -> bool:\n try:\n return bool(settings.INSTRUCTOR_RECRUITMENT_ENABLED)\n except AttributeError:\n return False\n\n\[email protected]_tag\ndef get_event_conflicts(events_to_check: Sequence[Event], event: Event) -> list[Event]:\n conflicts: list[Event] = []\n\n # event must have start and end dates, otherwise we can't get conflicts\n if not (event.start and event.end):\n return conflicts\n\n for event_to_check in events_to_check:\n if event == event_to_check:\n continue\n\n # event getting checked must have start and end dates\n if not (event_to_check.start and event_to_check.end):\n continue\n\n if event.start <= event_to_check.end and event.end >= event_to_check.start:\n conflicts.append(event_to_check)\n\n return conflicts\n\n\[email protected]_tag\ndef get_events_nearby(\n events_to_check: Sequence[Event],\n event: Event,\n days_before: int = 14,\n days_after: int = 14,\n) -> list[Event]:\n \"\"\"Get events nearby another event time-wise.\"\"\"\n nearby: list[Event] = []\n\n # event must have start and end dates, otherwise we can't get nearby events\n if not (event.start and event.end):\n return nearby\n\n for event_to_check in events_to_check:\n if event == event_to_check:\n continue\n\n # event getting checked must have start and end dates\n if not (event_to_check.start and event_to_check.end):\n continue\n\n if (\n event.start - timedelta(days=days_before) <= event_to_check.end\n and event.end + timedelta(days=days_after) >= event_to_check.start\n ):\n nearby.append(event_to_check)\n\n return nearby\n\n\[email protected]_tag\ndef get_signup_conflicts(\n signups_to_check: Sequence[InstructorRecruitmentSignup],\n recruitment: InstructorRecruitment,\n) -> list[InstructorRecruitmentSignup]:\n conflicts: list[InstructorRecruitmentSignup] = []\n\n # recruitment event must have start and end dates, otherwise we can't get conflicts\n if not (recruitment.event.start and recruitment.event.end):\n return conflicts\n\n for signup_to_check in signups_to_check:\n if recruitment == signup_to_check.recruitment:\n continue\n\n # event getting checked must have start and end dates\n if not (\n signup_to_check.recruitment.event.start\n and signup_to_check.recruitment.event.end\n ):\n continue\n\n if (\n recruitment.event.start <= signup_to_check.recruitment.event.end\n and recruitment.event.end >= signup_to_check.recruitment.event.start\n ):\n conflicts.append(signup_to_check)\n\n return conflicts\n\n\[email protected]\ndef priority_label(value: Union[int, RecruitmentPriority]) -> str:\n return RecruitmentPriority(value).label\n", "path": "amy/recruitment/templatetags/instructorrecruitment.py"}]} | 1,099 | 766 |
gh_patches_debug_10303 | rasdani/github-patches | git_diff | jupyterhub__jupyterhub-41 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Invalid argument: '--cookie-name=jupyter-hub-token-MYUSERNAME'
I can't run cells in py notebooks, I got a popup `The kernel appears to have died`
Here's the log running jupyterhub from master branch
```
[IPKernelApp] CRITICAL | Bad config encountered during initialization:
[IPKernelApp] CRITICAL | Invalid argument: '--cookie-name=jupyter-hub-token-ben'
2014-09-14 20:53:59.865 [SingleUserNotebookApp] WARNING | KernelRestarter: restart failed
2014-09-14 20:53:59.866 [SingleUserNotebookApp] WARNING | Kernel ec8afdad-2a1e-47f2-8202-d3644832c13e died, removing from map.
ERROR:root:kernel ec8afdad-2a1e-47f2-8202-d3644832c13e restarted failed!
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jupyterhub/singleuserapp.py`
Content:
```
1 #!/usr/bin/env python
2 """Extend regular notebook server to be aware of multiuser things."""
3
4 # Copyright (c) Jupyter Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import os
8
9 import requests
10
11 from tornado import ioloop
12 from tornado import web
13
14 from IPython.utils.traitlets import Unicode
15
16 from IPython.html.notebookapp import NotebookApp
17
18 from IPython.html.utils import url_path_join
19
20
21 from distutils.version import LooseVersion as V
22
23 import IPython
24 if V(IPython.__version__) < V('2.2'):
25 raise ImportError("JupyterHub Requires IPython >= 2.2, found %s" % IPython.__version__)
26
27 # Define two methods to attach to AuthenticatedHandler,
28 # which authenticate via the central auth server.
29
30
31 def verify_token(self, token):
32 """monkeypatch method for token verification"""
33 token_cache = self.settings['token_cache']
34 if token in token_cache:
35 # we've seen this token before, don't ask upstream again
36 return token_cache[token]
37
38 hub_api_url = self.settings['hub_api_url']
39 hub_api_key = self.settings['hub_api_key']
40 r = requests.get(url_path_join(
41 hub_api_url, "authorizations", token,
42 ),
43 headers = {'Authorization' : 'token %s' % hub_api_key}
44 )
45 if r.status_code == 404:
46 data = {'user' : ''}
47 else:
48 r.raise_for_status()
49 data = r.json()
50 token_cache[token] = data
51 return data
52
53
54 def get_current_user(self):
55 """alternative get_current_user to query the central server"""
56 my_user = self.settings['user']
57 token = self.get_cookie(self.cookie_name, '')
58 if token:
59 auth_data = self.verify_token(token)
60 if not auth_data:
61 # treat invalid token the same as no token
62 return None
63 user = auth_data['user']
64 if user == my_user:
65 return user
66 else:
67 return None
68 else:
69 self.log.debug("No token cookie")
70 return None
71
72
73 # register new hub related command-line aliases
74 aliases = NotebookApp.aliases.get_default_value()
75 aliases.update({
76 'user' : 'SingleUserNotebookApp.user',
77 'cookie-name': 'SingleUserNotebookApp.cookie_name',
78 'hub-prefix': 'SingleUserNotebookApp.hub_prefix',
79 'hub-api-url': 'SingleUserNotebookApp.hub_api_url',
80 'base-url': 'SingleUserNotebookApp.base_url',
81 })
82
83
84 class SingleUserNotebookApp(NotebookApp):
85 """A Subclass of the regular NotebookApp that is aware of the parent multiuser context."""
86 user = Unicode(config=True)
87 cookie_name = Unicode(config=True)
88 hub_prefix = Unicode(config=True)
89 hub_api_url = Unicode(config=True)
90 aliases = aliases
91 open_browser = False
92
93 def _confirm_exit(self):
94 # disable the exit confirmation for background notebook processes
95 ioloop.IOLoop.instance().stop()
96
97 def init_webapp(self):
98 # monkeypatch authentication to use the hub
99 from IPython.html.base.handlers import AuthenticatedHandler
100 AuthenticatedHandler.verify_token = verify_token
101 AuthenticatedHandler.get_current_user = get_current_user
102
103 # load the hub related settings into the tornado settings dict
104 env = os.environ
105 s = self.webapp_settings
106 s['token_cache'] = {}
107 s['user'] = self.user
108 s['hub_api_key'] = env.pop('JPY_API_TOKEN', '')
109 s['cookie_secret'] = env.pop('JPY_COOKIE_SECRET', '')
110 s['cookie_name'] = self.cookie_name
111 s['login_url'] = url_path_join(self.hub_prefix, 'login')
112 s['hub_api_url'] = self.hub_api_url
113 super(SingleUserNotebookApp, self).init_webapp()
114
115
116 def main():
117 return SingleUserNotebookApp.launch_instance()
118
119
120 if __name__ == "__main__":
121 main()
122
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/jupyterhub/singleuserapp.py b/jupyterhub/singleuserapp.py
--- a/jupyterhub/singleuserapp.py
+++ b/jupyterhub/singleuserapp.py
@@ -94,6 +94,11 @@
# disable the exit confirmation for background notebook processes
ioloop.IOLoop.instance().stop()
+ def init_kernel_argv(self):
+ """construct the kernel arguments"""
+ # FIXME: This is 2.x-compat, remove when 3.x is requred
+ self.kernel_argv = ["--profile-dir", self.profile_dir.location]
+
def init_webapp(self):
# monkeypatch authentication to use the hub
from IPython.html.base.handlers import AuthenticatedHandler
| {"golden_diff": "diff --git a/jupyterhub/singleuserapp.py b/jupyterhub/singleuserapp.py\n--- a/jupyterhub/singleuserapp.py\n+++ b/jupyterhub/singleuserapp.py\n@@ -94,6 +94,11 @@\n # disable the exit confirmation for background notebook processes\n ioloop.IOLoop.instance().stop()\n \n+ def init_kernel_argv(self):\n+ \"\"\"construct the kernel arguments\"\"\"\n+ # FIXME: This is 2.x-compat, remove when 3.x is requred\n+ self.kernel_argv = [\"--profile-dir\", self.profile_dir.location]\n+ \n def init_webapp(self):\n # monkeypatch authentication to use the hub\n from IPython.html.base.handlers import AuthenticatedHandler\n", "issue": "Invalid argument: '--cookie-name=jupyter-hub-token-MYUSERNAME'\nI can't run cells in py notebooks, I got a popup `The kernel appears to have died`\n\nHere's the log running jupyterhub from master branch\n\n```\n[IPKernelApp] CRITICAL | Bad config encountered during initialization:\n[IPKernelApp] CRITICAL | Invalid argument: '--cookie-name=jupyter-hub-token-ben'\n2014-09-14 20:53:59.865 [SingleUserNotebookApp] WARNING | KernelRestarter: restart failed\n2014-09-14 20:53:59.866 [SingleUserNotebookApp] WARNING | Kernel ec8afdad-2a1e-47f2-8202-d3644832c13e died, removing from map.\nERROR:root:kernel ec8afdad-2a1e-47f2-8202-d3644832c13e restarted failed!\n\n```\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\"\"\"Extend regular notebook server to be aware of multiuser things.\"\"\"\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nimport os\n\nimport requests\n\nfrom tornado import ioloop\nfrom tornado import web\n\nfrom IPython.utils.traitlets import Unicode\n\nfrom IPython.html.notebookapp import NotebookApp\n\nfrom IPython.html.utils import url_path_join\n\n\nfrom distutils.version import LooseVersion as V\n\nimport IPython\nif V(IPython.__version__) < V('2.2'):\n raise ImportError(\"JupyterHub Requires IPython >= 2.2, found %s\" % IPython.__version__)\n\n# Define two methods to attach to AuthenticatedHandler,\n# which authenticate via the central auth server.\n\n\ndef verify_token(self, token):\n \"\"\"monkeypatch method for token verification\"\"\"\n token_cache = self.settings['token_cache']\n if token in token_cache:\n # we've seen this token before, don't ask upstream again\n return token_cache[token]\n \n hub_api_url = self.settings['hub_api_url']\n hub_api_key = self.settings['hub_api_key']\n r = requests.get(url_path_join(\n hub_api_url, \"authorizations\", token,\n ),\n headers = {'Authorization' : 'token %s' % hub_api_key}\n )\n if r.status_code == 404:\n data = {'user' : ''}\n else:\n r.raise_for_status()\n data = r.json()\n token_cache[token] = data\n return data\n\n\ndef get_current_user(self):\n \"\"\"alternative get_current_user to query the central server\"\"\"\n my_user = self.settings['user']\n token = self.get_cookie(self.cookie_name, '')\n if token:\n auth_data = self.verify_token(token)\n if not auth_data:\n # treat invalid token the same as no token\n return None\n user = auth_data['user']\n if user == my_user:\n return user\n else:\n return None\n else:\n self.log.debug(\"No token cookie\")\n return None\n\n\n# register new hub related command-line aliases\naliases = NotebookApp.aliases.get_default_value()\naliases.update({\n 'user' : 'SingleUserNotebookApp.user',\n 'cookie-name': 'SingleUserNotebookApp.cookie_name',\n 'hub-prefix': 'SingleUserNotebookApp.hub_prefix',\n 'hub-api-url': 'SingleUserNotebookApp.hub_api_url',\n 'base-url': 'SingleUserNotebookApp.base_url',\n})\n\n\nclass SingleUserNotebookApp(NotebookApp):\n \"\"\"A Subclass of the regular NotebookApp that is aware of the parent multiuser context.\"\"\"\n user = Unicode(config=True)\n cookie_name = Unicode(config=True)\n hub_prefix = Unicode(config=True)\n hub_api_url = Unicode(config=True)\n aliases = aliases\n open_browser = False\n \n def _confirm_exit(self):\n # disable the exit confirmation for background notebook processes\n ioloop.IOLoop.instance().stop()\n \n def init_webapp(self):\n # monkeypatch authentication to use the hub\n from IPython.html.base.handlers import AuthenticatedHandler\n AuthenticatedHandler.verify_token = verify_token\n AuthenticatedHandler.get_current_user = get_current_user\n \n # load the hub related settings into the tornado settings dict\n env = os.environ\n s = self.webapp_settings\n s['token_cache'] = {}\n s['user'] = self.user\n s['hub_api_key'] = env.pop('JPY_API_TOKEN', '')\n s['cookie_secret'] = env.pop('JPY_COOKIE_SECRET', '')\n s['cookie_name'] = self.cookie_name\n s['login_url'] = url_path_join(self.hub_prefix, 'login')\n s['hub_api_url'] = self.hub_api_url\n super(SingleUserNotebookApp, self).init_webapp()\n\n\ndef main():\n return SingleUserNotebookApp.launch_instance()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "jupyterhub/singleuserapp.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\"\"\"Extend regular notebook server to be aware of multiuser things.\"\"\"\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nimport os\n\nimport requests\n\nfrom tornado import ioloop\nfrom tornado import web\n\nfrom IPython.utils.traitlets import Unicode\n\nfrom IPython.html.notebookapp import NotebookApp\n\nfrom IPython.html.utils import url_path_join\n\n\nfrom distutils.version import LooseVersion as V\n\nimport IPython\nif V(IPython.__version__) < V('2.2'):\n raise ImportError(\"JupyterHub Requires IPython >= 2.2, found %s\" % IPython.__version__)\n\n# Define two methods to attach to AuthenticatedHandler,\n# which authenticate via the central auth server.\n\n\ndef verify_token(self, token):\n \"\"\"monkeypatch method for token verification\"\"\"\n token_cache = self.settings['token_cache']\n if token in token_cache:\n # we've seen this token before, don't ask upstream again\n return token_cache[token]\n \n hub_api_url = self.settings['hub_api_url']\n hub_api_key = self.settings['hub_api_key']\n r = requests.get(url_path_join(\n hub_api_url, \"authorizations\", token,\n ),\n headers = {'Authorization' : 'token %s' % hub_api_key}\n )\n if r.status_code == 404:\n data = {'user' : ''}\n else:\n r.raise_for_status()\n data = r.json()\n token_cache[token] = data\n return data\n\n\ndef get_current_user(self):\n \"\"\"alternative get_current_user to query the central server\"\"\"\n my_user = self.settings['user']\n token = self.get_cookie(self.cookie_name, '')\n if token:\n auth_data = self.verify_token(token)\n if not auth_data:\n # treat invalid token the same as no token\n return None\n user = auth_data['user']\n if user == my_user:\n return user\n else:\n return None\n else:\n self.log.debug(\"No token cookie\")\n return None\n\n\n# register new hub related command-line aliases\naliases = NotebookApp.aliases.get_default_value()\naliases.update({\n 'user' : 'SingleUserNotebookApp.user',\n 'cookie-name': 'SingleUserNotebookApp.cookie_name',\n 'hub-prefix': 'SingleUserNotebookApp.hub_prefix',\n 'hub-api-url': 'SingleUserNotebookApp.hub_api_url',\n 'base-url': 'SingleUserNotebookApp.base_url',\n})\n\n\nclass SingleUserNotebookApp(NotebookApp):\n \"\"\"A Subclass of the regular NotebookApp that is aware of the parent multiuser context.\"\"\"\n user = Unicode(config=True)\n cookie_name = Unicode(config=True)\n hub_prefix = Unicode(config=True)\n hub_api_url = Unicode(config=True)\n aliases = aliases\n open_browser = False\n \n def _confirm_exit(self):\n # disable the exit confirmation for background notebook processes\n ioloop.IOLoop.instance().stop()\n \n def init_kernel_argv(self):\n \"\"\"construct the kernel arguments\"\"\"\n # FIXME: This is 2.x-compat, remove when 3.x is requred\n self.kernel_argv = [\"--profile-dir\", self.profile_dir.location]\n \n def init_webapp(self):\n # monkeypatch authentication to use the hub\n from IPython.html.base.handlers import AuthenticatedHandler\n AuthenticatedHandler.verify_token = verify_token\n AuthenticatedHandler.get_current_user = get_current_user\n \n # load the hub related settings into the tornado settings dict\n env = os.environ\n s = self.webapp_settings\n s['token_cache'] = {}\n s['user'] = self.user\n s['hub_api_key'] = env.pop('JPY_API_TOKEN', '')\n s['cookie_secret'] = env.pop('JPY_COOKIE_SECRET', '')\n s['cookie_name'] = self.cookie_name\n s['login_url'] = url_path_join(self.hub_prefix, 'login')\n s['hub_api_url'] = self.hub_api_url\n super(SingleUserNotebookApp, self).init_webapp()\n\n\ndef main():\n return SingleUserNotebookApp.launch_instance()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "jupyterhub/singleuserapp.py"}]} | 1,634 | 161 |
gh_patches_debug_60363 | rasdani/github-patches | git_diff | GeotrekCE__Geotrek-admin-2598 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug affichage intervention selon niveau de zoom
Bonjour, sur la 2.49.0 selon le niveau de zoom sélectionné, certaines interventions apparaissent et disparaissent. Faire glisser la carte provoque aussi ce comportement :
https://user-images.githubusercontent.com/45095227/108825316-2719b200-75c3-11eb-8406-11d57bcd2531.mov
J'en profite pour dire que je n'ai pas pu changer l'opacité des linéaires pour qu'ils soient plus visibles ( voir : https://github.com/GeotrekCE/Geotrek-admin/issues/2554)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `geotrek/maintenance/filters.py`
Content:
```
1 from django.db.models import Q
2 from django.conf import settings
3 from django.contrib.contenttypes.models import ContentType
4 from django.utils.translation import gettext_lazy as _
5 from django_filters import ChoiceFilter, MultipleChoiceFilter
6
7 from mapentity.filters import PolygonFilter, PythonPolygonFilter
8
9 from geotrek.core.models import Topology
10 from geotrek.authent.filters import StructureRelatedFilterSet
11 from geotrek.common.filters import RightFilter
12 from geotrek.zoning.filters import ZoningFilterSet
13 from geotrek.zoning.models import City, District
14
15 from .models import Intervention, Project
16
17 if 'geotrek.signage' in settings.INSTALLED_APPS:
18 from geotrek.signage.models import Blade
19
20
21 class PolygonInterventionFilterMixin(object):
22 def get_geom(self, value):
23 return value
24
25 def filter(self, qs, values):
26 if not values:
27 return qs
28 lookup = self.lookup_expr
29
30 blade_content_type = ContentType.objects.get_for_model(Blade)
31 topologies = []
32 for value in values:
33 topologies += Topology.objects.filter(**{'geom__%s' % lookup: self.get_geom(value)}).values_list('id', flat=True)
34 topologies_intervention = Intervention.objects.existing().filter(target_id__in=topologies).exclude(
35 target_type=blade_content_type).distinct('pk').values_list('id', flat=True)
36
37 interventions = list(topologies_intervention)
38 if 'geotrek.signage' in settings.INSTALLED_APPS:
39 blades = list(Blade.objects.filter(signage__in=topologies).values_list('id', flat=True))
40 blades_intervention = Intervention.objects.existing().filter(target_id__in=blades,
41 target_type=blade_content_type).values_list('id',
42 flat=True)
43 interventions.extend(blades_intervention)
44 if hasattr(self, 'lookup_queryset_in'):
45 lookup_queryset = self.lookup_queryset_in
46 else:
47 lookup_queryset = 'pk__in'
48 qs = qs.filter(**{'%s' % lookup_queryset: interventions})
49 return qs
50
51
52 class PolygonTopologyFilter(PolygonInterventionFilterMixin, PolygonFilter):
53 pass
54
55
56 class ProjectIntersectionFilterCity(PolygonInterventionFilterMixin, RightFilter):
57 model = City
58
59 def __init__(self, *args, **kwargs):
60 super(ProjectIntersectionFilterCity, self).__init__(*args, **kwargs)
61 self.lookup_expr = 'intersects'
62 self.lookup_queryset_in = 'interventions__in'
63
64 def get_geom(self, value):
65 return value.geom
66
67
68 class ProjectIntersectionFilterDistrict(PolygonInterventionFilterMixin, RightFilter):
69 model = District
70
71 def __init__(self, *args, **kwargs):
72 super(ProjectIntersectionFilterDistrict, self).__init__(*args, **kwargs)
73 self.lookup_expr = 'intersects'
74 self.lookup_queryset_in = 'interventions__in'
75
76 def get_geom(self, value):
77 return value.geom
78
79
80 class InterventionFilterSet(ZoningFilterSet, StructureRelatedFilterSet):
81 ON_CHOICES = (('infrastructure', _("Infrastructure")), ('signage', _("Signage")), ('blade', _("Blade")),
82 ('topology', _("Path")), ('trek', _("Trek")), ('poi', _("POI")), ('service', _("Service")),
83 ('trail', _("Trail")))
84 bbox = PolygonTopologyFilter(lookup_expr='intersects')
85 year = MultipleChoiceFilter(choices=Intervention.objects.year_choices(),
86 field_name='date', lookup_expr='year', label=_("Year"))
87 on = ChoiceFilter(field_name='target_type__model', choices=ON_CHOICES, label=_("On"), empty_label=_("On"))
88
89 class Meta(StructureRelatedFilterSet.Meta):
90 model = Intervention
91 fields = StructureRelatedFilterSet.Meta.fields + [
92 'status', 'type', 'stake', 'subcontracting', 'project', 'on',
93 ]
94
95
96 class ProjectFilterSet(StructureRelatedFilterSet):
97 bbox = PythonPolygonFilter(field_name='geom')
98 year = MultipleChoiceFilter(
99 label=_("Year of activity"), method='filter_year',
100 choices=lambda: Project.objects.year_choices() # Could change over time
101 )
102 city = ProjectIntersectionFilterCity(label=_('City'), required=False)
103 district = ProjectIntersectionFilterDistrict(label=_('District'), required=False)
104
105 class Meta(StructureRelatedFilterSet.Meta):
106 model = Project
107 fields = StructureRelatedFilterSet.Meta.fields + [
108 'year', 'type', 'domain', 'contractors', 'project_owner',
109 'project_manager', 'founders'
110 ]
111
112 def filter_year(self, qs, name, values):
113 q = Q()
114 for value in values:
115 q |= Q(begin_year__lte=value, end_year__gte=value)
116 return qs.filter(q)
117
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/geotrek/maintenance/filters.py b/geotrek/maintenance/filters.py
--- a/geotrek/maintenance/filters.py
+++ b/geotrek/maintenance/filters.py
@@ -25,6 +25,9 @@
def filter(self, qs, values):
if not values:
return qs
+ if not isinstance(values, list):
+ values = [values]
+
lookup = self.lookup_expr
blade_content_type = ContentType.objects.get_for_model(Blade)
| {"golden_diff": "diff --git a/geotrek/maintenance/filters.py b/geotrek/maintenance/filters.py\n--- a/geotrek/maintenance/filters.py\n+++ b/geotrek/maintenance/filters.py\n@@ -25,6 +25,9 @@\n def filter(self, qs, values):\n if not values:\n return qs\n+ if not isinstance(values, list):\n+ values = [values]\n+\n lookup = self.lookup_expr\n \n blade_content_type = ContentType.objects.get_for_model(Blade)\n", "issue": "Bug affichage intervention selon niveau de zoom\nBonjour, sur la 2.49.0 selon le niveau de zoom s\u00e9lectionn\u00e9, certaines interventions apparaissent et disparaissent. Faire glisser la carte provoque aussi ce comportement :\r\n\r\n\r\nhttps://user-images.githubusercontent.com/45095227/108825316-2719b200-75c3-11eb-8406-11d57bcd2531.mov\r\n\r\nJ'en profite pour dire que je n'ai pas pu changer l'opacit\u00e9 des lin\u00e9aires pour qu'ils soient plus visibles ( voir : https://github.com/GeotrekCE/Geotrek-admin/issues/2554)\n", "before_files": [{"content": "from django.db.models import Q\nfrom django.conf import settings\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.utils.translation import gettext_lazy as _\nfrom django_filters import ChoiceFilter, MultipleChoiceFilter\n\nfrom mapentity.filters import PolygonFilter, PythonPolygonFilter\n\nfrom geotrek.core.models import Topology\nfrom geotrek.authent.filters import StructureRelatedFilterSet\nfrom geotrek.common.filters import RightFilter\nfrom geotrek.zoning.filters import ZoningFilterSet\nfrom geotrek.zoning.models import City, District\n\nfrom .models import Intervention, Project\n\nif 'geotrek.signage' in settings.INSTALLED_APPS:\n from geotrek.signage.models import Blade\n\n\nclass PolygonInterventionFilterMixin(object):\n def get_geom(self, value):\n return value\n\n def filter(self, qs, values):\n if not values:\n return qs\n lookup = self.lookup_expr\n\n blade_content_type = ContentType.objects.get_for_model(Blade)\n topologies = []\n for value in values:\n topologies += Topology.objects.filter(**{'geom__%s' % lookup: self.get_geom(value)}).values_list('id', flat=True)\n topologies_intervention = Intervention.objects.existing().filter(target_id__in=topologies).exclude(\n target_type=blade_content_type).distinct('pk').values_list('id', flat=True)\n\n interventions = list(topologies_intervention)\n if 'geotrek.signage' in settings.INSTALLED_APPS:\n blades = list(Blade.objects.filter(signage__in=topologies).values_list('id', flat=True))\n blades_intervention = Intervention.objects.existing().filter(target_id__in=blades,\n target_type=blade_content_type).values_list('id',\n flat=True)\n interventions.extend(blades_intervention)\n if hasattr(self, 'lookup_queryset_in'):\n lookup_queryset = self.lookup_queryset_in\n else:\n lookup_queryset = 'pk__in'\n qs = qs.filter(**{'%s' % lookup_queryset: interventions})\n return qs\n\n\nclass PolygonTopologyFilter(PolygonInterventionFilterMixin, PolygonFilter):\n pass\n\n\nclass ProjectIntersectionFilterCity(PolygonInterventionFilterMixin, RightFilter):\n model = City\n\n def __init__(self, *args, **kwargs):\n super(ProjectIntersectionFilterCity, self).__init__(*args, **kwargs)\n self.lookup_expr = 'intersects'\n self.lookup_queryset_in = 'interventions__in'\n\n def get_geom(self, value):\n return value.geom\n\n\nclass ProjectIntersectionFilterDistrict(PolygonInterventionFilterMixin, RightFilter):\n model = District\n\n def __init__(self, *args, **kwargs):\n super(ProjectIntersectionFilterDistrict, self).__init__(*args, **kwargs)\n self.lookup_expr = 'intersects'\n self.lookup_queryset_in = 'interventions__in'\n\n def get_geom(self, value):\n return value.geom\n\n\nclass InterventionFilterSet(ZoningFilterSet, StructureRelatedFilterSet):\n ON_CHOICES = (('infrastructure', _(\"Infrastructure\")), ('signage', _(\"Signage\")), ('blade', _(\"Blade\")),\n ('topology', _(\"Path\")), ('trek', _(\"Trek\")), ('poi', _(\"POI\")), ('service', _(\"Service\")),\n ('trail', _(\"Trail\")))\n bbox = PolygonTopologyFilter(lookup_expr='intersects')\n year = MultipleChoiceFilter(choices=Intervention.objects.year_choices(),\n field_name='date', lookup_expr='year', label=_(\"Year\"))\n on = ChoiceFilter(field_name='target_type__model', choices=ON_CHOICES, label=_(\"On\"), empty_label=_(\"On\"))\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Intervention\n fields = StructureRelatedFilterSet.Meta.fields + [\n 'status', 'type', 'stake', 'subcontracting', 'project', 'on',\n ]\n\n\nclass ProjectFilterSet(StructureRelatedFilterSet):\n bbox = PythonPolygonFilter(field_name='geom')\n year = MultipleChoiceFilter(\n label=_(\"Year of activity\"), method='filter_year',\n choices=lambda: Project.objects.year_choices() # Could change over time\n )\n city = ProjectIntersectionFilterCity(label=_('City'), required=False)\n district = ProjectIntersectionFilterDistrict(label=_('District'), required=False)\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Project\n fields = StructureRelatedFilterSet.Meta.fields + [\n 'year', 'type', 'domain', 'contractors', 'project_owner',\n 'project_manager', 'founders'\n ]\n\n def filter_year(self, qs, name, values):\n q = Q()\n for value in values:\n q |= Q(begin_year__lte=value, end_year__gte=value)\n return qs.filter(q)\n", "path": "geotrek/maintenance/filters.py"}], "after_files": [{"content": "from django.db.models import Q\nfrom django.conf import settings\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.utils.translation import gettext_lazy as _\nfrom django_filters import ChoiceFilter, MultipleChoiceFilter\n\nfrom mapentity.filters import PolygonFilter, PythonPolygonFilter\n\nfrom geotrek.core.models import Topology\nfrom geotrek.authent.filters import StructureRelatedFilterSet\nfrom geotrek.common.filters import RightFilter\nfrom geotrek.zoning.filters import ZoningFilterSet\nfrom geotrek.zoning.models import City, District\n\nfrom .models import Intervention, Project\n\nif 'geotrek.signage' in settings.INSTALLED_APPS:\n from geotrek.signage.models import Blade\n\n\nclass PolygonInterventionFilterMixin(object):\n def get_geom(self, value):\n return value\n\n def filter(self, qs, values):\n if not values:\n return qs\n if not isinstance(values, list):\n values = [values]\n\n lookup = self.lookup_expr\n\n blade_content_type = ContentType.objects.get_for_model(Blade)\n topologies = []\n for value in values:\n topologies += Topology.objects.filter(**{'geom__%s' % lookup: self.get_geom(value)}).values_list('id', flat=True)\n topologies_intervention = Intervention.objects.existing().filter(target_id__in=topologies).exclude(\n target_type=blade_content_type).distinct('pk').values_list('id', flat=True)\n\n interventions = list(topologies_intervention)\n if 'geotrek.signage' in settings.INSTALLED_APPS:\n blades = list(Blade.objects.filter(signage__in=topologies).values_list('id', flat=True))\n blades_intervention = Intervention.objects.existing().filter(target_id__in=blades,\n target_type=blade_content_type).values_list('id',\n flat=True)\n interventions.extend(blades_intervention)\n if hasattr(self, 'lookup_queryset_in'):\n lookup_queryset = self.lookup_queryset_in\n else:\n lookup_queryset = 'pk__in'\n qs = qs.filter(**{'%s' % lookup_queryset: interventions})\n return qs\n\n\nclass PolygonTopologyFilter(PolygonInterventionFilterMixin, PolygonFilter):\n pass\n\n\nclass ProjectIntersectionFilterCity(PolygonInterventionFilterMixin, RightFilter):\n model = City\n\n def __init__(self, *args, **kwargs):\n super(ProjectIntersectionFilterCity, self).__init__(*args, **kwargs)\n self.lookup_expr = 'intersects'\n self.lookup_queryset_in = 'interventions__in'\n\n def get_geom(self, value):\n return value.geom\n\n\nclass ProjectIntersectionFilterDistrict(PolygonInterventionFilterMixin, RightFilter):\n model = District\n\n def __init__(self, *args, **kwargs):\n super(ProjectIntersectionFilterDistrict, self).__init__(*args, **kwargs)\n self.lookup_expr = 'intersects'\n self.lookup_queryset_in = 'interventions__in'\n\n def get_geom(self, value):\n return value.geom\n\n\nclass InterventionFilterSet(ZoningFilterSet, StructureRelatedFilterSet):\n ON_CHOICES = (('infrastructure', _(\"Infrastructure\")), ('signage', _(\"Signage\")), ('blade', _(\"Blade\")),\n ('topology', _(\"Path\")), ('trek', _(\"Trek\")), ('poi', _(\"POI\")), ('service', _(\"Service\")),\n ('trail', _(\"Trail\")))\n bbox = PolygonTopologyFilter(lookup_expr='intersects')\n year = MultipleChoiceFilter(choices=Intervention.objects.year_choices(),\n field_name='date', lookup_expr='year', label=_(\"Year\"))\n on = ChoiceFilter(field_name='target_type__model', choices=ON_CHOICES, label=_(\"On\"), empty_label=_(\"On\"))\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Intervention\n fields = StructureRelatedFilterSet.Meta.fields + [\n 'status', 'type', 'stake', 'subcontracting', 'project', 'on',\n ]\n\n\nclass ProjectFilterSet(StructureRelatedFilterSet):\n bbox = PythonPolygonFilter(field_name='geom')\n year = MultipleChoiceFilter(\n label=_(\"Year of activity\"), method='filter_year',\n choices=lambda: Project.objects.year_choices() # Could change over time\n )\n city = ProjectIntersectionFilterCity(label=_('City'), required=False)\n district = ProjectIntersectionFilterDistrict(label=_('District'), required=False)\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Project\n fields = StructureRelatedFilterSet.Meta.fields + [\n 'year', 'type', 'domain', 'contractors', 'project_owner',\n 'project_manager', 'founders'\n ]\n\n def filter_year(self, qs, name, values):\n q = Q()\n for value in values:\n q |= Q(begin_year__lte=value, end_year__gte=value)\n return qs.filter(q)\n", "path": "geotrek/maintenance/filters.py"}]} | 1,699 | 112 |
gh_patches_debug_7612 | rasdani/github-patches | git_diff | SCons__scons-4013 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
UnicodeDecodeError
Why i get error if type help command?
Python 3.8.10
Scons 4.2.0

UnicodeDecodeError
Why i get error if type help command?
Python 3.8.10
Scons 4.2.0

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `SCons/Tool/gcc.py`
Content:
```
1 # MIT License
2 #
3 # Copyright The SCons Foundation
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be included
14 # in all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
17 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
18 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
24 """SCons.Tool.gcc
25
26 Tool-specific initialization for gcc.
27
28 There normally shouldn't be any need to import this module directly.
29 It will usually be imported through the generic SCons.Tool.Tool()
30 selection method.
31
32 """
33
34 from . import cc
35 import re
36 import subprocess
37
38 import SCons.Util
39
40 compilers = ['gcc', 'cc']
41
42
43 def generate(env):
44 """Add Builders and construction variables for gcc to an Environment."""
45
46 if 'CC' not in env:
47 env['CC'] = env.Detect(compilers) or compilers[0]
48
49 cc.generate(env)
50
51 if env['PLATFORM'] in ['cygwin', 'win32']:
52 env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
53 else:
54 env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')
55 # determine compiler version
56 version = detect_version(env, env['CC'])
57 if version:
58 env['CCVERSION'] = version
59
60
61 def exists(env):
62 # is executable, and is a GNU compiler (or accepts '--version' at least)
63 return detect_version(env, env.Detect(env.get('CC', compilers)))
64
65
66 def detect_version(env, cc):
67 """Return the version of the GNU compiler, or None if it is not a GNU compiler."""
68 version = None
69 cc = env.subst(cc)
70 if not cc:
71 return version
72
73 # -dumpversion was added in GCC 3.0. As long as we're supporting
74 # GCC versions older than that, we should use --version and a
75 # regular expression.
76 # pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],
77 pipe=SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'],
78 stdin='devnull',
79 stderr='devnull',
80 stdout=subprocess.PIPE)
81 if pipe.wait() != 0:
82 return version
83
84 with pipe.stdout:
85 # -dumpversion variant:
86 # line = pipe.stdout.read().strip()
87 # --version variant:
88 line = SCons.Util.to_str(pipe.stdout.readline())
89 # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:
90 # So continue with reading to let the child process actually terminate.
91 while SCons.Util.to_str(pipe.stdout.readline()):
92 pass
93
94
95 # -dumpversion variant:
96 # if line:
97 # version = line
98 # --version variant:
99 match = re.search(r'[0-9]+(\.[0-9]+)+', line)
100 if match:
101 version = match.group(0)
102
103 return version
104
105 # Local Variables:
106 # tab-width:4
107 # indent-tabs-mode:nil
108 # End:
109 # vim: set expandtab tabstop=4 shiftwidth=4:
110
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/SCons/Tool/gcc.py b/SCons/Tool/gcc.py
--- a/SCons/Tool/gcc.py
+++ b/SCons/Tool/gcc.py
@@ -88,7 +88,8 @@
line = SCons.Util.to_str(pipe.stdout.readline())
# Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:
# So continue with reading to let the child process actually terminate.
- while SCons.Util.to_str(pipe.stdout.readline()):
+ # We don't need to know the rest of the data, so don't bother decoding.
+ while pipe.stdout.readline():
pass
| {"golden_diff": "diff --git a/SCons/Tool/gcc.py b/SCons/Tool/gcc.py\n--- a/SCons/Tool/gcc.py\n+++ b/SCons/Tool/gcc.py\n@@ -88,7 +88,8 @@\n line = SCons.Util.to_str(pipe.stdout.readline())\n # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:\n # So continue with reading to let the child process actually terminate.\n- while SCons.Util.to_str(pipe.stdout.readline()):\n+ # We don't need to know the rest of the data, so don't bother decoding.\n+ while pipe.stdout.readline():\n pass\n", "issue": "UnicodeDecodeError\nWhy i get error if type help command?\r\nPython 3.8.10\r\nScons 4.2.0\r\n\r\n\nUnicodeDecodeError\nWhy i get error if type help command?\r\nPython 3.8.10\r\nScons 4.2.0\r\n\r\n\n", "before_files": [{"content": "# MIT License\n#\n# Copyright The SCons Foundation\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\n# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\n# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\"\"\"SCons.Tool.gcc\n\nTool-specific initialization for gcc.\n\nThere normally shouldn't be any need to import this module directly.\nIt will usually be imported through the generic SCons.Tool.Tool()\nselection method.\n\n\"\"\"\n\nfrom . import cc\nimport re\nimport subprocess\n\nimport SCons.Util\n\ncompilers = ['gcc', 'cc']\n\n\ndef generate(env):\n \"\"\"Add Builders and construction variables for gcc to an Environment.\"\"\"\n\n if 'CC' not in env:\n env['CC'] = env.Detect(compilers) or compilers[0]\n\n cc.generate(env)\n\n if env['PLATFORM'] in ['cygwin', 'win32']:\n env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')\n else:\n env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')\n # determine compiler version\n version = detect_version(env, env['CC'])\n if version:\n env['CCVERSION'] = version\n\n\ndef exists(env):\n # is executable, and is a GNU compiler (or accepts '--version' at least)\n return detect_version(env, env.Detect(env.get('CC', compilers)))\n\n\ndef detect_version(env, cc):\n \"\"\"Return the version of the GNU compiler, or None if it is not a GNU compiler.\"\"\"\n version = None\n cc = env.subst(cc)\n if not cc:\n return version\n\n # -dumpversion was added in GCC 3.0. As long as we're supporting\n # GCC versions older than that, we should use --version and a\n # regular expression.\n # pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],\n pipe=SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'],\n stdin='devnull',\n stderr='devnull',\n stdout=subprocess.PIPE)\n if pipe.wait() != 0:\n return version\n\n with pipe.stdout:\n # -dumpversion variant:\n # line = pipe.stdout.read().strip()\n # --version variant:\n line = SCons.Util.to_str(pipe.stdout.readline())\n # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:\n # So continue with reading to let the child process actually terminate.\n while SCons.Util.to_str(pipe.stdout.readline()):\n pass\n\n\n # -dumpversion variant:\n # if line:\n # version = line\n # --version variant:\n match = re.search(r'[0-9]+(\\.[0-9]+)+', line)\n if match:\n version = match.group(0)\n\n return version\n\n# Local Variables:\n# tab-width:4\n# indent-tabs-mode:nil\n# End:\n# vim: set expandtab tabstop=4 shiftwidth=4:\n", "path": "SCons/Tool/gcc.py"}], "after_files": [{"content": "# MIT License\n#\n# Copyright The SCons Foundation\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\n# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\n# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\"\"\"SCons.Tool.gcc\n\nTool-specific initialization for gcc.\n\nThere normally shouldn't be any need to import this module directly.\nIt will usually be imported through the generic SCons.Tool.Tool()\nselection method.\n\n\"\"\"\n\nfrom . import cc\nimport re\nimport subprocess\n\nimport SCons.Util\n\ncompilers = ['gcc', 'cc']\n\n\ndef generate(env):\n \"\"\"Add Builders and construction variables for gcc to an Environment.\"\"\"\n\n if 'CC' not in env:\n env['CC'] = env.Detect(compilers) or compilers[0]\n\n cc.generate(env)\n\n if env['PLATFORM'] in ['cygwin', 'win32']:\n env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')\n else:\n env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')\n # determine compiler version\n version = detect_version(env, env['CC'])\n if version:\n env['CCVERSION'] = version\n\n\ndef exists(env):\n # is executable, and is a GNU compiler (or accepts '--version' at least)\n return detect_version(env, env.Detect(env.get('CC', compilers)))\n\n\ndef detect_version(env, cc):\n \"\"\"Return the version of the GNU compiler, or None if it is not a GNU compiler.\"\"\"\n version = None\n cc = env.subst(cc)\n if not cc:\n return version\n\n # -dumpversion was added in GCC 3.0. As long as we're supporting\n # GCC versions older than that, we should use --version and a\n # regular expression.\n # pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],\n pipe=SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'],\n stdin='devnull',\n stderr='devnull',\n stdout=subprocess.PIPE)\n if pipe.wait() != 0:\n return version\n\n with pipe.stdout:\n # -dumpversion variant:\n # line = pipe.stdout.read().strip()\n # --version variant:\n line = SCons.Util.to_str(pipe.stdout.readline())\n # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:\n # So continue with reading to let the child process actually terminate.\n # We don't need to know the rest of the data, so don't bother decoding.\n while pipe.stdout.readline():\n pass\n\n\n # -dumpversion variant:\n # if line:\n # version = line\n # --version variant:\n match = re.search(r'[0-9]+(\\.[0-9]+)+', line)\n if match:\n version = match.group(0)\n\n return version\n\n# Local Variables:\n# tab-width:4\n# indent-tabs-mode:nil\n# End:\n# vim: set expandtab tabstop=4 shiftwidth=4:\n", "path": "SCons/Tool/gcc.py"}]} | 1,537 | 139 |
gh_patches_debug_23911 | rasdani/github-patches | git_diff | pymedusa__Medusa-10131 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Update PMS library not working
Hi,
All boxes from Notifications -> Plex Media server are completed with the correct information (token, user, password, server ip:port) and I`m receiving the blue box with (error trying to update plex).
What is surprisingly, when using the Test Plex Media Server, the test is ok even if the Token field is empty.
Error received:
Thread_2 :: [] PLEX: Unauthorized. Please set TOKEN or USERNAME and PASSWORD in Plex settings
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `medusa/server/api/v2/notifications.py`
Content:
```
1 # coding=utf-8
2 """Request handler for notifications data."""
3 from __future__ import unicode_literals
4
5
6 import logging
7 import re
8
9 from medusa import app, notifiers, ui
10 from medusa.logger.adapters.style import BraceAdapter
11 from medusa.server.api.v2.base import BaseRequestHandler
12 from medusa.tv.series import Series, SeriesIdentifier
13
14 log = BraceAdapter(logging.getLogger(__name__))
15 log.logger.addHandler(logging.NullHandler())
16
17
18 class NotificationsHandler(BaseRequestHandler):
19 """Notifications data request handler."""
20
21 #: resource name
22 name = 'notifications'
23 #: identifier
24 identifier = ('resource', r'\w+')
25 #: path param
26 path_param = ('path_param', r'\w+')
27 #: allowed HTTP methods
28 allowed_methods = ('GET', 'POST')
29
30 def post(self, resource, path_param=None):
31 """Post Notifications actions for a specific external source.
32
33 :param resource: a resource name
34 :param path_param:
35 :type path_param: str
36 """
37 if resource is None:
38 return self._bad_request('You must provide a notifications resource name')
39
40 available_resources = (
41 'kodi', 'plexserver', 'plexhome', 'emby', 'nmj', 'nmjv2', 'trakt'
42 )
43
44 if resource not in available_resources:
45 return self._bad_request(f"Resource must be one of {', '.join(available_resources)}")
46
47 # Convert 'camelCase' to 'resource_snake_case'
48 resource_function_name = resource + '_' + re.sub('([A-Z]+)', r'_\1', path_param).lower()
49 resource_function = getattr(self, resource_function_name, None)
50
51 if resource_function is None:
52 log.error('Unable to get function "{func}" for resource "{resource}"',
53 {'func': resource_function_name, 'resource': path_param})
54 return self._bad_request('{key} is a invalid resource'.format(key=path_param))
55
56 return resource_function()
57
58 def kodi_update(self):
59 """Update kodi's show library."""
60 if app.KODI_UPDATE_ONLYFIRST:
61 host = app.KODI_HOST[0].strip()
62 else:
63 host = ', '.join(app.KODI_HOST)
64
65 if notifiers.kodi_notifier.update_library():
66 ui.notifications.message(f'Library update command sent to KODI host(s): {host}')
67 else:
68 ui.notifications.error(f'Unable to contact one or more KODI host(s): {host}')
69
70 return self._created()
71
72 def emby_update(self):
73 """Update emby's show library."""
74 show_slug = self.get_argument('showslug', '')
75 show = None
76
77 if show_slug:
78 show_identifier = SeriesIdentifier.from_slug(show_slug)
79 if not show_identifier:
80 return self._bad_request('Invalid show slug')
81
82 show = Series.find_by_identifier(show_identifier)
83 if not show:
84 return self._not_found('Series not found')
85
86 if notifiers.emby_notifier.update_library(show):
87 ui.notifications.message(f'Library update command sent to Emby host: {app.EMBY_HOST}')
88 else:
89 ui.notifications.error(f'Unable to contact Emby host: {app.EMBY_HOST}')
90
91 return self._created()
92
93 def plex_update(self):
94 """Update plex's show library."""
95 if None is notifiers.plex_notifier.update_library():
96 ui.notifications.message(
97 'Library update command sent to Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))
98 else:
99 ui.notifications.error('Unable to contact Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))
100
101 return self._created()
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/medusa/server/api/v2/notifications.py b/medusa/server/api/v2/notifications.py
--- a/medusa/server/api/v2/notifications.py
+++ b/medusa/server/api/v2/notifications.py
@@ -38,7 +38,7 @@
return self._bad_request('You must provide a notifications resource name')
available_resources = (
- 'kodi', 'plexserver', 'plexhome', 'emby', 'nmj', 'nmjv2', 'trakt'
+ 'kodi', 'plexserver', 'plexhome', 'emby', 'nmj', 'nmjv2', 'trakt', 'plex'
)
if resource not in available_resources:
@@ -92,10 +92,10 @@
def plex_update(self):
"""Update plex's show library."""
- if None is notifiers.plex_notifier.update_library():
+ if not notifiers.plex_notifier.update_library():
ui.notifications.message(
- 'Library update command sent to Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))
+ f"Library update command sent to Plex Media Server host: {', '.join(app.PLEX_SERVER_HOST)}")
else:
- ui.notifications.error('Unable to contact Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))
+ ui.notifications.error(f"Unable to contact Plex Media Server host: {', '.join(app.PLEX_SERVER_HOST)}")
return self._created()
| {"golden_diff": "diff --git a/medusa/server/api/v2/notifications.py b/medusa/server/api/v2/notifications.py\n--- a/medusa/server/api/v2/notifications.py\n+++ b/medusa/server/api/v2/notifications.py\n@@ -38,7 +38,7 @@\n return self._bad_request('You must provide a notifications resource name')\n \n available_resources = (\n- 'kodi', 'plexserver', 'plexhome', 'emby', 'nmj', 'nmjv2', 'trakt'\n+ 'kodi', 'plexserver', 'plexhome', 'emby', 'nmj', 'nmjv2', 'trakt', 'plex'\n )\n \n if resource not in available_resources:\n@@ -92,10 +92,10 @@\n \n def plex_update(self):\n \"\"\"Update plex's show library.\"\"\"\n- if None is notifiers.plex_notifier.update_library():\n+ if not notifiers.plex_notifier.update_library():\n ui.notifications.message(\n- 'Library update command sent to Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))\n+ f\"Library update command sent to Plex Media Server host: {', '.join(app.PLEX_SERVER_HOST)}\")\n else:\n- ui.notifications.error('Unable to contact Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))\n+ ui.notifications.error(f\"Unable to contact Plex Media Server host: {', '.join(app.PLEX_SERVER_HOST)}\")\n \n return self._created()\n", "issue": "Update PMS library not working\nHi,\r\n\r\nAll boxes from Notifications -> Plex Media server are completed with the correct information (token, user, password, server ip:port) and I`m receiving the blue box with (error trying to update plex).\r\nWhat is surprisingly, when using the Test Plex Media Server, the test is ok even if the Token field is empty.\r\n\r\nError received:\r\nThread_2 :: [] PLEX: Unauthorized. Please set TOKEN or USERNAME and PASSWORD in Plex settings\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"Request handler for notifications data.\"\"\"\nfrom __future__ import unicode_literals\n\n\nimport logging\nimport re\n\nfrom medusa import app, notifiers, ui\nfrom medusa.logger.adapters.style import BraceAdapter\nfrom medusa.server.api.v2.base import BaseRequestHandler\nfrom medusa.tv.series import Series, SeriesIdentifier\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass NotificationsHandler(BaseRequestHandler):\n \"\"\"Notifications data request handler.\"\"\"\n\n #: resource name\n name = 'notifications'\n #: identifier\n identifier = ('resource', r'\\w+')\n #: path param\n path_param = ('path_param', r'\\w+')\n #: allowed HTTP methods\n allowed_methods = ('GET', 'POST')\n\n def post(self, resource, path_param=None):\n \"\"\"Post Notifications actions for a specific external source.\n\n :param resource: a resource name\n :param path_param:\n :type path_param: str\n \"\"\"\n if resource is None:\n return self._bad_request('You must provide a notifications resource name')\n\n available_resources = (\n 'kodi', 'plexserver', 'plexhome', 'emby', 'nmj', 'nmjv2', 'trakt'\n )\n\n if resource not in available_resources:\n return self._bad_request(f\"Resource must be one of {', '.join(available_resources)}\")\n\n # Convert 'camelCase' to 'resource_snake_case'\n resource_function_name = resource + '_' + re.sub('([A-Z]+)', r'_\\1', path_param).lower()\n resource_function = getattr(self, resource_function_name, None)\n\n if resource_function is None:\n log.error('Unable to get function \"{func}\" for resource \"{resource}\"',\n {'func': resource_function_name, 'resource': path_param})\n return self._bad_request('{key} is a invalid resource'.format(key=path_param))\n\n return resource_function()\n\n def kodi_update(self):\n \"\"\"Update kodi's show library.\"\"\"\n if app.KODI_UPDATE_ONLYFIRST:\n host = app.KODI_HOST[0].strip()\n else:\n host = ', '.join(app.KODI_HOST)\n\n if notifiers.kodi_notifier.update_library():\n ui.notifications.message(f'Library update command sent to KODI host(s): {host}')\n else:\n ui.notifications.error(f'Unable to contact one or more KODI host(s): {host}')\n\n return self._created()\n\n def emby_update(self):\n \"\"\"Update emby's show library.\"\"\"\n show_slug = self.get_argument('showslug', '')\n show = None\n\n if show_slug:\n show_identifier = SeriesIdentifier.from_slug(show_slug)\n if not show_identifier:\n return self._bad_request('Invalid show slug')\n\n show = Series.find_by_identifier(show_identifier)\n if not show:\n return self._not_found('Series not found')\n\n if notifiers.emby_notifier.update_library(show):\n ui.notifications.message(f'Library update command sent to Emby host: {app.EMBY_HOST}')\n else:\n ui.notifications.error(f'Unable to contact Emby host: {app.EMBY_HOST}')\n\n return self._created()\n\n def plex_update(self):\n \"\"\"Update plex's show library.\"\"\"\n if None is notifiers.plex_notifier.update_library():\n ui.notifications.message(\n 'Library update command sent to Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))\n else:\n ui.notifications.error('Unable to contact Plex Media Server host: {host}'.format(host=', '.join(app.PLEX_SERVER_HOST)))\n\n return self._created()\n", "path": "medusa/server/api/v2/notifications.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"Request handler for notifications data.\"\"\"\nfrom __future__ import unicode_literals\n\n\nimport logging\nimport re\n\nfrom medusa import app, notifiers, ui\nfrom medusa.logger.adapters.style import BraceAdapter\nfrom medusa.server.api.v2.base import BaseRequestHandler\nfrom medusa.tv.series import Series, SeriesIdentifier\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass NotificationsHandler(BaseRequestHandler):\n \"\"\"Notifications data request handler.\"\"\"\n\n #: resource name\n name = 'notifications'\n #: identifier\n identifier = ('resource', r'\\w+')\n #: path param\n path_param = ('path_param', r'\\w+')\n #: allowed HTTP methods\n allowed_methods = ('GET', 'POST')\n\n def post(self, resource, path_param=None):\n \"\"\"Post Notifications actions for a specific external source.\n\n :param resource: a resource name\n :param path_param:\n :type path_param: str\n \"\"\"\n if resource is None:\n return self._bad_request('You must provide a notifications resource name')\n\n available_resources = (\n 'kodi', 'plexserver', 'plexhome', 'emby', 'nmj', 'nmjv2', 'trakt', 'plex'\n )\n\n if resource not in available_resources:\n return self._bad_request(f\"Resource must be one of {', '.join(available_resources)}\")\n\n # Convert 'camelCase' to 'resource_snake_case'\n resource_function_name = resource + '_' + re.sub('([A-Z]+)', r'_\\1', path_param).lower()\n resource_function = getattr(self, resource_function_name, None)\n\n if resource_function is None:\n log.error('Unable to get function \"{func}\" for resource \"{resource}\"',\n {'func': resource_function_name, 'resource': path_param})\n return self._bad_request('{key} is a invalid resource'.format(key=path_param))\n\n return resource_function()\n\n def kodi_update(self):\n \"\"\"Update kodi's show library.\"\"\"\n if app.KODI_UPDATE_ONLYFIRST:\n host = app.KODI_HOST[0].strip()\n else:\n host = ', '.join(app.KODI_HOST)\n\n if notifiers.kodi_notifier.update_library():\n ui.notifications.message(f'Library update command sent to KODI host(s): {host}')\n else:\n ui.notifications.error(f'Unable to contact one or more KODI host(s): {host}')\n\n return self._created()\n\n def emby_update(self):\n \"\"\"Update emby's show library.\"\"\"\n show_slug = self.get_argument('showslug', '')\n show = None\n\n if show_slug:\n show_identifier = SeriesIdentifier.from_slug(show_slug)\n if not show_identifier:\n return self._bad_request('Invalid show slug')\n\n show = Series.find_by_identifier(show_identifier)\n if not show:\n return self._not_found('Series not found')\n\n if notifiers.emby_notifier.update_library(show):\n ui.notifications.message(f'Library update command sent to Emby host: {app.EMBY_HOST}')\n else:\n ui.notifications.error(f'Unable to contact Emby host: {app.EMBY_HOST}')\n\n return self._created()\n\n def plex_update(self):\n \"\"\"Update plex's show library.\"\"\"\n if not notifiers.plex_notifier.update_library():\n ui.notifications.message(\n f\"Library update command sent to Plex Media Server host: {', '.join(app.PLEX_SERVER_HOST)}\")\n else:\n ui.notifications.error(f\"Unable to contact Plex Media Server host: {', '.join(app.PLEX_SERVER_HOST)}\")\n\n return self._created()\n", "path": "medusa/server/api/v2/notifications.py"}]} | 1,362 | 340 |
gh_patches_debug_529 | rasdani/github-patches | git_diff | pyscript__pyscript-1941 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Py Terminal issues aggregation
### Checklist
- [X] I added a descriptive title
- [X] I searched for other issues and couldn't find a solution or duplication
- [X] I already searched in Google and didn't find any good information or help
### What happened?
We have at least 3 issues in the current PyTerminal:
- [x] errors (on main?) are actually not shown
- [x] escape chars, **only on main**, are not recognized
- [x] the config relative URL is resolved differently between main and worker ... this is probably not strictly related to PyTerminal but it's surely worth fixing this too because otherwise terminals can't easily switch from main to worker and/or vice-versa (see https://github.com/pyscript/polyscript/issues/77)
### What browsers are you seeing the problem on? (if applicable)
_No response_
### Console info
_No response_
### Additional Context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyscript.core/src/stdlib/pyscript/magic_js.py`
Content:
```
1 import sys
2
3 import js as globalThis
4 from polyscript import js_modules
5 from pyscript.util import NotSupported
6
7 RUNNING_IN_WORKER = not hasattr(globalThis, "document")
8
9
10 # allow `from pyscript.js_modules.xxx import yyy`
11 class JSModule(object):
12 def __init__(self, name):
13 self.name = name
14
15 def __getattr__(self, field):
16 # avoid pyodide looking for non existent fields
17 if not field.startswith("_"):
18 return getattr(getattr(js_modules, self.name), field)
19
20
21 # generate N modules in the system that will proxy the real value
22 for name in globalThis.Reflect.ownKeys(js_modules):
23 sys.modules[f"pyscript.js_modules.{name}"] = JSModule(name)
24 sys.modules["pyscript.js_modules"] = js_modules
25
26 if RUNNING_IN_WORKER:
27 import js
28 import polyscript
29
30 PyWorker = NotSupported(
31 "pyscript.PyWorker",
32 "pyscript.PyWorker works only when running in the main thread",
33 )
34 window = polyscript.xworker.window
35 document = window.document
36 js.document = document
37 sync = polyscript.xworker.sync
38
39 # in workers the display does not have a default ID
40 # but there is a sync utility from xworker
41 def current_target():
42 return polyscript.target
43
44 else:
45 import _pyscript
46 from _pyscript import PyWorker
47
48 window = globalThis
49 document = globalThis.document
50 sync = NotSupported(
51 "pyscript.sync", "pyscript.sync works only when running in a worker"
52 )
53
54 # in MAIN the current element target exist, just use it
55 def current_target():
56 return _pyscript.target
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pyscript.core/src/stdlib/pyscript/magic_js.py b/pyscript.core/src/stdlib/pyscript/magic_js.py
--- a/pyscript.core/src/stdlib/pyscript/magic_js.py
+++ b/pyscript.core/src/stdlib/pyscript/magic_js.py
@@ -8,7 +8,7 @@
# allow `from pyscript.js_modules.xxx import yyy`
-class JSModule(object):
+class JSModule:
def __init__(self, name):
self.name = name
| {"golden_diff": "diff --git a/pyscript.core/src/stdlib/pyscript/magic_js.py b/pyscript.core/src/stdlib/pyscript/magic_js.py\n--- a/pyscript.core/src/stdlib/pyscript/magic_js.py\n+++ b/pyscript.core/src/stdlib/pyscript/magic_js.py\n@@ -8,7 +8,7 @@\n \n \n # allow `from pyscript.js_modules.xxx import yyy`\n-class JSModule(object):\n+class JSModule:\n def __init__(self, name):\n self.name = name\n", "issue": "Py Terminal issues aggregation\n### Checklist\r\n\r\n- [X] I added a descriptive title\r\n- [X] I searched for other issues and couldn't find a solution or duplication\r\n- [X] I already searched in Google and didn't find any good information or help\r\n\r\n### What happened?\r\n\r\nWe have at least 3 issues in the current PyTerminal:\r\n\r\n- [x] errors (on main?) are actually not shown\r\n- [x] escape chars, **only on main**, are not recognized\r\n- [x] the config relative URL is resolved differently between main and worker ... this is probably not strictly related to PyTerminal but it's surely worth fixing this too because otherwise terminals can't easily switch from main to worker and/or vice-versa (see https://github.com/pyscript/polyscript/issues/77)\r\n\r\n### What browsers are you seeing the problem on? (if applicable)\r\n\r\n_No response_\r\n\r\n### Console info\r\n\r\n_No response_\r\n\r\n### Additional Context\r\n\r\n_No response_\n", "before_files": [{"content": "import sys\n\nimport js as globalThis\nfrom polyscript import js_modules\nfrom pyscript.util import NotSupported\n\nRUNNING_IN_WORKER = not hasattr(globalThis, \"document\")\n\n\n# allow `from pyscript.js_modules.xxx import yyy`\nclass JSModule(object):\n def __init__(self, name):\n self.name = name\n\n def __getattr__(self, field):\n # avoid pyodide looking for non existent fields\n if not field.startswith(\"_\"):\n return getattr(getattr(js_modules, self.name), field)\n\n\n# generate N modules in the system that will proxy the real value\nfor name in globalThis.Reflect.ownKeys(js_modules):\n sys.modules[f\"pyscript.js_modules.{name}\"] = JSModule(name)\nsys.modules[\"pyscript.js_modules\"] = js_modules\n\nif RUNNING_IN_WORKER:\n import js\n import polyscript\n\n PyWorker = NotSupported(\n \"pyscript.PyWorker\",\n \"pyscript.PyWorker works only when running in the main thread\",\n )\n window = polyscript.xworker.window\n document = window.document\n js.document = document\n sync = polyscript.xworker.sync\n\n # in workers the display does not have a default ID\n # but there is a sync utility from xworker\n def current_target():\n return polyscript.target\n\nelse:\n import _pyscript\n from _pyscript import PyWorker\n\n window = globalThis\n document = globalThis.document\n sync = NotSupported(\n \"pyscript.sync\", \"pyscript.sync works only when running in a worker\"\n )\n\n # in MAIN the current element target exist, just use it\n def current_target():\n return _pyscript.target\n", "path": "pyscript.core/src/stdlib/pyscript/magic_js.py"}], "after_files": [{"content": "import sys\n\nimport js as globalThis\nfrom polyscript import js_modules\nfrom pyscript.util import NotSupported\n\nRUNNING_IN_WORKER = not hasattr(globalThis, \"document\")\n\n\n# allow `from pyscript.js_modules.xxx import yyy`\nclass JSModule:\n def __init__(self, name):\n self.name = name\n\n def __getattr__(self, field):\n # avoid pyodide looking for non existent fields\n if not field.startswith(\"_\"):\n return getattr(getattr(js_modules, self.name), field)\n\n\n# generate N modules in the system that will proxy the real value\nfor name in globalThis.Reflect.ownKeys(js_modules):\n sys.modules[f\"pyscript.js_modules.{name}\"] = JSModule(name)\nsys.modules[\"pyscript.js_modules\"] = js_modules\n\nif RUNNING_IN_WORKER:\n import js\n import polyscript\n\n PyWorker = NotSupported(\n \"pyscript.PyWorker\",\n \"pyscript.PyWorker works only when running in the main thread\",\n )\n window = polyscript.xworker.window\n document = window.document\n js.document = document\n sync = polyscript.xworker.sync\n\n # in workers the display does not have a default ID\n # but there is a sync utility from xworker\n def current_target():\n return polyscript.target\n\nelse:\n import _pyscript\n from _pyscript import PyWorker\n\n window = globalThis\n document = globalThis.document\n sync = NotSupported(\n \"pyscript.sync\", \"pyscript.sync works only when running in a worker\"\n )\n\n # in MAIN the current element target exist, just use it\n def current_target():\n return _pyscript.target\n", "path": "pyscript.core/src/stdlib/pyscript/magic_js.py"}]} | 960 | 117 |
gh_patches_debug_36418 | rasdani/github-patches | git_diff | conan-io__conan-center-index-5322 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[request] <pkgconf>/<1.7.4>
The above mentioned version is newly released by the upstream project and not yet available as a recipe. Please add this version.
The pkgconf recipe retrieves the sources from [https://distfiles.dereferenced.org/pkgconf](https://distfiles.dereferenced.org/pkgconf) but a git repo is available: https://github.com/pkgconf/pkgconf/tree/pkgconf-1.7.4
The distfiles website is reporting an expired SSL cerrtificate if I try to install pkgconf/1.7.3
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `recipes/pkgconf/all/conanfile.py`
Content:
```
1 from conans import ConanFile, Meson, tools
2 import os
3
4 required_conan_version = ">= 1.29.1"
5
6
7 class PkgConfConan(ConanFile):
8 name = "pkgconf"
9 url = "https://github.com/conan-io/conan-center-index"
10 homepage = "https://git.sr.ht/~kaniini/pkgconf"
11 topics = ("conan", "pkgconf")
12 settings = "os", "arch", "compiler", "build_type"
13 license = "ISC"
14 description = "package compiler and linker metadata toolkit"
15 exports_sources = "patches/**"
16 options = {
17 "shared": [True, False],
18 "fPIC": [True, False],
19 }
20 default_options = {
21 "shared": False,
22 "fPIC": True,
23 }
24
25 _meson = None
26
27 @property
28 def _source_subfolder(self):
29 return "source_subfolder"
30
31 @property
32 def _build_subfolder(self):
33 return "build_subfolder"
34
35 def config_options(self):
36 if self.settings.os == "Windows":
37 del self.options.fPIC
38
39 def configure(self):
40 if self.options.shared:
41 del self.options.fPIC
42 del self.settings.compiler.libcxx
43 del self.settings.compiler.cppstd
44
45 def source(self):
46 tools.get(**self.conan_data["sources"][self.version])
47 os.rename("pkgconf-{}".format(self.version), self._source_subfolder)
48
49 def build_requirements(self):
50 self.build_requires("meson/0.56.2")
51
52 @property
53 def _sharedstatedir(self):
54 return os.path.join(self.package_folder, "bin", "share")
55
56 def _configure_meson(self):
57 if self._meson:
58 return self._meson
59 self._meson = Meson(self)
60 self._meson.options["tests"] = False
61 self._meson.options["sharedstatedir"] = self._sharedstatedir
62 self._meson.configure(source_folder=self._source_subfolder, build_folder=self._build_subfolder)
63 return self._meson
64
65 def _patch_sources(self):
66 for patch in self.conan_data["patches"][self.version]:
67 tools.patch(**patch)
68 tools.replace_in_file(os.path.join(self._source_subfolder, "meson.build"),
69 "shared_library(", "library(")
70 if not self.options.shared:
71 tools.replace_in_file(os.path.join(self._source_subfolder, "meson.build"),
72 "'-DLIBPKGCONF_EXPORT'",
73 "'-DPKGCONFIG_IS_STATIC'")
74
75 def build(self):
76 self._patch_sources()
77 meson = self._configure_meson()
78 meson.build()
79
80 def package(self):
81 self.copy("COPYING", src=self._source_subfolder, dst="licenses")
82 meson = self._meson
83 meson.install()
84
85 if self.settings.compiler == "Visual Studio":
86 tools.remove_files_by_mask(os.path.join(self.package_folder, "bin"), "*.pdb")
87 if not self.options.shared:
88 os.rename(os.path.join(self.package_folder, "lib", "libpkgconf.a"),
89 os.path.join(self.package_folder, "lib", "pkgconf.lib"),)
90
91 tools.rmdir(os.path.join(self.package_folder, "share", "man"))
92 os.rename(os.path.join(self.package_folder, "share", "aclocal"),
93 os.path.join(self.package_folder, "bin", "aclocal"))
94 tools.rmdir(os.path.join(self.package_folder, "share"))
95
96 def package_info(self):
97 self.cpp_info.includedirs.append(os.path.join("include", "libpkgconf"))
98 self.cpp_info.libs = ["pkgconf"]
99 if not self.options.shared:
100 self.cpp_info.defines = ["PKGCONFIG_IS_STATIC"]
101
102 bindir = os.path.join(self.package_folder, "bin")
103 self.output.info("Appending PATH env var: {}".format(bindir))
104 self.env_info.PATH.append(bindir)
105
106 exesuffix = ".exe" if self.settings.os == "Windows" else ""
107 pkg_config = os.path.join(bindir, "pkgconf" + exesuffix).replace("\\", "/")
108 self.output.info("Setting PKG_CONFIG env var: {}".format(pkg_config))
109 self.env_info.PKG_CONFIG = pkg_config
110
111 automake_extra_includes = tools.unix_path(os.path.join(self.package_folder , "bin", "aclocal").replace("\\", "/"))
112 self.output.info("Appending AUTOMAKE_CONAN_INCLUDES env var: {}".format(automake_extra_includes))
113 self.env_info.AUTOMAKE_CONAN_INCLUDES.append(automake_extra_includes)
114
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/recipes/pkgconf/all/conanfile.py b/recipes/pkgconf/all/conanfile.py
--- a/recipes/pkgconf/all/conanfile.py
+++ b/recipes/pkgconf/all/conanfile.py
@@ -47,7 +47,7 @@
os.rename("pkgconf-{}".format(self.version), self._source_subfolder)
def build_requirements(self):
- self.build_requires("meson/0.56.2")
+ self.build_requires("meson/0.57.2")
@property
def _sharedstatedir(self):
@@ -65,12 +65,16 @@
def _patch_sources(self):
for patch in self.conan_data["patches"][self.version]:
tools.patch(**patch)
- tools.replace_in_file(os.path.join(self._source_subfolder, "meson.build"),
- "shared_library(", "library(")
+ if tools.Version(self.version) < "1.7.4":
+ tools.replace_in_file(os.path.join(self._source_subfolder, "meson.build"),
+ "shared_library(", "library(")
if not self.options.shared:
tools.replace_in_file(os.path.join(self._source_subfolder, "meson.build"),
"'-DLIBPKGCONF_EXPORT'",
"'-DPKGCONFIG_IS_STATIC'")
+ tools.replace_in_file(os.path.join(self._source_subfolder, "meson.build"),
+ "project('pkgconf', 'c',",
+ "project('pkgconf', 'c',\ndefault_options : ['c_std=gnu99'],")
def build(self):
self._patch_sources()
@@ -92,9 +96,14 @@
os.rename(os.path.join(self.package_folder, "share", "aclocal"),
os.path.join(self.package_folder, "bin", "aclocal"))
tools.rmdir(os.path.join(self.package_folder, "share"))
+ tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
def package_info(self):
- self.cpp_info.includedirs.append(os.path.join("include", "libpkgconf"))
+ if tools.Version(self.version) < "1.7.4":
+ self.cpp_info.includedirs.append("include")
+ else:
+ self.cpp_info.includedirs.append(os.path.join("include", "pkgconf"))
+ self.cpp_info.names["pkg_config"] = "libpkgconf"
self.cpp_info.libs = ["pkgconf"]
if not self.options.shared:
self.cpp_info.defines = ["PKGCONFIG_IS_STATIC"]
| {"golden_diff": "diff --git a/recipes/pkgconf/all/conanfile.py b/recipes/pkgconf/all/conanfile.py\n--- a/recipes/pkgconf/all/conanfile.py\n+++ b/recipes/pkgconf/all/conanfile.py\n@@ -47,7 +47,7 @@\n os.rename(\"pkgconf-{}\".format(self.version), self._source_subfolder)\n \n def build_requirements(self):\n- self.build_requires(\"meson/0.56.2\")\n+ self.build_requires(\"meson/0.57.2\")\n \n @property\n def _sharedstatedir(self):\n@@ -65,12 +65,16 @@\n def _patch_sources(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n- tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n- \"shared_library(\", \"library(\")\n+ if tools.Version(self.version) < \"1.7.4\":\n+ tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n+ \"shared_library(\", \"library(\")\n if not self.options.shared:\n tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n \"'-DLIBPKGCONF_EXPORT'\",\n \"'-DPKGCONFIG_IS_STATIC'\")\n+ tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n+ \"project('pkgconf', 'c',\",\n+ \"project('pkgconf', 'c',\\ndefault_options : ['c_std=gnu99'],\")\n \n def build(self):\n self._patch_sources()\n@@ -92,9 +96,14 @@\n os.rename(os.path.join(self.package_folder, \"share\", \"aclocal\"),\n os.path.join(self.package_folder, \"bin\", \"aclocal\"))\n tools.rmdir(os.path.join(self.package_folder, \"share\"))\n+ tools.rmdir(os.path.join(self.package_folder, \"lib\", \"pkgconfig\"))\n \n def package_info(self):\n- self.cpp_info.includedirs.append(os.path.join(\"include\", \"libpkgconf\"))\n+ if tools.Version(self.version) < \"1.7.4\":\n+ self.cpp_info.includedirs.append(\"include\")\n+ else:\n+ self.cpp_info.includedirs.append(os.path.join(\"include\", \"pkgconf\"))\n+ self.cpp_info.names[\"pkg_config\"] = \"libpkgconf\"\n self.cpp_info.libs = [\"pkgconf\"]\n if not self.options.shared:\n self.cpp_info.defines = [\"PKGCONFIG_IS_STATIC\"]\n", "issue": "[request] <pkgconf>/<1.7.4>\nThe above mentioned version is newly released by the upstream project and not yet available as a recipe. Please add this version.\r\n\r\nThe pkgconf recipe retrieves the sources from [https://distfiles.dereferenced.org/pkgconf](https://distfiles.dereferenced.org/pkgconf) but a git repo is available: https://github.com/pkgconf/pkgconf/tree/pkgconf-1.7.4 \r\n\r\nThe distfiles website is reporting an expired SSL cerrtificate if I try to install pkgconf/1.7.3\r\n\n", "before_files": [{"content": "from conans import ConanFile, Meson, tools\nimport os\n\nrequired_conan_version = \">= 1.29.1\"\n\n\nclass PkgConfConan(ConanFile):\n name = \"pkgconf\"\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://git.sr.ht/~kaniini/pkgconf\"\n topics = (\"conan\", \"pkgconf\")\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n license = \"ISC\"\n description = \"package compiler and linker metadata toolkit\"\n exports_sources = \"patches/**\"\n options = {\n \"shared\": [True, False],\n \"fPIC\": [True, False],\n }\n default_options = {\n \"shared\": False,\n \"fPIC\": True,\n }\n\n _meson = None\n\n @property\n def _source_subfolder(self):\n return \"source_subfolder\"\n\n @property\n def _build_subfolder(self):\n return \"build_subfolder\"\n\n def config_options(self):\n if self.settings.os == \"Windows\":\n del self.options.fPIC\n\n def configure(self):\n if self.options.shared:\n del self.options.fPIC\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n os.rename(\"pkgconf-{}\".format(self.version), self._source_subfolder)\n\n def build_requirements(self):\n self.build_requires(\"meson/0.56.2\")\n\n @property\n def _sharedstatedir(self):\n return os.path.join(self.package_folder, \"bin\", \"share\")\n\n def _configure_meson(self):\n if self._meson:\n return self._meson\n self._meson = Meson(self)\n self._meson.options[\"tests\"] = False\n self._meson.options[\"sharedstatedir\"] = self._sharedstatedir\n self._meson.configure(source_folder=self._source_subfolder, build_folder=self._build_subfolder)\n return self._meson\n\n def _patch_sources(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n \"shared_library(\", \"library(\")\n if not self.options.shared:\n tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n \"'-DLIBPKGCONF_EXPORT'\",\n \"'-DPKGCONFIG_IS_STATIC'\")\n\n def build(self):\n self._patch_sources()\n meson = self._configure_meson()\n meson.build()\n\n def package(self):\n self.copy(\"COPYING\", src=self._source_subfolder, dst=\"licenses\")\n meson = self._meson\n meson.install()\n\n if self.settings.compiler == \"Visual Studio\":\n tools.remove_files_by_mask(os.path.join(self.package_folder, \"bin\"), \"*.pdb\")\n if not self.options.shared:\n os.rename(os.path.join(self.package_folder, \"lib\", \"libpkgconf.a\"),\n os.path.join(self.package_folder, \"lib\", \"pkgconf.lib\"),)\n\n tools.rmdir(os.path.join(self.package_folder, \"share\", \"man\"))\n os.rename(os.path.join(self.package_folder, \"share\", \"aclocal\"),\n os.path.join(self.package_folder, \"bin\", \"aclocal\"))\n tools.rmdir(os.path.join(self.package_folder, \"share\"))\n\n def package_info(self):\n self.cpp_info.includedirs.append(os.path.join(\"include\", \"libpkgconf\"))\n self.cpp_info.libs = [\"pkgconf\"]\n if not self.options.shared:\n self.cpp_info.defines = [\"PKGCONFIG_IS_STATIC\"]\n\n bindir = os.path.join(self.package_folder, \"bin\")\n self.output.info(\"Appending PATH env var: {}\".format(bindir))\n self.env_info.PATH.append(bindir)\n\n exesuffix = \".exe\" if self.settings.os == \"Windows\" else \"\"\n pkg_config = os.path.join(bindir, \"pkgconf\" + exesuffix).replace(\"\\\\\", \"/\")\n self.output.info(\"Setting PKG_CONFIG env var: {}\".format(pkg_config))\n self.env_info.PKG_CONFIG = pkg_config\n\n automake_extra_includes = tools.unix_path(os.path.join(self.package_folder , \"bin\", \"aclocal\").replace(\"\\\\\", \"/\"))\n self.output.info(\"Appending AUTOMAKE_CONAN_INCLUDES env var: {}\".format(automake_extra_includes))\n self.env_info.AUTOMAKE_CONAN_INCLUDES.append(automake_extra_includes)\n", "path": "recipes/pkgconf/all/conanfile.py"}], "after_files": [{"content": "from conans import ConanFile, Meson, tools\nimport os\n\nrequired_conan_version = \">= 1.29.1\"\n\n\nclass PkgConfConan(ConanFile):\n name = \"pkgconf\"\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://git.sr.ht/~kaniini/pkgconf\"\n topics = (\"conan\", \"pkgconf\")\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n license = \"ISC\"\n description = \"package compiler and linker metadata toolkit\"\n exports_sources = \"patches/**\"\n options = {\n \"shared\": [True, False],\n \"fPIC\": [True, False],\n }\n default_options = {\n \"shared\": False,\n \"fPIC\": True,\n }\n\n _meson = None\n\n @property\n def _source_subfolder(self):\n return \"source_subfolder\"\n\n @property\n def _build_subfolder(self):\n return \"build_subfolder\"\n\n def config_options(self):\n if self.settings.os == \"Windows\":\n del self.options.fPIC\n\n def configure(self):\n if self.options.shared:\n del self.options.fPIC\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n os.rename(\"pkgconf-{}\".format(self.version), self._source_subfolder)\n\n def build_requirements(self):\n self.build_requires(\"meson/0.57.2\")\n\n @property\n def _sharedstatedir(self):\n return os.path.join(self.package_folder, \"bin\", \"share\")\n\n def _configure_meson(self):\n if self._meson:\n return self._meson\n self._meson = Meson(self)\n self._meson.options[\"tests\"] = False\n self._meson.options[\"sharedstatedir\"] = self._sharedstatedir\n self._meson.configure(source_folder=self._source_subfolder, build_folder=self._build_subfolder)\n return self._meson\n\n def _patch_sources(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n if tools.Version(self.version) < \"1.7.4\":\n tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n \"shared_library(\", \"library(\")\n if not self.options.shared:\n tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n \"'-DLIBPKGCONF_EXPORT'\",\n \"'-DPKGCONFIG_IS_STATIC'\")\n tools.replace_in_file(os.path.join(self._source_subfolder, \"meson.build\"),\n \"project('pkgconf', 'c',\",\n \"project('pkgconf', 'c',\\ndefault_options : ['c_std=gnu99'],\")\n\n def build(self):\n self._patch_sources()\n meson = self._configure_meson()\n meson.build()\n\n def package(self):\n self.copy(\"COPYING\", src=self._source_subfolder, dst=\"licenses\")\n meson = self._meson\n meson.install()\n\n if self.settings.compiler == \"Visual Studio\":\n tools.remove_files_by_mask(os.path.join(self.package_folder, \"bin\"), \"*.pdb\")\n if not self.options.shared:\n os.rename(os.path.join(self.package_folder, \"lib\", \"libpkgconf.a\"),\n os.path.join(self.package_folder, \"lib\", \"pkgconf.lib\"),)\n\n tools.rmdir(os.path.join(self.package_folder, \"share\", \"man\"))\n os.rename(os.path.join(self.package_folder, \"share\", \"aclocal\"),\n os.path.join(self.package_folder, \"bin\", \"aclocal\"))\n tools.rmdir(os.path.join(self.package_folder, \"share\"))\n tools.rmdir(os.path.join(self.package_folder, \"lib\", \"pkgconfig\"))\n\n def package_info(self):\n if tools.Version(self.version) < \"1.7.4\":\n self.cpp_info.includedirs.append(\"include\")\n else:\n self.cpp_info.includedirs.append(os.path.join(\"include\", \"pkgconf\"))\n self.cpp_info.names[\"pkg_config\"] = \"libpkgconf\"\n self.cpp_info.libs = [\"pkgconf\"]\n if not self.options.shared:\n self.cpp_info.defines = [\"PKGCONFIG_IS_STATIC\"]\n\n bindir = os.path.join(self.package_folder, \"bin\")\n self.output.info(\"Appending PATH env var: {}\".format(bindir))\n self.env_info.PATH.append(bindir)\n\n exesuffix = \".exe\" if self.settings.os == \"Windows\" else \"\"\n pkg_config = os.path.join(bindir, \"pkgconf\" + exesuffix).replace(\"\\\\\", \"/\")\n self.output.info(\"Setting PKG_CONFIG env var: {}\".format(pkg_config))\n self.env_info.PKG_CONFIG = pkg_config\n\n automake_extra_includes = tools.unix_path(os.path.join(self.package_folder , \"bin\", \"aclocal\").replace(\"\\\\\", \"/\"))\n self.output.info(\"Appending AUTOMAKE_CONAN_INCLUDES env var: {}\".format(automake_extra_includes))\n self.env_info.AUTOMAKE_CONAN_INCLUDES.append(automake_extra_includes)\n", "path": "recipes/pkgconf/all/conanfile.py"}]} | 1,633 | 563 |
gh_patches_debug_181 | rasdani/github-patches | git_diff | unionai-oss__pandera-1419 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Date type not exported
**Describe the bug**
In the `__all__` list [here](https://github.com/unionai-oss/pandera/blob/37c24d94ae719dcf4cdc36d1f204478539fce74a/pandera/__init__.py#L104-L106), the type `Date` is missing, causing complaints from mypy if you refer to the type as `pa.Date` -- you have to fully qualify it as `pa.typing.common.Date`.
- [x] I have checked that this issue has not already been reported.
- [x] I have confirmed this bug exists on the latest version of pandera.
- [x] (optional) I have confirmed this bug exists on the master branch of pandera.
**Note**: Please read [this guide](https://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports) detailing how to provide the necessary information for us to reproduce your bug.
#### Code Sample, a copy-pastable example
```python
import pandera as pa
# Mypy errors [name-defined]
class ErrorSchema(pa.DataFrameModel):
date_col: pa.Date
# Mypy is happy
class NoErrorSchema(pa.DataFrameModel):
date_col: pa.typing.common.Date
```
#### Expected behavior
No errors from mypy in both cases
#### Desktop (please complete the following information):
- OS: [Manjaro Linux kernel 6.1.60 - 1]
- Browser [Firefox 119.0]
- Version [pandera 0.17.2]
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pandera/__init__.py`
Content:
```
1 """A flexible and expressive pandas validation library."""
2 import platform
3
4 import pandera.backends
5 from pandera import errors, external_config, typing
6 from pandera.accessors import pandas_accessor
7 from pandera.api import extensions
8 from pandera.api.checks import Check
9 from pandera.api.hypotheses import Hypothesis
10 from pandera.api.pandas.array import SeriesSchema
11 from pandera.api.pandas.container import DataFrameSchema
12 from pandera.api.pandas.components import Column, Index, MultiIndex
13 from pandera.api.pandas.model import DataFrameModel, SchemaModel
14 from pandera.api.pandas.model_components import Field, check, dataframe_check
15 from pandera.decorators import check_input, check_io, check_output, check_types
16 from pandera.dtypes import (
17 Bool,
18 Category,
19 Complex,
20 Complex64,
21 Complex128,
22 DataType,
23 Date,
24 DateTime,
25 Decimal,
26 Float,
27 Float16,
28 Float32,
29 Float64,
30 Int,
31 Int8,
32 Int16,
33 Int32,
34 Int64,
35 String,
36 Timedelta,
37 Timestamp,
38 UInt,
39 UInt8,
40 UInt16,
41 UInt32,
42 UInt64,
43 )
44 from pandera.engines.numpy_engine import Object
45 from pandera.engines.pandas_engine import (
46 BOOL,
47 INT8,
48 INT16,
49 INT32,
50 INT64,
51 PANDAS_1_2_0_PLUS,
52 PANDAS_1_3_0_PLUS,
53 STRING,
54 UINT8,
55 UINT16,
56 UINT32,
57 UINT64,
58 pandas_version,
59 )
60
61 import pandera.backends.base.builtin_checks
62 import pandera.backends.base.builtin_hypotheses
63 import pandera.backends.pandas
64
65 from pandera.schema_inference.pandas import infer_schema
66 from pandera.version import __version__
67
68
69 if platform.system() != "Windows":
70 # pylint: disable=ungrouped-imports
71 from pandera.dtypes import Complex256, Float128
72
73
74 try:
75 import dask.dataframe
76
77 from pandera.accessors import dask_accessor
78 except ImportError:
79 pass
80
81
82 try:
83 import pyspark.pandas
84
85 from pandera.accessors import pyspark_accessor
86 except ImportError:
87 pass
88
89 try:
90 import modin.pandas
91
92 from pandera.accessors import modin_accessor
93 except ImportError:
94 pass
95
96 __all__ = [
97 # dtypes
98 "Bool",
99 "Category",
100 "Complex",
101 "Complex64",
102 "Complex128",
103 "Complex256",
104 "DataType",
105 "DateTime",
106 "Float",
107 "Float16",
108 "Float32",
109 "Float64",
110 "Float128",
111 "Int",
112 "Int8",
113 "Int16",
114 "Int32",
115 "Int64",
116 "String",
117 "Timedelta",
118 "Timestamp",
119 "UInt",
120 "UInt8",
121 "UInt16",
122 "UInt32",
123 "UInt64",
124 # numpy_engine
125 "Object",
126 # pandas_engine
127 "BOOL",
128 "INT8",
129 "INT16",
130 "INT32",
131 "INT64",
132 "PANDAS_1_3_0_PLUS",
133 "STRING",
134 "UINT8",
135 "UINT16",
136 "UINT32",
137 "UINT64",
138 # pandera.engines.pandas_engine
139 "pandas_version",
140 # checks
141 "Check",
142 # decorators
143 "check_input",
144 "check_io",
145 "check_output",
146 "check_types",
147 # hypotheses
148 "Hypothesis",
149 # model
150 "DataFrameModel",
151 "SchemaModel",
152 # model_components
153 "Field",
154 "check",
155 "dataframe_check",
156 # schema_components
157 "Column",
158 "Index",
159 "MultiIndex",
160 # schema_inference
161 "infer_schema",
162 # schemas
163 "DataFrameSchema",
164 "SeriesSchema",
165 # version
166 "__version__",
167 ]
168
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pandera/__init__.py b/pandera/__init__.py
--- a/pandera/__init__.py
+++ b/pandera/__init__.py
@@ -101,6 +101,7 @@
"Complex64",
"Complex128",
"Complex256",
+ "Date",
"DataType",
"DateTime",
"Float",
| {"golden_diff": "diff --git a/pandera/__init__.py b/pandera/__init__.py\n--- a/pandera/__init__.py\n+++ b/pandera/__init__.py\n@@ -101,6 +101,7 @@\n \"Complex64\",\n \"Complex128\",\n \"Complex256\",\n+ \"Date\",\n \"DataType\",\n \"DateTime\",\n \"Float\",\n", "issue": "Date type not exported\n**Describe the bug**\r\nIn the `__all__` list [here](https://github.com/unionai-oss/pandera/blob/37c24d94ae719dcf4cdc36d1f204478539fce74a/pandera/__init__.py#L104-L106), the type `Date` is missing, causing complaints from mypy if you refer to the type as `pa.Date` -- you have to fully qualify it as `pa.typing.common.Date`.\r\n\r\n- [x] I have checked that this issue has not already been reported.\r\n- [x] I have confirmed this bug exists on the latest version of pandera.\r\n- [x] (optional) I have confirmed this bug exists on the master branch of pandera.\r\n\r\n**Note**: Please read [this guide](https://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports) detailing how to provide the necessary information for us to reproduce your bug.\r\n\r\n#### Code Sample, a copy-pastable example\r\n\r\n```python\r\nimport pandera as pa\r\n\r\n# Mypy errors [name-defined]\r\nclass ErrorSchema(pa.DataFrameModel):\r\n date_col: pa.Date\r\n\r\n# Mypy is happy\r\nclass NoErrorSchema(pa.DataFrameModel):\r\n date_col: pa.typing.common.Date\r\n```\r\n\r\n#### Expected behavior\r\nNo errors from mypy in both cases\r\n\r\n#### Desktop (please complete the following information):\r\n\r\n - OS: [Manjaro Linux kernel 6.1.60 - 1]\r\n - Browser [Firefox 119.0]\r\n - Version [pandera 0.17.2]\n", "before_files": [{"content": "\"\"\"A flexible and expressive pandas validation library.\"\"\"\nimport platform\n\nimport pandera.backends\nfrom pandera import errors, external_config, typing\nfrom pandera.accessors import pandas_accessor\nfrom pandera.api import extensions\nfrom pandera.api.checks import Check\nfrom pandera.api.hypotheses import Hypothesis\nfrom pandera.api.pandas.array import SeriesSchema\nfrom pandera.api.pandas.container import DataFrameSchema\nfrom pandera.api.pandas.components import Column, Index, MultiIndex\nfrom pandera.api.pandas.model import DataFrameModel, SchemaModel\nfrom pandera.api.pandas.model_components import Field, check, dataframe_check\nfrom pandera.decorators import check_input, check_io, check_output, check_types\nfrom pandera.dtypes import (\n Bool,\n Category,\n Complex,\n Complex64,\n Complex128,\n DataType,\n Date,\n DateTime,\n Decimal,\n Float,\n Float16,\n Float32,\n Float64,\n Int,\n Int8,\n Int16,\n Int32,\n Int64,\n String,\n Timedelta,\n Timestamp,\n UInt,\n UInt8,\n UInt16,\n UInt32,\n UInt64,\n)\nfrom pandera.engines.numpy_engine import Object\nfrom pandera.engines.pandas_engine import (\n BOOL,\n INT8,\n INT16,\n INT32,\n INT64,\n PANDAS_1_2_0_PLUS,\n PANDAS_1_3_0_PLUS,\n STRING,\n UINT8,\n UINT16,\n UINT32,\n UINT64,\n pandas_version,\n)\n\nimport pandera.backends.base.builtin_checks\nimport pandera.backends.base.builtin_hypotheses\nimport pandera.backends.pandas\n\nfrom pandera.schema_inference.pandas import infer_schema\nfrom pandera.version import __version__\n\n\nif platform.system() != \"Windows\":\n # pylint: disable=ungrouped-imports\n from pandera.dtypes import Complex256, Float128\n\n\ntry:\n import dask.dataframe\n\n from pandera.accessors import dask_accessor\nexcept ImportError:\n pass\n\n\ntry:\n import pyspark.pandas\n\n from pandera.accessors import pyspark_accessor\nexcept ImportError:\n pass\n\ntry:\n import modin.pandas\n\n from pandera.accessors import modin_accessor\nexcept ImportError:\n pass\n\n__all__ = [\n # dtypes\n \"Bool\",\n \"Category\",\n \"Complex\",\n \"Complex64\",\n \"Complex128\",\n \"Complex256\",\n \"DataType\",\n \"DateTime\",\n \"Float\",\n \"Float16\",\n \"Float32\",\n \"Float64\",\n \"Float128\",\n \"Int\",\n \"Int8\",\n \"Int16\",\n \"Int32\",\n \"Int64\",\n \"String\",\n \"Timedelta\",\n \"Timestamp\",\n \"UInt\",\n \"UInt8\",\n \"UInt16\",\n \"UInt32\",\n \"UInt64\",\n # numpy_engine\n \"Object\",\n # pandas_engine\n \"BOOL\",\n \"INT8\",\n \"INT16\",\n \"INT32\",\n \"INT64\",\n \"PANDAS_1_3_0_PLUS\",\n \"STRING\",\n \"UINT8\",\n \"UINT16\",\n \"UINT32\",\n \"UINT64\",\n # pandera.engines.pandas_engine\n \"pandas_version\",\n # checks\n \"Check\",\n # decorators\n \"check_input\",\n \"check_io\",\n \"check_output\",\n \"check_types\",\n # hypotheses\n \"Hypothesis\",\n # model\n \"DataFrameModel\",\n \"SchemaModel\",\n # model_components\n \"Field\",\n \"check\",\n \"dataframe_check\",\n # schema_components\n \"Column\",\n \"Index\",\n \"MultiIndex\",\n # schema_inference\n \"infer_schema\",\n # schemas\n \"DataFrameSchema\",\n \"SeriesSchema\",\n # version\n \"__version__\",\n]\n", "path": "pandera/__init__.py"}], "after_files": [{"content": "\"\"\"A flexible and expressive pandas validation library.\"\"\"\nimport platform\n\nimport pandera.backends\nfrom pandera import errors, external_config, typing\nfrom pandera.accessors import pandas_accessor\nfrom pandera.api import extensions\nfrom pandera.api.checks import Check\nfrom pandera.api.hypotheses import Hypothesis\nfrom pandera.api.pandas.array import SeriesSchema\nfrom pandera.api.pandas.container import DataFrameSchema\nfrom pandera.api.pandas.components import Column, Index, MultiIndex\nfrom pandera.api.pandas.model import DataFrameModel, SchemaModel\nfrom pandera.api.pandas.model_components import Field, check, dataframe_check\nfrom pandera.decorators import check_input, check_io, check_output, check_types\nfrom pandera.dtypes import (\n Bool,\n Category,\n Complex,\n Complex64,\n Complex128,\n DataType,\n Date,\n DateTime,\n Decimal,\n Float,\n Float16,\n Float32,\n Float64,\n Int,\n Int8,\n Int16,\n Int32,\n Int64,\n String,\n Timedelta,\n Timestamp,\n UInt,\n UInt8,\n UInt16,\n UInt32,\n UInt64,\n)\nfrom pandera.engines.numpy_engine import Object\nfrom pandera.engines.pandas_engine import (\n BOOL,\n INT8,\n INT16,\n INT32,\n INT64,\n PANDAS_1_2_0_PLUS,\n PANDAS_1_3_0_PLUS,\n STRING,\n UINT8,\n UINT16,\n UINT32,\n UINT64,\n pandas_version,\n)\n\nimport pandera.backends.base.builtin_checks\nimport pandera.backends.base.builtin_hypotheses\nimport pandera.backends.pandas\n\nfrom pandera.schema_inference.pandas import infer_schema\nfrom pandera.version import __version__\n\n\nif platform.system() != \"Windows\":\n # pylint: disable=ungrouped-imports\n from pandera.dtypes import Complex256, Float128\n\n\ntry:\n import dask.dataframe\n\n from pandera.accessors import dask_accessor\nexcept ImportError:\n pass\n\n\ntry:\n import pyspark.pandas\n\n from pandera.accessors import pyspark_accessor\nexcept ImportError:\n pass\n\ntry:\n import modin.pandas\n\n from pandera.accessors import modin_accessor\nexcept ImportError:\n pass\n\n__all__ = [\n # dtypes\n \"Bool\",\n \"Category\",\n \"Complex\",\n \"Complex64\",\n \"Complex128\",\n \"Complex256\",\n \"Date\",\n \"DataType\",\n \"DateTime\",\n \"Float\",\n \"Float16\",\n \"Float32\",\n \"Float64\",\n \"Float128\",\n \"Int\",\n \"Int8\",\n \"Int16\",\n \"Int32\",\n \"Int64\",\n \"String\",\n \"Timedelta\",\n \"Timestamp\",\n \"UInt\",\n \"UInt8\",\n \"UInt16\",\n \"UInt32\",\n \"UInt64\",\n # numpy_engine\n \"Object\",\n # pandas_engine\n \"BOOL\",\n \"INT8\",\n \"INT16\",\n \"INT32\",\n \"INT64\",\n \"PANDAS_1_3_0_PLUS\",\n \"STRING\",\n \"UINT8\",\n \"UINT16\",\n \"UINT32\",\n \"UINT64\",\n # pandera.engines.pandas_engine\n \"pandas_version\",\n # checks\n \"Check\",\n # decorators\n \"check_input\",\n \"check_io\",\n \"check_output\",\n \"check_types\",\n # hypotheses\n \"Hypothesis\",\n # model\n \"DataFrameModel\",\n \"SchemaModel\",\n # model_components\n \"Field\",\n \"check\",\n \"dataframe_check\",\n # schema_components\n \"Column\",\n \"Index\",\n \"MultiIndex\",\n # schema_inference\n \"infer_schema\",\n # schemas\n \"DataFrameSchema\",\n \"SeriesSchema\",\n # version\n \"__version__\",\n]\n", "path": "pandera/__init__.py"}]} | 1,924 | 91 |
gh_patches_debug_18825 | rasdani/github-patches | git_diff | mesonbuild__meson-5303 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
scanbuild.py doesn't handle scan-build-{7,8}
Here: https://github.com/mesonbuild/meson/blob/1e7aea65e68a43b0319a4a28908daddfec621548/mesonbuild/scripts/scanbuild.py#L39
As with other LLVM/Clang tools (e.g. `llvm-config`), the version number suffix is just the major component since LLVM 7. The LLVM dependency support handles this correctly here: https://github.com/mesonbuild/meson/blob/54db2c9babe6391bba525f92573ceeadb8303e78/mesonbuild/dependencies/dev.py#L208
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mesonbuild/scripts/scanbuild.py`
Content:
```
1 # Copyright 2016 The Meson development team
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import os
16 import shlex
17 import subprocess
18 import shutil
19 import tempfile
20 from ..environment import detect_ninja
21 from ..mesonlib import Popen_safe
22
23 def scanbuild(exelist, srcdir, blddir, privdir, logdir, args):
24 with tempfile.TemporaryDirectory(dir=privdir) as scandir:
25 meson_cmd = exelist + args
26 build_cmd = exelist + ['-o', logdir, detect_ninja(), '-C', scandir]
27 rc = subprocess.call(meson_cmd + [srcdir, scandir])
28 if rc != 0:
29 return rc
30 return subprocess.call(build_cmd)
31
32 def run(args):
33 srcdir = args[0]
34 blddir = args[1]
35 meson_cmd = args[2:]
36 privdir = os.path.join(blddir, 'meson-private')
37 logdir = os.path.join(blddir, 'meson-logs/scanbuild')
38 shutil.rmtree(logdir, ignore_errors=True)
39 tools = [
40 'scan-build', # base
41 'scan-build-5.0', 'scan-build50', # latest stable release
42 'scan-build-4.0', 'scan-build40', # old stable releases
43 'scan-build-3.9', 'scan-build39',
44 'scan-build-3.8', 'scan-build38',
45 'scan-build-3.7', 'scan-build37',
46 'scan-build-3.6', 'scan-build36',
47 'scan-build-3.5', 'scan-build35',
48 'scan-build-6.0', 'scan-build-devel', # development snapshot
49 ]
50 toolname = 'scan-build'
51 for tool in tools:
52 try:
53 p, out = Popen_safe([tool, '--help'])[:2]
54 except (FileNotFoundError, PermissionError):
55 continue
56 if p.returncode != 0:
57 continue
58 else:
59 toolname = tool
60 break
61
62 if 'SCANBUILD' in os.environ:
63 exelist = shlex.split(os.environ['SCANBUILD'])
64 else:
65 exelist = [toolname]
66
67 try:
68 Popen_safe(exelist + ['--help'])
69 except OSError:
70 print('Could not execute scan-build "%s"' % ' '.join(exelist))
71 return 1
72 return scanbuild(exelist, srcdir, blddir, privdir, logdir, meson_cmd)
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py
--- a/mesonbuild/scripts/scanbuild.py
+++ b/mesonbuild/scripts/scanbuild.py
@@ -38,14 +38,17 @@
shutil.rmtree(logdir, ignore_errors=True)
tools = [
'scan-build', # base
- 'scan-build-5.0', 'scan-build50', # latest stable release
- 'scan-build-4.0', 'scan-build40', # old stable releases
+ 'scan-build-8.0', 'scan-build80',
+ 'scan-build-7.0', 'scan-build70',
+ 'scan-build-6.0', 'scan-build60',
+ 'scan-build-5.0', 'scan-build50',
+ 'scan-build-4.0', 'scan-build40',
'scan-build-3.9', 'scan-build39',
'scan-build-3.8', 'scan-build38',
'scan-build-3.7', 'scan-build37',
'scan-build-3.6', 'scan-build36',
'scan-build-3.5', 'scan-build35',
- 'scan-build-6.0', 'scan-build-devel', # development snapshot
+ 'scan-build-9.0', 'scan-build-devel', # development snapshot
]
toolname = 'scan-build'
for tool in tools:
| {"golden_diff": "diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py\n--- a/mesonbuild/scripts/scanbuild.py\n+++ b/mesonbuild/scripts/scanbuild.py\n@@ -38,14 +38,17 @@\n shutil.rmtree(logdir, ignore_errors=True)\n tools = [\n 'scan-build', # base\n- 'scan-build-5.0', 'scan-build50', # latest stable release\n- 'scan-build-4.0', 'scan-build40', # old stable releases\n+ 'scan-build-8.0', 'scan-build80',\n+ 'scan-build-7.0', 'scan-build70',\n+ 'scan-build-6.0', 'scan-build60',\n+ 'scan-build-5.0', 'scan-build50',\n+ 'scan-build-4.0', 'scan-build40',\n 'scan-build-3.9', 'scan-build39',\n 'scan-build-3.8', 'scan-build38',\n 'scan-build-3.7', 'scan-build37',\n 'scan-build-3.6', 'scan-build36',\n 'scan-build-3.5', 'scan-build35',\n- 'scan-build-6.0', 'scan-build-devel', # development snapshot\n+ 'scan-build-9.0', 'scan-build-devel', # development snapshot\n ]\n toolname = 'scan-build'\n for tool in tools:\n", "issue": "scanbuild.py doesn't handle scan-build-{7,8}\nHere: https://github.com/mesonbuild/meson/blob/1e7aea65e68a43b0319a4a28908daddfec621548/mesonbuild/scripts/scanbuild.py#L39\r\n\r\nAs with other LLVM/Clang tools (e.g. `llvm-config`), the version number suffix is just the major component since LLVM 7. The LLVM dependency support handles this correctly here: https://github.com/mesonbuild/meson/blob/54db2c9babe6391bba525f92573ceeadb8303e78/mesonbuild/dependencies/dev.py#L208\n", "before_files": [{"content": "# Copyright 2016 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shlex\nimport subprocess\nimport shutil\nimport tempfile\nfrom ..environment import detect_ninja\nfrom ..mesonlib import Popen_safe\n\ndef scanbuild(exelist, srcdir, blddir, privdir, logdir, args):\n with tempfile.TemporaryDirectory(dir=privdir) as scandir:\n meson_cmd = exelist + args\n build_cmd = exelist + ['-o', logdir, detect_ninja(), '-C', scandir]\n rc = subprocess.call(meson_cmd + [srcdir, scandir])\n if rc != 0:\n return rc\n return subprocess.call(build_cmd)\n\ndef run(args):\n srcdir = args[0]\n blddir = args[1]\n meson_cmd = args[2:]\n privdir = os.path.join(blddir, 'meson-private')\n logdir = os.path.join(blddir, 'meson-logs/scanbuild')\n shutil.rmtree(logdir, ignore_errors=True)\n tools = [\n 'scan-build', # base\n 'scan-build-5.0', 'scan-build50', # latest stable release\n 'scan-build-4.0', 'scan-build40', # old stable releases\n 'scan-build-3.9', 'scan-build39',\n 'scan-build-3.8', 'scan-build38',\n 'scan-build-3.7', 'scan-build37',\n 'scan-build-3.6', 'scan-build36',\n 'scan-build-3.5', 'scan-build35',\n 'scan-build-6.0', 'scan-build-devel', # development snapshot\n ]\n toolname = 'scan-build'\n for tool in tools:\n try:\n p, out = Popen_safe([tool, '--help'])[:2]\n except (FileNotFoundError, PermissionError):\n continue\n if p.returncode != 0:\n continue\n else:\n toolname = tool\n break\n\n if 'SCANBUILD' in os.environ:\n exelist = shlex.split(os.environ['SCANBUILD'])\n else:\n exelist = [toolname]\n\n try:\n Popen_safe(exelist + ['--help'])\n except OSError:\n print('Could not execute scan-build \"%s\"' % ' '.join(exelist))\n return 1\n return scanbuild(exelist, srcdir, blddir, privdir, logdir, meson_cmd)\n", "path": "mesonbuild/scripts/scanbuild.py"}], "after_files": [{"content": "# Copyright 2016 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shlex\nimport subprocess\nimport shutil\nimport tempfile\nfrom ..environment import detect_ninja\nfrom ..mesonlib import Popen_safe\n\ndef scanbuild(exelist, srcdir, blddir, privdir, logdir, args):\n with tempfile.TemporaryDirectory(dir=privdir) as scandir:\n meson_cmd = exelist + args\n build_cmd = exelist + ['-o', logdir, detect_ninja(), '-C', scandir]\n rc = subprocess.call(meson_cmd + [srcdir, scandir])\n if rc != 0:\n return rc\n return subprocess.call(build_cmd)\n\ndef run(args):\n srcdir = args[0]\n blddir = args[1]\n meson_cmd = args[2:]\n privdir = os.path.join(blddir, 'meson-private')\n logdir = os.path.join(blddir, 'meson-logs/scanbuild')\n shutil.rmtree(logdir, ignore_errors=True)\n tools = [\n 'scan-build', # base\n 'scan-build-8.0', 'scan-build80',\n 'scan-build-7.0', 'scan-build70',\n 'scan-build-6.0', 'scan-build60',\n 'scan-build-5.0', 'scan-build50',\n 'scan-build-4.0', 'scan-build40',\n 'scan-build-3.9', 'scan-build39',\n 'scan-build-3.8', 'scan-build38',\n 'scan-build-3.7', 'scan-build37',\n 'scan-build-3.6', 'scan-build36',\n 'scan-build-3.5', 'scan-build35',\n 'scan-build-9.0', 'scan-build-devel', # development snapshot\n ]\n toolname = 'scan-build'\n for tool in tools:\n try:\n p, out = Popen_safe([tool, '--help'])[:2]\n except (FileNotFoundError, PermissionError):\n continue\n if p.returncode != 0:\n continue\n else:\n toolname = tool\n break\n\n if 'SCANBUILD' in os.environ:\n exelist = shlex.split(os.environ['SCANBUILD'])\n else:\n exelist = [toolname]\n\n try:\n Popen_safe(exelist + ['--help'])\n except OSError:\n print('Could not execute scan-build \"%s\"' % ' '.join(exelist))\n return 1\n return scanbuild(exelist, srcdir, blddir, privdir, logdir, meson_cmd)\n", "path": "mesonbuild/scripts/scanbuild.py"}]} | 1,240 | 336 |
gh_patches_debug_35482 | rasdani/github-patches | git_diff | ephios-dev__ephios-855 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
StaticI18N catalog not loaded on some views
The shift edit views tries to load `/de.js` while other views like the instance settings are loading `/static/jsi18n/de/djangojs.js` correctly, despite originating from the same `<script type="text/javascript" src="{% statici18n LANGUAGE_CODE %}"></script>` in base.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ephios/plugins/simpleresource/models.py`
Content:
```
1 from django.db import models
2
3 from ephios.core.models import Shift
4
5
6 class ResourceCategory(models.Model):
7 name = models.CharField(max_length=50)
8
9 def __str__(self):
10 # pylint: disable=invalid-str-returned
11 return self.name
12
13
14 class Resource(models.Model):
15 title = models.CharField(max_length=100)
16 category = models.ForeignKey(ResourceCategory, on_delete=models.CASCADE)
17
18 def __str__(self):
19 # pylint: disable=invalid-str-returned
20 return self.title
21
22
23 class ResourceAllocation(models.Model):
24 shift = models.ForeignKey(Shift, on_delete=models.CASCADE)
25 resources = models.ManyToManyField(Resource, blank=True)
26
27 def __str__(self):
28 return f"Resource allocation for {self.shift}"
29
```
Path: `ephios/plugins/simpleresource/forms.py`
Content:
```
1 from django.forms import BaseModelFormSet, BooleanField, ModelForm, modelformset_factory
2 from django.forms.formsets import DELETION_FIELD_NAME
3 from django.utils.translation import gettext as _
4 from django_select2.forms import Select2MultipleWidget
5
6 from ephios.core.forms.events import BasePluginFormMixin
7 from ephios.plugins.simpleresource.models import ResourceAllocation, ResourceCategory
8
9
10 class ResourceAllocationForm(BasePluginFormMixin, ModelForm):
11 class Meta:
12 model = ResourceAllocation
13 fields = ["resources"]
14 widgets = {
15 "resources": Select2MultipleWidget,
16 }
17
18 def __init__(self, *args, shift, **kwargs):
19 self.shift = shift
20 try:
21 kwargs.setdefault("instance", ResourceAllocation.objects.get(shift=shift))
22 except ResourceAllocation.DoesNotExist:
23 pass
24 super().__init__(*args, **kwargs)
25
26 def save(self, commit=True):
27 if self.cleaned_data.get("resources"):
28 self.instance.shift = self.shift
29 super().save(commit)
30 elif self.instance.pk:
31 self.instance.delete()
32
33 @property
34 def heading(self):
35 return _("Resource allocation")
36
37 def is_function_active(self):
38 return bool(self.instance.resources.exists())
39
40
41 class BaseResourceCategoryFormset(BaseModelFormSet):
42 def add_fields(self, form, index):
43 super().add_fields(form, index)
44 initial_form_count = self.initial_form_count()
45 if self.can_delete and (self.can_delete_extra or index < initial_form_count):
46 category: ResourceCategory = form.instance
47 form.fields[DELETION_FIELD_NAME] = BooleanField(
48 label=_("Delete"),
49 required=False,
50 disabled=category.pk and category.resource_set.exists(),
51 )
52
53
54 ResourceCategoryFormset = modelformset_factory(
55 ResourceCategory,
56 formset=BaseResourceCategoryFormset,
57 can_delete=True,
58 extra=0,
59 fields=["name"],
60 )
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ephios/plugins/simpleresource/forms.py b/ephios/plugins/simpleresource/forms.py
--- a/ephios/plugins/simpleresource/forms.py
+++ b/ephios/plugins/simpleresource/forms.py
@@ -1,3 +1,4 @@
+from crispy_forms.helper import FormHelper
from django.forms import BaseModelFormSet, BooleanField, ModelForm, modelformset_factory
from django.forms.formsets import DELETION_FIELD_NAME
from django.utils.translation import gettext as _
@@ -17,6 +18,8 @@
def __init__(self, *args, shift, **kwargs):
self.shift = shift
+ self.helper = FormHelper()
+ self.helper.include_media = False
try:
kwargs.setdefault("instance", ResourceAllocation.objects.get(shift=shift))
except ResourceAllocation.DoesNotExist:
diff --git a/ephios/plugins/simpleresource/models.py b/ephios/plugins/simpleresource/models.py
--- a/ephios/plugins/simpleresource/models.py
+++ b/ephios/plugins/simpleresource/models.py
@@ -1,10 +1,11 @@
from django.db import models
+from django.utils.translation import gettext_lazy as _
from ephios.core.models import Shift
class ResourceCategory(models.Model):
- name = models.CharField(max_length=50)
+ name = models.CharField(max_length=50, verbose_name=_("Name"))
def __str__(self):
# pylint: disable=invalid-str-returned
@@ -12,8 +13,10 @@
class Resource(models.Model):
- title = models.CharField(max_length=100)
- category = models.ForeignKey(ResourceCategory, on_delete=models.CASCADE)
+ title = models.CharField(max_length=100, verbose_name=_("Title"))
+ category = models.ForeignKey(
+ ResourceCategory, on_delete=models.CASCADE, verbose_name=_("Category")
+ )
def __str__(self):
# pylint: disable=invalid-str-returned
@@ -22,7 +25,7 @@
class ResourceAllocation(models.Model):
shift = models.ForeignKey(Shift, on_delete=models.CASCADE)
- resources = models.ManyToManyField(Resource, blank=True)
+ resources = models.ManyToManyField(Resource, blank=True, verbose_name=_("Resources"))
def __str__(self):
return f"Resource allocation for {self.shift}"
| {"golden_diff": "diff --git a/ephios/plugins/simpleresource/forms.py b/ephios/plugins/simpleresource/forms.py\n--- a/ephios/plugins/simpleresource/forms.py\n+++ b/ephios/plugins/simpleresource/forms.py\n@@ -1,3 +1,4 @@\n+from crispy_forms.helper import FormHelper\n from django.forms import BaseModelFormSet, BooleanField, ModelForm, modelformset_factory\n from django.forms.formsets import DELETION_FIELD_NAME\n from django.utils.translation import gettext as _\n@@ -17,6 +18,8 @@\n \n def __init__(self, *args, shift, **kwargs):\n self.shift = shift\n+ self.helper = FormHelper()\n+ self.helper.include_media = False\n try:\n kwargs.setdefault(\"instance\", ResourceAllocation.objects.get(shift=shift))\n except ResourceAllocation.DoesNotExist:\ndiff --git a/ephios/plugins/simpleresource/models.py b/ephios/plugins/simpleresource/models.py\n--- a/ephios/plugins/simpleresource/models.py\n+++ b/ephios/plugins/simpleresource/models.py\n@@ -1,10 +1,11 @@\n from django.db import models\n+from django.utils.translation import gettext_lazy as _\n \n from ephios.core.models import Shift\n \n \n class ResourceCategory(models.Model):\n- name = models.CharField(max_length=50)\n+ name = models.CharField(max_length=50, verbose_name=_(\"Name\"))\n \n def __str__(self):\n # pylint: disable=invalid-str-returned\n@@ -12,8 +13,10 @@\n \n \n class Resource(models.Model):\n- title = models.CharField(max_length=100)\n- category = models.ForeignKey(ResourceCategory, on_delete=models.CASCADE)\n+ title = models.CharField(max_length=100, verbose_name=_(\"Title\"))\n+ category = models.ForeignKey(\n+ ResourceCategory, on_delete=models.CASCADE, verbose_name=_(\"Category\")\n+ )\n \n def __str__(self):\n # pylint: disable=invalid-str-returned\n@@ -22,7 +25,7 @@\n \n class ResourceAllocation(models.Model):\n shift = models.ForeignKey(Shift, on_delete=models.CASCADE)\n- resources = models.ManyToManyField(Resource, blank=True)\n+ resources = models.ManyToManyField(Resource, blank=True, verbose_name=_(\"Resources\"))\n \n def __str__(self):\n return f\"Resource allocation for {self.shift}\"\n", "issue": "StaticI18N catalog not loaded on some views\nThe shift edit views tries to load `/de.js` while other views like the instance settings are loading `/static/jsi18n/de/djangojs.js` correctly, despite originating from the same `<script type=\"text/javascript\" src=\"{% statici18n LANGUAGE_CODE %}\"></script>` in base.html\n", "before_files": [{"content": "from django.db import models\n\nfrom ephios.core.models import Shift\n\n\nclass ResourceCategory(models.Model):\n name = models.CharField(max_length=50)\n\n def __str__(self):\n # pylint: disable=invalid-str-returned\n return self.name\n\n\nclass Resource(models.Model):\n title = models.CharField(max_length=100)\n category = models.ForeignKey(ResourceCategory, on_delete=models.CASCADE)\n\n def __str__(self):\n # pylint: disable=invalid-str-returned\n return self.title\n\n\nclass ResourceAllocation(models.Model):\n shift = models.ForeignKey(Shift, on_delete=models.CASCADE)\n resources = models.ManyToManyField(Resource, blank=True)\n\n def __str__(self):\n return f\"Resource allocation for {self.shift}\"\n", "path": "ephios/plugins/simpleresource/models.py"}, {"content": "from django.forms import BaseModelFormSet, BooleanField, ModelForm, modelformset_factory\nfrom django.forms.formsets import DELETION_FIELD_NAME\nfrom django.utils.translation import gettext as _\nfrom django_select2.forms import Select2MultipleWidget\n\nfrom ephios.core.forms.events import BasePluginFormMixin\nfrom ephios.plugins.simpleresource.models import ResourceAllocation, ResourceCategory\n\n\nclass ResourceAllocationForm(BasePluginFormMixin, ModelForm):\n class Meta:\n model = ResourceAllocation\n fields = [\"resources\"]\n widgets = {\n \"resources\": Select2MultipleWidget,\n }\n\n def __init__(self, *args, shift, **kwargs):\n self.shift = shift\n try:\n kwargs.setdefault(\"instance\", ResourceAllocation.objects.get(shift=shift))\n except ResourceAllocation.DoesNotExist:\n pass\n super().__init__(*args, **kwargs)\n\n def save(self, commit=True):\n if self.cleaned_data.get(\"resources\"):\n self.instance.shift = self.shift\n super().save(commit)\n elif self.instance.pk:\n self.instance.delete()\n\n @property\n def heading(self):\n return _(\"Resource allocation\")\n\n def is_function_active(self):\n return bool(self.instance.resources.exists())\n\n\nclass BaseResourceCategoryFormset(BaseModelFormSet):\n def add_fields(self, form, index):\n super().add_fields(form, index)\n initial_form_count = self.initial_form_count()\n if self.can_delete and (self.can_delete_extra or index < initial_form_count):\n category: ResourceCategory = form.instance\n form.fields[DELETION_FIELD_NAME] = BooleanField(\n label=_(\"Delete\"),\n required=False,\n disabled=category.pk and category.resource_set.exists(),\n )\n\n\nResourceCategoryFormset = modelformset_factory(\n ResourceCategory,\n formset=BaseResourceCategoryFormset,\n can_delete=True,\n extra=0,\n fields=[\"name\"],\n)\n", "path": "ephios/plugins/simpleresource/forms.py"}], "after_files": [{"content": "from django.db import models\nfrom django.utils.translation import gettext_lazy as _\n\nfrom ephios.core.models import Shift\n\n\nclass ResourceCategory(models.Model):\n name = models.CharField(max_length=50, verbose_name=_(\"Name\"))\n\n def __str__(self):\n # pylint: disable=invalid-str-returned\n return self.name\n\n\nclass Resource(models.Model):\n title = models.CharField(max_length=100, verbose_name=_(\"Title\"))\n category = models.ForeignKey(\n ResourceCategory, on_delete=models.CASCADE, verbose_name=_(\"Category\")\n )\n\n def __str__(self):\n # pylint: disable=invalid-str-returned\n return self.title\n\n\nclass ResourceAllocation(models.Model):\n shift = models.ForeignKey(Shift, on_delete=models.CASCADE)\n resources = models.ManyToManyField(Resource, blank=True, verbose_name=_(\"Resources\"))\n\n def __str__(self):\n return f\"Resource allocation for {self.shift}\"\n", "path": "ephios/plugins/simpleresource/models.py"}, {"content": "from crispy_forms.helper import FormHelper\nfrom django.forms import BaseModelFormSet, BooleanField, ModelForm, modelformset_factory\nfrom django.forms.formsets import DELETION_FIELD_NAME\nfrom django.utils.translation import gettext as _\nfrom django_select2.forms import Select2MultipleWidget\n\nfrom ephios.core.forms.events import BasePluginFormMixin\nfrom ephios.plugins.simpleresource.models import ResourceAllocation, ResourceCategory\n\n\nclass ResourceAllocationForm(BasePluginFormMixin, ModelForm):\n class Meta:\n model = ResourceAllocation\n fields = [\"resources\"]\n widgets = {\n \"resources\": Select2MultipleWidget,\n }\n\n def __init__(self, *args, shift, **kwargs):\n self.shift = shift\n self.helper = FormHelper()\n self.helper.include_media = False\n try:\n kwargs.setdefault(\"instance\", ResourceAllocation.objects.get(shift=shift))\n except ResourceAllocation.DoesNotExist:\n pass\n super().__init__(*args, **kwargs)\n\n def save(self, commit=True):\n if self.cleaned_data.get(\"resources\"):\n self.instance.shift = self.shift\n super().save(commit)\n elif self.instance.pk:\n self.instance.delete()\n\n @property\n def heading(self):\n return _(\"Resource allocation\")\n\n def is_function_active(self):\n return bool(self.instance.resources.exists())\n\n\nclass BaseResourceCategoryFormset(BaseModelFormSet):\n def add_fields(self, form, index):\n super().add_fields(form, index)\n initial_form_count = self.initial_form_count()\n if self.can_delete and (self.can_delete_extra or index < initial_form_count):\n category: ResourceCategory = form.instance\n form.fields[DELETION_FIELD_NAME] = BooleanField(\n label=_(\"Delete\"),\n required=False,\n disabled=category.pk and category.resource_set.exists(),\n )\n\n\nResourceCategoryFormset = modelformset_factory(\n ResourceCategory,\n formset=BaseResourceCategoryFormset,\n can_delete=True,\n extra=0,\n fields=[\"name\"],\n)\n", "path": "ephios/plugins/simpleresource/forms.py"}]} | 1,096 | 516 |
gh_patches_debug_37131 | rasdani/github-patches | git_diff | bokeh__bokeh-4963 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Updating of image ColorMapper
The ability to change the low/high of a ColorMapper for an already existing image (adjusting contrast/brightness) through widget callbacks on a server is desired. I could imagine this being accomplished by the following code, which incidentally doesn't throw any errors as is, it just doesn't do anything ;)
```
import numpy as np
from bokeh.plotting import figure, curdoc
from bokeh.layouts import row, layout
from bokeh.models import Slider
from bokeh.models.mappers import LinearColorMapper
from bokeh.palettes import Greys9
def change_image_contrast(attr, old, new):
fig_im.glyph.color_mapper.update(low=graph_min_slider.value, high=graph_max_slider.value)
fig_im.trigger('glyph', fig_im.glyph, fig_im.glyph)
graph_min_slider = Slider(title="Min", start=0, end=99, step=1, value=0)
graph_max_slider = Slider(title="Max", start=1, end=100, step=1, value=100)
graph_min_slider.on_change('value', change_image_contrast)
graph_max_slider.on_change('value', change_image_contrast)
fig = figure(plot_width=500, plot_height=500, x_range=(0, 10), y_range=(0, 10))
fig_im = fig.image(image=[np.random.randint(0, 100, (10, 10), dtype='int16')], x=[0], y=[0], dw=[10], dh=[10],
color_mapper=LinearColorMapper(low=0, high=100, palette=Greys9))
curdoc().add_root(layout([fig], [row([graph_min_slider, graph_max_slider])]))
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bokeh/models/mappers.py`
Content:
```
1 """ Models for mapping values from one range or space to another.
2
3 """
4 from __future__ import absolute_import
5
6 from ..model import Model
7 from ..core.properties import abstract
8 from ..core.properties import Float, Color, Enum, Seq
9 from ..core.enums import Palette
10 from .. import palettes
11
12 @abstract
13 class ColorMapper(Model):
14 """ Base class for color mapper types. `ColorMapper`` is not
15 generally useful to instantiate on its own.
16
17 """
18
19 class LinearColorMapper(ColorMapper):
20 """ Map numbers in a range [*low*, *high*] linearly into a
21 sequence of colors (a palette).
22
23 For example, if the range is [0, 99] and the palette is
24 ``['red', 'green', 'blue']``, the values would be mapped as
25 follows::
26
27 x < 0 : 'red' # values < low are clamped
28 0 >= x < 33 : 'red'
29 33 >= x < 66 : 'green'
30 66 >= x < 99 : 'blue'
31 99 >= x : 'blue' # values > high are clamped
32
33 """
34
35 palette = Seq(Color, help="""
36 A sequence of colors to use as the target palette for mapping.
37
38 This property can also be set as a ``String``, to the name of
39 any of the palettes shown in :ref:`bokeh.palettes`.
40 """).accepts(Enum(Palette), lambda pal: getattr(palettes, pal))
41
42 low = Float(help="""
43 The minimum value of the range to map into the palette. Values below
44 this are clamped to ``low``.
45 """)
46
47 high = Float(help="""
48 The maximum value of the range to map into the palette. Values above
49 this are clamped to ``high``.
50 """)
51
52 # TODO: (jc) what is the color code for transparent?
53 # TODO: (bev) better docstring
54 reserve_color = Color("#ffffff", help="""
55 Used by Abstract Rendering.
56 """)
57
58 # TODO: (bev) better docstring
59 reserve_val = Float(default=None, help="""
60 Used by Abstract Rendering.
61 """)
62
63 def __init__(self, palette=None, **kwargs):
64 if palette is not None: kwargs['palette'] = palette
65 super(LinearColorMapper, self).__init__(**kwargs)
66
67 class LogColorMapper(ColorMapper):
68 """ Map numbers in a range [*low*, *high*] into a
69 sequence of colors (a palette) on a natural logarithm scale.
70
71 For example, if the range is [0, 25] and the palette is
72 ``['red', 'green', 'blue']``, the values would be mapped as
73 follows::
74
75 x < 0 : 'red' # values < low are clamped
76 0 >= x < 2.72 : 'red' # math.e ** 1
77 2.72 >= x < 7.39 : 'green' # math.e ** 2
78 7.39 >= x < 20.09 : 'blue' # math.e ** 3
79 20.09 >= x : 'blue' # values > high are clamped
80
81 .. warning::
82 The LogColorMapper only works for images with scalar values that are
83 non-negative.
84
85 """
86
87 palette = Seq(Color, help="""
88 A sequence of colors to use as the target palette for mapping.
89
90 This property can also be set as a ``String``, to the name of
91 any of the palettes shown in :ref:`bokeh.palettes`.
92 """).accepts(Enum(Palette), lambda pal: getattr(palettes, pal))
93
94 low = Float(help="""
95 The minimum value of the range to map into the palette. Values below
96 this are clamped to ``low``.
97 """)
98
99 high = Float(help="""
100 The maximum value of the range to map into the palette. Values above
101 this are clamped to ``high``.
102 """)
103
104 def __init__(self, palette=None, **kwargs):
105 if palette is not None: kwargs['palette'] = palette
106 super(LogColorMapper, self).__init__(**kwargs)
107
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bokeh/models/mappers.py b/bokeh/models/mappers.py
--- a/bokeh/models/mappers.py
+++ b/bokeh/models/mappers.py
@@ -16,6 +16,17 @@
"""
+ palette = Seq(Color, help="""
+ A sequence of colors to use as the target palette for mapping.
+
+ This property can also be set as a ``String``, to the name of
+ any of the palettes shown in :ref:`bokeh.palettes`.
+ """).accepts(Enum(Palette), lambda pal: getattr(palettes, pal))
+
+ def __init__(self, palette=None, **kwargs):
+ if palette is not None: kwargs['palette'] = palette
+ super(ColorMapper, self).__init__(**kwargs)
+
class LinearColorMapper(ColorMapper):
""" Map numbers in a range [*low*, *high*] linearly into a
sequence of colors (a palette).
@@ -32,13 +43,6 @@
"""
- palette = Seq(Color, help="""
- A sequence of colors to use as the target palette for mapping.
-
- This property can also be set as a ``String``, to the name of
- any of the palettes shown in :ref:`bokeh.palettes`.
- """).accepts(Enum(Palette), lambda pal: getattr(palettes, pal))
-
low = Float(help="""
The minimum value of the range to map into the palette. Values below
this are clamped to ``low``.
@@ -60,10 +64,6 @@
Used by Abstract Rendering.
""")
- def __init__(self, palette=None, **kwargs):
- if palette is not None: kwargs['palette'] = palette
- super(LinearColorMapper, self).__init__(**kwargs)
-
class LogColorMapper(ColorMapper):
""" Map numbers in a range [*low*, *high*] into a
sequence of colors (a palette) on a natural logarithm scale.
@@ -84,13 +84,6 @@
"""
- palette = Seq(Color, help="""
- A sequence of colors to use as the target palette for mapping.
-
- This property can also be set as a ``String``, to the name of
- any of the palettes shown in :ref:`bokeh.palettes`.
- """).accepts(Enum(Palette), lambda pal: getattr(palettes, pal))
-
low = Float(help="""
The minimum value of the range to map into the palette. Values below
this are clamped to ``low``.
@@ -100,7 +93,3 @@
The maximum value of the range to map into the palette. Values above
this are clamped to ``high``.
""")
-
- def __init__(self, palette=None, **kwargs):
- if palette is not None: kwargs['palette'] = palette
- super(LogColorMapper, self).__init__(**kwargs)
| {"golden_diff": "diff --git a/bokeh/models/mappers.py b/bokeh/models/mappers.py\n--- a/bokeh/models/mappers.py\n+++ b/bokeh/models/mappers.py\n@@ -16,6 +16,17 @@\n \n \"\"\"\n \n+ palette = Seq(Color, help=\"\"\"\n+ A sequence of colors to use as the target palette for mapping.\n+\n+ This property can also be set as a ``String``, to the name of\n+ any of the palettes shown in :ref:`bokeh.palettes`.\n+ \"\"\").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))\n+\n+ def __init__(self, palette=None, **kwargs):\n+ if palette is not None: kwargs['palette'] = palette\n+ super(ColorMapper, self).__init__(**kwargs)\n+\n class LinearColorMapper(ColorMapper):\n \"\"\" Map numbers in a range [*low*, *high*] linearly into a\n sequence of colors (a palette).\n@@ -32,13 +43,6 @@\n \n \"\"\"\n \n- palette = Seq(Color, help=\"\"\"\n- A sequence of colors to use as the target palette for mapping.\n-\n- This property can also be set as a ``String``, to the name of\n- any of the palettes shown in :ref:`bokeh.palettes`.\n- \"\"\").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))\n-\n low = Float(help=\"\"\"\n The minimum value of the range to map into the palette. Values below\n this are clamped to ``low``.\n@@ -60,10 +64,6 @@\n Used by Abstract Rendering.\n \"\"\")\n \n- def __init__(self, palette=None, **kwargs):\n- if palette is not None: kwargs['palette'] = palette\n- super(LinearColorMapper, self).__init__(**kwargs)\n-\n class LogColorMapper(ColorMapper):\n \"\"\" Map numbers in a range [*low*, *high*] into a\n sequence of colors (a palette) on a natural logarithm scale.\n@@ -84,13 +84,6 @@\n \n \"\"\"\n \n- palette = Seq(Color, help=\"\"\"\n- A sequence of colors to use as the target palette for mapping.\n-\n- This property can also be set as a ``String``, to the name of\n- any of the palettes shown in :ref:`bokeh.palettes`.\n- \"\"\").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))\n-\n low = Float(help=\"\"\"\n The minimum value of the range to map into the palette. Values below\n this are clamped to ``low``.\n@@ -100,7 +93,3 @@\n The maximum value of the range to map into the palette. Values above\n this are clamped to ``high``.\n \"\"\")\n-\n- def __init__(self, palette=None, **kwargs):\n- if palette is not None: kwargs['palette'] = palette\n- super(LogColorMapper, self).__init__(**kwargs)\n", "issue": "Updating of image ColorMapper\nThe ability to change the low/high of a ColorMapper for an already existing image (adjusting contrast/brightness) through widget callbacks on a server is desired. I could imagine this being accomplished by the following code, which incidentally doesn't throw any errors as is, it just doesn't do anything ;)\n\n```\nimport numpy as np\nfrom bokeh.plotting import figure, curdoc\nfrom bokeh.layouts import row, layout\nfrom bokeh.models import Slider\nfrom bokeh.models.mappers import LinearColorMapper\nfrom bokeh.palettes import Greys9\n\ndef change_image_contrast(attr, old, new):\n fig_im.glyph.color_mapper.update(low=graph_min_slider.value, high=graph_max_slider.value)\n fig_im.trigger('glyph', fig_im.glyph, fig_im.glyph)\n\ngraph_min_slider = Slider(title=\"Min\", start=0, end=99, step=1, value=0)\ngraph_max_slider = Slider(title=\"Max\", start=1, end=100, step=1, value=100)\n\ngraph_min_slider.on_change('value', change_image_contrast)\ngraph_max_slider.on_change('value', change_image_contrast)\n\nfig = figure(plot_width=500, plot_height=500, x_range=(0, 10), y_range=(0, 10))\n\nfig_im = fig.image(image=[np.random.randint(0, 100, (10, 10), dtype='int16')], x=[0], y=[0], dw=[10], dh=[10],\n color_mapper=LinearColorMapper(low=0, high=100, palette=Greys9))\n\ncurdoc().add_root(layout([fig], [row([graph_min_slider, graph_max_slider])]))\n```\n\n", "before_files": [{"content": "\"\"\" Models for mapping values from one range or space to another.\n\n\"\"\"\nfrom __future__ import absolute_import\n\nfrom ..model import Model\nfrom ..core.properties import abstract\nfrom ..core.properties import Float, Color, Enum, Seq\nfrom ..core.enums import Palette\nfrom .. import palettes\n\n@abstract\nclass ColorMapper(Model):\n \"\"\" Base class for color mapper types. `ColorMapper`` is not\n generally useful to instantiate on its own.\n\n \"\"\"\n\nclass LinearColorMapper(ColorMapper):\n \"\"\" Map numbers in a range [*low*, *high*] linearly into a\n sequence of colors (a palette).\n\n For example, if the range is [0, 99] and the palette is\n ``['red', 'green', 'blue']``, the values would be mapped as\n follows::\n\n x < 0 : 'red' # values < low are clamped\n 0 >= x < 33 : 'red'\n 33 >= x < 66 : 'green'\n 66 >= x < 99 : 'blue'\n 99 >= x : 'blue' # values > high are clamped\n\n \"\"\"\n\n palette = Seq(Color, help=\"\"\"\n A sequence of colors to use as the target palette for mapping.\n\n This property can also be set as a ``String``, to the name of\n any of the palettes shown in :ref:`bokeh.palettes`.\n \"\"\").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))\n\n low = Float(help=\"\"\"\n The minimum value of the range to map into the palette. Values below\n this are clamped to ``low``.\n \"\"\")\n\n high = Float(help=\"\"\"\n The maximum value of the range to map into the palette. Values above\n this are clamped to ``high``.\n \"\"\")\n\n # TODO: (jc) what is the color code for transparent?\n # TODO: (bev) better docstring\n reserve_color = Color(\"#ffffff\", help=\"\"\"\n Used by Abstract Rendering.\n \"\"\")\n\n # TODO: (bev) better docstring\n reserve_val = Float(default=None, help=\"\"\"\n Used by Abstract Rendering.\n \"\"\")\n\n def __init__(self, palette=None, **kwargs):\n if palette is not None: kwargs['palette'] = palette\n super(LinearColorMapper, self).__init__(**kwargs)\n\nclass LogColorMapper(ColorMapper):\n \"\"\" Map numbers in a range [*low*, *high*] into a\n sequence of colors (a palette) on a natural logarithm scale.\n\n For example, if the range is [0, 25] and the palette is\n ``['red', 'green', 'blue']``, the values would be mapped as\n follows::\n\n x < 0 : 'red' # values < low are clamped\n 0 >= x < 2.72 : 'red' # math.e ** 1\n 2.72 >= x < 7.39 : 'green' # math.e ** 2\n 7.39 >= x < 20.09 : 'blue' # math.e ** 3\n 20.09 >= x : 'blue' # values > high are clamped\n\n .. warning::\n The LogColorMapper only works for images with scalar values that are\n non-negative.\n\n \"\"\"\n\n palette = Seq(Color, help=\"\"\"\n A sequence of colors to use as the target palette for mapping.\n\n This property can also be set as a ``String``, to the name of\n any of the palettes shown in :ref:`bokeh.palettes`.\n \"\"\").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))\n\n low = Float(help=\"\"\"\n The minimum value of the range to map into the palette. Values below\n this are clamped to ``low``.\n \"\"\")\n\n high = Float(help=\"\"\"\n The maximum value of the range to map into the palette. Values above\n this are clamped to ``high``.\n \"\"\")\n\n def __init__(self, palette=None, **kwargs):\n if palette is not None: kwargs['palette'] = palette\n super(LogColorMapper, self).__init__(**kwargs)\n", "path": "bokeh/models/mappers.py"}], "after_files": [{"content": "\"\"\" Models for mapping values from one range or space to another.\n\n\"\"\"\nfrom __future__ import absolute_import\n\nfrom ..model import Model\nfrom ..core.properties import abstract\nfrom ..core.properties import Float, Color, Enum, Seq\nfrom ..core.enums import Palette\nfrom .. import palettes\n\n@abstract\nclass ColorMapper(Model):\n \"\"\" Base class for color mapper types. `ColorMapper`` is not\n generally useful to instantiate on its own.\n\n \"\"\"\n\n palette = Seq(Color, help=\"\"\"\n A sequence of colors to use as the target palette for mapping.\n\n This property can also be set as a ``String``, to the name of\n any of the palettes shown in :ref:`bokeh.palettes`.\n \"\"\").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))\n\n def __init__(self, palette=None, **kwargs):\n if palette is not None: kwargs['palette'] = palette\n super(ColorMapper, self).__init__(**kwargs)\n\nclass LinearColorMapper(ColorMapper):\n \"\"\" Map numbers in a range [*low*, *high*] linearly into a\n sequence of colors (a palette).\n\n For example, if the range is [0, 99] and the palette is\n ``['red', 'green', 'blue']``, the values would be mapped as\n follows::\n\n x < 0 : 'red' # values < low are clamped\n 0 >= x < 33 : 'red'\n 33 >= x < 66 : 'green'\n 66 >= x < 99 : 'blue'\n 99 >= x : 'blue' # values > high are clamped\n\n \"\"\"\n\n low = Float(help=\"\"\"\n The minimum value of the range to map into the palette. Values below\n this are clamped to ``low``.\n \"\"\")\n\n high = Float(help=\"\"\"\n The maximum value of the range to map into the palette. Values above\n this are clamped to ``high``.\n \"\"\")\n\n # TODO: (jc) what is the color code for transparent?\n # TODO: (bev) better docstring\n reserve_color = Color(\"#ffffff\", help=\"\"\"\n Used by Abstract Rendering.\n \"\"\")\n\n # TODO: (bev) better docstring\n reserve_val = Float(default=None, help=\"\"\"\n Used by Abstract Rendering.\n \"\"\")\n\nclass LogColorMapper(ColorMapper):\n \"\"\" Map numbers in a range [*low*, *high*] into a\n sequence of colors (a palette) on a natural logarithm scale.\n\n For example, if the range is [0, 25] and the palette is\n ``['red', 'green', 'blue']``, the values would be mapped as\n follows::\n\n x < 0 : 'red' # values < low are clamped\n 0 >= x < 2.72 : 'red' # math.e ** 1\n 2.72 >= x < 7.39 : 'green' # math.e ** 2\n 7.39 >= x < 20.09 : 'blue' # math.e ** 3\n 20.09 >= x : 'blue' # values > high are clamped\n\n .. warning::\n The LogColorMapper only works for images with scalar values that are\n non-negative.\n\n \"\"\"\n\n low = Float(help=\"\"\"\n The minimum value of the range to map into the palette. Values below\n this are clamped to ``low``.\n \"\"\")\n\n high = Float(help=\"\"\"\n The maximum value of the range to map into the palette. Values above\n this are clamped to ``high``.\n \"\"\")\n", "path": "bokeh/models/mappers.py"}]} | 1,818 | 670 |
gh_patches_debug_13867 | rasdani/github-patches | git_diff | litestar-org__litestar-2885 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Docs: Channels run_in_background.py example does not work
### Summary
Hi,
i just tried to run the channels example "run_in_background.py" from the [website](https://docs.litestar.dev/2/usage/channels.html) (which translates to [run_in_background.py](https://github.com/litestar-org/litestar/blob/main/docs/examples/channels/run_in_background.py)) and found that it didn't work for me.
I'm running Python 3.8.10 in a virtual environment with Litestar 2.4.1.
The application contains exacly the same source code that is provided in the example file and was run using `uvicorn app:app --reload`
Log output:
```
INFO: Started reloader process [219108] using WatchFiles
Process SpawnProcess-1:
Traceback (most recent call last):
File "/usr/lib/python3.8/multiprocessing/process.py", line 315, in _bootstrap
self.run()
File "/usr/lib/python3.8/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/home/.../venv/lib/python3.8/site-packages/uvicorn/_subprocess.py", line 76, in subprocess_started
target(sockets=sockets)
File "/home/.../venv/lib/python3.8/site-packages/uvicorn/server.py", line 61, in run
return asyncio.run(self.serve(sockets=sockets))
File "/usr/lib/python3.8/asyncio/runners.py", line 44, in run
return loop.run_until_complete(main)
File "uvloop/loop.pyx", line 1517, in uvloop.loop.Loop.run_until_complete
File "/home/.../venv/lib/python3.8/site-packages/uvicorn/server.py", line 68, in serve
config.load()
File "/home/.../venv/lib/python3.8/site-packages/uvicorn/config.py", line 467, in load
self.loaded_app = import_from_string(self.app)
File "/home/.../venv/lib/python3.8/site-packages/uvicorn/importer.py", line 21, in import_from_string
module = importlib.import_module(module_str)
File "/usr/lib/python3.8/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 975, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 671, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 848, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/.../source/app.py", line 18, in <module>
plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],
File "/home/.../venv/lib/python3.8/site-packages/litestar/channels/plugin.py", line 82, in __init__
raise ImproperlyConfiguredException("Must define either channels or set arbitrary_channels_allowed=True")
litestar.exceptions.http_exceptions.ImproperlyConfiguredException: 500: Must define either channels or set arbitrary_channels_allowed=True
```
This error is easy to resolve. I've changed
plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],
to
plugins=[ChannelsPlugin(backend=MemoryChannelsBackend(), channels=["general"])],
and the app started up successfully.
But there seems to be another problem. As soon as my browser connects to the websocket, I get the following warning:
```
INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
INFO: Started reloader process [219218] using WatchFiles
INFO: Started server process [219220]
INFO: Waiting for application startup.
INFO: Application startup complete.
INFO: ('127.0.0.1', 33682) - "WebSocket /ws" [accepted]
/home/...../source/app.py:10: RuntimeWarning: coroutine 'ChannelsPlugin.subscribe' was never awaited
async with channels.subscribe(["some_channel"]) as subscriber, subscriber.run_in_background(socket.send_text):
RuntimeWarning: Enable tracemalloc to get the object allocation traceback
INFO: connection open
INFO: connection closed
```
I'm not sure how to resolve this problem. Probably the documentation does not refect the current state of Litestar? Could you check if the example is still valid? Thank you :-)
<!-- POLAR PLEDGE BADGE START -->
---
> [!NOTE]
> While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and
> [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship.
>
> Check out all issues funded or available for funding [on our Polar.sh Litestar dashboard](https://polar.sh/litestar-org)
> * If you would like to see an issue prioritized, make a pledge towards it!
> * We receive the pledge once the issue is completed & verified
> * This, along with engagement in the community, helps us know which features are a priority to our users.
<a href="https://polar.sh/litestar-org/litestar/issues/2813">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://polar.sh/api/github/litestar-org/litestar/issues/2813/pledge.svg?darkmode=1">
<img alt="Fund with Polar" src="https://polar.sh/api/github/litestar-org/litestar/issues/2813/pledge.svg">
</picture>
</a>
<!-- POLAR PLEDGE BADGE END -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/examples/channels/run_in_background.py`
Content:
```
1 from litestar import Litestar, WebSocket, websocket
2 from litestar.channels import ChannelsPlugin
3 from litestar.channels.backends.memory import MemoryChannelsBackend
4
5
6 @websocket("/ws")
7 async def handler(socket: WebSocket, channels: ChannelsPlugin) -> None:
8 await socket.accept()
9
10 async with channels.subscribe(["some_channel"]) as subscriber, subscriber.run_in_background(socket.send_text):
11 while True:
12 await socket.receive_text()
13 # do something with the message here
14
15
16 app = Litestar(
17 [handler],
18 plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],
19 )
20
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/examples/channels/run_in_background.py b/docs/examples/channels/run_in_background.py
--- a/docs/examples/channels/run_in_background.py
+++ b/docs/examples/channels/run_in_background.py
@@ -7,13 +7,13 @@
async def handler(socket: WebSocket, channels: ChannelsPlugin) -> None:
await socket.accept()
- async with channels.subscribe(["some_channel"]) as subscriber, subscriber.run_in_background(socket.send_text):
+ async with await channels.subscribe(["some_channel"]) as subscriber, subscriber.run_in_background(socket.send_text):
while True:
- await socket.receive_text()
- # do something with the message here
+ response = await socket.receive_text()
+ await subscriber.send(response)
app = Litestar(
[handler],
- plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],
+ plugins=[ChannelsPlugin(backend=MemoryChannelsBackend(), channels=["some_channel"])],
)
| {"golden_diff": "diff --git a/docs/examples/channels/run_in_background.py b/docs/examples/channels/run_in_background.py\n--- a/docs/examples/channels/run_in_background.py\n+++ b/docs/examples/channels/run_in_background.py\n@@ -7,13 +7,13 @@\n async def handler(socket: WebSocket, channels: ChannelsPlugin) -> None:\n await socket.accept()\n \n- async with channels.subscribe([\"some_channel\"]) as subscriber, subscriber.run_in_background(socket.send_text):\n+ async with await channels.subscribe([\"some_channel\"]) as subscriber, subscriber.run_in_background(socket.send_text):\n while True:\n- await socket.receive_text()\n- # do something with the message here\n+ response = await socket.receive_text()\n+ await subscriber.send(response)\n \n \n app = Litestar(\n [handler],\n- plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],\n+ plugins=[ChannelsPlugin(backend=MemoryChannelsBackend(), channels=[\"some_channel\"])],\n )\n", "issue": "Docs: Channels run_in_background.py example does not work\n### Summary\r\n\r\nHi,\r\n\r\ni just tried to run the channels example \"run_in_background.py\" from the [website](https://docs.litestar.dev/2/usage/channels.html) (which translates to [run_in_background.py](https://github.com/litestar-org/litestar/blob/main/docs/examples/channels/run_in_background.py)) and found that it didn't work for me. \r\n\r\nI'm running Python 3.8.10 in a virtual environment with Litestar 2.4.1.\r\n\r\nThe application contains exacly the same source code that is provided in the example file and was run using `uvicorn app:app --reload`\r\n\r\nLog output: \r\n\r\n```\r\nINFO: Started reloader process [219108] using WatchFiles\r\nProcess SpawnProcess-1:\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.8/multiprocessing/process.py\", line 315, in _bootstrap\r\n self.run()\r\n File \"/usr/lib/python3.8/multiprocessing/process.py\", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File \"/home/.../venv/lib/python3.8/site-packages/uvicorn/_subprocess.py\", line 76, in subprocess_started\r\n target(sockets=sockets)\r\n File \"/home/.../venv/lib/python3.8/site-packages/uvicorn/server.py\", line 61, in run\r\n return asyncio.run(self.serve(sockets=sockets))\r\n File \"/usr/lib/python3.8/asyncio/runners.py\", line 44, in run\r\n return loop.run_until_complete(main)\r\n File \"uvloop/loop.pyx\", line 1517, in uvloop.loop.Loop.run_until_complete\r\n File \"/home/.../venv/lib/python3.8/site-packages/uvicorn/server.py\", line 68, in serve\r\n config.load()\r\n File \"/home/.../venv/lib/python3.8/site-packages/uvicorn/config.py\", line 467, in load\r\n self.loaded_app = import_from_string(self.app)\r\n File \"/home/.../venv/lib/python3.8/site-packages/uvicorn/importer.py\", line 21, in import_from_string\r\n module = importlib.import_module(module_str)\r\n File \"/usr/lib/python3.8/importlib/__init__.py\", line 127, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"<frozen importlib._bootstrap>\", line 1014, in _gcd_import\r\n File \"<frozen importlib._bootstrap>\", line 991, in _find_and_load\r\n File \"<frozen importlib._bootstrap>\", line 975, in _find_and_load_unlocked\r\n File \"<frozen importlib._bootstrap>\", line 671, in _load_unlocked\r\n File \"<frozen importlib._bootstrap_external>\", line 848, in exec_module\r\n File \"<frozen importlib._bootstrap>\", line 219, in _call_with_frames_removed\r\n File \"/home/.../source/app.py\", line 18, in <module>\r\n plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],\r\n File \"/home/.../venv/lib/python3.8/site-packages/litestar/channels/plugin.py\", line 82, in __init__\r\n raise ImproperlyConfiguredException(\"Must define either channels or set arbitrary_channels_allowed=True\")\r\nlitestar.exceptions.http_exceptions.ImproperlyConfiguredException: 500: Must define either channels or set arbitrary_channels_allowed=True\r\n```\r\n\r\nThis error is easy to resolve. I've changed \r\n\r\n plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],\r\n\r\n to\r\n\r\n plugins=[ChannelsPlugin(backend=MemoryChannelsBackend(), channels=[\"general\"])],\r\n\r\nand the app started up successfully. \r\n\r\nBut there seems to be another problem. As soon as my browser connects to the websocket, I get the following warning:\r\n\r\n```\r\nINFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)\r\nINFO: Started reloader process [219218] using WatchFiles\r\nINFO: Started server process [219220]\r\nINFO: Waiting for application startup.\r\nINFO: Application startup complete.\r\nINFO: ('127.0.0.1', 33682) - \"WebSocket /ws\" [accepted]\r\n/home/...../source/app.py:10: RuntimeWarning: coroutine 'ChannelsPlugin.subscribe' was never awaited\r\n async with channels.subscribe([\"some_channel\"]) as subscriber, subscriber.run_in_background(socket.send_text):\r\nRuntimeWarning: Enable tracemalloc to get the object allocation traceback\r\nINFO: connection open\r\nINFO: connection closed\r\n```\r\n\r\nI'm not sure how to resolve this problem. Probably the documentation does not refect the current state of Litestar? Could you check if the example is still valid? Thank you :-) \r\n\r\n\r\n\r\n<!-- POLAR PLEDGE BADGE START -->\r\n---\r\n> [!NOTE] \r\n> While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and \r\n> [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship.\r\n>\r\n> Check out all issues funded or available for funding [on our Polar.sh Litestar dashboard](https://polar.sh/litestar-org)\r\n> * If you would like to see an issue prioritized, make a pledge towards it!\r\n> * We receive the pledge once the issue is completed & verified\r\n> * This, along with engagement in the community, helps us know which features are a priority to our users.\r\n\r\n<a href=\"https://polar.sh/litestar-org/litestar/issues/2813\">\r\n<picture>\r\n <source media=\"(prefers-color-scheme: dark)\" srcset=\"https://polar.sh/api/github/litestar-org/litestar/issues/2813/pledge.svg?darkmode=1\">\r\n <img alt=\"Fund with Polar\" src=\"https://polar.sh/api/github/litestar-org/litestar/issues/2813/pledge.svg\">\r\n</picture>\r\n</a>\r\n<!-- POLAR PLEDGE BADGE END -->\r\n\n", "before_files": [{"content": "from litestar import Litestar, WebSocket, websocket\nfrom litestar.channels import ChannelsPlugin\nfrom litestar.channels.backends.memory import MemoryChannelsBackend\n\n\n@websocket(\"/ws\")\nasync def handler(socket: WebSocket, channels: ChannelsPlugin) -> None:\n await socket.accept()\n\n async with channels.subscribe([\"some_channel\"]) as subscriber, subscriber.run_in_background(socket.send_text):\n while True:\n await socket.receive_text()\n # do something with the message here\n\n\napp = Litestar(\n [handler],\n plugins=[ChannelsPlugin(backend=MemoryChannelsBackend())],\n)\n", "path": "docs/examples/channels/run_in_background.py"}], "after_files": [{"content": "from litestar import Litestar, WebSocket, websocket\nfrom litestar.channels import ChannelsPlugin\nfrom litestar.channels.backends.memory import MemoryChannelsBackend\n\n\n@websocket(\"/ws\")\nasync def handler(socket: WebSocket, channels: ChannelsPlugin) -> None:\n await socket.accept()\n\n async with await channels.subscribe([\"some_channel\"]) as subscriber, subscriber.run_in_background(socket.send_text):\n while True:\n response = await socket.receive_text()\n await subscriber.send(response)\n\n\napp = Litestar(\n [handler],\n plugins=[ChannelsPlugin(backend=MemoryChannelsBackend(), channels=[\"some_channel\"])],\n)\n", "path": "docs/examples/channels/run_in_background.py"}]} | 1,813 | 201 |
gh_patches_debug_30812 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-3311 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider xpo_logistics is broken
During the global build at 2021-10-20-14-42-48, spider **xpo_logistics** failed with **0 features** and **1 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-10-20-14-42-48/logs/xpo_logistics.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-10-20-14-42-48/output/xpo_logistics.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-10-20-14-42-48/output/xpo_logistics.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/xpo_logistics.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 import scrapy
3 import re
4 import ast
5 from locations.items import GeojsonPointItem
6
7 class XPOLogisticsSpider(scrapy.Spider):
8 name = "xpo_logistics"
9 item_attributes = { 'brand': "XPO Logistics" }
10 allowed_domains = ["www.xpo.com"]
11 start_urls = (
12 'https://www.xpo.com/global-locations/',
13 )
14
15 def parse(self, response):
16 script = response.xpath('//script[contains(.,"globalLocationsArray")]').extract_first()
17 data = re.search(r'globalLocationsArray = (.*);', script).groups()[0]
18 data = ast.literal_eval(data)
19
20 for store in data:
21 yield GeojsonPointItem(
22 lat=float(store['latitude']),
23 lon=float(store['longitude'].replace(',','')),
24 phone=store['telephone'],
25 ref=store['office_name'],
26 addr_full=store['street'],
27 city=store['city'],
28 state=store['state'],
29 postcode=store['postal_code'],
30 country=store['country'],
31 name=store['office_name']
32 )
33
34
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/xpo_logistics.py b/locations/spiders/xpo_logistics.py
--- a/locations/spiders/xpo_logistics.py
+++ b/locations/spiders/xpo_logistics.py
@@ -1,28 +1,29 @@
# -*- coding: utf-8 -*-
-import scrapy
-import re
import ast
+
+import scrapy
+
from locations.items import GeojsonPointItem
+
class XPOLogisticsSpider(scrapy.Spider):
name = "xpo_logistics"
- item_attributes = { 'brand': "XPO Logistics" }
+ item_attributes = { 'brand': "XPO Logistics", 'brand_wikidata': 'Q8042415' }
allowed_domains = ["www.xpo.com"]
start_urls = (
'https://www.xpo.com/global-locations/',
)
def parse(self, response):
- script = response.xpath('//script[contains(.,"globalLocationsArray")]').extract_first()
- data = re.search(r'globalLocationsArray = (.*);', script).groups()[0]
- data = ast.literal_eval(data)
+ script = response.xpath('//script[@id="globalLocations"]/text()').extract_first()
+ data = ast.literal_eval(script)
for store in data:
yield GeojsonPointItem(
lat=float(store['latitude']),
lon=float(store['longitude'].replace(',','')),
phone=store['telephone'],
- ref=store['office_name'],
+ ref=f"{store['office_name']}-{store['postal_code']}",
addr_full=store['street'],
city=store['city'],
state=store['state'],
@@ -30,5 +31,3 @@
country=store['country'],
name=store['office_name']
)
-
-
| {"golden_diff": "diff --git a/locations/spiders/xpo_logistics.py b/locations/spiders/xpo_logistics.py\n--- a/locations/spiders/xpo_logistics.py\n+++ b/locations/spiders/xpo_logistics.py\n@@ -1,28 +1,29 @@\n # -*- coding: utf-8 -*-\n-import scrapy\n-import re\n import ast\n+\n+import scrapy\n+\n from locations.items import GeojsonPointItem\n \n+\n class XPOLogisticsSpider(scrapy.Spider):\n name = \"xpo_logistics\"\n- item_attributes = { 'brand': \"XPO Logistics\" }\n+ item_attributes = { 'brand': \"XPO Logistics\", 'brand_wikidata': 'Q8042415' }\n allowed_domains = [\"www.xpo.com\"]\n start_urls = (\n 'https://www.xpo.com/global-locations/',\n )\n \n def parse(self, response):\n- script = response.xpath('//script[contains(.,\"globalLocationsArray\")]').extract_first()\n- data = re.search(r'globalLocationsArray = (.*);', script).groups()[0]\n- data = ast.literal_eval(data)\n+ script = response.xpath('//script[@id=\"globalLocations\"]/text()').extract_first()\n+ data = ast.literal_eval(script)\n \n for store in data:\n yield GeojsonPointItem(\n lat=float(store['latitude']),\n lon=float(store['longitude'].replace(',','')),\n phone=store['telephone'],\n- ref=store['office_name'],\n+ ref=f\"{store['office_name']}-{store['postal_code']}\",\n addr_full=store['street'],\n city=store['city'],\n state=store['state'],\n@@ -30,5 +31,3 @@\n country=store['country'],\n name=store['office_name']\n )\n-\n-\n", "issue": "Spider xpo_logistics is broken\nDuring the global build at 2021-10-20-14-42-48, spider **xpo_logistics** failed with **0 features** and **1 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-10-20-14-42-48/logs/xpo_logistics.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-10-20-14-42-48/output/xpo_logistics.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-10-20-14-42-48/output/xpo_logistics.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport scrapy\nimport re\nimport ast\nfrom locations.items import GeojsonPointItem\n\nclass XPOLogisticsSpider(scrapy.Spider):\n name = \"xpo_logistics\"\n item_attributes = { 'brand': \"XPO Logistics\" }\n allowed_domains = [\"www.xpo.com\"]\n start_urls = (\n 'https://www.xpo.com/global-locations/',\n )\n\n def parse(self, response):\n script = response.xpath('//script[contains(.,\"globalLocationsArray\")]').extract_first()\n data = re.search(r'globalLocationsArray = (.*);', script).groups()[0]\n data = ast.literal_eval(data)\n\n for store in data:\n yield GeojsonPointItem(\n lat=float(store['latitude']),\n lon=float(store['longitude'].replace(',','')),\n phone=store['telephone'],\n ref=store['office_name'],\n addr_full=store['street'],\n city=store['city'],\n state=store['state'],\n postcode=store['postal_code'],\n country=store['country'],\n name=store['office_name']\n )\n\n\n", "path": "locations/spiders/xpo_logistics.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nimport ast\n\nimport scrapy\n\nfrom locations.items import GeojsonPointItem\n\n\nclass XPOLogisticsSpider(scrapy.Spider):\n name = \"xpo_logistics\"\n item_attributes = { 'brand': \"XPO Logistics\", 'brand_wikidata': 'Q8042415' }\n allowed_domains = [\"www.xpo.com\"]\n start_urls = (\n 'https://www.xpo.com/global-locations/',\n )\n\n def parse(self, response):\n script = response.xpath('//script[@id=\"globalLocations\"]/text()').extract_first()\n data = ast.literal_eval(script)\n\n for store in data:\n yield GeojsonPointItem(\n lat=float(store['latitude']),\n lon=float(store['longitude'].replace(',','')),\n phone=store['telephone'],\n ref=f\"{store['office_name']}-{store['postal_code']}\",\n addr_full=store['street'],\n city=store['city'],\n state=store['state'],\n postcode=store['postal_code'],\n country=store['country'],\n name=store['office_name']\n )\n", "path": "locations/spiders/xpo_logistics.py"}]} | 754 | 393 |
gh_patches_debug_18968 | rasdani/github-patches | git_diff | litestar-org__litestar-1676 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `litestar/contrib/sqlalchemy/types.py`
Content:
```
1 from __future__ import annotations
2
3 import uuid
4 from typing import TYPE_CHECKING, Any, cast
5
6 from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB
7 from sqlalchemy.dialects.postgresql import UUID as PG_UUID
8 from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator
9 from sqlalchemy.types import JSON as _JSON
10
11 if TYPE_CHECKING:
12 from sqlalchemy.engine import Dialect
13
14 BigIntIdentity = BigInteger().with_variant(Integer, "sqlite")
15 """Platform-independent BigInteger Primary Key.
16
17 User a Big Integer on engines that support it.
18
19 Uses Integer for sqlite since there is no
20
21 """
22
23
24 class GUID(TypeDecorator):
25 """Platform-independent GUID type.
26
27 Uses PostgreSQL's UUID type, otherwise uses
28 BINARY(16) or CHAR(32), storing as stringified hex values.
29
30 Will accept stringified UUIDs as a hexstring or an actual UUID
31
32 """
33
34 impl = BINARY(16)
35 cache_ok = True
36 python_type = type(uuid.UUID)
37
38 def __init__(self, binary: bool = True) -> None:
39 self.binary = binary
40
41 def load_dialect_impl(self, dialect: Dialect) -> Any:
42 if dialect.name == "postgresql":
43 return dialect.type_descriptor(PG_UUID())
44 if self.binary:
45 return dialect.type_descriptor(BINARY(16))
46 return dialect.type_descriptor(CHAR(32))
47
48 def process_bind_param(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> bytes | str | None:
49 if value is None:
50 return value
51 if dialect.name == "postgresql":
52 return str(value)
53 value = self.to_uuid(value)
54 if value is None:
55 return value
56 return value.bytes if self.binary else value.hex
57
58 def process_result_value(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> uuid.UUID | None:
59 if value is None:
60 return value
61 if isinstance(value, uuid.UUID):
62 return value
63 if self.binary:
64 return uuid.UUID(bytes=cast("bytes", value))
65 return uuid.UUID(hex=cast("str", value))
66
67 @staticmethod
68 def to_uuid(value: Any) -> uuid.UUID | None:
69 if isinstance(value, uuid.UUID) or value is None:
70 return value
71 try:
72 value = uuid.UUID(hex=value)
73 except (TypeError, ValueError):
74 value = uuid.UUID(bytes=value)
75 return cast("uuid.UUID | None", value)
76
77
78 class JSON(_JSON):
79 """Platform-independent JSON type.
80
81 Uses JSONB type for postgres, otherwise uses the generic JSON data type.
82 """
83
84 def load_dialect_impl(self, dialect: Dialect) -> Any:
85 if dialect.name == "postgresql":
86 return dialect.type_descriptor(PG_JSONB()) # type: ignore[no-untyped-call]
87 return dialect.type_descriptor(_JSON())
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py
--- a/litestar/contrib/sqlalchemy/types.py
+++ b/litestar/contrib/sqlalchemy/types.py
@@ -35,7 +35,7 @@
cache_ok = True
python_type = type(uuid.UUID)
- def __init__(self, binary: bool = True) -> None:
+ def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None:
self.binary = binary
def load_dialect_impl(self, dialect: Dialect) -> Any:
@@ -75,13 +75,8 @@
return cast("uuid.UUID | None", value)
-class JSON(_JSON):
- """Platform-independent JSON type.
+JSON = _JSON().with_variant(PG_JSONB, "postgresql")
+"""Platform-independent JSON type.
Uses JSONB type for postgres, otherwise uses the generic JSON data type.
- """
-
- def load_dialect_impl(self, dialect: Dialect) -> Any:
- if dialect.name == "postgresql":
- return dialect.type_descriptor(PG_JSONB()) # type: ignore[no-untyped-call]
- return dialect.type_descriptor(_JSON())
+"""
| {"golden_diff": "diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py\n--- a/litestar/contrib/sqlalchemy/types.py\n+++ b/litestar/contrib/sqlalchemy/types.py\n@@ -35,7 +35,7 @@\n cache_ok = True\n python_type = type(uuid.UUID)\n \n- def __init__(self, binary: bool = True) -> None:\n+ def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None:\n self.binary = binary\n \n def load_dialect_impl(self, dialect: Dialect) -> Any:\n@@ -75,13 +75,8 @@\n return cast(\"uuid.UUID | None\", value)\n \n \n-class JSON(_JSON):\n- \"\"\"Platform-independent JSON type.\n+JSON = _JSON().with_variant(PG_JSONB, \"postgresql\")\n+\"\"\"Platform-independent JSON type.\n \n Uses JSONB type for postgres, otherwise uses the generic JSON data type.\n- \"\"\"\n-\n- def load_dialect_impl(self, dialect: Dialect) -> Any:\n- if dialect.name == \"postgresql\":\n- return dialect.type_descriptor(PG_JSONB()) # type: ignore[no-untyped-call]\n- return dialect.type_descriptor(_JSON())\n+\"\"\"\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from __future__ import annotations\n\nimport uuid\nfrom typing import TYPE_CHECKING, Any, cast\n\nfrom sqlalchemy.dialects.postgresql import JSONB as PG_JSONB\nfrom sqlalchemy.dialects.postgresql import UUID as PG_UUID\nfrom sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator\nfrom sqlalchemy.types import JSON as _JSON\n\nif TYPE_CHECKING:\n from sqlalchemy.engine import Dialect\n\nBigIntIdentity = BigInteger().with_variant(Integer, \"sqlite\")\n\"\"\"Platform-independent BigInteger Primary Key.\n\nUser a Big Integer on engines that support it.\n\nUses Integer for sqlite since there is no\n\n\"\"\"\n\n\nclass GUID(TypeDecorator):\n \"\"\"Platform-independent GUID type.\n\n Uses PostgreSQL's UUID type, otherwise uses\n BINARY(16) or CHAR(32), storing as stringified hex values.\n\n Will accept stringified UUIDs as a hexstring or an actual UUID\n\n \"\"\"\n\n impl = BINARY(16)\n cache_ok = True\n python_type = type(uuid.UUID)\n\n def __init__(self, binary: bool = True) -> None:\n self.binary = binary\n\n def load_dialect_impl(self, dialect: Dialect) -> Any:\n if dialect.name == \"postgresql\":\n return dialect.type_descriptor(PG_UUID())\n if self.binary:\n return dialect.type_descriptor(BINARY(16))\n return dialect.type_descriptor(CHAR(32))\n\n def process_bind_param(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> bytes | str | None:\n if value is None:\n return value\n if dialect.name == \"postgresql\":\n return str(value)\n value = self.to_uuid(value)\n if value is None:\n return value\n return value.bytes if self.binary else value.hex\n\n def process_result_value(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> uuid.UUID | None:\n if value is None:\n return value\n if isinstance(value, uuid.UUID):\n return value\n if self.binary:\n return uuid.UUID(bytes=cast(\"bytes\", value))\n return uuid.UUID(hex=cast(\"str\", value))\n\n @staticmethod\n def to_uuid(value: Any) -> uuid.UUID | None:\n if isinstance(value, uuid.UUID) or value is None:\n return value\n try:\n value = uuid.UUID(hex=value)\n except (TypeError, ValueError):\n value = uuid.UUID(bytes=value)\n return cast(\"uuid.UUID | None\", value)\n\n\nclass JSON(_JSON):\n \"\"\"Platform-independent JSON type.\n\n Uses JSONB type for postgres, otherwise uses the generic JSON data type.\n \"\"\"\n\n def load_dialect_impl(self, dialect: Dialect) -> Any:\n if dialect.name == \"postgresql\":\n return dialect.type_descriptor(PG_JSONB()) # type: ignore[no-untyped-call]\n return dialect.type_descriptor(_JSON())\n", "path": "litestar/contrib/sqlalchemy/types.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport uuid\nfrom typing import TYPE_CHECKING, Any, cast\n\nfrom sqlalchemy.dialects.postgresql import JSONB as PG_JSONB\nfrom sqlalchemy.dialects.postgresql import UUID as PG_UUID\nfrom sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator\nfrom sqlalchemy.types import JSON as _JSON\n\nif TYPE_CHECKING:\n from sqlalchemy.engine import Dialect\n\nBigIntIdentity = BigInteger().with_variant(Integer, \"sqlite\")\n\"\"\"Platform-independent BigInteger Primary Key.\n\nUser a Big Integer on engines that support it.\n\nUses Integer for sqlite since there is no\n\n\"\"\"\n\n\nclass GUID(TypeDecorator):\n \"\"\"Platform-independent GUID type.\n\n Uses PostgreSQL's UUID type, otherwise uses\n BINARY(16) or CHAR(32), storing as stringified hex values.\n\n Will accept stringified UUIDs as a hexstring or an actual UUID\n\n \"\"\"\n\n impl = BINARY(16)\n cache_ok = True\n python_type = type(uuid.UUID)\n\n def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None:\n self.binary = binary\n\n def load_dialect_impl(self, dialect: Dialect) -> Any:\n if dialect.name == \"postgresql\":\n return dialect.type_descriptor(PG_UUID())\n if self.binary:\n return dialect.type_descriptor(BINARY(16))\n return dialect.type_descriptor(CHAR(32))\n\n def process_bind_param(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> bytes | str | None:\n if value is None:\n return value\n if dialect.name == \"postgresql\":\n return str(value)\n value = self.to_uuid(value)\n if value is None:\n return value\n return value.bytes if self.binary else value.hex\n\n def process_result_value(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> uuid.UUID | None:\n if value is None:\n return value\n if isinstance(value, uuid.UUID):\n return value\n if self.binary:\n return uuid.UUID(bytes=cast(\"bytes\", value))\n return uuid.UUID(hex=cast(\"str\", value))\n\n @staticmethod\n def to_uuid(value: Any) -> uuid.UUID | None:\n if isinstance(value, uuid.UUID) or value is None:\n return value\n try:\n value = uuid.UUID(hex=value)\n except (TypeError, ValueError):\n value = uuid.UUID(bytes=value)\n return cast(\"uuid.UUID | None\", value)\n\n\nJSON = _JSON().with_variant(PG_JSONB, \"postgresql\")\n\"\"\"Platform-independent JSON type.\n\n Uses JSONB type for postgres, otherwise uses the generic JSON data type.\n\"\"\"\n", "path": "litestar/contrib/sqlalchemy/types.py"}]} | 1,226 | 282 |
gh_patches_debug_22809 | rasdani/github-patches | git_diff | mabel-dev__opteryx-1396 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
🪲 Incorrect error raised on type errors on JOINs
### Thank you for taking the time to report a problem with Opteryx.
_To help us to respond to your request we ask that you try to provide the below detail about the bug._
**Describe the bug** _A clear and specific description of what the bug is. What the error, incorrect or unexpected behaviour was._
**Expected behaviour** _A clear and concise description of what you expected to happen._
**Sample Code/Statement** _If you can, please submit the SQL statement or Python code snippet, or a representative example using the sample datasets._
~~~sql
~~~
**Additional context** _Add any other context about the problem here, for example what you have done to try to diagnose or workaround the problem._
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opteryx/operators/join_node.py`
Content:
```
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 """
14 Join Node
15
16 This is a SQL Query Execution Plan Node.
17
18 This handles most of the join types as a wrapper for pyarrow's JOIN functions,
19 only CROSS JOINs are not handled here.
20
21 This is a faster implementation that the pyarrow_ops implementation, but hashes
22 the right table every call so carries a penalty.
23 """
24 from typing import Generator
25
26 import pyarrow
27
28 from opteryx.models import QueryProperties
29 from opteryx.operators import BasePlanNode
30
31
32 class JoinNode(BasePlanNode):
33 def __init__(self, properties: QueryProperties, **config):
34 super().__init__(properties=properties)
35 self._join_type = config["type"]
36 self._on = config.get("on")
37 self._using = config.get("using")
38
39 self._left_columns = config.get("left_columns")
40 self._left_relation = config.get("left_relation_names")
41
42 self._right_columns = config.get("right_columns")
43 self._right_relation = config.get("right_relation_names")
44
45 @property
46 def name(self): # pragma: no cover
47 return f"{self._join_type} Join"
48
49 @property
50 def config(self): # pragma: no cover
51 return ""
52
53 def execute(self) -> Generator:
54 left_node = self._producers[0] # type:ignore
55 right_node = self._producers[1] # type:ignore
56
57 right_table = pyarrow.concat_tables(right_node.execute(), mode="default")
58
59 for morsel in left_node.execute():
60 # in place until #1295 resolved
61 if not self._right_columns[0] in morsel.column_names:
62 self._right_columns, self._left_columns = self._left_columns, self._right_columns
63
64 try:
65 # do the join
66 new_morsel = morsel.join(
67 right_table,
68 keys=self._right_columns,
69 right_keys=self._left_columns,
70 join_type=self._join_type,
71 coalesce_keys=self._using is not None,
72 )
73 except pyarrow.ArrowInvalid as err: # pragma: no cover
74 last_token = str(err).split(" ")[-1]
75 column = None
76 for col in left_node.columns:
77 if last_token == col.identity:
78 column = col.name
79 break
80 for col in right_node.columns:
81 if last_token == col.identity:
82 column = col.name
83 break
84 if column:
85 raise pyarrow.ArrowInvalid(str(err).replace(last_token, f"'{column}'"))
86 raise err
87
88 yield new_morsel
89
```
Path: `opteryx/__version__.py`
Content:
```
1 __build__ = 226
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """
16 Store the version here so:
17 1) we don't load dependencies by storing it in __init__.py
18 2) we can import it in setup.py for the same reason
19 """
20 from enum import Enum # isort: skip
21
22
23 class VersionStatus(Enum):
24 ALPHA = "alpha"
25 BETA = "beta"
26 RELEASE = "release"
27
28
29 _major = 0
30 _minor = 12
31 _revision = 4
32 _status = VersionStatus.BETA
33
34 __version__ = f"{_major}.{_minor}.{_revision}" + (
35 f"-{_status.value}.{__build__}" if _status != VersionStatus.RELEASE else ""
36 )
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/opteryx/__version__.py b/opteryx/__version__.py
--- a/opteryx/__version__.py
+++ b/opteryx/__version__.py
@@ -1,4 +1,4 @@
-__build__ = 226
+__build__ = 228
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/opteryx/operators/join_node.py b/opteryx/operators/join_node.py
--- a/opteryx/operators/join_node.py
+++ b/opteryx/operators/join_node.py
@@ -25,6 +25,7 @@
import pyarrow
+from opteryx.exceptions import UnsupportedSyntaxError
from opteryx.models import QueryProperties
from opteryx.operators import BasePlanNode
@@ -82,7 +83,11 @@
column = col.name
break
if column:
- raise pyarrow.ArrowInvalid(str(err).replace(last_token, f"'{column}'"))
- raise err
+ raise UnsupportedSyntaxError(
+ f"Unable to JOIN with unsupported column types in table, '{column}'."
+ ) from err
+ raise UnsupportedSyntaxError(
+ "Unable to JOIN with unsupported column types in table."
+ ) from err
yield new_morsel
| {"golden_diff": "diff --git a/opteryx/__version__.py b/opteryx/__version__.py\n--- a/opteryx/__version__.py\n+++ b/opteryx/__version__.py\n@@ -1,4 +1,4 @@\n-__build__ = 226\n+__build__ = 228\n \n # Licensed under the Apache License, Version 2.0 (the \"License\");\n # you may not use this file except in compliance with the License.\ndiff --git a/opteryx/operators/join_node.py b/opteryx/operators/join_node.py\n--- a/opteryx/operators/join_node.py\n+++ b/opteryx/operators/join_node.py\n@@ -25,6 +25,7 @@\n \n import pyarrow\n \n+from opteryx.exceptions import UnsupportedSyntaxError\n from opteryx.models import QueryProperties\n from opteryx.operators import BasePlanNode\n \n@@ -82,7 +83,11 @@\n column = col.name\n break\n if column:\n- raise pyarrow.ArrowInvalid(str(err).replace(last_token, f\"'{column}'\"))\n- raise err\n+ raise UnsupportedSyntaxError(\n+ f\"Unable to JOIN with unsupported column types in table, '{column}'.\"\n+ ) from err\n+ raise UnsupportedSyntaxError(\n+ \"Unable to JOIN with unsupported column types in table.\"\n+ ) from err\n \n yield new_morsel\n", "issue": "\ud83e\udeb2 Incorrect error raised on type errors on JOINs\n### Thank you for taking the time to report a problem with Opteryx.\r\n_To help us to respond to your request we ask that you try to provide the below detail about the bug._\r\n\r\n**Describe the bug** _A clear and specific description of what the bug is. What the error, incorrect or unexpected behaviour was._\r\n\r\n\r\n**Expected behaviour** _A clear and concise description of what you expected to happen._\r\n\r\n\r\n**Sample Code/Statement** _If you can, please submit the SQL statement or Python code snippet, or a representative example using the sample datasets._\r\n\r\n~~~sql\r\n\r\n~~~\r\n\r\n**Additional context** _Add any other context about the problem here, for example what you have done to try to diagnose or workaround the problem._\r\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nJoin Node\n\nThis is a SQL Query Execution Plan Node.\n\nThis handles most of the join types as a wrapper for pyarrow's JOIN functions, \nonly CROSS JOINs are not handled here.\n\nThis is a faster implementation that the pyarrow_ops implementation, but hashes\nthe right table every call so carries a penalty.\n\"\"\"\nfrom typing import Generator\n\nimport pyarrow\n\nfrom opteryx.models import QueryProperties\nfrom opteryx.operators import BasePlanNode\n\n\nclass JoinNode(BasePlanNode):\n def __init__(self, properties: QueryProperties, **config):\n super().__init__(properties=properties)\n self._join_type = config[\"type\"]\n self._on = config.get(\"on\")\n self._using = config.get(\"using\")\n\n self._left_columns = config.get(\"left_columns\")\n self._left_relation = config.get(\"left_relation_names\")\n\n self._right_columns = config.get(\"right_columns\")\n self._right_relation = config.get(\"right_relation_names\")\n\n @property\n def name(self): # pragma: no cover\n return f\"{self._join_type} Join\"\n\n @property\n def config(self): # pragma: no cover\n return \"\"\n\n def execute(self) -> Generator:\n left_node = self._producers[0] # type:ignore\n right_node = self._producers[1] # type:ignore\n\n right_table = pyarrow.concat_tables(right_node.execute(), mode=\"default\")\n\n for morsel in left_node.execute():\n # in place until #1295 resolved\n if not self._right_columns[0] in morsel.column_names:\n self._right_columns, self._left_columns = self._left_columns, self._right_columns\n\n try:\n # do the join\n new_morsel = morsel.join(\n right_table,\n keys=self._right_columns,\n right_keys=self._left_columns,\n join_type=self._join_type,\n coalesce_keys=self._using is not None,\n )\n except pyarrow.ArrowInvalid as err: # pragma: no cover\n last_token = str(err).split(\" \")[-1]\n column = None\n for col in left_node.columns:\n if last_token == col.identity:\n column = col.name\n break\n for col in right_node.columns:\n if last_token == col.identity:\n column = col.name\n break\n if column:\n raise pyarrow.ArrowInvalid(str(err).replace(last_token, f\"'{column}'\"))\n raise err\n\n yield new_morsel\n", "path": "opteryx/operators/join_node.py"}, {"content": "__build__ = 226\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nStore the version here so:\n1) we don't load dependencies by storing it in __init__.py\n2) we can import it in setup.py for the same reason\n\"\"\"\nfrom enum import Enum # isort: skip\n\n\nclass VersionStatus(Enum):\n ALPHA = \"alpha\"\n BETA = \"beta\"\n RELEASE = \"release\"\n\n\n_major = 0\n_minor = 12\n_revision = 4\n_status = VersionStatus.BETA\n\n__version__ = f\"{_major}.{_minor}.{_revision}\" + (\n f\"-{_status.value}.{__build__}\" if _status != VersionStatus.RELEASE else \"\"\n)\n", "path": "opteryx/__version__.py"}], "after_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nJoin Node\n\nThis is a SQL Query Execution Plan Node.\n\nThis handles most of the join types as a wrapper for pyarrow's JOIN functions, \nonly CROSS JOINs are not handled here.\n\nThis is a faster implementation that the pyarrow_ops implementation, but hashes\nthe right table every call so carries a penalty.\n\"\"\"\nfrom typing import Generator\n\nimport pyarrow\n\nfrom opteryx.exceptions import UnsupportedSyntaxError\nfrom opteryx.models import QueryProperties\nfrom opteryx.operators import BasePlanNode\n\n\nclass JoinNode(BasePlanNode):\n def __init__(self, properties: QueryProperties, **config):\n super().__init__(properties=properties)\n self._join_type = config[\"type\"]\n self._on = config.get(\"on\")\n self._using = config.get(\"using\")\n\n self._left_columns = config.get(\"left_columns\")\n self._left_relation = config.get(\"left_relation_names\")\n\n self._right_columns = config.get(\"right_columns\")\n self._right_relation = config.get(\"right_relation_names\")\n\n @property\n def name(self): # pragma: no cover\n return f\"{self._join_type} Join\"\n\n @property\n def config(self): # pragma: no cover\n return \"\"\n\n def execute(self) -> Generator:\n left_node = self._producers[0] # type:ignore\n right_node = self._producers[1] # type:ignore\n\n right_table = pyarrow.concat_tables(right_node.execute(), mode=\"default\")\n\n for morsel in left_node.execute():\n # in place until #1295 resolved\n if not self._right_columns[0] in morsel.column_names:\n self._right_columns, self._left_columns = self._left_columns, self._right_columns\n\n try:\n # do the join\n new_morsel = morsel.join(\n right_table,\n keys=self._right_columns,\n right_keys=self._left_columns,\n join_type=self._join_type,\n coalesce_keys=self._using is not None,\n )\n except pyarrow.ArrowInvalid as err: # pragma: no cover\n last_token = str(err).split(\" \")[-1]\n column = None\n for col in left_node.columns:\n if last_token == col.identity:\n column = col.name\n break\n for col in right_node.columns:\n if last_token == col.identity:\n column = col.name\n break\n if column:\n raise UnsupportedSyntaxError(\n f\"Unable to JOIN with unsupported column types in table, '{column}'.\"\n ) from err\n raise UnsupportedSyntaxError(\n \"Unable to JOIN with unsupported column types in table.\"\n ) from err\n\n yield new_morsel\n", "path": "opteryx/operators/join_node.py"}, {"content": "__build__ = 228\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nStore the version here so:\n1) we don't load dependencies by storing it in __init__.py\n2) we can import it in setup.py for the same reason\n\"\"\"\nfrom enum import Enum # isort: skip\n\n\nclass VersionStatus(Enum):\n ALPHA = \"alpha\"\n BETA = \"beta\"\n RELEASE = \"release\"\n\n\n_major = 0\n_minor = 12\n_revision = 4\n_status = VersionStatus.BETA\n\n__version__ = f\"{_major}.{_minor}.{_revision}\" + (\n f\"-{_status.value}.{__build__}\" if _status != VersionStatus.RELEASE else \"\"\n)\n", "path": "opteryx/__version__.py"}]} | 1,643 | 309 |
gh_patches_debug_39387 | rasdani/github-patches | git_diff | uccser__cs-unplugged-1035 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Modify Job Badges printable to use one string per badge
Currently the way the strings are broken for translation doesn't support all languages (for example: Māori has a word after the emphasis of the role).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `csunplugged/resources/generators/JobBadgesResourceGenerator.py`
Content:
```
1 """Class for Job Badges resource generator."""
2
3 from PIL import Image, ImageDraw
4 from resources.utils.BaseResourceGenerator import BaseResourceGenerator
5 from django.utils.translation import ugettext_lazy as _
6 from utils.TextBoxDrawer import TextBoxDrawer
7
8
9 class JobBadgesResourceGenerator(BaseResourceGenerator):
10 """Class for Job Badges resource generator."""
11
12 def data(self):
13 """Create data for a copy of the Job Badges resource.
14
15 Returns:
16 A dictionary of the one page for the resource.
17 """
18 path = "static/img/resources/job-badges/job-badges"
19 image_path = "{}.png".format(path)
20 svg_path = "{}.svg".format(path)
21 image = Image.open(image_path)
22
23 draw = ImageDraw.Draw(image)
24 textbox_drawer = TextBoxDrawer(image, draw, svg_path)
25
26 hello_ids = [
27 "programmer_hello1",
28 "programmer_hello2",
29 "tester_hello1",
30 "tester_hello2",
31 "bot_hello1",
32 "bot_hello2",
33 ]
34
35 for hello_id in hello_ids:
36 textbox_drawer.write_text_box(
37 hello_id,
38 _("Hello, I'm a"),
39 horiz_just="center"
40 )
41
42 for i in range(1, 3):
43 textbox_drawer.write_text_box(
44 "programmer{}".format(i),
45 _("Programmer"),
46 horiz_just="center"
47 )
48 textbox_drawer.write_text_box(
49 "tester{}".format(i),
50 _("Tester"),
51 horiz_just="center"
52 )
53 textbox_drawer.write_text_box(
54 "bot{}".format(i),
55 _("Bot"),
56 horiz_just="center"
57 )
58
59 return {"type": "image", "data": image}
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/csunplugged/resources/generators/JobBadgesResourceGenerator.py b/csunplugged/resources/generators/JobBadgesResourceGenerator.py
--- a/csunplugged/resources/generators/JobBadgesResourceGenerator.py
+++ b/csunplugged/resources/generators/JobBadgesResourceGenerator.py
@@ -1,9 +1,20 @@
"""Class for Job Badges resource generator."""
from PIL import Image, ImageDraw
-from resources.utils.BaseResourceGenerator import BaseResourceGenerator
+from utils.TextBoxDrawer import TextBox, TextBoxDrawer
from django.utils.translation import ugettext_lazy as _
-from utils.TextBoxDrawer import TextBoxDrawer
+from resources.utils.BaseResourceGenerator import BaseResourceGenerator
+from resources.utils.coords import calculate_box_vertices
+
+FONT_PATH = "static/fonts/PatrickHand-Regular.ttf"
+FONT_SIZE = 200
+TEXTBOX_WIDTH = 1000
+TEXTBOX_HEIGHT = 275
+LABEL_DATA = {
+ "programmer": _("Hello, I'm a Programmer"),
+ "tester": _("Hello, I'm a Tester"),
+ "bot": _("Hello, I'm a Bot"),
+}
class JobBadgesResourceGenerator(BaseResourceGenerator):
@@ -15,45 +26,38 @@
Returns:
A dictionary of the one page for the resource.
"""
- path = "static/img/resources/job-badges/job-badges"
- image_path = "{}.png".format(path)
- svg_path = "{}.svg".format(path)
+ image_path = "static/img/resources/job-badges/job-badges.png"
image = Image.open(image_path)
-
draw = ImageDraw.Draw(image)
- textbox_drawer = TextBoxDrawer(image, draw, svg_path)
-
- hello_ids = [
- "programmer_hello1",
- "programmer_hello2",
- "tester_hello1",
- "tester_hello2",
- "bot_hello1",
- "bot_hello2",
- ]
-
- for hello_id in hello_ids:
- textbox_drawer.write_text_box(
- hello_id,
- _("Hello, I'm a"),
- horiz_just="center"
- )
-
- for i in range(1, 3):
- textbox_drawer.write_text_box(
- "programmer{}".format(i),
- _("Programmer"),
- horiz_just="center"
- )
- textbox_drawer.write_text_box(
- "tester{}".format(i),
- _("Tester"),
- horiz_just="center"
- )
- textbox_drawer.write_text_box(
- "bot{}".format(i),
- _("Bot"),
- horiz_just="center"
- )
+ textbox_drawer = TextBoxDrawer(image, draw)
+
+ # coordinates of top left point of text box
+ top_left_x_coords = [50, 1200]
+ top_left_y_coord = 100
+
+ # Add text labels
+ for label, label_text in LABEL_DATA.items():
+ for top_left_x_coord in top_left_x_coords:
+ vertices = calculate_box_vertices(
+ (top_left_x_coord, top_left_y_coord),
+ TEXTBOX_WIDTH,
+ TEXTBOX_HEIGHT
+ )
+ box = TextBox(
+ vertices=vertices,
+ width=TEXTBOX_WIDTH,
+ height=TEXTBOX_HEIGHT,
+ font_path=FONT_PATH,
+ font_size=FONT_SIZE,
+ )
+ textbox_drawer.write_text_box(
+ box,
+ label_text,
+ horiz_just="center",
+ vert_just="center",
+ )
+
+ # increase y coord for next name tag down
+ top_left_y_coord += 675
return {"type": "image", "data": image}
| {"golden_diff": "diff --git a/csunplugged/resources/generators/JobBadgesResourceGenerator.py b/csunplugged/resources/generators/JobBadgesResourceGenerator.py\n--- a/csunplugged/resources/generators/JobBadgesResourceGenerator.py\n+++ b/csunplugged/resources/generators/JobBadgesResourceGenerator.py\n@@ -1,9 +1,20 @@\n \"\"\"Class for Job Badges resource generator.\"\"\"\n \n from PIL import Image, ImageDraw\n-from resources.utils.BaseResourceGenerator import BaseResourceGenerator\n+from utils.TextBoxDrawer import TextBox, TextBoxDrawer\n from django.utils.translation import ugettext_lazy as _\n-from utils.TextBoxDrawer import TextBoxDrawer\n+from resources.utils.BaseResourceGenerator import BaseResourceGenerator\n+from resources.utils.coords import calculate_box_vertices\n+\n+FONT_PATH = \"static/fonts/PatrickHand-Regular.ttf\"\n+FONT_SIZE = 200\n+TEXTBOX_WIDTH = 1000\n+TEXTBOX_HEIGHT = 275\n+LABEL_DATA = {\n+ \"programmer\": _(\"Hello, I'm a Programmer\"),\n+ \"tester\": _(\"Hello, I'm a Tester\"),\n+ \"bot\": _(\"Hello, I'm a Bot\"),\n+}\n \n \n class JobBadgesResourceGenerator(BaseResourceGenerator):\n@@ -15,45 +26,38 @@\n Returns:\n A dictionary of the one page for the resource.\n \"\"\"\n- path = \"static/img/resources/job-badges/job-badges\"\n- image_path = \"{}.png\".format(path)\n- svg_path = \"{}.svg\".format(path)\n+ image_path = \"static/img/resources/job-badges/job-badges.png\"\n image = Image.open(image_path)\n-\n draw = ImageDraw.Draw(image)\n- textbox_drawer = TextBoxDrawer(image, draw, svg_path)\n-\n- hello_ids = [\n- \"programmer_hello1\",\n- \"programmer_hello2\",\n- \"tester_hello1\",\n- \"tester_hello2\",\n- \"bot_hello1\",\n- \"bot_hello2\",\n- ]\n-\n- for hello_id in hello_ids:\n- textbox_drawer.write_text_box(\n- hello_id,\n- _(\"Hello, I'm a\"),\n- horiz_just=\"center\"\n- )\n-\n- for i in range(1, 3):\n- textbox_drawer.write_text_box(\n- \"programmer{}\".format(i),\n- _(\"Programmer\"),\n- horiz_just=\"center\"\n- )\n- textbox_drawer.write_text_box(\n- \"tester{}\".format(i),\n- _(\"Tester\"),\n- horiz_just=\"center\"\n- )\n- textbox_drawer.write_text_box(\n- \"bot{}\".format(i),\n- _(\"Bot\"),\n- horiz_just=\"center\"\n- )\n+ textbox_drawer = TextBoxDrawer(image, draw)\n+\n+ # coordinates of top left point of text box\n+ top_left_x_coords = [50, 1200]\n+ top_left_y_coord = 100\n+\n+ # Add text labels\n+ for label, label_text in LABEL_DATA.items():\n+ for top_left_x_coord in top_left_x_coords:\n+ vertices = calculate_box_vertices(\n+ (top_left_x_coord, top_left_y_coord),\n+ TEXTBOX_WIDTH,\n+ TEXTBOX_HEIGHT\n+ )\n+ box = TextBox(\n+ vertices=vertices,\n+ width=TEXTBOX_WIDTH,\n+ height=TEXTBOX_HEIGHT,\n+ font_path=FONT_PATH,\n+ font_size=FONT_SIZE,\n+ )\n+ textbox_drawer.write_text_box(\n+ box,\n+ label_text,\n+ horiz_just=\"center\",\n+ vert_just=\"center\",\n+ )\n+\n+ # increase y coord for next name tag down\n+ top_left_y_coord += 675\n \n return {\"type\": \"image\", \"data\": image}\n", "issue": "Modify Job Badges printable to use one string per badge\nCurrently the way the strings are broken for translation doesn't support all languages (for example: M\u0101ori has a word after the emphasis of the role).\n", "before_files": [{"content": "\"\"\"Class for Job Badges resource generator.\"\"\"\n\nfrom PIL import Image, ImageDraw\nfrom resources.utils.BaseResourceGenerator import BaseResourceGenerator\nfrom django.utils.translation import ugettext_lazy as _\nfrom utils.TextBoxDrawer import TextBoxDrawer\n\n\nclass JobBadgesResourceGenerator(BaseResourceGenerator):\n \"\"\"Class for Job Badges resource generator.\"\"\"\n\n def data(self):\n \"\"\"Create data for a copy of the Job Badges resource.\n\n Returns:\n A dictionary of the one page for the resource.\n \"\"\"\n path = \"static/img/resources/job-badges/job-badges\"\n image_path = \"{}.png\".format(path)\n svg_path = \"{}.svg\".format(path)\n image = Image.open(image_path)\n\n draw = ImageDraw.Draw(image)\n textbox_drawer = TextBoxDrawer(image, draw, svg_path)\n\n hello_ids = [\n \"programmer_hello1\",\n \"programmer_hello2\",\n \"tester_hello1\",\n \"tester_hello2\",\n \"bot_hello1\",\n \"bot_hello2\",\n ]\n\n for hello_id in hello_ids:\n textbox_drawer.write_text_box(\n hello_id,\n _(\"Hello, I'm a\"),\n horiz_just=\"center\"\n )\n\n for i in range(1, 3):\n textbox_drawer.write_text_box(\n \"programmer{}\".format(i),\n _(\"Programmer\"),\n horiz_just=\"center\"\n )\n textbox_drawer.write_text_box(\n \"tester{}\".format(i),\n _(\"Tester\"),\n horiz_just=\"center\"\n )\n textbox_drawer.write_text_box(\n \"bot{}\".format(i),\n _(\"Bot\"),\n horiz_just=\"center\"\n )\n\n return {\"type\": \"image\", \"data\": image}\n", "path": "csunplugged/resources/generators/JobBadgesResourceGenerator.py"}], "after_files": [{"content": "\"\"\"Class for Job Badges resource generator.\"\"\"\n\nfrom PIL import Image, ImageDraw\nfrom utils.TextBoxDrawer import TextBox, TextBoxDrawer\nfrom django.utils.translation import ugettext_lazy as _\nfrom resources.utils.BaseResourceGenerator import BaseResourceGenerator\nfrom resources.utils.coords import calculate_box_vertices\n\nFONT_PATH = \"static/fonts/PatrickHand-Regular.ttf\"\nFONT_SIZE = 200\nTEXTBOX_WIDTH = 1000\nTEXTBOX_HEIGHT = 275\nLABEL_DATA = {\n \"programmer\": _(\"Hello, I'm a Programmer\"),\n \"tester\": _(\"Hello, I'm a Tester\"),\n \"bot\": _(\"Hello, I'm a Bot\"),\n}\n\n\nclass JobBadgesResourceGenerator(BaseResourceGenerator):\n \"\"\"Class for Job Badges resource generator.\"\"\"\n\n def data(self):\n \"\"\"Create data for a copy of the Job Badges resource.\n\n Returns:\n A dictionary of the one page for the resource.\n \"\"\"\n image_path = \"static/img/resources/job-badges/job-badges.png\"\n image = Image.open(image_path)\n draw = ImageDraw.Draw(image)\n textbox_drawer = TextBoxDrawer(image, draw)\n\n # coordinates of top left point of text box\n top_left_x_coords = [50, 1200]\n top_left_y_coord = 100\n\n # Add text labels\n for label, label_text in LABEL_DATA.items():\n for top_left_x_coord in top_left_x_coords:\n vertices = calculate_box_vertices(\n (top_left_x_coord, top_left_y_coord),\n TEXTBOX_WIDTH,\n TEXTBOX_HEIGHT\n )\n box = TextBox(\n vertices=vertices,\n width=TEXTBOX_WIDTH,\n height=TEXTBOX_HEIGHT,\n font_path=FONT_PATH,\n font_size=FONT_SIZE,\n )\n textbox_drawer.write_text_box(\n box,\n label_text,\n horiz_just=\"center\",\n vert_just=\"center\",\n )\n\n # increase y coord for next name tag down\n top_left_y_coord += 675\n\n return {\"type\": \"image\", \"data\": image}\n", "path": "csunplugged/resources/generators/JobBadgesResourceGenerator.py"}]} | 793 | 843 |
gh_patches_debug_33522 | rasdani/github-patches | git_diff | getsentry__sentry-python-153 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
RQ: Job shutdown problems
RQ forks, so each job will have its own transport. This is not a great situation as we would have to block each job to send out pending events.
Right now this can mean that some events of a job go missing.
Idea would be to use `multiprocessing.Queue` inside of the transport, but unclear if we can pull that off without disrupting other usecases.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 """
4 Sentry-Python - Sentry SDK for Python
5 =====================================
6
7 **Sentry-Python is an SDK for Sentry.** Check out `GitHub
8 <https://github.com/getsentry/sentry-python>`_ to find out more.
9 """
10
11 from setuptools import setup, find_packages
12
13 setup(
14 name="sentry-sdk",
15 version="0.5.1",
16 author="Sentry Team and Contributors",
17 author_email="[email protected]",
18 url="https://github.com/getsentry/sentry-python",
19 description="Python client for Sentry (https://getsentry.com)",
20 long_description=__doc__,
21 packages=find_packages(exclude=("tests", "tests.*")),
22 zip_safe=False,
23 license="BSD",
24 install_requires=["urllib3", "certifi"],
25 extras_require={"flask": ["flask>=0.8", "blinker>=1.1"]},
26 classifiers=[
27 'Development Status :: 5 - Production/Stable',
28 'Environment :: Web Environment',
29 'Intended Audience :: Developers',
30 'License :: OSI Approved :: BSD License',
31 'Operating System :: OS Independent',
32 'Programming Language :: Python',
33 'Programming Language :: Python :: 2',
34 'Programming Language :: Python :: 2.7',
35 'Programming Language :: Python :: 3',
36 'Programming Language :: Python :: 3.4',
37 'Programming Language :: Python :: 3.5',
38 'Programming Language :: Python :: 3.6',
39 'Programming Language :: Python :: 3.7',
40 'Topic :: Software Development :: Libraries :: Python Modules',
41 ],
42 )
43
```
Path: `sentry_sdk/integrations/rq.py`
Content:
```
1 from __future__ import absolute_import
2
3 import weakref
4
5 from sentry_sdk.hub import Hub
6 from sentry_sdk.integrations import Integration
7 from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
8
9 from rq.timeouts import JobTimeoutException
10 from rq.worker import Worker
11
12
13 class RqIntegration(Integration):
14 identifier = "rq"
15
16 @staticmethod
17 def setup_once():
18
19 old_perform_job = Worker.perform_job
20
21 def sentry_patched_perform_job(self, job, *args, **kwargs):
22 hub = Hub.current
23 integration = hub.get_integration(RqIntegration)
24
25 if integration is None:
26 return old_perform_job(self, job, *args, **kwargs)
27
28 with hub.push_scope() as scope:
29 scope.add_event_processor(_make_event_processor(weakref.ref(job)))
30 return old_perform_job(self, job, *args, **kwargs)
31
32 Worker.perform_job = sentry_patched_perform_job
33
34 old_handle_exception = Worker.handle_exception
35
36 def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
37 _capture_exception(exc_info)
38 return old_handle_exception(self, job, *exc_info, **kwargs)
39
40 Worker.handle_exception = sentry_patched_handle_exception
41
42
43 def _make_event_processor(weak_job):
44 def event_processor(event, hint):
45 job = weak_job()
46 if job is not None:
47 with capture_internal_exceptions():
48 if "transaction" not in event:
49 event["transaction"] = job.func_name
50
51 with capture_internal_exceptions():
52 extra = event.setdefault("extra", {})
53 extra["rq-job"] = {
54 "job_id": job.id,
55 "func": job.func_name,
56 "args": job.args,
57 "kwargs": job.kwargs,
58 "description": job.description,
59 }
60
61 if "exc_info" in hint:
62 with capture_internal_exceptions():
63 if issubclass(hint["exc_info"][0], JobTimeoutException):
64 event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name]
65
66 return event
67
68 return event_processor
69
70
71 def _capture_exception(exc_info, **kwargs):
72 hub = Hub.current
73 if hub.get_integration(RqIntegration) is None:
74 return
75 event, hint = event_from_exception(
76 exc_info,
77 client_options=hub.client.options,
78 mechanism={"type": "rq", "handled": False},
79 )
80
81 hub.capture_event(event, hint=hint)
82
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -27,7 +27,18 @@
with hub.push_scope() as scope:
scope.add_event_processor(_make_event_processor(weakref.ref(job)))
- return old_perform_job(self, job, *args, **kwargs)
+ rv = old_perform_job(self, job, *args, **kwargs)
+
+ if self.is_horse:
+ # We're inside of a forked process and RQ is
+ # about to call `os._exit`. Make sure that our
+ # events get sent out.
+ #
+ # Closing the client should not affect other jobs since
+ # we're in a different process
+ hub.client.close()
+
+ return rv
Worker.perform_job = sentry_patched_perform_job
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -24,19 +24,19 @@
install_requires=["urllib3", "certifi"],
extras_require={"flask": ["flask>=0.8", "blinker>=1.1"]},
classifiers=[
- 'Development Status :: 5 - Production/Stable',
- 'Environment :: Web Environment',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: BSD License',
- 'Operating System :: OS Independent',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
- 'Programming Language :: Python :: 3.7',
- 'Topic :: Software Development :: Libraries :: Python Modules',
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Web Environment",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Topic :: Software Development :: Libraries :: Python Modules",
],
)
| {"golden_diff": "diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py\n--- a/sentry_sdk/integrations/rq.py\n+++ b/sentry_sdk/integrations/rq.py\n@@ -27,7 +27,18 @@\n \n with hub.push_scope() as scope:\n scope.add_event_processor(_make_event_processor(weakref.ref(job)))\n- return old_perform_job(self, job, *args, **kwargs)\n+ rv = old_perform_job(self, job, *args, **kwargs)\n+\n+ if self.is_horse:\n+ # We're inside of a forked process and RQ is\n+ # about to call `os._exit`. Make sure that our\n+ # events get sent out.\n+ #\n+ # Closing the client should not affect other jobs since\n+ # we're in a different process\n+ hub.client.close()\n+\n+ return rv\n \n Worker.perform_job = sentry_patched_perform_job\n \ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -24,19 +24,19 @@\n install_requires=[\"urllib3\", \"certifi\"],\n extras_require={\"flask\": [\"flask>=0.8\", \"blinker>=1.1\"]},\n classifiers=[\n- 'Development Status :: 5 - Production/Stable',\n- 'Environment :: Web Environment',\n- 'Intended Audience :: Developers',\n- 'License :: OSI Approved :: BSD License',\n- 'Operating System :: OS Independent',\n- 'Programming Language :: Python',\n- 'Programming Language :: Python :: 2',\n- 'Programming Language :: Python :: 2.7',\n- 'Programming Language :: Python :: 3',\n- 'Programming Language :: Python :: 3.4',\n- 'Programming Language :: Python :: 3.5',\n- 'Programming Language :: Python :: 3.6',\n- 'Programming Language :: Python :: 3.7',\n- 'Topic :: Software Development :: Libraries :: Python Modules',\n+ \"Development Status :: 5 - Production/Stable\",\n+ \"Environment :: Web Environment\",\n+ \"Intended Audience :: Developers\",\n+ \"License :: OSI Approved :: BSD License\",\n+ \"Operating System :: OS Independent\",\n+ \"Programming Language :: Python\",\n+ \"Programming Language :: Python :: 2\",\n+ \"Programming Language :: Python :: 2.7\",\n+ \"Programming Language :: Python :: 3\",\n+ \"Programming Language :: Python :: 3.4\",\n+ \"Programming Language :: Python :: 3.5\",\n+ \"Programming Language :: Python :: 3.6\",\n+ \"Programming Language :: Python :: 3.7\",\n+ \"Topic :: Software Development :: Libraries :: Python Modules\",\n ],\n )\n", "issue": "RQ: Job shutdown problems\nRQ forks, so each job will have its own transport. This is not a great situation as we would have to block each job to send out pending events.\r\n\r\nRight now this can mean that some events of a job go missing.\r\n\r\nIdea would be to use `multiprocessing.Queue` inside of the transport, but unclear if we can pull that off without disrupting other usecases.\n", "before_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"\nSentry-Python - Sentry SDK for Python\n=====================================\n\n**Sentry-Python is an SDK for Sentry.** Check out `GitHub\n<https://github.com/getsentry/sentry-python>`_ to find out more.\n\"\"\"\n\nfrom setuptools import setup, find_packages\n\nsetup(\n name=\"sentry-sdk\",\n version=\"0.5.1\",\n author=\"Sentry Team and Contributors\",\n author_email=\"[email protected]\",\n url=\"https://github.com/getsentry/sentry-python\",\n description=\"Python client for Sentry (https://getsentry.com)\",\n long_description=__doc__,\n packages=find_packages(exclude=(\"tests\", \"tests.*\")),\n zip_safe=False,\n license=\"BSD\",\n install_requires=[\"urllib3\", \"certifi\"],\n extras_require={\"flask\": [\"flask>=0.8\", \"blinker>=1.1\"]},\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n)\n", "path": "setup.py"}, {"content": "from __future__ import absolute_import\n\nimport weakref\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.integrations import Integration\nfrom sentry_sdk.utils import capture_internal_exceptions, event_from_exception\n\nfrom rq.timeouts import JobTimeoutException\nfrom rq.worker import Worker\n\n\nclass RqIntegration(Integration):\n identifier = \"rq\"\n\n @staticmethod\n def setup_once():\n\n old_perform_job = Worker.perform_job\n\n def sentry_patched_perform_job(self, job, *args, **kwargs):\n hub = Hub.current\n integration = hub.get_integration(RqIntegration)\n\n if integration is None:\n return old_perform_job(self, job, *args, **kwargs)\n\n with hub.push_scope() as scope:\n scope.add_event_processor(_make_event_processor(weakref.ref(job)))\n return old_perform_job(self, job, *args, **kwargs)\n\n Worker.perform_job = sentry_patched_perform_job\n\n old_handle_exception = Worker.handle_exception\n\n def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):\n _capture_exception(exc_info)\n return old_handle_exception(self, job, *exc_info, **kwargs)\n\n Worker.handle_exception = sentry_patched_handle_exception\n\n\ndef _make_event_processor(weak_job):\n def event_processor(event, hint):\n job = weak_job()\n if job is not None:\n with capture_internal_exceptions():\n if \"transaction\" not in event:\n event[\"transaction\"] = job.func_name\n\n with capture_internal_exceptions():\n extra = event.setdefault(\"extra\", {})\n extra[\"rq-job\"] = {\n \"job_id\": job.id,\n \"func\": job.func_name,\n \"args\": job.args,\n \"kwargs\": job.kwargs,\n \"description\": job.description,\n }\n\n if \"exc_info\" in hint:\n with capture_internal_exceptions():\n if issubclass(hint[\"exc_info\"][0], JobTimeoutException):\n event[\"fingerprint\"] = [\"rq\", \"JobTimeoutException\", job.func_name]\n\n return event\n\n return event_processor\n\n\ndef _capture_exception(exc_info, **kwargs):\n hub = Hub.current\n if hub.get_integration(RqIntegration) is None:\n return\n event, hint = event_from_exception(\n exc_info,\n client_options=hub.client.options,\n mechanism={\"type\": \"rq\", \"handled\": False},\n )\n\n hub.capture_event(event, hint=hint)\n", "path": "sentry_sdk/integrations/rq.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"\nSentry-Python - Sentry SDK for Python\n=====================================\n\n**Sentry-Python is an SDK for Sentry.** Check out `GitHub\n<https://github.com/getsentry/sentry-python>`_ to find out more.\n\"\"\"\n\nfrom setuptools import setup, find_packages\n\nsetup(\n name=\"sentry-sdk\",\n version=\"0.5.1\",\n author=\"Sentry Team and Contributors\",\n author_email=\"[email protected]\",\n url=\"https://github.com/getsentry/sentry-python\",\n description=\"Python client for Sentry (https://getsentry.com)\",\n long_description=__doc__,\n packages=find_packages(exclude=(\"tests\", \"tests.*\")),\n zip_safe=False,\n license=\"BSD\",\n install_requires=[\"urllib3\", \"certifi\"],\n extras_require={\"flask\": [\"flask>=0.8\", \"blinker>=1.1\"]},\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n ],\n)\n", "path": "setup.py"}, {"content": "from __future__ import absolute_import\n\nimport weakref\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.integrations import Integration\nfrom sentry_sdk.utils import capture_internal_exceptions, event_from_exception\n\nfrom rq.timeouts import JobTimeoutException\nfrom rq.worker import Worker\n\n\nclass RqIntegration(Integration):\n identifier = \"rq\"\n\n @staticmethod\n def setup_once():\n\n old_perform_job = Worker.perform_job\n\n def sentry_patched_perform_job(self, job, *args, **kwargs):\n hub = Hub.current\n integration = hub.get_integration(RqIntegration)\n\n if integration is None:\n return old_perform_job(self, job, *args, **kwargs)\n\n with hub.push_scope() as scope:\n scope.add_event_processor(_make_event_processor(weakref.ref(job)))\n rv = old_perform_job(self, job, *args, **kwargs)\n\n if self.is_horse:\n # We're inside of a forked process and RQ is\n # about to call `os._exit`. Make sure that our\n # events get sent out.\n #\n # Closing the client should not affect other jobs since\n # we're in a different process\n hub.client.close()\n\n return rv\n\n Worker.perform_job = sentry_patched_perform_job\n\n old_handle_exception = Worker.handle_exception\n\n def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):\n _capture_exception(exc_info)\n return old_handle_exception(self, job, *exc_info, **kwargs)\n\n Worker.handle_exception = sentry_patched_handle_exception\n\n\ndef _make_event_processor(weak_job):\n def event_processor(event, hint):\n job = weak_job()\n if job is not None:\n with capture_internal_exceptions():\n if \"transaction\" not in event:\n event[\"transaction\"] = job.func_name\n\n with capture_internal_exceptions():\n extra = event.setdefault(\"extra\", {})\n extra[\"rq-job\"] = {\n \"job_id\": job.id,\n \"func\": job.func_name,\n \"args\": job.args,\n \"kwargs\": job.kwargs,\n \"description\": job.description,\n }\n\n if \"exc_info\" in hint:\n with capture_internal_exceptions():\n if issubclass(hint[\"exc_info\"][0], JobTimeoutException):\n event[\"fingerprint\"] = [\"rq\", \"JobTimeoutException\", job.func_name]\n\n return event\n\n return event_processor\n\n\ndef _capture_exception(exc_info, **kwargs):\n hub = Hub.current\n if hub.get_integration(RqIntegration) is None:\n return\n event, hint = event_from_exception(\n exc_info,\n client_options=hub.client.options,\n mechanism={\"type\": \"rq\", \"handled\": False},\n )\n\n hub.capture_event(event, hint=hint)\n", "path": "sentry_sdk/integrations/rq.py"}]} | 1,480 | 613 |
gh_patches_debug_50239 | rasdani/github-patches | git_diff | sopel-irc__sopel-1419 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
User objects aren't updated on NICK events
````
<Reptarsaurus> Okay, I just tested....I am doing users = chans[chan].users.values() after
a nick change, and 'for user in users: print(user.nick)' is showing the
nick pre-change.
````
This report from Sopel's IRC channel told me something was off in the code that keeps track of users, and indeed something is. Nowhere in the `coretasks` handler for `NICK` events does Sopel update the user object's `nick` property:
https://github.com/sopel-irc/sopel/blob/d4c4c16f850b63776b156fc8c9d3812287b15882/sopel/coretasks.py#L275-L284
`Channel.rename_user()` doesn't touch it either:
https://github.com/sopel-irc/sopel/blob/d4c4c16f850b63776b156fc8c9d3812287b15882/sopel/tools/target.py#L76-L80
I can't find any place in the code where this property gets updated, ever, and that's a bug. Thanks to Reptarsaurus for stumbling onto this, and it should be fixed in the next Sopel version.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sopel/tools/target.py`
Content:
```
1 # coding=utf-8
2 from __future__ import unicode_literals, absolute_import, print_function, division
3
4 import functools
5 from sopel.tools import Identifier
6
7
8 @functools.total_ordering
9 class User(object):
10 """A representation of a user Sopel is aware of."""
11 def __init__(self, nick, user, host):
12 assert isinstance(nick, Identifier)
13 self.nick = nick
14 """The user's nickname."""
15 self.user = user
16 """The user's local username."""
17 self.host = host
18 """The user's hostname."""
19 self.channels = {}
20 """The channels the user is in.
21
22 This maps channel name ``Identifier``\\s to ``Channel`` objects."""
23 self.account = None
24 """The IRC services account of the user.
25
26 This relies on IRCv3 account tracking being enabled."""
27 self.away = None
28 """Whether the user is marked as away."""
29
30 hostmask = property(lambda self: '{}!{}@{}'.format(self.nick, self.user,
31 self.host))
32 """The user's full hostmask."""
33
34 def __eq__(self, other):
35 if not isinstance(other, User):
36 return NotImplemented
37 return self.nick == other.nick
38
39 def __lt__(self, other):
40 if not isinstance(other, User):
41 return NotImplemented
42 return self.nick < other.nick
43
44
45 @functools.total_ordering
46 class Channel(object):
47 """A representation of a channel Sopel is in."""
48 def __init__(self, name):
49 assert isinstance(name, Identifier)
50 self.name = name
51 """The name of the channel."""
52 self.users = {}
53 """The users in the channel.
54
55 This maps username ``Identifier``\\s to channel objects."""
56 self.privileges = {}
57 """The permissions of the users in the channel.
58
59 This maps username ``Identifier``s to bitwise integer values. This can
60 be compared to appropriate constants from ``sopel.module``."""
61 self.topic = ''
62 """The topic of the channel."""
63
64 def clear_user(self, nick):
65 user = self.users.pop(nick, None)
66 self.privileges.pop(nick, None)
67 if user is not None:
68 user.channels.pop(self.name, None)
69
70 def add_user(self, user):
71 assert isinstance(user, User)
72 self.users[user.nick] = user
73 self.privileges[user.nick] = 0
74 user.channels[self.name] = self
75
76 def rename_user(self, old, new):
77 if old in self.users:
78 self.users[new] = self.users.pop(old)
79 if old in self.privileges:
80 self.privileges[new] = self.privileges.pop(old)
81
82 def __eq__(self, other):
83 if not isinstance(other, Channel):
84 return NotImplemented
85 return self.name == other.name
86
87 def __lt__(self, other):
88 if not isinstance(other, Channel):
89 return NotImplemented
90 return self.name < other.name
91
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sopel/tools/target.py b/sopel/tools/target.py
--- a/sopel/tools/target.py
+++ b/sopel/tools/target.py
@@ -76,6 +76,7 @@
def rename_user(self, old, new):
if old in self.users:
self.users[new] = self.users.pop(old)
+ self.users[new].nick = new
if old in self.privileges:
self.privileges[new] = self.privileges.pop(old)
| {"golden_diff": "diff --git a/sopel/tools/target.py b/sopel/tools/target.py\n--- a/sopel/tools/target.py\n+++ b/sopel/tools/target.py\n@@ -76,6 +76,7 @@\n def rename_user(self, old, new):\n if old in self.users:\n self.users[new] = self.users.pop(old)\n+ self.users[new].nick = new\n if old in self.privileges:\n self.privileges[new] = self.privileges.pop(old)\n", "issue": "User objects aren't updated on NICK events\n````\r\n<Reptarsaurus> Okay, I just tested....I am doing users = chans[chan].users.values() after\r\n a nick change, and 'for user in users: print(user.nick)' is showing the\r\n nick pre-change.\r\n````\r\n\r\nThis report from Sopel's IRC channel told me something was off in the code that keeps track of users, and indeed something is. Nowhere in the `coretasks` handler for `NICK` events does Sopel update the user object's `nick` property:\r\n\r\nhttps://github.com/sopel-irc/sopel/blob/d4c4c16f850b63776b156fc8c9d3812287b15882/sopel/coretasks.py#L275-L284\r\n\r\n`Channel.rename_user()` doesn't touch it either:\r\n\r\nhttps://github.com/sopel-irc/sopel/blob/d4c4c16f850b63776b156fc8c9d3812287b15882/sopel/tools/target.py#L76-L80\r\n\r\nI can't find any place in the code where this property gets updated, ever, and that's a bug. Thanks to Reptarsaurus for stumbling onto this, and it should be fixed in the next Sopel version.\n", "before_files": [{"content": "# coding=utf-8\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport functools\nfrom sopel.tools import Identifier\n\n\[email protected]_ordering\nclass User(object):\n \"\"\"A representation of a user Sopel is aware of.\"\"\"\n def __init__(self, nick, user, host):\n assert isinstance(nick, Identifier)\n self.nick = nick\n \"\"\"The user's nickname.\"\"\"\n self.user = user\n \"\"\"The user's local username.\"\"\"\n self.host = host\n \"\"\"The user's hostname.\"\"\"\n self.channels = {}\n \"\"\"The channels the user is in.\n\n This maps channel name ``Identifier``\\\\s to ``Channel`` objects.\"\"\"\n self.account = None\n \"\"\"The IRC services account of the user.\n\n This relies on IRCv3 account tracking being enabled.\"\"\"\n self.away = None\n \"\"\"Whether the user is marked as away.\"\"\"\n\n hostmask = property(lambda self: '{}!{}@{}'.format(self.nick, self.user,\n self.host))\n \"\"\"The user's full hostmask.\"\"\"\n\n def __eq__(self, other):\n if not isinstance(other, User):\n return NotImplemented\n return self.nick == other.nick\n\n def __lt__(self, other):\n if not isinstance(other, User):\n return NotImplemented\n return self.nick < other.nick\n\n\[email protected]_ordering\nclass Channel(object):\n \"\"\"A representation of a channel Sopel is in.\"\"\"\n def __init__(self, name):\n assert isinstance(name, Identifier)\n self.name = name\n \"\"\"The name of the channel.\"\"\"\n self.users = {}\n \"\"\"The users in the channel.\n\n This maps username ``Identifier``\\\\s to channel objects.\"\"\"\n self.privileges = {}\n \"\"\"The permissions of the users in the channel.\n\n This maps username ``Identifier``s to bitwise integer values. This can\n be compared to appropriate constants from ``sopel.module``.\"\"\"\n self.topic = ''\n \"\"\"The topic of the channel.\"\"\"\n\n def clear_user(self, nick):\n user = self.users.pop(nick, None)\n self.privileges.pop(nick, None)\n if user is not None:\n user.channels.pop(self.name, None)\n\n def add_user(self, user):\n assert isinstance(user, User)\n self.users[user.nick] = user\n self.privileges[user.nick] = 0\n user.channels[self.name] = self\n\n def rename_user(self, old, new):\n if old in self.users:\n self.users[new] = self.users.pop(old)\n if old in self.privileges:\n self.privileges[new] = self.privileges.pop(old)\n\n def __eq__(self, other):\n if not isinstance(other, Channel):\n return NotImplemented\n return self.name == other.name\n\n def __lt__(self, other):\n if not isinstance(other, Channel):\n return NotImplemented\n return self.name < other.name\n", "path": "sopel/tools/target.py"}], "after_files": [{"content": "# coding=utf-8\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport functools\nfrom sopel.tools import Identifier\n\n\[email protected]_ordering\nclass User(object):\n \"\"\"A representation of a user Sopel is aware of.\"\"\"\n def __init__(self, nick, user, host):\n assert isinstance(nick, Identifier)\n self.nick = nick\n \"\"\"The user's nickname.\"\"\"\n self.user = user\n \"\"\"The user's local username.\"\"\"\n self.host = host\n \"\"\"The user's hostname.\"\"\"\n self.channels = {}\n \"\"\"The channels the user is in.\n\n This maps channel name ``Identifier``\\\\s to ``Channel`` objects.\"\"\"\n self.account = None\n \"\"\"The IRC services account of the user.\n\n This relies on IRCv3 account tracking being enabled.\"\"\"\n self.away = None\n \"\"\"Whether the user is marked as away.\"\"\"\n\n hostmask = property(lambda self: '{}!{}@{}'.format(self.nick, self.user,\n self.host))\n \"\"\"The user's full hostmask.\"\"\"\n\n def __eq__(self, other):\n if not isinstance(other, User):\n return NotImplemented\n return self.nick == other.nick\n\n def __lt__(self, other):\n if not isinstance(other, User):\n return NotImplemented\n return self.nick < other.nick\n\n\[email protected]_ordering\nclass Channel(object):\n \"\"\"A representation of a channel Sopel is in.\"\"\"\n def __init__(self, name):\n assert isinstance(name, Identifier)\n self.name = name\n \"\"\"The name of the channel.\"\"\"\n self.users = {}\n \"\"\"The users in the channel.\n\n This maps username ``Identifier``\\\\s to channel objects.\"\"\"\n self.privileges = {}\n \"\"\"The permissions of the users in the channel.\n\n This maps username ``Identifier``s to bitwise integer values. This can\n be compared to appropriate constants from ``sopel.module``.\"\"\"\n self.topic = ''\n \"\"\"The topic of the channel.\"\"\"\n\n def clear_user(self, nick):\n user = self.users.pop(nick, None)\n self.privileges.pop(nick, None)\n if user is not None:\n user.channels.pop(self.name, None)\n\n def add_user(self, user):\n assert isinstance(user, User)\n self.users[user.nick] = user\n self.privileges[user.nick] = 0\n user.channels[self.name] = self\n\n def rename_user(self, old, new):\n if old in self.users:\n self.users[new] = self.users.pop(old)\n self.users[new].nick = new\n if old in self.privileges:\n self.privileges[new] = self.privileges.pop(old)\n\n def __eq__(self, other):\n if not isinstance(other, Channel):\n return NotImplemented\n return self.name == other.name\n\n def __lt__(self, other):\n if not isinstance(other, Channel):\n return NotImplemented\n return self.name < other.name\n", "path": "sopel/tools/target.py"}]} | 1,389 | 107 |
gh_patches_debug_18934 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-8781 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use `django-storages`'s manifest files class
`django-storages` already implemented the manifest files class (see https://github.com/jschneier/django-storages/issues/733#issuecomment-989005340) so we can use it directly and remove our override for this in:
https://github.com/readthedocs/readthedocs.org/blob/933c316f5228423f9f871b3da858a01491f79925/readthedocs/storage/s3_storage.py#L55-L61
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `readthedocs/storage/s3_storage.py`
Content:
```
1 """
2 AWS S3 Storage backends.
3
4 We override the backends provided by django-storages to add some small pieces
5 that we need to make our project to work as we want. For example, using
6 ManifestFilesMixin for static files and OverrideHostnameMixin to make it work
7 in our Docker Development environment.
8 """
9
10 # Disable abstract method because we are not overriding all the methods
11 # pylint: disable=abstract-method
12 from django.conf import settings
13 from django.contrib.staticfiles.storage import ManifestFilesMixin
14 from django.core.exceptions import ImproperlyConfigured
15 from storages.backends.s3boto3 import S3Boto3Storage
16
17 from readthedocs.builds.storage import BuildMediaStorageMixin
18
19 from .mixins import OverrideHostnameMixin, S3PrivateBucketMixin
20
21
22 class S3BuildMediaStorage(BuildMediaStorageMixin, OverrideHostnameMixin, S3Boto3Storage):
23
24 """An AWS S3 Storage backend for build artifacts."""
25
26 bucket_name = getattr(settings, 'S3_MEDIA_STORAGE_BUCKET', None)
27 override_hostname = getattr(settings, 'S3_MEDIA_STORAGE_OVERRIDE_HOSTNAME', None)
28
29 def __init__(self, *args, **kwargs):
30 super().__init__(*args, **kwargs)
31
32 if not self.bucket_name:
33 raise ImproperlyConfigured(
34 'AWS S3 not configured correctly. '
35 'Ensure S3_MEDIA_STORAGE_BUCKET is defined.',
36 )
37
38
39 class S3BuildCommandsStorage(S3PrivateBucketMixin, S3Boto3Storage):
40
41 """An AWS S3 Storage backend for build commands."""
42
43 bucket_name = getattr(settings, 'S3_BUILD_COMMANDS_STORAGE_BUCKET', None)
44
45 def __init__(self, *args, **kwargs):
46 super().__init__(*args, **kwargs)
47
48 if not self.bucket_name:
49 raise ImproperlyConfigured(
50 'AWS S3 not configured correctly. '
51 'Ensure S3_BUILD_COMMANDS_STORAGE_BUCKET is defined.',
52 )
53
54
55 class S3StaticStorage(OverrideHostnameMixin, ManifestFilesMixin, S3Boto3Storage):
56
57 """
58 An AWS S3 Storage backend for static media.
59
60 * Uses Django's ManifestFilesMixin to have unique file paths (eg. core.a6f5e2c.css)
61 """
62
63 bucket_name = getattr(settings, 'S3_STATIC_STORAGE_BUCKET', None)
64 override_hostname = getattr(settings, 'S3_STATIC_STORAGE_OVERRIDE_HOSTNAME', None)
65
66 def __init__(self, *args, **kwargs):
67 super().__init__(*args, **kwargs)
68
69 if not self.bucket_name:
70 raise ImproperlyConfigured(
71 'AWS S3 not configured correctly. '
72 'Ensure S3_STATIC_STORAGE_BUCKET is defined.',
73 )
74
75 self.bucket_acl = 'public-read'
76 self.default_acl = 'public-read'
77 self.querystring_auth = False
78
79
80 class S3BuildEnvironmentStorage(S3PrivateBucketMixin, BuildMediaStorageMixin, S3Boto3Storage):
81
82 bucket_name = getattr(settings, 'S3_BUILD_ENVIRONMENT_STORAGE_BUCKET', None)
83
84 def __init__(self, *args, **kwargs):
85 super().__init__(*args, **kwargs)
86
87 if not self.bucket_name:
88 raise ImproperlyConfigured(
89 'AWS S3 not configured correctly. '
90 'Ensure S3_BUILD_ENVIRONMENT_STORAGE_BUCKET is defined.',
91 )
92
93
94 class S3BuildToolsStorage(S3PrivateBucketMixin, BuildMediaStorageMixin, S3Boto3Storage):
95
96 bucket_name = getattr(settings, 'S3_BUILD_TOOLS_STORAGE_BUCKET', None)
97
98 def __init__(self, *args, **kwargs):
99 super().__init__(*args, **kwargs)
100
101 if not self.bucket_name:
102 raise ImproperlyConfigured(
103 'AWS S3 not configured correctly. '
104 'Ensure S3_BUILD_TOOLS_STORAGE_BUCKET is defined.',
105 )
106
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/readthedocs/storage/s3_storage.py b/readthedocs/storage/s3_storage.py
--- a/readthedocs/storage/s3_storage.py
+++ b/readthedocs/storage/s3_storage.py
@@ -10,9 +10,8 @@
# Disable abstract method because we are not overriding all the methods
# pylint: disable=abstract-method
from django.conf import settings
-from django.contrib.staticfiles.storage import ManifestFilesMixin
from django.core.exceptions import ImproperlyConfigured
-from storages.backends.s3boto3 import S3Boto3Storage
+from storages.backends.s3boto3 import S3Boto3Storage, S3ManifestStaticStorage
from readthedocs.builds.storage import BuildMediaStorageMixin
@@ -52,7 +51,11 @@
)
-class S3StaticStorage(OverrideHostnameMixin, ManifestFilesMixin, S3Boto3Storage):
+class S3StaticStorage(
+ OverrideHostnameMixin,
+ S3ManifestStaticStorage,
+ S3Boto3Storage
+): # pylint: disable=too-many-ancestors
"""
An AWS S3 Storage backend for static media.
| {"golden_diff": "diff --git a/readthedocs/storage/s3_storage.py b/readthedocs/storage/s3_storage.py\n--- a/readthedocs/storage/s3_storage.py\n+++ b/readthedocs/storage/s3_storage.py\n@@ -10,9 +10,8 @@\n # Disable abstract method because we are not overriding all the methods\n # pylint: disable=abstract-method\n from django.conf import settings\n-from django.contrib.staticfiles.storage import ManifestFilesMixin\n from django.core.exceptions import ImproperlyConfigured\n-from storages.backends.s3boto3 import S3Boto3Storage\n+from storages.backends.s3boto3 import S3Boto3Storage, S3ManifestStaticStorage\n \n from readthedocs.builds.storage import BuildMediaStorageMixin\n \n@@ -52,7 +51,11 @@\n )\n \n \n-class S3StaticStorage(OverrideHostnameMixin, ManifestFilesMixin, S3Boto3Storage):\n+class S3StaticStorage(\n+ OverrideHostnameMixin,\n+ S3ManifestStaticStorage,\n+ S3Boto3Storage\n+): # pylint: disable=too-many-ancestors\n \n \"\"\"\n An AWS S3 Storage backend for static media.\n", "issue": "Use `django-storages`'s manifest files class\n`django-storages` already implemented the manifest files class (see https://github.com/jschneier/django-storages/issues/733#issuecomment-989005340) so we can use it directly and remove our override for this in:\r\n\r\nhttps://github.com/readthedocs/readthedocs.org/blob/933c316f5228423f9f871b3da858a01491f79925/readthedocs/storage/s3_storage.py#L55-L61\n", "before_files": [{"content": "\"\"\"\nAWS S3 Storage backends.\n\nWe override the backends provided by django-storages to add some small pieces\nthat we need to make our project to work as we want. For example, using\nManifestFilesMixin for static files and OverrideHostnameMixin to make it work\nin our Docker Development environment.\n\"\"\"\n\n# Disable abstract method because we are not overriding all the methods\n# pylint: disable=abstract-method\nfrom django.conf import settings\nfrom django.contrib.staticfiles.storage import ManifestFilesMixin\nfrom django.core.exceptions import ImproperlyConfigured\nfrom storages.backends.s3boto3 import S3Boto3Storage\n\nfrom readthedocs.builds.storage import BuildMediaStorageMixin\n\nfrom .mixins import OverrideHostnameMixin, S3PrivateBucketMixin\n\n\nclass S3BuildMediaStorage(BuildMediaStorageMixin, OverrideHostnameMixin, S3Boto3Storage):\n\n \"\"\"An AWS S3 Storage backend for build artifacts.\"\"\"\n\n bucket_name = getattr(settings, 'S3_MEDIA_STORAGE_BUCKET', None)\n override_hostname = getattr(settings, 'S3_MEDIA_STORAGE_OVERRIDE_HOSTNAME', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_MEDIA_STORAGE_BUCKET is defined.',\n )\n\n\nclass S3BuildCommandsStorage(S3PrivateBucketMixin, S3Boto3Storage):\n\n \"\"\"An AWS S3 Storage backend for build commands.\"\"\"\n\n bucket_name = getattr(settings, 'S3_BUILD_COMMANDS_STORAGE_BUCKET', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_BUILD_COMMANDS_STORAGE_BUCKET is defined.',\n )\n\n\nclass S3StaticStorage(OverrideHostnameMixin, ManifestFilesMixin, S3Boto3Storage):\n\n \"\"\"\n An AWS S3 Storage backend for static media.\n\n * Uses Django's ManifestFilesMixin to have unique file paths (eg. core.a6f5e2c.css)\n \"\"\"\n\n bucket_name = getattr(settings, 'S3_STATIC_STORAGE_BUCKET', None)\n override_hostname = getattr(settings, 'S3_STATIC_STORAGE_OVERRIDE_HOSTNAME', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_STATIC_STORAGE_BUCKET is defined.',\n )\n\n self.bucket_acl = 'public-read'\n self.default_acl = 'public-read'\n self.querystring_auth = False\n\n\nclass S3BuildEnvironmentStorage(S3PrivateBucketMixin, BuildMediaStorageMixin, S3Boto3Storage):\n\n bucket_name = getattr(settings, 'S3_BUILD_ENVIRONMENT_STORAGE_BUCKET', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_BUILD_ENVIRONMENT_STORAGE_BUCKET is defined.',\n )\n\n\nclass S3BuildToolsStorage(S3PrivateBucketMixin, BuildMediaStorageMixin, S3Boto3Storage):\n\n bucket_name = getattr(settings, 'S3_BUILD_TOOLS_STORAGE_BUCKET', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_BUILD_TOOLS_STORAGE_BUCKET is defined.',\n )\n", "path": "readthedocs/storage/s3_storage.py"}], "after_files": [{"content": "\"\"\"\nAWS S3 Storage backends.\n\nWe override the backends provided by django-storages to add some small pieces\nthat we need to make our project to work as we want. For example, using\nManifestFilesMixin for static files and OverrideHostnameMixin to make it work\nin our Docker Development environment.\n\"\"\"\n\n# Disable abstract method because we are not overriding all the methods\n# pylint: disable=abstract-method\nfrom django.conf import settings\nfrom django.core.exceptions import ImproperlyConfigured\nfrom storages.backends.s3boto3 import S3Boto3Storage, S3ManifestStaticStorage\n\nfrom readthedocs.builds.storage import BuildMediaStorageMixin\n\nfrom .mixins import OverrideHostnameMixin, S3PrivateBucketMixin\n\n\nclass S3BuildMediaStorage(BuildMediaStorageMixin, OverrideHostnameMixin, S3Boto3Storage):\n\n \"\"\"An AWS S3 Storage backend for build artifacts.\"\"\"\n\n bucket_name = getattr(settings, 'S3_MEDIA_STORAGE_BUCKET', None)\n override_hostname = getattr(settings, 'S3_MEDIA_STORAGE_OVERRIDE_HOSTNAME', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_MEDIA_STORAGE_BUCKET is defined.',\n )\n\n\nclass S3BuildCommandsStorage(S3PrivateBucketMixin, S3Boto3Storage):\n\n \"\"\"An AWS S3 Storage backend for build commands.\"\"\"\n\n bucket_name = getattr(settings, 'S3_BUILD_COMMANDS_STORAGE_BUCKET', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_BUILD_COMMANDS_STORAGE_BUCKET is defined.',\n )\n\n\nclass S3StaticStorage(\n OverrideHostnameMixin,\n S3ManifestStaticStorage,\n S3Boto3Storage\n): # pylint: disable=too-many-ancestors\n\n \"\"\"\n An AWS S3 Storage backend for static media.\n\n * Uses Django's ManifestFilesMixin to have unique file paths (eg. core.a6f5e2c.css)\n \"\"\"\n\n bucket_name = getattr(settings, 'S3_STATIC_STORAGE_BUCKET', None)\n override_hostname = getattr(settings, 'S3_STATIC_STORAGE_OVERRIDE_HOSTNAME', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_STATIC_STORAGE_BUCKET is defined.',\n )\n\n self.bucket_acl = 'public-read'\n self.default_acl = 'public-read'\n self.querystring_auth = False\n\n\nclass S3BuildEnvironmentStorage(S3PrivateBucketMixin, BuildMediaStorageMixin, S3Boto3Storage):\n\n bucket_name = getattr(settings, 'S3_BUILD_ENVIRONMENT_STORAGE_BUCKET', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_BUILD_ENVIRONMENT_STORAGE_BUCKET is defined.',\n )\n\n\nclass S3BuildToolsStorage(S3PrivateBucketMixin, BuildMediaStorageMixin, S3Boto3Storage):\n\n bucket_name = getattr(settings, 'S3_BUILD_TOOLS_STORAGE_BUCKET', None)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n if not self.bucket_name:\n raise ImproperlyConfigured(\n 'AWS S3 not configured correctly. '\n 'Ensure S3_BUILD_TOOLS_STORAGE_BUCKET is defined.',\n )\n", "path": "readthedocs/storage/s3_storage.py"}]} | 1,440 | 254 |
gh_patches_debug_38362 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-3623 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider argos is broken
During the global build at 2021-08-25-14-42-15, spider **argos** failed with **0 features** and **0 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-08-25-14-42-15/logs/argos.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-08-25-14-42-15/output/argos.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-08-25-14-42-15/output/argos.geojson))
Spider argos is broken
During the global build at 2021-05-26-14-42-23, spider **argos** failed with **0 features** and **0 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/argos.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/argos.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/argos.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/argos.py`
Content:
```
1 import scrapy
2 import re
3 import json
4 from locations.items import GeojsonPointItem
5
6
7 class ArgosSpider(scrapy.Spider):
8
9 name = "argos"
10 item_attributes = {"brand": "Argos"}
11 allowed_domains = ["www.argos.co.uk"]
12 download_delay = 0.5
13 start_urls = ("http://www.argos.co.uk/stores/",)
14
15 def parse_stores(self, response):
16 data = re.findall(r"window.INITIAL_STATE =[^<]+", response.text)
17 json_data = json.loads(data[0].replace("window.INITIAL_STATE =", ""))
18 properties = {
19 "addr_full": json_data["store"]["store"]["address"],
20 "phone": json_data["store"]["store"]["tel"],
21 "city": json_data["store"]["store"]["town"],
22 "state": "",
23 "postcode": json_data["store"]["store"]["postcode"],
24 "ref": json_data["store"]["store"]["id"],
25 "website": response.url,
26 "lat": float(json_data["store"]["store"]["lat"]),
27 "lon": float(json_data["store"]["store"]["lng"]),
28 }
29
30 open_hours = ""
31 for item in json_data["store"]["store"]["storeTimes"]:
32 open_hours = open_hours + item["date"][:2] + " " + item["time"] + " ;"
33 if open_hours:
34 properties["opening_hours"] = open_hours
35
36 yield GeojsonPointItem(**properties)
37
38 def parse(self, response):
39 urls = response.xpath('//div/div[@class="azsl-panel"]/ul/li/a/@href').extract()
40 for path in urls:
41 yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/argos.py b/locations/spiders/argos.py
--- a/locations/spiders/argos.py
+++ b/locations/spiders/argos.py
@@ -1,41 +1,58 @@
-import scrapy
import re
import json
-from locations.items import GeojsonPointItem
+from locations.items import GeojsonPointItem
+from locations.hours import OpeningHours
+from scrapy.spiders import SitemapSpider
-class ArgosSpider(scrapy.Spider):
+class ArgosSpider(SitemapSpider):
name = "argos"
- item_attributes = {"brand": "Argos"}
+ item_attributes = {"brand": "Argos", "brand_wikidata": "Q4789707"}
allowed_domains = ["www.argos.co.uk"]
download_delay = 0.5
- start_urls = ("http://www.argos.co.uk/stores/",)
+ sitemap_urls = [
+ "https://www.argos.co.uk/stores_sitemap.xml",
+ ]
+ sitemap_rules = [
+ (
+ r"https://www.argos.co.uk/stores/([\d]+)-([\w-]+)",
+ "parse",
+ ),
+ ]
+ custom_settings = {
+ "USER_AGENT": "Mozilla/5.0 (X11; Linux x86_64; rv:99.0) Gecko/20100101 Firefox/99.0"
+ }
- def parse_stores(self, response):
+ def parse(self, response):
data = re.findall(r"window.INITIAL_STATE =[^<]+", response.text)
json_data = json.loads(data[0].replace("window.INITIAL_STATE =", ""))
properties = {
- "addr_full": json_data["store"]["store"]["address"],
+ "street_address": json_data["store"]["store"]["address"],
"phone": json_data["store"]["store"]["tel"],
"city": json_data["store"]["store"]["town"],
- "state": "",
+ "name": json_data["store"]["store"]["name"],
"postcode": json_data["store"]["store"]["postcode"],
"ref": json_data["store"]["store"]["id"],
"website": response.url,
"lat": float(json_data["store"]["store"]["lat"]),
"lon": float(json_data["store"]["store"]["lng"]),
+ "extras": {
+ "store_type": json_data["store"]["store"]["type"],
+ },
}
- open_hours = ""
+ oh = OpeningHours()
for item in json_data["store"]["store"]["storeTimes"]:
- open_hours = open_hours + item["date"][:2] + " " + item["time"] + " ;"
- if open_hours:
- properties["opening_hours"] = open_hours
+ open_time, close_time = item["time"].split(" - ")
+ if (
+ open_time
+ and not open_time.isspace()
+ and close_time
+ and not close_time.isspace()
+ ):
+ oh.add_range(item["date"][:2], open_time, close_time)
- yield GeojsonPointItem(**properties)
+ properties["opening_hours"] = oh.as_opening_hours()
- def parse(self, response):
- urls = response.xpath('//div/div[@class="azsl-panel"]/ul/li/a/@href').extract()
- for path in urls:
- yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)
+ yield GeojsonPointItem(**properties)
| {"golden_diff": "diff --git a/locations/spiders/argos.py b/locations/spiders/argos.py\n--- a/locations/spiders/argos.py\n+++ b/locations/spiders/argos.py\n@@ -1,41 +1,58 @@\n-import scrapy\n import re\n import json\n-from locations.items import GeojsonPointItem\n \n+from locations.items import GeojsonPointItem\n+from locations.hours import OpeningHours\n+from scrapy.spiders import SitemapSpider\n \n-class ArgosSpider(scrapy.Spider):\n \n+class ArgosSpider(SitemapSpider):\n name = \"argos\"\n- item_attributes = {\"brand\": \"Argos\"}\n+ item_attributes = {\"brand\": \"Argos\", \"brand_wikidata\": \"Q4789707\"}\n allowed_domains = [\"www.argos.co.uk\"]\n download_delay = 0.5\n- start_urls = (\"http://www.argos.co.uk/stores/\",)\n+ sitemap_urls = [\n+ \"https://www.argos.co.uk/stores_sitemap.xml\",\n+ ]\n+ sitemap_rules = [\n+ (\n+ r\"https://www.argos.co.uk/stores/([\\d]+)-([\\w-]+)\",\n+ \"parse\",\n+ ),\n+ ]\n+ custom_settings = {\n+ \"USER_AGENT\": \"Mozilla/5.0 (X11; Linux x86_64; rv:99.0) Gecko/20100101 Firefox/99.0\"\n+ }\n \n- def parse_stores(self, response):\n+ def parse(self, response):\n data = re.findall(r\"window.INITIAL_STATE =[^<]+\", response.text)\n json_data = json.loads(data[0].replace(\"window.INITIAL_STATE =\", \"\"))\n properties = {\n- \"addr_full\": json_data[\"store\"][\"store\"][\"address\"],\n+ \"street_address\": json_data[\"store\"][\"store\"][\"address\"],\n \"phone\": json_data[\"store\"][\"store\"][\"tel\"],\n \"city\": json_data[\"store\"][\"store\"][\"town\"],\n- \"state\": \"\",\n+ \"name\": json_data[\"store\"][\"store\"][\"name\"],\n \"postcode\": json_data[\"store\"][\"store\"][\"postcode\"],\n \"ref\": json_data[\"store\"][\"store\"][\"id\"],\n \"website\": response.url,\n \"lat\": float(json_data[\"store\"][\"store\"][\"lat\"]),\n \"lon\": float(json_data[\"store\"][\"store\"][\"lng\"]),\n+ \"extras\": {\n+ \"store_type\": json_data[\"store\"][\"store\"][\"type\"],\n+ },\n }\n \n- open_hours = \"\"\n+ oh = OpeningHours()\n for item in json_data[\"store\"][\"store\"][\"storeTimes\"]:\n- open_hours = open_hours + item[\"date\"][:2] + \" \" + item[\"time\"] + \" ;\"\n- if open_hours:\n- properties[\"opening_hours\"] = open_hours\n+ open_time, close_time = item[\"time\"].split(\" - \")\n+ if (\n+ open_time\n+ and not open_time.isspace()\n+ and close_time\n+ and not close_time.isspace()\n+ ):\n+ oh.add_range(item[\"date\"][:2], open_time, close_time)\n \n- yield GeojsonPointItem(**properties)\n+ properties[\"opening_hours\"] = oh.as_opening_hours()\n \n- def parse(self, response):\n- urls = response.xpath('//div/div[@class=\"azsl-panel\"]/ul/li/a/@href').extract()\n- for path in urls:\n- yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n+ yield GeojsonPointItem(**properties)\n", "issue": "Spider argos is broken\nDuring the global build at 2021-08-25-14-42-15, spider **argos** failed with **0 features** and **0 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-08-25-14-42-15/logs/argos.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-08-25-14-42-15/output/argos.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-08-25-14-42-15/output/argos.geojson))\nSpider argos is broken\nDuring the global build at 2021-05-26-14-42-23, spider **argos** failed with **0 features** and **0 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/argos.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/argos.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/argos.geojson))\n", "before_files": [{"content": "import scrapy\nimport re\nimport json\nfrom locations.items import GeojsonPointItem\n\n\nclass ArgosSpider(scrapy.Spider):\n\n name = \"argos\"\n item_attributes = {\"brand\": \"Argos\"}\n allowed_domains = [\"www.argos.co.uk\"]\n download_delay = 0.5\n start_urls = (\"http://www.argos.co.uk/stores/\",)\n\n def parse_stores(self, response):\n data = re.findall(r\"window.INITIAL_STATE =[^<]+\", response.text)\n json_data = json.loads(data[0].replace(\"window.INITIAL_STATE =\", \"\"))\n properties = {\n \"addr_full\": json_data[\"store\"][\"store\"][\"address\"],\n \"phone\": json_data[\"store\"][\"store\"][\"tel\"],\n \"city\": json_data[\"store\"][\"store\"][\"town\"],\n \"state\": \"\",\n \"postcode\": json_data[\"store\"][\"store\"][\"postcode\"],\n \"ref\": json_data[\"store\"][\"store\"][\"id\"],\n \"website\": response.url,\n \"lat\": float(json_data[\"store\"][\"store\"][\"lat\"]),\n \"lon\": float(json_data[\"store\"][\"store\"][\"lng\"]),\n }\n\n open_hours = \"\"\n for item in json_data[\"store\"][\"store\"][\"storeTimes\"]:\n open_hours = open_hours + item[\"date\"][:2] + \" \" + item[\"time\"] + \" ;\"\n if open_hours:\n properties[\"opening_hours\"] = open_hours\n\n yield GeojsonPointItem(**properties)\n\n def parse(self, response):\n urls = response.xpath('//div/div[@class=\"azsl-panel\"]/ul/li/a/@href').extract()\n for path in urls:\n yield scrapy.Request(response.urljoin(path), callback=self.parse_stores)\n", "path": "locations/spiders/argos.py"}], "after_files": [{"content": "import re\nimport json\n\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\nfrom scrapy.spiders import SitemapSpider\n\n\nclass ArgosSpider(SitemapSpider):\n name = \"argos\"\n item_attributes = {\"brand\": \"Argos\", \"brand_wikidata\": \"Q4789707\"}\n allowed_domains = [\"www.argos.co.uk\"]\n download_delay = 0.5\n sitemap_urls = [\n \"https://www.argos.co.uk/stores_sitemap.xml\",\n ]\n sitemap_rules = [\n (\n r\"https://www.argos.co.uk/stores/([\\d]+)-([\\w-]+)\",\n \"parse\",\n ),\n ]\n custom_settings = {\n \"USER_AGENT\": \"Mozilla/5.0 (X11; Linux x86_64; rv:99.0) Gecko/20100101 Firefox/99.0\"\n }\n\n def parse(self, response):\n data = re.findall(r\"window.INITIAL_STATE =[^<]+\", response.text)\n json_data = json.loads(data[0].replace(\"window.INITIAL_STATE =\", \"\"))\n properties = {\n \"street_address\": json_data[\"store\"][\"store\"][\"address\"],\n \"phone\": json_data[\"store\"][\"store\"][\"tel\"],\n \"city\": json_data[\"store\"][\"store\"][\"town\"],\n \"name\": json_data[\"store\"][\"store\"][\"name\"],\n \"postcode\": json_data[\"store\"][\"store\"][\"postcode\"],\n \"ref\": json_data[\"store\"][\"store\"][\"id\"],\n \"website\": response.url,\n \"lat\": float(json_data[\"store\"][\"store\"][\"lat\"]),\n \"lon\": float(json_data[\"store\"][\"store\"][\"lng\"]),\n \"extras\": {\n \"store_type\": json_data[\"store\"][\"store\"][\"type\"],\n },\n }\n\n oh = OpeningHours()\n for item in json_data[\"store\"][\"store\"][\"storeTimes\"]:\n open_time, close_time = item[\"time\"].split(\" - \")\n if (\n open_time\n and not open_time.isspace()\n and close_time\n and not close_time.isspace()\n ):\n oh.add_range(item[\"date\"][:2], open_time, close_time)\n\n properties[\"opening_hours\"] = oh.as_opening_hours()\n\n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/argos.py"}]} | 1,070 | 790 |
gh_patches_debug_41926 | rasdani/github-patches | git_diff | Kinto__kinto-914 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Permissions endpoint does not expose perms specified in settings
Ex:
- Set `kinto.bucket_write_principals = system.Authenticated`
- `GET /permissions` should mention every buckets
Related #892 #350
Permissions endpoint does not expose perms specified in settings
Ex:
- Set `kinto.bucket_write_principals = system.Authenticated`
- `GET /permissions` should mention every buckets
Related #892 #350
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/views/permissions.py`
Content:
```
1 import colander
2 from pyramid.security import NO_PERMISSION_REQUIRED, Authenticated
3
4 from kinto.authorization import PERMISSIONS_INHERITANCE_TREE
5 from kinto.core import utils as core_utils, resource
6 from kinto.core.storage.memory import extract_record_set
7
8
9 class PermissionsModel(object):
10 id_field = 'id'
11 modified_field = 'last_modified'
12 deleted_field = 'deleted'
13
14 def __init__(self, request):
15 self.request = request
16
17 def get_records(self, filters=None, sorting=None, pagination_rules=None,
18 limit=None, include_deleted=False, parent_id=None):
19 # Invert the permissions inheritance tree.
20 perms_descending_tree = {}
21 for on_resource, tree in PERMISSIONS_INHERITANCE_TREE.items():
22 for obtained_perm, obtained_from in tree.items():
23 for from_resource, perms in obtained_from.items():
24 for perm in perms:
25 perms_descending_tree.setdefault(from_resource, {})\
26 .setdefault(perm, {})\
27 .setdefault(on_resource, set())\
28 .add(obtained_perm)
29
30 # Obtain current principals.
31 principals = self.request.effective_principals
32 if Authenticated in principals:
33 # Since this view does not require any permission (can be used to
34 # obtain public users permissions), we have to add the prefixed
35 # userid among the principals
36 # (see :mod:`kinto.core.authentication`)
37 userid = self.request.prefixed_userid
38 principals.append(userid)
39
40 # Query every possible permission of the current user from backend.
41 backend = self.request.registry.permission
42 perms_by_object_uri = backend.get_accessible_objects(principals)
43
44 entries = []
45 for object_uri, perms in perms_by_object_uri.items():
46 try:
47 # Obtain associated resource from object URI
48 resource_name, matchdict = core_utils.view_lookup(self.request,
49 object_uri)
50 except ValueError:
51 # Skip permissions entries that are not linked to an object URI
52 continue
53
54 # For consistency with event payloads, prefix id with resource name
55 matchdict[resource_name + '_id'] = matchdict.get('id')
56
57 # Expand implicit permissions using descending tree.
58 permissions = set(perms)
59 for perm in perms:
60 obtained = perms_descending_tree[resource_name][perm]
61 # Related to same resource only and not every sub-objects.
62 # (e.g "bucket:write" gives "bucket:read" but not "group:read")
63 permissions |= obtained[resource_name]
64
65 entry = dict(uri=object_uri,
66 resource_name=resource_name,
67 permissions=list(permissions),
68 **matchdict)
69 entries.append(entry)
70
71 return extract_record_set(entries, filters=filters, sorting=sorting,
72 pagination_rules=pagination_rules,
73 limit=limit)
74
75
76 class PermissionsSchema(resource.ResourceSchema):
77 uri = colander.SchemaNode(colander.String())
78 resource_name = colander.SchemaNode(colander.String())
79 permissions = colander.Sequence(colander.SchemaNode(colander.String()))
80 bucket_id = colander.SchemaNode(colander.String())
81 collection_id = colander.SchemaNode(colander.String(),
82 missing=colander.drop)
83 group_id = colander.SchemaNode(colander.String(),
84 missing=colander.drop)
85 record_id = colander.SchemaNode(colander.String(),
86 missing=colander.drop)
87
88 class Options:
89 preserve_unknown = False
90
91
92 @resource.register(name='permissions',
93 description='List of user permissions',
94 collection_path='/permissions',
95 record_path=None,
96 collection_methods=('GET',),
97 permission=NO_PERMISSION_REQUIRED)
98 class Permissions(resource.ShareableResource):
99
100 schema = PermissionsSchema
101
102 def __init__(self, request, context=None):
103 super(Permissions, self).__init__(request, context)
104 self.model = PermissionsModel(request)
105
106 def _extract_sorting(self, limit):
107 # Permissions entries are not stored with timestamp, so do not
108 # force it.
109 result = super(Permissions, self)._extract_sorting(limit)
110 without_last_modified = [s for s in result
111 if s.field != self.model.modified_field]
112 return without_last_modified
113
114 def _extract_filters(self, queryparams=None):
115 result = super(Permissions, self)._extract_filters(queryparams)
116 without_last_modified = [s for s in result
117 if s.field != self.model.modified_field]
118 return without_last_modified
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kinto/views/permissions.py b/kinto/views/permissions.py
--- a/kinto/views/permissions.py
+++ b/kinto/views/permissions.py
@@ -1,11 +1,51 @@
import colander
from pyramid.security import NO_PERMISSION_REQUIRED, Authenticated
+from pyramid.settings import aslist
from kinto.authorization import PERMISSIONS_INHERITANCE_TREE
from kinto.core import utils as core_utils, resource
from kinto.core.storage.memory import extract_record_set
+def allowed_from_settings(settings, principals):
+ """Returns every permissions allowed from settings for the current user.
+ :param settings dict: app settings
+ :param principals list: list of principals of current user
+ :rtype: dict
+
+ Result example::
+
+ {
+ "bucket": {"write", "collection:create"},
+ "collection": {"read"}
+ }
+
+ XXX: This helper will be useful for Kinto/kinto#894
+ """
+ perms_settings = {k: aslist(v) for k, v in settings.items()
+ if k.endswith('_principals')}
+ from_settings = {}
+ for key, allowed_principals in perms_settings.items():
+ resource_name, permission, _ = key.split('_')
+ # Keep the known permissions only.
+ if resource_name not in PERMISSIONS_INHERITANCE_TREE.keys():
+ continue
+ # Keep the permissions of the current user only.
+ if not bool(set(principals) & set(allowed_principals)):
+ continue
+ # ``collection_create_principals`` means ``collection:create`` in bucket.
+ if permission == 'create':
+ permission = '%s:%s' % (resource_name, permission)
+ resource_name = { # resource parents.
+ 'bucket': '',
+ 'collection': 'bucket',
+ 'group': 'bucket',
+ 'record': 'collection'}[resource_name]
+ # Store them in a convenient way.
+ from_settings.setdefault(resource_name, set()).add(permission)
+ return from_settings
+
+
class PermissionsModel(object):
id_field = 'id'
modified_field = 'last_modified'
@@ -41,10 +81,35 @@
backend = self.request.registry.permission
perms_by_object_uri = backend.get_accessible_objects(principals)
+ # Check settings for every allowed resources.
+ from_settings = allowed_from_settings(self.request.registry.settings, principals)
+
+ # Expand permissions obtained from backend with the object URIs that
+ # correspond to permissions allowed from settings.
+ allowed_resources = {'bucket', 'collection', 'group'} & set(from_settings.keys())
+ if allowed_resources:
+ storage = self.request.registry.storage
+ every_bucket, _ = storage.get_all(parent_id='', collection_id='bucket')
+ for bucket in every_bucket:
+ bucket_uri = '/buckets/{id}'.format(**bucket)
+ for res in allowed_resources:
+ resource_perms = from_settings[res]
+ # Bucket is always fetched.
+ if res == 'bucket':
+ perms_by_object_uri.setdefault(bucket_uri, []).extend(resource_perms)
+ continue
+ # Fetch bucket collections and groups.
+ # XXX: wrong approach: query in a loop!
+ every_subobjects, _ = storage.get_all(parent_id=bucket_uri,
+ collection_id=res)
+ for subobject in every_subobjects:
+ subobj_uri = bucket_uri + '/{0}s/{1}'.format(res, subobject['id'])
+ perms_by_object_uri.setdefault(subobj_uri, []).extend(resource_perms)
+
entries = []
for object_uri, perms in perms_by_object_uri.items():
try:
- # Obtain associated resource from object URI
+ # Obtain associated res from object URI
resource_name, matchdict = core_utils.view_lookup(self.request,
object_uri)
except ValueError:
| {"golden_diff": "diff --git a/kinto/views/permissions.py b/kinto/views/permissions.py\n--- a/kinto/views/permissions.py\n+++ b/kinto/views/permissions.py\n@@ -1,11 +1,51 @@\n import colander\n from pyramid.security import NO_PERMISSION_REQUIRED, Authenticated\n+from pyramid.settings import aslist\n \n from kinto.authorization import PERMISSIONS_INHERITANCE_TREE\n from kinto.core import utils as core_utils, resource\n from kinto.core.storage.memory import extract_record_set\n \n \n+def allowed_from_settings(settings, principals):\n+ \"\"\"Returns every permissions allowed from settings for the current user.\n+ :param settings dict: app settings\n+ :param principals list: list of principals of current user\n+ :rtype: dict\n+\n+ Result example::\n+\n+ {\n+ \"bucket\": {\"write\", \"collection:create\"},\n+ \"collection\": {\"read\"}\n+ }\n+\n+ XXX: This helper will be useful for Kinto/kinto#894\n+ \"\"\"\n+ perms_settings = {k: aslist(v) for k, v in settings.items()\n+ if k.endswith('_principals')}\n+ from_settings = {}\n+ for key, allowed_principals in perms_settings.items():\n+ resource_name, permission, _ = key.split('_')\n+ # Keep the known permissions only.\n+ if resource_name not in PERMISSIONS_INHERITANCE_TREE.keys():\n+ continue\n+ # Keep the permissions of the current user only.\n+ if not bool(set(principals) & set(allowed_principals)):\n+ continue\n+ # ``collection_create_principals`` means ``collection:create`` in bucket.\n+ if permission == 'create':\n+ permission = '%s:%s' % (resource_name, permission)\n+ resource_name = { # resource parents.\n+ 'bucket': '',\n+ 'collection': 'bucket',\n+ 'group': 'bucket',\n+ 'record': 'collection'}[resource_name]\n+ # Store them in a convenient way.\n+ from_settings.setdefault(resource_name, set()).add(permission)\n+ return from_settings\n+\n+\n class PermissionsModel(object):\n id_field = 'id'\n modified_field = 'last_modified'\n@@ -41,10 +81,35 @@\n backend = self.request.registry.permission\n perms_by_object_uri = backend.get_accessible_objects(principals)\n \n+ # Check settings for every allowed resources.\n+ from_settings = allowed_from_settings(self.request.registry.settings, principals)\n+\n+ # Expand permissions obtained from backend with the object URIs that\n+ # correspond to permissions allowed from settings.\n+ allowed_resources = {'bucket', 'collection', 'group'} & set(from_settings.keys())\n+ if allowed_resources:\n+ storage = self.request.registry.storage\n+ every_bucket, _ = storage.get_all(parent_id='', collection_id='bucket')\n+ for bucket in every_bucket:\n+ bucket_uri = '/buckets/{id}'.format(**bucket)\n+ for res in allowed_resources:\n+ resource_perms = from_settings[res]\n+ # Bucket is always fetched.\n+ if res == 'bucket':\n+ perms_by_object_uri.setdefault(bucket_uri, []).extend(resource_perms)\n+ continue\n+ # Fetch bucket collections and groups.\n+ # XXX: wrong approach: query in a loop!\n+ every_subobjects, _ = storage.get_all(parent_id=bucket_uri,\n+ collection_id=res)\n+ for subobject in every_subobjects:\n+ subobj_uri = bucket_uri + '/{0}s/{1}'.format(res, subobject['id'])\n+ perms_by_object_uri.setdefault(subobj_uri, []).extend(resource_perms)\n+\n entries = []\n for object_uri, perms in perms_by_object_uri.items():\n try:\n- # Obtain associated resource from object URI\n+ # Obtain associated res from object URI\n resource_name, matchdict = core_utils.view_lookup(self.request,\n object_uri)\n except ValueError:\n", "issue": "Permissions endpoint does not expose perms specified in settings\nEx:\r\n- Set `kinto.bucket_write_principals = system.Authenticated`\r\n- `GET /permissions` should mention every buckets\r\n\r\nRelated #892 #350 \nPermissions endpoint does not expose perms specified in settings\nEx:\r\n- Set `kinto.bucket_write_principals = system.Authenticated`\r\n- `GET /permissions` should mention every buckets\r\n\r\nRelated #892 #350 \n", "before_files": [{"content": "import colander\nfrom pyramid.security import NO_PERMISSION_REQUIRED, Authenticated\n\nfrom kinto.authorization import PERMISSIONS_INHERITANCE_TREE\nfrom kinto.core import utils as core_utils, resource\nfrom kinto.core.storage.memory import extract_record_set\n\n\nclass PermissionsModel(object):\n id_field = 'id'\n modified_field = 'last_modified'\n deleted_field = 'deleted'\n\n def __init__(self, request):\n self.request = request\n\n def get_records(self, filters=None, sorting=None, pagination_rules=None,\n limit=None, include_deleted=False, parent_id=None):\n # Invert the permissions inheritance tree.\n perms_descending_tree = {}\n for on_resource, tree in PERMISSIONS_INHERITANCE_TREE.items():\n for obtained_perm, obtained_from in tree.items():\n for from_resource, perms in obtained_from.items():\n for perm in perms:\n perms_descending_tree.setdefault(from_resource, {})\\\n .setdefault(perm, {})\\\n .setdefault(on_resource, set())\\\n .add(obtained_perm)\n\n # Obtain current principals.\n principals = self.request.effective_principals\n if Authenticated in principals:\n # Since this view does not require any permission (can be used to\n # obtain public users permissions), we have to add the prefixed\n # userid among the principals\n # (see :mod:`kinto.core.authentication`)\n userid = self.request.prefixed_userid\n principals.append(userid)\n\n # Query every possible permission of the current user from backend.\n backend = self.request.registry.permission\n perms_by_object_uri = backend.get_accessible_objects(principals)\n\n entries = []\n for object_uri, perms in perms_by_object_uri.items():\n try:\n # Obtain associated resource from object URI\n resource_name, matchdict = core_utils.view_lookup(self.request,\n object_uri)\n except ValueError:\n # Skip permissions entries that are not linked to an object URI\n continue\n\n # For consistency with event payloads, prefix id with resource name\n matchdict[resource_name + '_id'] = matchdict.get('id')\n\n # Expand implicit permissions using descending tree.\n permissions = set(perms)\n for perm in perms:\n obtained = perms_descending_tree[resource_name][perm]\n # Related to same resource only and not every sub-objects.\n # (e.g \"bucket:write\" gives \"bucket:read\" but not \"group:read\")\n permissions |= obtained[resource_name]\n\n entry = dict(uri=object_uri,\n resource_name=resource_name,\n permissions=list(permissions),\n **matchdict)\n entries.append(entry)\n\n return extract_record_set(entries, filters=filters, sorting=sorting,\n pagination_rules=pagination_rules,\n limit=limit)\n\n\nclass PermissionsSchema(resource.ResourceSchema):\n uri = colander.SchemaNode(colander.String())\n resource_name = colander.SchemaNode(colander.String())\n permissions = colander.Sequence(colander.SchemaNode(colander.String()))\n bucket_id = colander.SchemaNode(colander.String())\n collection_id = colander.SchemaNode(colander.String(),\n missing=colander.drop)\n group_id = colander.SchemaNode(colander.String(),\n missing=colander.drop)\n record_id = colander.SchemaNode(colander.String(),\n missing=colander.drop)\n\n class Options:\n preserve_unknown = False\n\n\[email protected](name='permissions',\n description='List of user permissions',\n collection_path='/permissions',\n record_path=None,\n collection_methods=('GET',),\n permission=NO_PERMISSION_REQUIRED)\nclass Permissions(resource.ShareableResource):\n\n schema = PermissionsSchema\n\n def __init__(self, request, context=None):\n super(Permissions, self).__init__(request, context)\n self.model = PermissionsModel(request)\n\n def _extract_sorting(self, limit):\n # Permissions entries are not stored with timestamp, so do not\n # force it.\n result = super(Permissions, self)._extract_sorting(limit)\n without_last_modified = [s for s in result\n if s.field != self.model.modified_field]\n return without_last_modified\n\n def _extract_filters(self, queryparams=None):\n result = super(Permissions, self)._extract_filters(queryparams)\n without_last_modified = [s for s in result\n if s.field != self.model.modified_field]\n return without_last_modified\n", "path": "kinto/views/permissions.py"}], "after_files": [{"content": "import colander\nfrom pyramid.security import NO_PERMISSION_REQUIRED, Authenticated\nfrom pyramid.settings import aslist\n\nfrom kinto.authorization import PERMISSIONS_INHERITANCE_TREE\nfrom kinto.core import utils as core_utils, resource\nfrom kinto.core.storage.memory import extract_record_set\n\n\ndef allowed_from_settings(settings, principals):\n \"\"\"Returns every permissions allowed from settings for the current user.\n :param settings dict: app settings\n :param principals list: list of principals of current user\n :rtype: dict\n\n Result example::\n\n {\n \"bucket\": {\"write\", \"collection:create\"},\n \"collection\": {\"read\"}\n }\n\n XXX: This helper will be useful for Kinto/kinto#894\n \"\"\"\n perms_settings = {k: aslist(v) for k, v in settings.items()\n if k.endswith('_principals')}\n from_settings = {}\n for key, allowed_principals in perms_settings.items():\n resource_name, permission, _ = key.split('_')\n # Keep the known permissions only.\n if resource_name not in PERMISSIONS_INHERITANCE_TREE.keys():\n continue\n # Keep the permissions of the current user only.\n if not bool(set(principals) & set(allowed_principals)):\n continue\n # ``collection_create_principals`` means ``collection:create`` in bucket.\n if permission == 'create':\n permission = '%s:%s' % (resource_name, permission)\n resource_name = { # resource parents.\n 'bucket': '',\n 'collection': 'bucket',\n 'group': 'bucket',\n 'record': 'collection'}[resource_name]\n # Store them in a convenient way.\n from_settings.setdefault(resource_name, set()).add(permission)\n return from_settings\n\n\nclass PermissionsModel(object):\n id_field = 'id'\n modified_field = 'last_modified'\n deleted_field = 'deleted'\n\n def __init__(self, request):\n self.request = request\n\n def get_records(self, filters=None, sorting=None, pagination_rules=None,\n limit=None, include_deleted=False, parent_id=None):\n # Invert the permissions inheritance tree.\n perms_descending_tree = {}\n for on_resource, tree in PERMISSIONS_INHERITANCE_TREE.items():\n for obtained_perm, obtained_from in tree.items():\n for from_resource, perms in obtained_from.items():\n for perm in perms:\n perms_descending_tree.setdefault(from_resource, {})\\\n .setdefault(perm, {})\\\n .setdefault(on_resource, set())\\\n .add(obtained_perm)\n\n # Obtain current principals.\n principals = self.request.effective_principals\n if Authenticated in principals:\n # Since this view does not require any permission (can be used to\n # obtain public users permissions), we have to add the prefixed\n # userid among the principals\n # (see :mod:`kinto.core.authentication`)\n userid = self.request.prefixed_userid\n principals.append(userid)\n\n # Query every possible permission of the current user from backend.\n backend = self.request.registry.permission\n perms_by_object_uri = backend.get_accessible_objects(principals)\n\n # Check settings for every allowed resources.\n from_settings = allowed_from_settings(self.request.registry.settings, principals)\n\n # Expand permissions obtained from backend with the object URIs that\n # correspond to permissions allowed from settings.\n allowed_resources = {'bucket', 'collection', 'group'} & set(from_settings.keys())\n if allowed_resources:\n storage = self.request.registry.storage\n every_bucket, _ = storage.get_all(parent_id='', collection_id='bucket')\n for bucket in every_bucket:\n bucket_uri = '/buckets/{id}'.format(**bucket)\n for res in allowed_resources:\n resource_perms = from_settings[res]\n # Bucket is always fetched.\n if res == 'bucket':\n perms_by_object_uri.setdefault(bucket_uri, []).extend(resource_perms)\n continue\n # Fetch bucket collections and groups.\n # XXX: wrong approach: query in a loop!\n every_subobjects, _ = storage.get_all(parent_id=bucket_uri,\n collection_id=res)\n for subobject in every_subobjects:\n subobj_uri = bucket_uri + '/{0}s/{1}'.format(res, subobject['id'])\n perms_by_object_uri.setdefault(subobj_uri, []).extend(resource_perms)\n\n entries = []\n for object_uri, perms in perms_by_object_uri.items():\n try:\n # Obtain associated res from object URI\n resource_name, matchdict = core_utils.view_lookup(self.request,\n object_uri)\n except ValueError:\n # Skip permissions entries that are not linked to an object URI\n continue\n\n # For consistency with event payloads, prefix id with resource name\n matchdict[resource_name + '_id'] = matchdict.get('id')\n\n # Expand implicit permissions using descending tree.\n permissions = set(perms)\n for perm in perms:\n obtained = perms_descending_tree[resource_name][perm]\n # Related to same resource only and not every sub-objects.\n # (e.g \"bucket:write\" gives \"bucket:read\" but not \"group:read\")\n permissions |= obtained[resource_name]\n\n entry = dict(uri=object_uri,\n resource_name=resource_name,\n permissions=list(permissions),\n **matchdict)\n entries.append(entry)\n\n return extract_record_set(entries, filters=filters, sorting=sorting,\n pagination_rules=pagination_rules,\n limit=limit)\n\n\nclass PermissionsSchema(resource.ResourceSchema):\n uri = colander.SchemaNode(colander.String())\n resource_name = colander.SchemaNode(colander.String())\n permissions = colander.Sequence(colander.SchemaNode(colander.String()))\n bucket_id = colander.SchemaNode(colander.String())\n collection_id = colander.SchemaNode(colander.String(),\n missing=colander.drop)\n group_id = colander.SchemaNode(colander.String(),\n missing=colander.drop)\n record_id = colander.SchemaNode(colander.String(),\n missing=colander.drop)\n\n class Options:\n preserve_unknown = False\n\n\[email protected](name='permissions',\n description='List of user permissions',\n collection_path='/permissions',\n record_path=None,\n collection_methods=('GET',),\n permission=NO_PERMISSION_REQUIRED)\nclass Permissions(resource.ShareableResource):\n\n schema = PermissionsSchema\n\n def __init__(self, request, context=None):\n super(Permissions, self).__init__(request, context)\n self.model = PermissionsModel(request)\n\n def _extract_sorting(self, limit):\n # Permissions entries are not stored with timestamp, so do not\n # force it.\n result = super(Permissions, self)._extract_sorting(limit)\n without_last_modified = [s for s in result\n if s.field != self.model.modified_field]\n return without_last_modified\n\n def _extract_filters(self, queryparams=None):\n result = super(Permissions, self)._extract_filters(queryparams)\n without_last_modified = [s for s in result\n if s.field != self.model.modified_field]\n return without_last_modified\n", "path": "kinto/views/permissions.py"}]} | 1,539 | 855 |
gh_patches_debug_8452 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-8361 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Problems with Spiders built on YextSpider
I've noticed a few of the spiders whose results I was using in my OSM tool have recently stopped returning any results, including five_guys_de_es_fr_gb.py , matalan_gb.py , and three_gb.py .
A common feature of these spiders is that they use the YextSpider class. Checking some other spiders that are also based on this class, reveals they've also stopped working. The spider stats suggest 404 and 403 errors are preventing the data being retrieved.
If this is a general problem affecting multiple spiders, would someone be able to take a look and see if it can be fixed?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/lane_bryant_us.py`
Content:
```
1 from locations.storefinders.yext import YextSpider
2
3
4 class LaneBryantUSSpider(YextSpider):
5 name = "lane_bryant_us"
6 item_attributes = {"brand": "Lane Bryant", "brand_wikidata": "Q6485350"}
7 api_key = "ccfe136898dfe236489c4f9fb0b91ded"
8 api_version = "20220511"
9
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/lane_bryant_us.py b/locations/spiders/lane_bryant_us.py
--- a/locations/spiders/lane_bryant_us.py
+++ b/locations/spiders/lane_bryant_us.py
@@ -1,8 +1,9 @@
-from locations.storefinders.yext import YextSpider
+from locations.categories import Categories
+from locations.storefinders.yext_answers import YextAnswersSpider
-class LaneBryantUSSpider(YextSpider):
+class LaneBryantUSSpider(YextAnswersSpider):
name = "lane_bryant_us"
- item_attributes = {"brand": "Lane Bryant", "brand_wikidata": "Q6485350"}
+ item_attributes = {"brand": "Lane Bryant", "brand_wikidata": "Q6485350", "extras": Categories.SHOP_CLOTHES.value}
api_key = "ccfe136898dfe236489c4f9fb0b91ded"
- api_version = "20220511"
+ experience_key = "lane-bryant-locator"
| {"golden_diff": "diff --git a/locations/spiders/lane_bryant_us.py b/locations/spiders/lane_bryant_us.py\n--- a/locations/spiders/lane_bryant_us.py\n+++ b/locations/spiders/lane_bryant_us.py\n@@ -1,8 +1,9 @@\n-from locations.storefinders.yext import YextSpider\n+from locations.categories import Categories\n+from locations.storefinders.yext_answers import YextAnswersSpider\n \n \n-class LaneBryantUSSpider(YextSpider):\n+class LaneBryantUSSpider(YextAnswersSpider):\n name = \"lane_bryant_us\"\n- item_attributes = {\"brand\": \"Lane Bryant\", \"brand_wikidata\": \"Q6485350\"}\n+ item_attributes = {\"brand\": \"Lane Bryant\", \"brand_wikidata\": \"Q6485350\", \"extras\": Categories.SHOP_CLOTHES.value}\n api_key = \"ccfe136898dfe236489c4f9fb0b91ded\"\n- api_version = \"20220511\"\n+ experience_key = \"lane-bryant-locator\"\n", "issue": "Problems with Spiders built on YextSpider\nI've noticed a few of the spiders whose results I was using in my OSM tool have recently stopped returning any results, including five_guys_de_es_fr_gb.py , matalan_gb.py , and three_gb.py .\r\n\r\nA common feature of these spiders is that they use the YextSpider class. Checking some other spiders that are also based on this class, reveals they've also stopped working. The spider stats suggest 404 and 403 errors are preventing the data being retrieved.\r\n\r\nIf this is a general problem affecting multiple spiders, would someone be able to take a look and see if it can be fixed?\n", "before_files": [{"content": "from locations.storefinders.yext import YextSpider\n\n\nclass LaneBryantUSSpider(YextSpider):\n name = \"lane_bryant_us\"\n item_attributes = {\"brand\": \"Lane Bryant\", \"brand_wikidata\": \"Q6485350\"}\n api_key = \"ccfe136898dfe236489c4f9fb0b91ded\"\n api_version = \"20220511\"\n", "path": "locations/spiders/lane_bryant_us.py"}], "after_files": [{"content": "from locations.categories import Categories\nfrom locations.storefinders.yext_answers import YextAnswersSpider\n\n\nclass LaneBryantUSSpider(YextAnswersSpider):\n name = \"lane_bryant_us\"\n item_attributes = {\"brand\": \"Lane Bryant\", \"brand_wikidata\": \"Q6485350\", \"extras\": Categories.SHOP_CLOTHES.value}\n api_key = \"ccfe136898dfe236489c4f9fb0b91ded\"\n experience_key = \"lane-bryant-locator\"\n", "path": "locations/spiders/lane_bryant_us.py"}]} | 524 | 267 |
gh_patches_debug_24496 | rasdani/github-patches | git_diff | optuna__optuna-3443 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve multi-objective optimization tutorial regarding `Study.best_trials`
### What is an issue?
## Background
https://github.com/optuna/optuna/issues/3397 added a note about `study.best_trials` to the [reuse the best trial tutorial](https://optuna.readthedocs.io/en/latest/tutorial/20_recipes/010_reuse_best_trial.html). During the discussion, we found that the [multi-objective optimization tutorial](https://optuna.readthedocs.io/en/latest/tutorial/20_recipes/002_multi_objective.html) does not fetch the trials on the Pareto front (i.e., `Study.best_trials`) while it visualize the Pareto front.
## Description
I think readers can understand how to use the trials on Pareto front more easily if we mention `Study.best_trials` in the tutorial page. We may have some possible scenarios. For example,
1. Show the Pareto front as can be seen in the current tutorial
2. Explain how to get the trials on the Pareto front by using `Study.best_trials`
3. Demonstrate how to get the desired trial from the Pareto front i.e., the trial whose accuracy is highest, and show the parameter values (or objective values)

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tutorial/20_recipes/002_multi_objective.py`
Content:
```
1 """
2 .. _multi_objective:
3
4 Multi-objective Optimization with Optuna
5 ========================================
6
7 This tutorial showcases Optuna's multi-objective optimization feature by
8 optimizing the validation accuracy of Fashion MNIST dataset and the FLOPS of the model implemented in PyTorch.
9
10 We use `thop <https://github.com/Lyken17/pytorch-OpCounter>`_ to measure FLOPS.
11 """
12
13 import thop
14 import torch
15 import torch.nn as nn
16 import torch.nn.functional as F
17 import torchvision
18
19 import optuna
20
21
22 DEVICE = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
23 DIR = ".."
24 BATCHSIZE = 128
25 N_TRAIN_EXAMPLES = BATCHSIZE * 30
26 N_VALID_EXAMPLES = BATCHSIZE * 10
27
28
29 def define_model(trial):
30 n_layers = trial.suggest_int("n_layers", 1, 3)
31 layers = []
32
33 in_features = 28 * 28
34 for i in range(n_layers):
35 out_features = trial.suggest_int("n_units_l{}".format(i), 4, 128)
36 layers.append(nn.Linear(in_features, out_features))
37 layers.append(nn.ReLU())
38 p = trial.suggest_float("dropout_{}".format(i), 0.2, 0.5)
39 layers.append(nn.Dropout(p))
40
41 in_features = out_features
42
43 layers.append(nn.Linear(in_features, 10))
44 layers.append(nn.LogSoftmax(dim=1))
45
46 return nn.Sequential(*layers)
47
48
49 # Defines training and evaluation.
50 def train_model(model, optimizer, train_loader):
51 model.train()
52 for batch_idx, (data, target) in enumerate(train_loader):
53 data, target = data.view(-1, 28 * 28).to(DEVICE), target.to(DEVICE)
54 optimizer.zero_grad()
55 F.nll_loss(model(data), target).backward()
56 optimizer.step()
57
58
59 def eval_model(model, valid_loader):
60 model.eval()
61 correct = 0
62 with torch.no_grad():
63 for batch_idx, (data, target) in enumerate(valid_loader):
64 data, target = data.view(-1, 28 * 28).to(DEVICE), target.to(DEVICE)
65 pred = model(data).argmax(dim=1, keepdim=True)
66 correct += pred.eq(target.view_as(pred)).sum().item()
67
68 accuracy = correct / N_VALID_EXAMPLES
69
70 flops, _ = thop.profile(model, inputs=(torch.randn(1, 28 * 28).to(DEVICE),), verbose=False)
71 return flops, accuracy
72
73
74 ###################################################################################################
75 # Define multi-objective objective function.
76 # Objectives are FLOPS and accuracy.
77 def objective(trial):
78 train_dataset = torchvision.datasets.FashionMNIST(
79 DIR, train=True, download=True, transform=torchvision.transforms.ToTensor()
80 )
81 train_loader = torch.utils.data.DataLoader(
82 torch.utils.data.Subset(train_dataset, list(range(N_TRAIN_EXAMPLES))),
83 batch_size=BATCHSIZE,
84 shuffle=True,
85 )
86
87 val_dataset = torchvision.datasets.FashionMNIST(
88 DIR, train=False, transform=torchvision.transforms.ToTensor()
89 )
90 val_loader = torch.utils.data.DataLoader(
91 torch.utils.data.Subset(val_dataset, list(range(N_VALID_EXAMPLES))),
92 batch_size=BATCHSIZE,
93 shuffle=True,
94 )
95 model = define_model(trial).to(DEVICE)
96
97 optimizer = torch.optim.Adam(
98 model.parameters(), trial.suggest_float("lr", 1e-5, 1e-1, log=True)
99 )
100
101 for epoch in range(10):
102 train_model(model, optimizer, train_loader)
103 flops, accuracy = eval_model(model, val_loader)
104 return flops, accuracy
105
106
107 ###################################################################################################
108 # Run multi-objective optimization
109 # --------------------------------
110 #
111 # If your optimization problem is multi-objective,
112 # Optuna assumes that you will specify the optimization direction for each objective.
113 # Specifically, in this example, we want to minimize the FLOPS (we want a faster model)
114 # and maximize the accuracy. So we set ``directions`` to ``["minimize", "maximize"]``.
115 study = optuna.create_study(directions=["minimize", "maximize"])
116 study.optimize(objective, n_trials=30, timeout=300)
117
118 print("Number of finished trials: ", len(study.trials))
119
120
121 ###################################################################################################
122 # Check trials on pareto front visually
123 optuna.visualization.plot_pareto_front(study, target_names=["FLOPS", "accuracy"])
124
125
126 ###################################################################################################
127 # Learn which hyperparameters are affecting the flops most with hyperparameter importance.
128 optuna.visualization.plot_param_importances(
129 study, target=lambda t: t.values[0], target_name="flops"
130 )
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tutorial/20_recipes/002_multi_objective.py b/tutorial/20_recipes/002_multi_objective.py
--- a/tutorial/20_recipes/002_multi_objective.py
+++ b/tutorial/20_recipes/002_multi_objective.py
@@ -119,10 +119,23 @@
###################################################################################################
-# Check trials on pareto front visually
+# Check trials on Pareto front visually.
optuna.visualization.plot_pareto_front(study, target_names=["FLOPS", "accuracy"])
+###################################################################################################
+# Fetch the list of trials on the Pareto front with :attr:`~optuna.study.Study.best_trials`.
+#
+# For example, the following code shows the number of trials on the Pareto front and picks the trial with the highest accuracy.
+
+print(f"Number of trials on the Pareto front: {len(study.best_trials)}")
+
+trial_with_highest_accuracy = max(study.best_trials, key=lambda t: t.values[1])
+print(f"Trial with highest accuracy: ")
+print(f"\tnumber: {trial_with_highest_accuracy.number}")
+print(f"\tparams: {trial_with_highest_accuracy.params}")
+print(f"\tvalues: {trial_with_highest_accuracy.values}")
+
###################################################################################################
# Learn which hyperparameters are affecting the flops most with hyperparameter importance.
optuna.visualization.plot_param_importances(
| {"golden_diff": "diff --git a/tutorial/20_recipes/002_multi_objective.py b/tutorial/20_recipes/002_multi_objective.py\n--- a/tutorial/20_recipes/002_multi_objective.py\n+++ b/tutorial/20_recipes/002_multi_objective.py\n@@ -119,10 +119,23 @@\n \n \n ###################################################################################################\n-# Check trials on pareto front visually\n+# Check trials on Pareto front visually.\n optuna.visualization.plot_pareto_front(study, target_names=[\"FLOPS\", \"accuracy\"])\n \n \n+###################################################################################################\n+# Fetch the list of trials on the Pareto front with :attr:`~optuna.study.Study.best_trials`.\n+#\n+# For example, the following code shows the number of trials on the Pareto front and picks the trial with the highest accuracy.\n+\n+print(f\"Number of trials on the Pareto front: {len(study.best_trials)}\")\n+\n+trial_with_highest_accuracy = max(study.best_trials, key=lambda t: t.values[1])\n+print(f\"Trial with highest accuracy: \")\n+print(f\"\\tnumber: {trial_with_highest_accuracy.number}\")\n+print(f\"\\tparams: {trial_with_highest_accuracy.params}\")\n+print(f\"\\tvalues: {trial_with_highest_accuracy.values}\")\n+\n ###################################################################################################\n # Learn which hyperparameters are affecting the flops most with hyperparameter importance.\n optuna.visualization.plot_param_importances(\n", "issue": "Improve multi-objective optimization tutorial regarding `Study.best_trials`\n### What is an issue?\n\n## Background\r\n\r\nhttps://github.com/optuna/optuna/issues/3397 added a note about `study.best_trials` to the [reuse the best trial tutorial](https://optuna.readthedocs.io/en/latest/tutorial/20_recipes/010_reuse_best_trial.html). During the discussion, we found that the [multi-objective optimization tutorial](https://optuna.readthedocs.io/en/latest/tutorial/20_recipes/002_multi_objective.html) does not fetch the trials on the Pareto front (i.e., `Study.best_trials`) while it visualize the Pareto front.\r\n\r\n## Description\r\n\r\nI think readers can understand how to use the trials on Pareto front more easily if we mention `Study.best_trials` in the tutorial page. We may have some possible scenarios. For example,\r\n\r\n1. Show the Pareto front as can be seen in the current tutorial\r\n2. Explain how to get the trials on the Pareto front by using `Study.best_trials`\r\n3. Demonstrate how to get the desired trial from the Pareto front i.e., the trial whose accuracy is highest, and show the parameter values (or objective values)\r\n\r\n\r\n\n", "before_files": [{"content": "\"\"\"\n.. _multi_objective:\n\nMulti-objective Optimization with Optuna\n========================================\n\nThis tutorial showcases Optuna's multi-objective optimization feature by\noptimizing the validation accuracy of Fashion MNIST dataset and the FLOPS of the model implemented in PyTorch.\n\nWe use `thop <https://github.com/Lyken17/pytorch-OpCounter>`_ to measure FLOPS.\n\"\"\"\n\nimport thop\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision\n\nimport optuna\n\n\nDEVICE = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\nDIR = \"..\"\nBATCHSIZE = 128\nN_TRAIN_EXAMPLES = BATCHSIZE * 30\nN_VALID_EXAMPLES = BATCHSIZE * 10\n\n\ndef define_model(trial):\n n_layers = trial.suggest_int(\"n_layers\", 1, 3)\n layers = []\n\n in_features = 28 * 28\n for i in range(n_layers):\n out_features = trial.suggest_int(\"n_units_l{}\".format(i), 4, 128)\n layers.append(nn.Linear(in_features, out_features))\n layers.append(nn.ReLU())\n p = trial.suggest_float(\"dropout_{}\".format(i), 0.2, 0.5)\n layers.append(nn.Dropout(p))\n\n in_features = out_features\n\n layers.append(nn.Linear(in_features, 10))\n layers.append(nn.LogSoftmax(dim=1))\n\n return nn.Sequential(*layers)\n\n\n# Defines training and evaluation.\ndef train_model(model, optimizer, train_loader):\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data, target = data.view(-1, 28 * 28).to(DEVICE), target.to(DEVICE)\n optimizer.zero_grad()\n F.nll_loss(model(data), target).backward()\n optimizer.step()\n\n\ndef eval_model(model, valid_loader):\n model.eval()\n correct = 0\n with torch.no_grad():\n for batch_idx, (data, target) in enumerate(valid_loader):\n data, target = data.view(-1, 28 * 28).to(DEVICE), target.to(DEVICE)\n pred = model(data).argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n\n accuracy = correct / N_VALID_EXAMPLES\n\n flops, _ = thop.profile(model, inputs=(torch.randn(1, 28 * 28).to(DEVICE),), verbose=False)\n return flops, accuracy\n\n\n###################################################################################################\n# Define multi-objective objective function.\n# Objectives are FLOPS and accuracy.\ndef objective(trial):\n train_dataset = torchvision.datasets.FashionMNIST(\n DIR, train=True, download=True, transform=torchvision.transforms.ToTensor()\n )\n train_loader = torch.utils.data.DataLoader(\n torch.utils.data.Subset(train_dataset, list(range(N_TRAIN_EXAMPLES))),\n batch_size=BATCHSIZE,\n shuffle=True,\n )\n\n val_dataset = torchvision.datasets.FashionMNIST(\n DIR, train=False, transform=torchvision.transforms.ToTensor()\n )\n val_loader = torch.utils.data.DataLoader(\n torch.utils.data.Subset(val_dataset, list(range(N_VALID_EXAMPLES))),\n batch_size=BATCHSIZE,\n shuffle=True,\n )\n model = define_model(trial).to(DEVICE)\n\n optimizer = torch.optim.Adam(\n model.parameters(), trial.suggest_float(\"lr\", 1e-5, 1e-1, log=True)\n )\n\n for epoch in range(10):\n train_model(model, optimizer, train_loader)\n flops, accuracy = eval_model(model, val_loader)\n return flops, accuracy\n\n\n###################################################################################################\n# Run multi-objective optimization\n# --------------------------------\n#\n# If your optimization problem is multi-objective,\n# Optuna assumes that you will specify the optimization direction for each objective.\n# Specifically, in this example, we want to minimize the FLOPS (we want a faster model)\n# and maximize the accuracy. So we set ``directions`` to ``[\"minimize\", \"maximize\"]``.\nstudy = optuna.create_study(directions=[\"minimize\", \"maximize\"])\nstudy.optimize(objective, n_trials=30, timeout=300)\n\nprint(\"Number of finished trials: \", len(study.trials))\n\n\n###################################################################################################\n# Check trials on pareto front visually\noptuna.visualization.plot_pareto_front(study, target_names=[\"FLOPS\", \"accuracy\"])\n\n\n###################################################################################################\n# Learn which hyperparameters are affecting the flops most with hyperparameter importance.\noptuna.visualization.plot_param_importances(\n study, target=lambda t: t.values[0], target_name=\"flops\"\n)\n", "path": "tutorial/20_recipes/002_multi_objective.py"}], "after_files": [{"content": "\"\"\"\n.. _multi_objective:\n\nMulti-objective Optimization with Optuna\n========================================\n\nThis tutorial showcases Optuna's multi-objective optimization feature by\noptimizing the validation accuracy of Fashion MNIST dataset and the FLOPS of the model implemented in PyTorch.\n\nWe use `thop <https://github.com/Lyken17/pytorch-OpCounter>`_ to measure FLOPS.\n\"\"\"\n\nimport thop\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision\n\nimport optuna\n\n\nDEVICE = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\nDIR = \"..\"\nBATCHSIZE = 128\nN_TRAIN_EXAMPLES = BATCHSIZE * 30\nN_VALID_EXAMPLES = BATCHSIZE * 10\n\n\ndef define_model(trial):\n n_layers = trial.suggest_int(\"n_layers\", 1, 3)\n layers = []\n\n in_features = 28 * 28\n for i in range(n_layers):\n out_features = trial.suggest_int(\"n_units_l{}\".format(i), 4, 128)\n layers.append(nn.Linear(in_features, out_features))\n layers.append(nn.ReLU())\n p = trial.suggest_float(\"dropout_{}\".format(i), 0.2, 0.5)\n layers.append(nn.Dropout(p))\n\n in_features = out_features\n\n layers.append(nn.Linear(in_features, 10))\n layers.append(nn.LogSoftmax(dim=1))\n\n return nn.Sequential(*layers)\n\n\n# Defines training and evaluation.\ndef train_model(model, optimizer, train_loader):\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n data, target = data.view(-1, 28 * 28).to(DEVICE), target.to(DEVICE)\n optimizer.zero_grad()\n F.nll_loss(model(data), target).backward()\n optimizer.step()\n\n\ndef eval_model(model, valid_loader):\n model.eval()\n correct = 0\n with torch.no_grad():\n for batch_idx, (data, target) in enumerate(valid_loader):\n data, target = data.view(-1, 28 * 28).to(DEVICE), target.to(DEVICE)\n pred = model(data).argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n\n accuracy = correct / N_VALID_EXAMPLES\n\n flops, _ = thop.profile(model, inputs=(torch.randn(1, 28 * 28).to(DEVICE),), verbose=False)\n return flops, accuracy\n\n\n###################################################################################################\n# Define multi-objective objective function.\n# Objectives are FLOPS and accuracy.\ndef objective(trial):\n train_dataset = torchvision.datasets.FashionMNIST(\n DIR, train=True, download=True, transform=torchvision.transforms.ToTensor()\n )\n train_loader = torch.utils.data.DataLoader(\n torch.utils.data.Subset(train_dataset, list(range(N_TRAIN_EXAMPLES))),\n batch_size=BATCHSIZE,\n shuffle=True,\n )\n\n val_dataset = torchvision.datasets.FashionMNIST(\n DIR, train=False, transform=torchvision.transforms.ToTensor()\n )\n val_loader = torch.utils.data.DataLoader(\n torch.utils.data.Subset(val_dataset, list(range(N_VALID_EXAMPLES))),\n batch_size=BATCHSIZE,\n shuffle=True,\n )\n model = define_model(trial).to(DEVICE)\n\n optimizer = torch.optim.Adam(\n model.parameters(), trial.suggest_float(\"lr\", 1e-5, 1e-1, log=True)\n )\n\n for epoch in range(10):\n train_model(model, optimizer, train_loader)\n flops, accuracy = eval_model(model, val_loader)\n return flops, accuracy\n\n\n###################################################################################################\n# Run multi-objective optimization\n# --------------------------------\n#\n# If your optimization problem is multi-objective,\n# Optuna assumes that you will specify the optimization direction for each objective.\n# Specifically, in this example, we want to minimize the FLOPS (we want a faster model)\n# and maximize the accuracy. So we set ``directions`` to ``[\"minimize\", \"maximize\"]``.\nstudy = optuna.create_study(directions=[\"minimize\", \"maximize\"])\nstudy.optimize(objective, n_trials=30, timeout=300)\n\nprint(\"Number of finished trials: \", len(study.trials))\n\n\n###################################################################################################\n# Check trials on Pareto front visually.\noptuna.visualization.plot_pareto_front(study, target_names=[\"FLOPS\", \"accuracy\"])\n\n\n###################################################################################################\n# Fetch the list of trials on the Pareto front with :attr:`~optuna.study.Study.best_trials`.\n#\n# For example, the following code shows the number of trials on the Pareto front and picks the trial with the highest accuracy.\n\nprint(f\"Number of trials on the Pareto front: {len(study.best_trials)}\")\n\ntrial_with_highest_accuracy = max(study.best_trials, key=lambda t: t.values[1])\nprint(f\"Trial with highest accuracy: \")\nprint(f\"\\tnumber: {trial_with_highest_accuracy.number}\")\nprint(f\"\\tparams: {trial_with_highest_accuracy.params}\")\nprint(f\"\\tvalues: {trial_with_highest_accuracy.values}\")\n\n###################################################################################################\n# Learn which hyperparameters are affecting the flops most with hyperparameter importance.\noptuna.visualization.plot_param_importances(\n study, target=lambda t: t.values[0], target_name=\"flops\"\n)\n", "path": "tutorial/20_recipes/002_multi_objective.py"}]} | 1,924 | 312 |
gh_patches_debug_66 | rasdani/github-patches | git_diff | streamlit__streamlit-2342 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Clicking on a text field in the sidebar (on mobile) causes the sidebar to close.
# Summary
When the window is too narrow, clicking on a text input in the sidebar causes the sidebar to disappear, making it impossible to type in text. [[video example](https://drive.google.com/file/d/1KetCBECPsg3UAiESZCRdm6W9M_zIrNkc/view?usp=sharing)]
# Steps to reproduce
1. Put a text input in the sidebar.
2. Make the Streamlit app window narrow.
3. Click the text input in the sidebar.
## Expected behavior:
Ideally, the sidebar would stay open and the text input box would have focus and you coudl type something in.
In fact, this _does_ happen when the window is a bit wider. [[video example](https://drive.google.com/file/d/1fObxQWIjkL_5VBJY_niltG489Ki8tRB8/view?usp=sharing)]
## Actual behavior:
Clicking on a text input in the sidebar causes the sidebar to disappear, making it impossible to type in text. [[video example](https://drive.google.com/file/d/1KetCBECPsg3UAiESZCRdm6W9M_zIrNkc/view?usp=sharing)]
## Is this a regression?
Unkown
# Debug info
- Streamlit version: `Streamlit, version 0.68.0`
- Python version: `Python 3.8.5`
- Using Conda? PipEnv? PyEnv? Pex? `pipenv, version 2020.8.13`
- OS version:
```
Distributor ID: Ubuntu
Description: Ubuntu 20.04.1 LTS
Release: 20.04
Codename: focal
```
- Browser version: `Safari on iPadOS 14`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `e2e/scripts/st_sidebar.py`
Content:
```
1 # Copyright 2018-2020 Streamlit Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import streamlit as st
16 from datetime import datetime
17 from datetime import date
18
19 w1 = st.sidebar.date_input("Label 1", date(1970, 1, 1))
20 st.write("Value 1:", w1)
21
22 w2 = st.sidebar.date_input("Label 2", datetime(2019, 7, 6, 21, 15))
23 st.write("Value 2:", w2)
24
25 x = st.sidebar.text("overwrite me")
26 x.text("overwritten")
27
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/e2e/scripts/st_sidebar.py b/e2e/scripts/st_sidebar.py
--- a/e2e/scripts/st_sidebar.py
+++ b/e2e/scripts/st_sidebar.py
@@ -24,3 +24,5 @@
x = st.sidebar.text("overwrite me")
x.text("overwritten")
+
+y = st.sidebar.text_input("type here")
| {"golden_diff": "diff --git a/e2e/scripts/st_sidebar.py b/e2e/scripts/st_sidebar.py\n--- a/e2e/scripts/st_sidebar.py\n+++ b/e2e/scripts/st_sidebar.py\n@@ -24,3 +24,5 @@\n \n x = st.sidebar.text(\"overwrite me\")\n x.text(\"overwritten\")\n+\n+y = st.sidebar.text_input(\"type here\")\n", "issue": "Clicking on a text field in the sidebar (on mobile) causes the sidebar to close.\n# Summary\r\n\r\nWhen the window is too narrow, clicking on a text input in the sidebar causes the sidebar to disappear, making it impossible to type in text. [[video example](https://drive.google.com/file/d/1KetCBECPsg3UAiESZCRdm6W9M_zIrNkc/view?usp=sharing)]\r\n\r\n# Steps to reproduce\r\n\r\n1. Put a text input in the sidebar.\r\n2. Make the Streamlit app window narrow.\r\n3. Click the text input in the sidebar.\r\n\r\n## Expected behavior:\r\n\r\nIdeally, the sidebar would stay open and the text input box would have focus and you coudl type something in.\r\n\r\nIn fact, this _does_ happen when the window is a bit wider. [[video example](https://drive.google.com/file/d/1fObxQWIjkL_5VBJY_niltG489Ki8tRB8/view?usp=sharing)]\r\n\r\n## Actual behavior:\r\n\r\nClicking on a text input in the sidebar causes the sidebar to disappear, making it impossible to type in text. [[video example](https://drive.google.com/file/d/1KetCBECPsg3UAiESZCRdm6W9M_zIrNkc/view?usp=sharing)]\r\n\r\n## Is this a regression?\r\n\r\nUnkown\r\n\r\n# Debug info\r\n\r\n- Streamlit version: `Streamlit, version 0.68.0`\r\n- Python version: `Python 3.8.5`\r\n- Using Conda? PipEnv? PyEnv? Pex? `pipenv, version 2020.8.13`\r\n- OS version: \r\n```\r\nDistributor ID: Ubuntu\r\nDescription: Ubuntu 20.04.1 LTS\r\nRelease: 20.04\r\nCodename: focal\r\n```\r\n- Browser version: `Safari on iPadOS 14`\n", "before_files": [{"content": "# Copyright 2018-2020 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport streamlit as st\nfrom datetime import datetime\nfrom datetime import date\n\nw1 = st.sidebar.date_input(\"Label 1\", date(1970, 1, 1))\nst.write(\"Value 1:\", w1)\n\nw2 = st.sidebar.date_input(\"Label 2\", datetime(2019, 7, 6, 21, 15))\nst.write(\"Value 2:\", w2)\n\nx = st.sidebar.text(\"overwrite me\")\nx.text(\"overwritten\")\n", "path": "e2e/scripts/st_sidebar.py"}], "after_files": [{"content": "# Copyright 2018-2020 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport streamlit as st\nfrom datetime import datetime\nfrom datetime import date\n\nw1 = st.sidebar.date_input(\"Label 1\", date(1970, 1, 1))\nst.write(\"Value 1:\", w1)\n\nw2 = st.sidebar.date_input(\"Label 2\", datetime(2019, 7, 6, 21, 15))\nst.write(\"Value 2:\", w2)\n\nx = st.sidebar.text(\"overwrite me\")\nx.text(\"overwritten\")\n\ny = st.sidebar.text_input(\"type here\")\n", "path": "e2e/scripts/st_sidebar.py"}]} | 973 | 79 |
gh_patches_debug_24062 | rasdani/github-patches | git_diff | rucio__rucio-4948 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SRM to HTTPS optional compatibility
Motivation
----------
https://github.com/rucio/rucio/blob/882b662a24c75d515080606be635959566b17f14/lib/rucio/common/constants.py#L42
Defines a scheme_map between protocol compatibility in Rucio. We need an additional compatibility between `srm` and `https` (and vice-versa) which is only optionally (based on a config parameter) activated.
Modification
------------
Check if an option
```
[transfers]
srm_https_compatibility = True
```
is set and add it to the scheme_map.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/rucio/common/constants.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright 2013-2021 CERN
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 #
16 # Authors:
17 # - Vincent Garonne <[email protected]>, 2013-2015
18 # - Cedric Serfon <[email protected]>, 2015-2021
19 # - Martin Barisits <[email protected]>, 2017
20 # - Mario Lassnig <[email protected]>, 2018-2021
21 # - Hannes Hansen <[email protected]>, 2018
22 # - Sahan Dilshan <[email protected]>, 2021
23 # - David Población Criado <[email protected]>, 2021
24 # - Rakshita Varadarajan <[email protected]>, 2021
25
26 from collections import namedtuple
27 from enum import Enum
28
29 """
30 Constants.
31
32 """
33
34 RESERVED_KEYS = ['scope', 'name', 'account', 'did_type', 'is_open', 'monotonic', 'obsolete', 'complete',
35 'availability', 'suppressed', 'bytes', 'length', 'md5', 'adler32', 'rule_evaluation_action',
36 'rule_evaluation_required', 'expired_at', 'deleted_at', 'created_at', 'updated_at']
37 # collection_keys =
38 # file_keys =
39
40 KEY_TYPES = ['ALL', 'COLLECTION', 'FILE', 'DERIVED']
41 # all(container, dataset, file), collection(dataset or container), file, derived(compute from file for collection)
42
43 SCHEME_MAP = {'srm': ['srm', 'gsiftp'],
44 'gsiftp': ['srm', 'gsiftp'],
45 'https': ['https', 'davs', 's3', 'srm+https'],
46 'davs': ['https', 'davs', 's3', 'srm+https'],
47 'root': ['root'],
48 's3': ['https', 'davs', 's3', 'srm+https'],
49 'srm+https': ['https', 'davs', 's3', 'srm+https'],
50 'scp': ['scp'],
51 'rsync': ['rsync'],
52 'rclone': ['rclone']}
53
54 SUPPORTED_PROTOCOLS = ['gsiftp', 'srm', 'root', 'davs', 'http', 'https', 'file', 's3', 's3+rucio', 's3+https', 'storm', 'srm+https', 'scp', 'rsync', 'rclone']
55
56 FTS_STATE = namedtuple('FTS_STATE', ['SUBMITTED', 'READY', 'ACTIVE', 'FAILED', 'FINISHED', 'FINISHEDDIRTY', 'NOT_USED',
57 'CANCELED'])('SUBMITTED', 'READY', 'ACTIVE', 'FAILED', 'FINISHED', 'FINISHEDDIRTY',
58 'NOT_USED', 'CANCELED')
59
60 FTS_COMPLETE_STATE = namedtuple('FTS_COMPLETE_STATE', ['OK', 'ERROR'])('Ok', 'Error')
61
62
63 class ReplicaState(Enum):
64 # From rucio.db.sqla.constants, update that file at the same time than this
65 AVAILABLE = 'A'
66 UNAVAILABLE = 'U'
67 COPYING = 'C'
68 BEING_DELETED = 'B'
69 BAD = 'D'
70 TEMPORARY_UNAVAILABLE = 'T'
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lib/rucio/common/constants.py b/lib/rucio/common/constants.py
--- a/lib/rucio/common/constants.py
+++ b/lib/rucio/common/constants.py
@@ -21,11 +21,15 @@
# - Hannes Hansen <[email protected]>, 2018
# - Sahan Dilshan <[email protected]>, 2021
# - David Población Criado <[email protected]>, 2021
+# - Radu Carpa <[email protected]>, 2021
# - Rakshita Varadarajan <[email protected]>, 2021
+# - Joel Dierkes <[email protected]>, 2021
from collections import namedtuple
from enum import Enum
+from rucio.common.config import config_get
+
"""
Constants.
@@ -50,6 +54,9 @@
'scp': ['scp'],
'rsync': ['rsync'],
'rclone': ['rclone']}
+if config_get('transfers', 'srm_https_compatibility', raise_exception=False, default=False):
+ SCHEME_MAP['srm'].append('https')
+ SCHEME_MAP['https'].append('srm')
SUPPORTED_PROTOCOLS = ['gsiftp', 'srm', 'root', 'davs', 'http', 'https', 'file', 's3', 's3+rucio', 's3+https', 'storm', 'srm+https', 'scp', 'rsync', 'rclone']
| {"golden_diff": "diff --git a/lib/rucio/common/constants.py b/lib/rucio/common/constants.py\n--- a/lib/rucio/common/constants.py\n+++ b/lib/rucio/common/constants.py\n@@ -21,11 +21,15 @@\n # - Hannes Hansen <[email protected]>, 2018\n # - Sahan Dilshan <[email protected]>, 2021\n # - David Poblaci\u00f3n Criado <[email protected]>, 2021\n+# - Radu Carpa <[email protected]>, 2021\n # - Rakshita Varadarajan <[email protected]>, 2021\n+# - Joel Dierkes <[email protected]>, 2021\n \n from collections import namedtuple\n from enum import Enum\n \n+from rucio.common.config import config_get\n+\n \"\"\"\n Constants.\n \n@@ -50,6 +54,9 @@\n 'scp': ['scp'],\n 'rsync': ['rsync'],\n 'rclone': ['rclone']}\n+if config_get('transfers', 'srm_https_compatibility', raise_exception=False, default=False):\n+ SCHEME_MAP['srm'].append('https')\n+ SCHEME_MAP['https'].append('srm')\n \n SUPPORTED_PROTOCOLS = ['gsiftp', 'srm', 'root', 'davs', 'http', 'https', 'file', 's3', 's3+rucio', 's3+https', 'storm', 'srm+https', 'scp', 'rsync', 'rclone']\n", "issue": "SRM to HTTPS optional compatibility\nMotivation\r\n----------\r\nhttps://github.com/rucio/rucio/blob/882b662a24c75d515080606be635959566b17f14/lib/rucio/common/constants.py#L42\r\nDefines a scheme_map between protocol compatibility in Rucio. We need an additional compatibility between `srm` and `https` (and vice-versa) which is only optionally (based on a config parameter) activated.\r\n\r\nModification\r\n------------\r\nCheck if an option\r\n```\r\n[transfers]\r\nsrm_https_compatibility = True\r\n```\r\nis set and add it to the scheme_map.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2013-2021 CERN\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# Authors:\n# - Vincent Garonne <[email protected]>, 2013-2015\n# - Cedric Serfon <[email protected]>, 2015-2021\n# - Martin Barisits <[email protected]>, 2017\n# - Mario Lassnig <[email protected]>, 2018-2021\n# - Hannes Hansen <[email protected]>, 2018\n# - Sahan Dilshan <[email protected]>, 2021\n# - David Poblaci\u00f3n Criado <[email protected]>, 2021\n# - Rakshita Varadarajan <[email protected]>, 2021\n\nfrom collections import namedtuple\nfrom enum import Enum\n\n\"\"\"\nConstants.\n\n\"\"\"\n\nRESERVED_KEYS = ['scope', 'name', 'account', 'did_type', 'is_open', 'monotonic', 'obsolete', 'complete',\n 'availability', 'suppressed', 'bytes', 'length', 'md5', 'adler32', 'rule_evaluation_action',\n 'rule_evaluation_required', 'expired_at', 'deleted_at', 'created_at', 'updated_at']\n# collection_keys =\n# file_keys =\n\nKEY_TYPES = ['ALL', 'COLLECTION', 'FILE', 'DERIVED']\n# all(container, dataset, file), collection(dataset or container), file, derived(compute from file for collection)\n\nSCHEME_MAP = {'srm': ['srm', 'gsiftp'],\n 'gsiftp': ['srm', 'gsiftp'],\n 'https': ['https', 'davs', 's3', 'srm+https'],\n 'davs': ['https', 'davs', 's3', 'srm+https'],\n 'root': ['root'],\n 's3': ['https', 'davs', 's3', 'srm+https'],\n 'srm+https': ['https', 'davs', 's3', 'srm+https'],\n 'scp': ['scp'],\n 'rsync': ['rsync'],\n 'rclone': ['rclone']}\n\nSUPPORTED_PROTOCOLS = ['gsiftp', 'srm', 'root', 'davs', 'http', 'https', 'file', 's3', 's3+rucio', 's3+https', 'storm', 'srm+https', 'scp', 'rsync', 'rclone']\n\nFTS_STATE = namedtuple('FTS_STATE', ['SUBMITTED', 'READY', 'ACTIVE', 'FAILED', 'FINISHED', 'FINISHEDDIRTY', 'NOT_USED',\n 'CANCELED'])('SUBMITTED', 'READY', 'ACTIVE', 'FAILED', 'FINISHED', 'FINISHEDDIRTY',\n 'NOT_USED', 'CANCELED')\n\nFTS_COMPLETE_STATE = namedtuple('FTS_COMPLETE_STATE', ['OK', 'ERROR'])('Ok', 'Error')\n\n\nclass ReplicaState(Enum):\n # From rucio.db.sqla.constants, update that file at the same time than this\n AVAILABLE = 'A'\n UNAVAILABLE = 'U'\n COPYING = 'C'\n BEING_DELETED = 'B'\n BAD = 'D'\n TEMPORARY_UNAVAILABLE = 'T'\n", "path": "lib/rucio/common/constants.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2013-2021 CERN\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# Authors:\n# - Vincent Garonne <[email protected]>, 2013-2015\n# - Cedric Serfon <[email protected]>, 2015-2021\n# - Martin Barisits <[email protected]>, 2017\n# - Mario Lassnig <[email protected]>, 2018-2021\n# - Hannes Hansen <[email protected]>, 2018\n# - Sahan Dilshan <[email protected]>, 2021\n# - David Poblaci\u00f3n Criado <[email protected]>, 2021\n# - Radu Carpa <[email protected]>, 2021\n# - Rakshita Varadarajan <[email protected]>, 2021\n# - Joel Dierkes <[email protected]>, 2021\n\nfrom collections import namedtuple\nfrom enum import Enum\n\nfrom rucio.common.config import config_get\n\n\"\"\"\nConstants.\n\n\"\"\"\n\nRESERVED_KEYS = ['scope', 'name', 'account', 'did_type', 'is_open', 'monotonic', 'obsolete', 'complete',\n 'availability', 'suppressed', 'bytes', 'length', 'md5', 'adler32', 'rule_evaluation_action',\n 'rule_evaluation_required', 'expired_at', 'deleted_at', 'created_at', 'updated_at']\n# collection_keys =\n# file_keys =\n\nKEY_TYPES = ['ALL', 'COLLECTION', 'FILE', 'DERIVED']\n# all(container, dataset, file), collection(dataset or container), file, derived(compute from file for collection)\n\nSCHEME_MAP = {'srm': ['srm', 'gsiftp'],\n 'gsiftp': ['srm', 'gsiftp'],\n 'https': ['https', 'davs', 's3', 'srm+https'],\n 'davs': ['https', 'davs', 's3', 'srm+https'],\n 'root': ['root'],\n 's3': ['https', 'davs', 's3', 'srm+https'],\n 'srm+https': ['https', 'davs', 's3', 'srm+https'],\n 'scp': ['scp'],\n 'rsync': ['rsync'],\n 'rclone': ['rclone']}\nif config_get('transfers', 'srm_https_compatibility', raise_exception=False, default=False):\n SCHEME_MAP['srm'].append('https')\n SCHEME_MAP['https'].append('srm')\n\nSUPPORTED_PROTOCOLS = ['gsiftp', 'srm', 'root', 'davs', 'http', 'https', 'file', 's3', 's3+rucio', 's3+https', 'storm', 'srm+https', 'scp', 'rsync', 'rclone']\n\nFTS_STATE = namedtuple('FTS_STATE', ['SUBMITTED', 'READY', 'ACTIVE', 'FAILED', 'FINISHED', 'FINISHEDDIRTY', 'NOT_USED',\n 'CANCELED'])('SUBMITTED', 'READY', 'ACTIVE', 'FAILED', 'FINISHED', 'FINISHEDDIRTY',\n 'NOT_USED', 'CANCELED')\n\nFTS_COMPLETE_STATE = namedtuple('FTS_COMPLETE_STATE', ['OK', 'ERROR'])('Ok', 'Error')\n\n\nclass ReplicaState(Enum):\n # From rucio.db.sqla.constants, update that file at the same time than this\n AVAILABLE = 'A'\n UNAVAILABLE = 'U'\n COPYING = 'C'\n BEING_DELETED = 'B'\n BAD = 'D'\n TEMPORARY_UNAVAILABLE = 'T'\n", "path": "lib/rucio/common/constants.py"}]} | 1,447 | 383 |
gh_patches_debug_38615 | rasdani/github-patches | git_diff | OCHA-DAP__hdx-ckan-1670 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Change feedback button to link to http://docs.hdx.rwlabs.org/get-involved/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ckanext-hdx_theme/ckanext/hdx_theme/version.py`
Content:
```
1 hdx_version = 'v0.4.6'
2
```
Path: `ckanext-sitemap/setup.py`
Content:
```
1 from setuptools import setup, find_packages
2 import sys
3 import os
4
5 version = '0.1'
6
7 setup(
8 name='ckanext-sitemap',
9 version=version,
10 description="Sitemap extension for CKAN",
11 long_description="""\
12 """,
13 classifiers=[
14 ], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
15 keywords='',
16 author='Aleksi Suomalainen',
17 author_email='[email protected]',
18 url='https://github.com/locusf/ckanext-sitemap',
19 license='AGPL',
20 packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
21 namespace_packages=['ckanext', 'ckanext.sitemap'],
22 include_package_data=True,
23 zip_safe=False,
24 install_requires=[
25 # -*- Extra requirements: -*-
26 ],
27 setup_requires=[
28 'nose'
29 ],
30 entry_points="""
31 [ckan.plugins]
32 # Add plugins here, eg
33 sitemap=ckanext.sitemap.plugin:SitemapPlugin
34 """,
35 )
36
```
Path: `ckanext-sitemap/ckanext/sitemap/controller.py`
Content:
```
1 '''
2 Controller for sitemap
3 '''
4 import logging
5
6 from ckan.lib.base import BaseController
7 from ckan.model import Session, Package
8 from ckan.lib.helpers import url_for
9 from lxml import etree
10 from pylons import config, response
11 from pylons.decorators.cache import beaker_cache
12
13 SITEMAP_NS = "http://www.sitemaps.org/schemas/sitemap/0.9"
14
15 log = logging.getLogger(__file__)
16
17 class SitemapController(BaseController):
18
19 @beaker_cache(expire=3600*24, type="dbm", invalidate_on_startup=True)
20 def _render_sitemap(self, page):
21 root = etree.Element("urlset", nsmap={None: SITEMAP_NS})
22 #pkgs = Session.query(Package).all()
23 pkgs = Session.query(Package).filter(Package.private == 0).offset(int(page)*25).limit(25)
24 for pkg in pkgs:
25 url = etree.SubElement(root, 'url')
26 loc = etree.SubElement(url, 'loc')
27 pkg_url = url_for(controller='package', action="read", id = pkg.name)
28 loc.text = config.get('ckan.site_url') + pkg_url
29 lastmod = etree.SubElement(url, 'lastmod')
30 lastmod.text = pkg.latest_related_revision.timestamp.strftime('%Y-%m-%d')
31 for res in pkg.resources:
32 url = etree.SubElement(root, 'url')
33 loc = etree.SubElement(url, 'loc')
34 loc.text = config.get('ckan.site_url') + url_for(controller="package", action="resource_read", id = pkg.name, resource_id = res.id)
35 lastmod = etree.SubElement(url, 'lastmod')
36 lastmod.text = res.created.strftime('%Y-%m-%d')
37 response.headers['Content-type'] = 'text/xml'
38 return etree.tostring(root, pretty_print=True)
39
40 def view(self):
41 #Sitemap Index
42 root = etree.Element("sitemapindex", nsmap={None: SITEMAP_NS})
43 pkgs = Session.query(Package).count()
44 count = pkgs/25
45 for i in range(1,count):
46 sitemap = etree.SubElement(root, 'sitemap')
47 loc = etree.SubElement(sitemap, 'loc')
48 loc.text = config.get('ckan.site_url') + url_for(controller="ckanext.sitemap.controller:SitemapController", action="index", page=i)
49 response.headers['Content-type'] = 'text/xml'
50 return etree.tostring(root, pretty_print=True)
51 #.limit() and .offset()
52 #return self._render_sitemap()
53
54 def index(self, page):
55 return self._render_sitemap(page)
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ckanext-hdx_theme/ckanext/hdx_theme/version.py b/ckanext-hdx_theme/ckanext/hdx_theme/version.py
--- a/ckanext-hdx_theme/ckanext/hdx_theme/version.py
+++ b/ckanext-hdx_theme/ckanext/hdx_theme/version.py
@@ -1 +1 @@
-hdx_version = 'v0.4.5'
+hdx_version = 'v0.4.6'
diff --git a/ckanext-sitemap/ckanext/sitemap/controller.py b/ckanext-sitemap/ckanext/sitemap/controller.py
--- a/ckanext-sitemap/ckanext/sitemap/controller.py
+++ b/ckanext-sitemap/ckanext/sitemap/controller.py
@@ -20,7 +20,7 @@
def _render_sitemap(self, page):
root = etree.Element("urlset", nsmap={None: SITEMAP_NS})
#pkgs = Session.query(Package).all()
- pkgs = Session.query(Package).filter(Package.private == 0).offset(int(page)*25).limit(25)
+ pkgs = Session.query(Package).filter(Package.private == False).offset(int(page)*25).limit(25)
for pkg in pkgs:
url = etree.SubElement(root, 'url')
loc = etree.SubElement(url, 'loc')
@@ -40,7 +40,7 @@
def view(self):
#Sitemap Index
root = etree.Element("sitemapindex", nsmap={None: SITEMAP_NS})
- pkgs = Session.query(Package).count()
+ pkgs = Session.query(Package).filter(Package.private == False).count()
count = pkgs/25
for i in range(1,count):
sitemap = etree.SubElement(root, 'sitemap')
diff --git a/ckanext-sitemap/setup.py b/ckanext-sitemap/setup.py
--- a/ckanext-sitemap/setup.py
+++ b/ckanext-sitemap/setup.py
@@ -1,32 +1,33 @@
from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
version = '0.1'
setup(
- name='ckanext-sitemap',
- version=version,
- description="Sitemap extension for CKAN",
- long_description="""\
+ name='ckanext-sitemap',
+ version=version,
+ description="Sitemap extension for CKAN",
+ long_description="""\
""",
- classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
- keywords='',
- author='Aleksi Suomalainen',
- author_email='[email protected]',
- url='https://github.com/locusf/ckanext-sitemap',
- license='AGPL',
- packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
- namespace_packages=['ckanext', 'ckanext.sitemap'],
- include_package_data=True,
- zip_safe=False,
- install_requires=[
- # -*- Extra requirements: -*-
- ],
- setup_requires=[
- 'nose'
- ],
- entry_points=\
- """
+ classifiers=[
+ ], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='',
+ author='Aleksi Suomalainen',
+ author_email='[email protected]',
+ url='https://github.com/locusf/ckanext-sitemap',
+ license='AGPL',
+ packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
+ namespace_packages=['ckanext', 'ckanext.sitemap'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=[
+ # -*- Extra requirements: -*-
+ ],
+ setup_requires=[
+ 'nose'
+ ],
+ entry_points="""
[ckan.plugins]
# Add plugins here, eg
sitemap=ckanext.sitemap.plugin:SitemapPlugin
| {"golden_diff": "diff --git a/ckanext-hdx_theme/ckanext/hdx_theme/version.py b/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n--- a/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n+++ b/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n@@ -1 +1 @@\n-hdx_version = 'v0.4.5'\n+hdx_version = 'v0.4.6'\ndiff --git a/ckanext-sitemap/ckanext/sitemap/controller.py b/ckanext-sitemap/ckanext/sitemap/controller.py\n--- a/ckanext-sitemap/ckanext/sitemap/controller.py\n+++ b/ckanext-sitemap/ckanext/sitemap/controller.py\n@@ -20,7 +20,7 @@\n def _render_sitemap(self, page):\n root = etree.Element(\"urlset\", nsmap={None: SITEMAP_NS})\n #pkgs = Session.query(Package).all()\n- pkgs = Session.query(Package).filter(Package.private == 0).offset(int(page)*25).limit(25)\n+ pkgs = Session.query(Package).filter(Package.private == False).offset(int(page)*25).limit(25)\n for pkg in pkgs:\n url = etree.SubElement(root, 'url')\n loc = etree.SubElement(url, 'loc')\n@@ -40,7 +40,7 @@\n def view(self):\n #Sitemap Index\n root = etree.Element(\"sitemapindex\", nsmap={None: SITEMAP_NS})\n- pkgs = Session.query(Package).count()\n+ pkgs = Session.query(Package).filter(Package.private == False).count()\n count = pkgs/25\n for i in range(1,count):\n sitemap = etree.SubElement(root, 'sitemap')\ndiff --git a/ckanext-sitemap/setup.py b/ckanext-sitemap/setup.py\n--- a/ckanext-sitemap/setup.py\n+++ b/ckanext-sitemap/setup.py\n@@ -1,32 +1,33 @@\n from setuptools import setup, find_packages\n-import sys, os\n+import sys\n+import os\n \n version = '0.1'\n \n setup(\n-\tname='ckanext-sitemap',\n-\tversion=version,\n-\tdescription=\"Sitemap extension for CKAN\",\n-\tlong_description=\"\"\"\\\n+ name='ckanext-sitemap',\n+ version=version,\n+ description=\"Sitemap extension for CKAN\",\n+ long_description=\"\"\"\\\n \t\"\"\",\n-\tclassifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n-\tkeywords='',\n-\tauthor='Aleksi Suomalainen',\n-\tauthor_email='[email protected]',\n-\turl='https://github.com/locusf/ckanext-sitemap',\n-\tlicense='AGPL',\n-\tpackages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n-\tnamespace_packages=['ckanext', 'ckanext.sitemap'],\n-\tinclude_package_data=True,\n-\tzip_safe=False,\n-\tinstall_requires=[\n-\t\t# -*- Extra requirements: -*-\n-\t],\n-\tsetup_requires=[\n-\t\t'nose'\n-\t],\n-\tentry_points=\\\n-\t\"\"\"\n+ classifiers=[\n+ ], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n+ keywords='',\n+ author='Aleksi Suomalainen',\n+ author_email='[email protected]',\n+ url='https://github.com/locusf/ckanext-sitemap',\n+ license='AGPL',\n+ packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n+ namespace_packages=['ckanext', 'ckanext.sitemap'],\n+ include_package_data=True,\n+ zip_safe=False,\n+ install_requires=[\n+ # -*- Extra requirements: -*-\n+ ],\n+ setup_requires=[\n+ 'nose'\n+ ],\n+ entry_points=\"\"\"\n [ckan.plugins]\n \t# Add plugins here, eg\n \tsitemap=ckanext.sitemap.plugin:SitemapPlugin\n", "issue": "Change feedback button to link to http://docs.hdx.rwlabs.org/get-involved/\n\n", "before_files": [{"content": "hdx_version = 'v0.4.6'\n", "path": "ckanext-hdx_theme/ckanext/hdx_theme/version.py"}, {"content": "from setuptools import setup, find_packages\nimport sys\nimport os\n\nversion = '0.1'\n\nsetup(\n name='ckanext-sitemap',\n version=version,\n description=\"Sitemap extension for CKAN\",\n long_description=\"\"\"\\\n\t\"\"\",\n classifiers=[\n ], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n keywords='',\n author='Aleksi Suomalainen',\n author_email='[email protected]',\n url='https://github.com/locusf/ckanext-sitemap',\n license='AGPL',\n packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n namespace_packages=['ckanext', 'ckanext.sitemap'],\n include_package_data=True,\n zip_safe=False,\n install_requires=[\n # -*- Extra requirements: -*-\n ],\n setup_requires=[\n 'nose'\n ],\n entry_points=\"\"\"\n [ckan.plugins]\n\t# Add plugins here, eg\n\tsitemap=ckanext.sitemap.plugin:SitemapPlugin\n\t\"\"\",\n)\n", "path": "ckanext-sitemap/setup.py"}, {"content": "'''\nController for sitemap\n'''\nimport logging\n\nfrom ckan.lib.base import BaseController\nfrom ckan.model import Session, Package\nfrom ckan.lib.helpers import url_for\nfrom lxml import etree\nfrom pylons import config, response\nfrom pylons.decorators.cache import beaker_cache\n\nSITEMAP_NS = \"http://www.sitemaps.org/schemas/sitemap/0.9\"\n\nlog = logging.getLogger(__file__)\n\nclass SitemapController(BaseController):\n\n @beaker_cache(expire=3600*24, type=\"dbm\", invalidate_on_startup=True)\n def _render_sitemap(self, page):\n root = etree.Element(\"urlset\", nsmap={None: SITEMAP_NS})\n #pkgs = Session.query(Package).all()\n pkgs = Session.query(Package).filter(Package.private == 0).offset(int(page)*25).limit(25)\n for pkg in pkgs:\n url = etree.SubElement(root, 'url')\n loc = etree.SubElement(url, 'loc')\n pkg_url = url_for(controller='package', action=\"read\", id = pkg.name)\n loc.text = config.get('ckan.site_url') + pkg_url\n lastmod = etree.SubElement(url, 'lastmod')\n lastmod.text = pkg.latest_related_revision.timestamp.strftime('%Y-%m-%d')\n for res in pkg.resources:\n url = etree.SubElement(root, 'url')\n loc = etree.SubElement(url, 'loc')\n loc.text = config.get('ckan.site_url') + url_for(controller=\"package\", action=\"resource_read\", id = pkg.name, resource_id = res.id)\n lastmod = etree.SubElement(url, 'lastmod')\n lastmod.text = res.created.strftime('%Y-%m-%d')\n response.headers['Content-type'] = 'text/xml'\n return etree.tostring(root, pretty_print=True)\n\n def view(self):\n #Sitemap Index\n root = etree.Element(\"sitemapindex\", nsmap={None: SITEMAP_NS})\n pkgs = Session.query(Package).count()\n count = pkgs/25\n for i in range(1,count):\n sitemap = etree.SubElement(root, 'sitemap')\n loc = etree.SubElement(sitemap, 'loc')\n loc.text = config.get('ckan.site_url') + url_for(controller=\"ckanext.sitemap.controller:SitemapController\", action=\"index\", page=i)\n response.headers['Content-type'] = 'text/xml'\n return etree.tostring(root, pretty_print=True)\n #.limit() and .offset()\n #return self._render_sitemap()\n\n def index(self, page):\n return self._render_sitemap(page)", "path": "ckanext-sitemap/ckanext/sitemap/controller.py"}], "after_files": [{"content": "hdx_version = 'v0.4.6'\n", "path": "ckanext-hdx_theme/ckanext/hdx_theme/version.py"}, {"content": "from setuptools import setup, find_packages\nimport sys\nimport os\n\nversion = '0.1'\n\nsetup(\n name='ckanext-sitemap',\n version=version,\n description=\"Sitemap extension for CKAN\",\n long_description=\"\"\"\\\n\t\"\"\",\n classifiers=[\n ], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n keywords='',\n author='Aleksi Suomalainen',\n author_email='[email protected]',\n url='https://github.com/locusf/ckanext-sitemap',\n license='AGPL',\n packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),\n namespace_packages=['ckanext', 'ckanext.sitemap'],\n include_package_data=True,\n zip_safe=False,\n install_requires=[\n # -*- Extra requirements: -*-\n ],\n setup_requires=[\n 'nose'\n ],\n entry_points=\"\"\"\n [ckan.plugins]\n\t# Add plugins here, eg\n\tsitemap=ckanext.sitemap.plugin:SitemapPlugin\n\t\"\"\",\n)\n", "path": "ckanext-sitemap/setup.py"}, {"content": "'''\nController for sitemap\n'''\nimport logging\n\nfrom ckan.lib.base import BaseController\nfrom ckan.model import Session, Package\nfrom ckan.lib.helpers import url_for\nfrom lxml import etree\nfrom pylons import config, response\nfrom pylons.decorators.cache import beaker_cache\n\nSITEMAP_NS = \"http://www.sitemaps.org/schemas/sitemap/0.9\"\n\nlog = logging.getLogger(__file__)\n\nclass SitemapController(BaseController):\n\n @beaker_cache(expire=3600*24, type=\"dbm\", invalidate_on_startup=True)\n def _render_sitemap(self, page):\n root = etree.Element(\"urlset\", nsmap={None: SITEMAP_NS})\n #pkgs = Session.query(Package).all()\n pkgs = Session.query(Package).filter(Package.private == False).offset(int(page)*25).limit(25)\n for pkg in pkgs:\n url = etree.SubElement(root, 'url')\n loc = etree.SubElement(url, 'loc')\n pkg_url = url_for(controller='package', action=\"read\", id = pkg.name)\n loc.text = config.get('ckan.site_url') + pkg_url\n lastmod = etree.SubElement(url, 'lastmod')\n lastmod.text = pkg.latest_related_revision.timestamp.strftime('%Y-%m-%d')\n for res in pkg.resources:\n url = etree.SubElement(root, 'url')\n loc = etree.SubElement(url, 'loc')\n loc.text = config.get('ckan.site_url') + url_for(controller=\"package\", action=\"resource_read\", id = pkg.name, resource_id = res.id)\n lastmod = etree.SubElement(url, 'lastmod')\n lastmod.text = res.created.strftime('%Y-%m-%d')\n response.headers['Content-type'] = 'text/xml'\n return etree.tostring(root, pretty_print=True)\n\n def view(self):\n #Sitemap Index\n root = etree.Element(\"sitemapindex\", nsmap={None: SITEMAP_NS})\n pkgs = Session.query(Package).filter(Package.private == False).count()\n count = pkgs/25\n for i in range(1,count):\n sitemap = etree.SubElement(root, 'sitemap')\n loc = etree.SubElement(sitemap, 'loc')\n loc.text = config.get('ckan.site_url') + url_for(controller=\"ckanext.sitemap.controller:SitemapController\", action=\"index\", page=i)\n response.headers['Content-type'] = 'text/xml'\n return etree.tostring(root, pretty_print=True)\n #.limit() and .offset()\n #return self._render_sitemap()\n\n def index(self, page):\n return self._render_sitemap(page)", "path": "ckanext-sitemap/ckanext/sitemap/controller.py"}]} | 1,321 | 927 |
gh_patches_debug_40278 | rasdani/github-patches | git_diff | networkx__networkx-3362 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Closeness centrality assumes symmetric distance when "distance" parameter is used
At the beginning of `nx.closeness_centrality` code there is this logic:
```python
if distance is not None:
# use Dijkstra's algorithm with specified attribute as edge weight
path_length = functools.partial(nx.single_source_dijkstra_path_length,
weight=distance)
else: # handle either directed or undirected
if G.is_directed() and not reverse:
path_length = nx.single_target_shortest_path_length
else:
path_length = nx.single_source_shortest_path_length
```
This means that if `distance` parameter is passed then the direction of the edges is ignored, and the `reverse` parameter has no effect.
Is this a design decision? I have directed networks with non-symmetric distances and am interested in both inbound and outbound centralities. It looks to me this would be a simple change (checking `reverse` and using `nx.single_target_dijkstra_path_length` if needed). Am willing to make a PR if this is deemed an appropriate change.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `networkx/algorithms/centrality/closeness.py`
Content:
```
1 # Copyright (C) 2004-2018 by
2 # Aric Hagberg <[email protected]>
3 # Dan Schult <[email protected]>
4 # Pieter Swart <[email protected]>
5 # All rights reserved.
6 # BSD license.
7 #
8 # Authors: Aric Hagberg <[email protected]>
9 # Pieter Swart <[email protected]>
10 # Sasha Gutfraind <[email protected]>
11 # Dan Schult <[email protected]>
12 """
13 Closeness centrality measures.
14 """
15 import functools
16 import networkx as nx
17
18 __all__ = ['closeness_centrality']
19
20
21 def closeness_centrality(G, u=None, distance=None,
22 wf_improved=True, reverse=False):
23 r"""Compute closeness centrality for nodes.
24
25 Closeness centrality [1]_ of a node `u` is the reciprocal of the
26 average shortest path distance to `u` over all `n-1` reachable nodes.
27
28 .. math::
29
30 C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
31
32 where `d(v, u)` is the shortest-path distance between `v` and `u`,
33 and `n` is the number of nodes that can reach `u`.
34
35 Notice that higher values of closeness indicate higher centrality.
36
37 Wasserman and Faust propose an improved formula for graphs with
38 more than one connected component. The result is "a ratio of the
39 fraction of actors in the group who are reachable, to the average
40 distance" from the reachable actors [2]_. You might think this
41 scale factor is inverted but it is not. As is, nodes from small
42 components receive a smaller closeness value. Letting `N` denote
43 the number of nodes in the graph,
44
45 .. math::
46
47 C_{WF}(u) = \frac{n-1}{N-1} \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
48
49 Parameters
50 ----------
51 G : graph
52 A NetworkX graph
53
54 u : node, optional
55 Return only the value for node u
56
57 distance : edge attribute key, optional (default=None)
58 Use the specified edge attribute as the edge distance in shortest
59 path calculations
60
61 wf_improved : bool, optional (default=True)
62 If True, scale by the fraction of nodes reachable. This gives the
63 Wasserman and Faust improved formula. For single component graphs
64 it is the same as the original formula.
65
66 reverse : bool, optional (default=False)
67 If True and G is a digraph, reverse the edges of G, using successors
68 instead of predecessors.
69
70 Returns
71 -------
72 nodes : dictionary
73 Dictionary of nodes with closeness centrality as the value.
74
75 See Also
76 --------
77 betweenness_centrality, load_centrality, eigenvector_centrality,
78 degree_centrality
79
80 Notes
81 -----
82 The closeness centrality is normalized to `(n-1)/(|G|-1)` where
83 `n` is the number of nodes in the connected part of graph
84 containing the node. If the graph is not completely connected,
85 this algorithm computes the closeness centrality for each
86 connected part separately scaled by that parts size.
87
88 If the 'distance' keyword is set to an edge attribute key then the
89 shortest-path length will be computed using Dijkstra's algorithm with
90 that edge attribute as the edge weight.
91
92 References
93 ----------
94 .. [1] Linton C. Freeman: Centrality in networks: I.
95 Conceptual clarification. Social Networks 1:215-239, 1979.
96 http://leonidzhukov.ru/hse/2013/socialnetworks/papers/freeman79-centrality.pdf
97 .. [2] pg. 201 of Wasserman, S. and Faust, K.,
98 Social Network Analysis: Methods and Applications, 1994,
99 Cambridge University Press.
100 """
101 if distance is not None:
102 # use Dijkstra's algorithm with specified attribute as edge weight
103 path_length = functools.partial(nx.single_source_dijkstra_path_length,
104 weight=distance)
105 else: # handle either directed or undirected
106 if G.is_directed() and not reverse:
107 path_length = nx.single_target_shortest_path_length
108 else:
109 path_length = nx.single_source_shortest_path_length
110
111 if u is None:
112 nodes = G.nodes()
113 else:
114 nodes = [u]
115 closeness_centrality = {}
116 for n in nodes:
117 sp = dict(path_length(G, n))
118 totsp = sum(sp.values())
119 if totsp > 0.0 and len(G) > 1:
120 closeness_centrality[n] = (len(sp) - 1.0) / totsp
121 # normalize to number of nodes-1 in connected part
122 if wf_improved:
123 s = (len(sp) - 1.0) / (len(G) - 1)
124 closeness_centrality[n] *= s
125 else:
126 closeness_centrality[n] = 0.0
127 if u is not None:
128 return closeness_centrality[u]
129 else:
130 return closeness_centrality
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/networkx/algorithms/centrality/closeness.py b/networkx/algorithms/centrality/closeness.py
--- a/networkx/algorithms/centrality/closeness.py
+++ b/networkx/algorithms/centrality/closeness.py
@@ -18,8 +18,7 @@
__all__ = ['closeness_centrality']
-def closeness_centrality(G, u=None, distance=None,
- wf_improved=True, reverse=False):
+def closeness_centrality(G, u=None, distance=None, wf_improved=True):
r"""Compute closeness centrality for nodes.
Closeness centrality [1]_ of a node `u` is the reciprocal of the
@@ -30,7 +29,9 @@
C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
where `d(v, u)` is the shortest-path distance between `v` and `u`,
- and `n` is the number of nodes that can reach `u`.
+ and `n` is the number of nodes that can reach `u`. Notice that the
+ closeness distance function computes the incoming distance to `u`
+ for directed graphs. To use outward distance, act on `G.reverse()`.
Notice that higher values of closeness indicate higher centrality.
@@ -63,10 +64,6 @@
Wasserman and Faust improved formula. For single component graphs
it is the same as the original formula.
- reverse : bool, optional (default=False)
- If True and G is a digraph, reverse the edges of G, using successors
- instead of predecessors.
-
Returns
-------
nodes : dictionary
@@ -89,6 +86,10 @@
shortest-path length will be computed using Dijkstra's algorithm with
that edge attribute as the edge weight.
+ In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the
+ outward distance rather than the inward distance. If you use a 'distance'
+ keyword and a DiGraph, your results will change between v2.2 and v2.3.
+
References
----------
.. [1] Linton C. Freeman: Centrality in networks: I.
@@ -98,18 +99,18 @@
Social Network Analysis: Methods and Applications, 1994,
Cambridge University Press.
"""
+ if G.is_directed():
+ G = G.reverse() # create a reversed graph view
+
if distance is not None:
# use Dijkstra's algorithm with specified attribute as edge weight
path_length = functools.partial(nx.single_source_dijkstra_path_length,
weight=distance)
- else: # handle either directed or undirected
- if G.is_directed() and not reverse:
- path_length = nx.single_target_shortest_path_length
- else:
- path_length = nx.single_source_shortest_path_length
+ else:
+ path_length = nx.single_source_shortest_path_length
if u is None:
- nodes = G.nodes()
+ nodes = G.nodes
else:
nodes = [u]
closeness_centrality = {}
| {"golden_diff": "diff --git a/networkx/algorithms/centrality/closeness.py b/networkx/algorithms/centrality/closeness.py\n--- a/networkx/algorithms/centrality/closeness.py\n+++ b/networkx/algorithms/centrality/closeness.py\n@@ -18,8 +18,7 @@\n __all__ = ['closeness_centrality']\n \n \n-def closeness_centrality(G, u=None, distance=None,\n- wf_improved=True, reverse=False):\n+def closeness_centrality(G, u=None, distance=None, wf_improved=True):\n r\"\"\"Compute closeness centrality for nodes.\n \n Closeness centrality [1]_ of a node `u` is the reciprocal of the\n@@ -30,7 +29,9 @@\n C(u) = \\frac{n - 1}{\\sum_{v=1}^{n-1} d(v, u)},\n \n where `d(v, u)` is the shortest-path distance between `v` and `u`,\n- and `n` is the number of nodes that can reach `u`.\n+ and `n` is the number of nodes that can reach `u`. Notice that the\n+ closeness distance function computes the incoming distance to `u`\n+ for directed graphs. To use outward distance, act on `G.reverse()`.\n \n Notice that higher values of closeness indicate higher centrality.\n \n@@ -63,10 +64,6 @@\n Wasserman and Faust improved formula. For single component graphs\n it is the same as the original formula. \n \n- reverse : bool, optional (default=False)\n- If True and G is a digraph, reverse the edges of G, using successors\n- instead of predecessors.\n-\n Returns\n -------\n nodes : dictionary\n@@ -89,6 +86,10 @@\n shortest-path length will be computed using Dijkstra's algorithm with\n that edge attribute as the edge weight.\n \n+ In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the\n+ outward distance rather than the inward distance. If you use a 'distance'\n+ keyword and a DiGraph, your results will change between v2.2 and v2.3.\n+\n References\n ----------\n .. [1] Linton C. Freeman: Centrality in networks: I.\n@@ -98,18 +99,18 @@\n Social Network Analysis: Methods and Applications, 1994,\n Cambridge University Press.\n \"\"\"\n+ if G.is_directed():\n+ G = G.reverse() # create a reversed graph view\n+\n if distance is not None:\n # use Dijkstra's algorithm with specified attribute as edge weight\n path_length = functools.partial(nx.single_source_dijkstra_path_length,\n weight=distance)\n- else: # handle either directed or undirected\n- if G.is_directed() and not reverse:\n- path_length = nx.single_target_shortest_path_length\n- else:\n- path_length = nx.single_source_shortest_path_length\n+ else:\n+ path_length = nx.single_source_shortest_path_length\n \n if u is None:\n- nodes = G.nodes()\n+ nodes = G.nodes\n else:\n nodes = [u]\n closeness_centrality = {}\n", "issue": "Closeness centrality assumes symmetric distance when \"distance\" parameter is used\nAt the beginning of `nx.closeness_centrality` code there is this logic:\r\n\r\n```python\r\n if distance is not None:\r\n # use Dijkstra's algorithm with specified attribute as edge weight\r\n path_length = functools.partial(nx.single_source_dijkstra_path_length,\r\n weight=distance)\r\n else: # handle either directed or undirected\r\n if G.is_directed() and not reverse:\r\n path_length = nx.single_target_shortest_path_length\r\n else:\r\n path_length = nx.single_source_shortest_path_length\r\n```\r\n\r\nThis means that if `distance` parameter is passed then the direction of the edges is ignored, and the `reverse` parameter has no effect.\r\n\r\nIs this a design decision? I have directed networks with non-symmetric distances and am interested in both inbound and outbound centralities. It looks to me this would be a simple change (checking `reverse` and using `nx.single_target_dijkstra_path_length` if needed). Am willing to make a PR if this is deemed an appropriate change.\n", "before_files": [{"content": "# Copyright (C) 2004-2018 by\n# Aric Hagberg <[email protected]>\n# Dan Schult <[email protected]>\n# Pieter Swart <[email protected]>\n# All rights reserved.\n# BSD license.\n#\n# Authors: Aric Hagberg <[email protected]>\n# Pieter Swart <[email protected]>\n# Sasha Gutfraind <[email protected]>\n# Dan Schult <[email protected]>\n\"\"\"\nCloseness centrality measures.\n\"\"\"\nimport functools\nimport networkx as nx\n\n__all__ = ['closeness_centrality']\n\n\ndef closeness_centrality(G, u=None, distance=None,\n wf_improved=True, reverse=False):\n r\"\"\"Compute closeness centrality for nodes.\n\n Closeness centrality [1]_ of a node `u` is the reciprocal of the\n average shortest path distance to `u` over all `n-1` reachable nodes.\n\n .. math::\n\n C(u) = \\frac{n - 1}{\\sum_{v=1}^{n-1} d(v, u)},\n\n where `d(v, u)` is the shortest-path distance between `v` and `u`,\n and `n` is the number of nodes that can reach `u`.\n\n Notice that higher values of closeness indicate higher centrality.\n\n Wasserman and Faust propose an improved formula for graphs with\n more than one connected component. The result is \"a ratio of the\n fraction of actors in the group who are reachable, to the average\n distance\" from the reachable actors [2]_. You might think this\n scale factor is inverted but it is not. As is, nodes from small\n components receive a smaller closeness value. Letting `N` denote\n the number of nodes in the graph,\n\n .. math::\n\n C_{WF}(u) = \\frac{n-1}{N-1} \\frac{n - 1}{\\sum_{v=1}^{n-1} d(v, u)},\n\n Parameters\n ----------\n G : graph\n A NetworkX graph\n\n u : node, optional\n Return only the value for node u\n\n distance : edge attribute key, optional (default=None)\n Use the specified edge attribute as the edge distance in shortest\n path calculations\n\n wf_improved : bool, optional (default=True)\n If True, scale by the fraction of nodes reachable. This gives the\n Wasserman and Faust improved formula. For single component graphs\n it is the same as the original formula. \n\n reverse : bool, optional (default=False)\n If True and G is a digraph, reverse the edges of G, using successors\n instead of predecessors.\n\n Returns\n -------\n nodes : dictionary\n Dictionary of nodes with closeness centrality as the value.\n\n See Also\n --------\n betweenness_centrality, load_centrality, eigenvector_centrality,\n degree_centrality\n\n Notes\n -----\n The closeness centrality is normalized to `(n-1)/(|G|-1)` where\n `n` is the number of nodes in the connected part of graph\n containing the node. If the graph is not completely connected,\n this algorithm computes the closeness centrality for each\n connected part separately scaled by that parts size.\n\n If the 'distance' keyword is set to an edge attribute key then the\n shortest-path length will be computed using Dijkstra's algorithm with\n that edge attribute as the edge weight.\n\n References\n ----------\n .. [1] Linton C. Freeman: Centrality in networks: I.\n Conceptual clarification. Social Networks 1:215-239, 1979.\n http://leonidzhukov.ru/hse/2013/socialnetworks/papers/freeman79-centrality.pdf\n .. [2] pg. 201 of Wasserman, S. and Faust, K.,\n Social Network Analysis: Methods and Applications, 1994,\n Cambridge University Press.\n \"\"\"\n if distance is not None:\n # use Dijkstra's algorithm with specified attribute as edge weight\n path_length = functools.partial(nx.single_source_dijkstra_path_length,\n weight=distance)\n else: # handle either directed or undirected\n if G.is_directed() and not reverse:\n path_length = nx.single_target_shortest_path_length\n else:\n path_length = nx.single_source_shortest_path_length\n\n if u is None:\n nodes = G.nodes()\n else:\n nodes = [u]\n closeness_centrality = {}\n for n in nodes:\n sp = dict(path_length(G, n))\n totsp = sum(sp.values())\n if totsp > 0.0 and len(G) > 1:\n closeness_centrality[n] = (len(sp) - 1.0) / totsp\n # normalize to number of nodes-1 in connected part\n if wf_improved:\n s = (len(sp) - 1.0) / (len(G) - 1)\n closeness_centrality[n] *= s\n else:\n closeness_centrality[n] = 0.0\n if u is not None:\n return closeness_centrality[u]\n else:\n return closeness_centrality\n", "path": "networkx/algorithms/centrality/closeness.py"}], "after_files": [{"content": "# Copyright (C) 2004-2018 by\n# Aric Hagberg <[email protected]>\n# Dan Schult <[email protected]>\n# Pieter Swart <[email protected]>\n# All rights reserved.\n# BSD license.\n#\n# Authors: Aric Hagberg <[email protected]>\n# Pieter Swart <[email protected]>\n# Sasha Gutfraind <[email protected]>\n# Dan Schult <[email protected]>\n\"\"\"\nCloseness centrality measures.\n\"\"\"\nimport functools\nimport networkx as nx\n\n__all__ = ['closeness_centrality']\n\n\ndef closeness_centrality(G, u=None, distance=None, wf_improved=True):\n r\"\"\"Compute closeness centrality for nodes.\n\n Closeness centrality [1]_ of a node `u` is the reciprocal of the\n average shortest path distance to `u` over all `n-1` reachable nodes.\n\n .. math::\n\n C(u) = \\frac{n - 1}{\\sum_{v=1}^{n-1} d(v, u)},\n\n where `d(v, u)` is the shortest-path distance between `v` and `u`,\n and `n` is the number of nodes that can reach `u`. Notice that the\n closeness distance function computes the incoming distance to `u`\n for directed graphs. To use outward distance, act on `G.reverse()`.\n\n Notice that higher values of closeness indicate higher centrality.\n\n Wasserman and Faust propose an improved formula for graphs with\n more than one connected component. The result is \"a ratio of the\n fraction of actors in the group who are reachable, to the average\n distance\" from the reachable actors [2]_. You might think this\n scale factor is inverted but it is not. As is, nodes from small\n components receive a smaller closeness value. Letting `N` denote\n the number of nodes in the graph,\n\n .. math::\n\n C_{WF}(u) = \\frac{n-1}{N-1} \\frac{n - 1}{\\sum_{v=1}^{n-1} d(v, u)},\n\n Parameters\n ----------\n G : graph\n A NetworkX graph\n\n u : node, optional\n Return only the value for node u\n\n distance : edge attribute key, optional (default=None)\n Use the specified edge attribute as the edge distance in shortest\n path calculations\n\n wf_improved : bool, optional (default=True)\n If True, scale by the fraction of nodes reachable. This gives the\n Wasserman and Faust improved formula. For single component graphs\n it is the same as the original formula. \n\n Returns\n -------\n nodes : dictionary\n Dictionary of nodes with closeness centrality as the value.\n\n See Also\n --------\n betweenness_centrality, load_centrality, eigenvector_centrality,\n degree_centrality\n\n Notes\n -----\n The closeness centrality is normalized to `(n-1)/(|G|-1)` where\n `n` is the number of nodes in the connected part of graph\n containing the node. If the graph is not completely connected,\n this algorithm computes the closeness centrality for each\n connected part separately scaled by that parts size.\n\n If the 'distance' keyword is set to an edge attribute key then the\n shortest-path length will be computed using Dijkstra's algorithm with\n that edge attribute as the edge weight.\n\n In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the\n outward distance rather than the inward distance. If you use a 'distance'\n keyword and a DiGraph, your results will change between v2.2 and v2.3.\n\n References\n ----------\n .. [1] Linton C. Freeman: Centrality in networks: I.\n Conceptual clarification. Social Networks 1:215-239, 1979.\n http://leonidzhukov.ru/hse/2013/socialnetworks/papers/freeman79-centrality.pdf\n .. [2] pg. 201 of Wasserman, S. and Faust, K.,\n Social Network Analysis: Methods and Applications, 1994,\n Cambridge University Press.\n \"\"\"\n if G.is_directed():\n G = G.reverse() # create a reversed graph view\n\n if distance is not None:\n # use Dijkstra's algorithm with specified attribute as edge weight\n path_length = functools.partial(nx.single_source_dijkstra_path_length,\n weight=distance)\n else:\n path_length = nx.single_source_shortest_path_length\n\n if u is None:\n nodes = G.nodes\n else:\n nodes = [u]\n closeness_centrality = {}\n for n in nodes:\n sp = dict(path_length(G, n))\n totsp = sum(sp.values())\n if totsp > 0.0 and len(G) > 1:\n closeness_centrality[n] = (len(sp) - 1.0) / totsp\n # normalize to number of nodes-1 in connected part\n if wf_improved:\n s = (len(sp) - 1.0) / (len(G) - 1)\n closeness_centrality[n] *= s\n else:\n closeness_centrality[n] = 0.0\n if u is not None:\n return closeness_centrality[u]\n else:\n return closeness_centrality\n", "path": "networkx/algorithms/centrality/closeness.py"}]} | 1,996 | 730 |
gh_patches_debug_61332 | rasdani/github-patches | git_diff | nerfstudio-project__nerfstudio-1453 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error while doing ns-export poisson for depth-nerfacto trained model on polycam data
Hi all,
I have been trying to do ns-export poisson on depth-nerfacto trained model on polycam data and get the following error:
In depth_nerfacto.py line 73 in get_outputs
outputs["directions_norm"] = ray_bundle.metadata["directions_norm"] as the following:
TypeError: 'NoneType' object is not subscriptable
Reference:
https://discord.com/channels/1025504971606724712/1073830375559806976/1074782026781175919
Can you please help. Thanks in advance.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nerfstudio/models/depth_nerfacto.py`
Content:
```
1 # Copyright 2022 The Nerfstudio Team. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """
16 Nerfacto augmented with depth supervision.
17 """
18
19 from __future__ import annotations
20
21 from dataclasses import dataclass, field
22 from typing import Dict, Tuple, Type
23
24 import torch
25
26 from nerfstudio.cameras.rays import RayBundle
27 from nerfstudio.model_components.losses import DepthLossType, depth_loss
28 from nerfstudio.models.nerfacto import NerfactoModel, NerfactoModelConfig
29 from nerfstudio.utils import colormaps
30
31
32 @dataclass
33 class DepthNerfactoModelConfig(NerfactoModelConfig):
34 """Additional parameters for depth supervision."""
35
36 _target: Type = field(default_factory=lambda: DepthNerfactoModel)
37 depth_loss_mult: float = 1e-3
38 """Lambda of the depth loss."""
39 is_euclidean_depth: bool = False
40 """Whether input depth maps are Euclidean distances (or z-distances)."""
41 depth_sigma: float = 0.01
42 """Uncertainty around depth values in meters (defaults to 1cm)."""
43 should_decay_sigma: bool = False
44 """Whether to exponentially decay sigma."""
45 starting_depth_sigma: float = 0.2
46 """Starting uncertainty around depth values in meters (defaults to 0.2m)."""
47 sigma_decay_rate: float = 0.99985
48 """Rate of exponential decay."""
49 depth_loss_type: DepthLossType = DepthLossType.DS_NERF
50 """Depth loss type."""
51
52
53 class DepthNerfactoModel(NerfactoModel):
54 """Depth loss augmented nerfacto model.
55
56 Args:
57 config: Nerfacto configuration to instantiate model
58 """
59
60 config: DepthNerfactoModelConfig
61
62 def populate_modules(self):
63 """Set the fields and modules."""
64 super().populate_modules()
65
66 if self.config.should_decay_sigma:
67 self.depth_sigma = torch.tensor([self.config.starting_depth_sigma])
68 else:
69 self.depth_sigma = torch.tensor([self.config.depth_sigma])
70
71 def get_outputs(self, ray_bundle: RayBundle):
72 outputs = super().get_outputs(ray_bundle)
73 outputs["directions_norm"] = ray_bundle.metadata["directions_norm"]
74 return outputs
75
76 def get_metrics_dict(self, outputs, batch):
77 metrics_dict = super().get_metrics_dict(outputs, batch)
78 if self.training:
79 metrics_dict["depth_loss"] = 0.0
80 sigma = self._get_sigma().to(self.device)
81 termination_depth = batch["depth_image"].to(self.device)
82 for i in range(len(outputs["weights_list"])):
83 metrics_dict["depth_loss"] += depth_loss(
84 weights=outputs["weights_list"][i],
85 ray_samples=outputs["ray_samples_list"][i],
86 termination_depth=termination_depth,
87 predicted_depth=outputs["depth"],
88 sigma=sigma,
89 directions_norm=outputs["directions_norm"],
90 is_euclidean=self.config.is_euclidean_depth,
91 depth_loss_type=self.config.depth_loss_type,
92 ) / len(outputs["weights_list"])
93
94 return metrics_dict
95
96 def get_loss_dict(self, outputs, batch, metrics_dict=None):
97 loss_dict = super().get_loss_dict(outputs, batch, metrics_dict)
98 if self.training:
99 assert metrics_dict is not None and "depth_loss" in metrics_dict
100 loss_dict["depth_loss"] = self.config.depth_loss_mult * metrics_dict["depth_loss"]
101
102 return loss_dict
103
104 def get_image_metrics_and_images(
105 self, outputs: Dict[str, torch.Tensor], batch: Dict[str, torch.Tensor]
106 ) -> Tuple[Dict[str, float], Dict[str, torch.Tensor]]:
107 """Appends ground truth depth to the depth image."""
108 metrics, images = super().get_image_metrics_and_images(outputs, batch)
109 ground_truth_depth = batch["depth_image"]
110 if not self.config.is_euclidean_depth:
111 ground_truth_depth = ground_truth_depth * outputs["directions_norm"]
112
113 ground_truth_depth_colormap = colormaps.apply_depth_colormap(ground_truth_depth)
114 predicted_depth_colormap = colormaps.apply_depth_colormap(
115 outputs["depth"],
116 accumulation=outputs["accumulation"],
117 near_plane=torch.min(ground_truth_depth),
118 far_plane=torch.max(ground_truth_depth),
119 )
120 images["depth"] = torch.cat([ground_truth_depth_colormap, predicted_depth_colormap], dim=1)
121 depth_mask = ground_truth_depth > 0
122 metrics["depth_mse"] = torch.nn.functional.mse_loss(
123 outputs["depth"][depth_mask], ground_truth_depth[depth_mask]
124 )
125 return metrics, images
126
127 def _get_sigma(self):
128 if not self.config.should_decay_sigma:
129 return self.depth_sigma
130
131 self.depth_sigma = torch.maximum( # pylint: disable=attribute-defined-outside-init
132 self.config.sigma_decay_rate * self.depth_sigma, torch.tensor([self.config.depth_sigma])
133 )
134 return self.depth_sigma
135
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nerfstudio/models/depth_nerfacto.py b/nerfstudio/models/depth_nerfacto.py
--- a/nerfstudio/models/depth_nerfacto.py
+++ b/nerfstudio/models/depth_nerfacto.py
@@ -70,7 +70,8 @@
def get_outputs(self, ray_bundle: RayBundle):
outputs = super().get_outputs(ray_bundle)
- outputs["directions_norm"] = ray_bundle.metadata["directions_norm"]
+ if ray_bundle.metadata is not None and "directions_norm" in ray_bundle.metadata:
+ outputs["directions_norm"] = ray_bundle.metadata["directions_norm"]
return outputs
def get_metrics_dict(self, outputs, batch):
| {"golden_diff": "diff --git a/nerfstudio/models/depth_nerfacto.py b/nerfstudio/models/depth_nerfacto.py\n--- a/nerfstudio/models/depth_nerfacto.py\n+++ b/nerfstudio/models/depth_nerfacto.py\n@@ -70,7 +70,8 @@\n \n def get_outputs(self, ray_bundle: RayBundle):\n outputs = super().get_outputs(ray_bundle)\n- outputs[\"directions_norm\"] = ray_bundle.metadata[\"directions_norm\"]\n+ if ray_bundle.metadata is not None and \"directions_norm\" in ray_bundle.metadata:\n+ outputs[\"directions_norm\"] = ray_bundle.metadata[\"directions_norm\"]\n return outputs\n \n def get_metrics_dict(self, outputs, batch):\n", "issue": "Error while doing ns-export poisson for depth-nerfacto trained model on polycam data\nHi all,\n\nI have been trying to do ns-export poisson on depth-nerfacto trained model on polycam data and get the following error: \n\nIn depth_nerfacto.py line 73 in get_outputs\n\n\noutputs[\"directions_norm\"] = ray_bundle.metadata[\"directions_norm\"] as the following:\n\nTypeError: 'NoneType' object is not subscriptable\n\nReference: \n\nhttps://discord.com/channels/1025504971606724712/1073830375559806976/1074782026781175919\n\n\nCan you please help. Thanks in advance.\n\n", "before_files": [{"content": "# Copyright 2022 The Nerfstudio Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nNerfacto augmented with depth supervision.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom dataclasses import dataclass, field\nfrom typing import Dict, Tuple, Type\n\nimport torch\n\nfrom nerfstudio.cameras.rays import RayBundle\nfrom nerfstudio.model_components.losses import DepthLossType, depth_loss\nfrom nerfstudio.models.nerfacto import NerfactoModel, NerfactoModelConfig\nfrom nerfstudio.utils import colormaps\n\n\n@dataclass\nclass DepthNerfactoModelConfig(NerfactoModelConfig):\n \"\"\"Additional parameters for depth supervision.\"\"\"\n\n _target: Type = field(default_factory=lambda: DepthNerfactoModel)\n depth_loss_mult: float = 1e-3\n \"\"\"Lambda of the depth loss.\"\"\"\n is_euclidean_depth: bool = False\n \"\"\"Whether input depth maps are Euclidean distances (or z-distances).\"\"\"\n depth_sigma: float = 0.01\n \"\"\"Uncertainty around depth values in meters (defaults to 1cm).\"\"\"\n should_decay_sigma: bool = False\n \"\"\"Whether to exponentially decay sigma.\"\"\"\n starting_depth_sigma: float = 0.2\n \"\"\"Starting uncertainty around depth values in meters (defaults to 0.2m).\"\"\"\n sigma_decay_rate: float = 0.99985\n \"\"\"Rate of exponential decay.\"\"\"\n depth_loss_type: DepthLossType = DepthLossType.DS_NERF\n \"\"\"Depth loss type.\"\"\"\n\n\nclass DepthNerfactoModel(NerfactoModel):\n \"\"\"Depth loss augmented nerfacto model.\n\n Args:\n config: Nerfacto configuration to instantiate model\n \"\"\"\n\n config: DepthNerfactoModelConfig\n\n def populate_modules(self):\n \"\"\"Set the fields and modules.\"\"\"\n super().populate_modules()\n\n if self.config.should_decay_sigma:\n self.depth_sigma = torch.tensor([self.config.starting_depth_sigma])\n else:\n self.depth_sigma = torch.tensor([self.config.depth_sigma])\n\n def get_outputs(self, ray_bundle: RayBundle):\n outputs = super().get_outputs(ray_bundle)\n outputs[\"directions_norm\"] = ray_bundle.metadata[\"directions_norm\"]\n return outputs\n\n def get_metrics_dict(self, outputs, batch):\n metrics_dict = super().get_metrics_dict(outputs, batch)\n if self.training:\n metrics_dict[\"depth_loss\"] = 0.0\n sigma = self._get_sigma().to(self.device)\n termination_depth = batch[\"depth_image\"].to(self.device)\n for i in range(len(outputs[\"weights_list\"])):\n metrics_dict[\"depth_loss\"] += depth_loss(\n weights=outputs[\"weights_list\"][i],\n ray_samples=outputs[\"ray_samples_list\"][i],\n termination_depth=termination_depth,\n predicted_depth=outputs[\"depth\"],\n sigma=sigma,\n directions_norm=outputs[\"directions_norm\"],\n is_euclidean=self.config.is_euclidean_depth,\n depth_loss_type=self.config.depth_loss_type,\n ) / len(outputs[\"weights_list\"])\n\n return metrics_dict\n\n def get_loss_dict(self, outputs, batch, metrics_dict=None):\n loss_dict = super().get_loss_dict(outputs, batch, metrics_dict)\n if self.training:\n assert metrics_dict is not None and \"depth_loss\" in metrics_dict\n loss_dict[\"depth_loss\"] = self.config.depth_loss_mult * metrics_dict[\"depth_loss\"]\n\n return loss_dict\n\n def get_image_metrics_and_images(\n self, outputs: Dict[str, torch.Tensor], batch: Dict[str, torch.Tensor]\n ) -> Tuple[Dict[str, float], Dict[str, torch.Tensor]]:\n \"\"\"Appends ground truth depth to the depth image.\"\"\"\n metrics, images = super().get_image_metrics_and_images(outputs, batch)\n ground_truth_depth = batch[\"depth_image\"]\n if not self.config.is_euclidean_depth:\n ground_truth_depth = ground_truth_depth * outputs[\"directions_norm\"]\n\n ground_truth_depth_colormap = colormaps.apply_depth_colormap(ground_truth_depth)\n predicted_depth_colormap = colormaps.apply_depth_colormap(\n outputs[\"depth\"],\n accumulation=outputs[\"accumulation\"],\n near_plane=torch.min(ground_truth_depth),\n far_plane=torch.max(ground_truth_depth),\n )\n images[\"depth\"] = torch.cat([ground_truth_depth_colormap, predicted_depth_colormap], dim=1)\n depth_mask = ground_truth_depth > 0\n metrics[\"depth_mse\"] = torch.nn.functional.mse_loss(\n outputs[\"depth\"][depth_mask], ground_truth_depth[depth_mask]\n )\n return metrics, images\n\n def _get_sigma(self):\n if not self.config.should_decay_sigma:\n return self.depth_sigma\n\n self.depth_sigma = torch.maximum( # pylint: disable=attribute-defined-outside-init\n self.config.sigma_decay_rate * self.depth_sigma, torch.tensor([self.config.depth_sigma])\n )\n return self.depth_sigma\n", "path": "nerfstudio/models/depth_nerfacto.py"}], "after_files": [{"content": "# Copyright 2022 The Nerfstudio Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nNerfacto augmented with depth supervision.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom dataclasses import dataclass, field\nfrom typing import Dict, Tuple, Type\n\nimport torch\n\nfrom nerfstudio.cameras.rays import RayBundle\nfrom nerfstudio.model_components.losses import DepthLossType, depth_loss\nfrom nerfstudio.models.nerfacto import NerfactoModel, NerfactoModelConfig\nfrom nerfstudio.utils import colormaps\n\n\n@dataclass\nclass DepthNerfactoModelConfig(NerfactoModelConfig):\n \"\"\"Additional parameters for depth supervision.\"\"\"\n\n _target: Type = field(default_factory=lambda: DepthNerfactoModel)\n depth_loss_mult: float = 1e-3\n \"\"\"Lambda of the depth loss.\"\"\"\n is_euclidean_depth: bool = False\n \"\"\"Whether input depth maps are Euclidean distances (or z-distances).\"\"\"\n depth_sigma: float = 0.01\n \"\"\"Uncertainty around depth values in meters (defaults to 1cm).\"\"\"\n should_decay_sigma: bool = False\n \"\"\"Whether to exponentially decay sigma.\"\"\"\n starting_depth_sigma: float = 0.2\n \"\"\"Starting uncertainty around depth values in meters (defaults to 0.2m).\"\"\"\n sigma_decay_rate: float = 0.99985\n \"\"\"Rate of exponential decay.\"\"\"\n depth_loss_type: DepthLossType = DepthLossType.DS_NERF\n \"\"\"Depth loss type.\"\"\"\n\n\nclass DepthNerfactoModel(NerfactoModel):\n \"\"\"Depth loss augmented nerfacto model.\n\n Args:\n config: Nerfacto configuration to instantiate model\n \"\"\"\n\n config: DepthNerfactoModelConfig\n\n def populate_modules(self):\n \"\"\"Set the fields and modules.\"\"\"\n super().populate_modules()\n\n if self.config.should_decay_sigma:\n self.depth_sigma = torch.tensor([self.config.starting_depth_sigma])\n else:\n self.depth_sigma = torch.tensor([self.config.depth_sigma])\n\n def get_outputs(self, ray_bundle: RayBundle):\n outputs = super().get_outputs(ray_bundle)\n if ray_bundle.metadata is not None and \"directions_norm\" in ray_bundle.metadata:\n outputs[\"directions_norm\"] = ray_bundle.metadata[\"directions_norm\"]\n return outputs\n\n def get_metrics_dict(self, outputs, batch):\n metrics_dict = super().get_metrics_dict(outputs, batch)\n if self.training:\n metrics_dict[\"depth_loss\"] = 0.0\n sigma = self._get_sigma().to(self.device)\n termination_depth = batch[\"depth_image\"].to(self.device)\n for i in range(len(outputs[\"weights_list\"])):\n metrics_dict[\"depth_loss\"] += depth_loss(\n weights=outputs[\"weights_list\"][i],\n ray_samples=outputs[\"ray_samples_list\"][i],\n termination_depth=termination_depth,\n predicted_depth=outputs[\"depth\"],\n sigma=sigma,\n directions_norm=outputs[\"directions_norm\"],\n is_euclidean=self.config.is_euclidean_depth,\n depth_loss_type=self.config.depth_loss_type,\n ) / len(outputs[\"weights_list\"])\n\n return metrics_dict\n\n def get_loss_dict(self, outputs, batch, metrics_dict=None):\n loss_dict = super().get_loss_dict(outputs, batch, metrics_dict)\n if self.training:\n assert metrics_dict is not None and \"depth_loss\" in metrics_dict\n loss_dict[\"depth_loss\"] = self.config.depth_loss_mult * metrics_dict[\"depth_loss\"]\n\n return loss_dict\n\n def get_image_metrics_and_images(\n self, outputs: Dict[str, torch.Tensor], batch: Dict[str, torch.Tensor]\n ) -> Tuple[Dict[str, float], Dict[str, torch.Tensor]]:\n \"\"\"Appends ground truth depth to the depth image.\"\"\"\n metrics, images = super().get_image_metrics_and_images(outputs, batch)\n ground_truth_depth = batch[\"depth_image\"]\n if not self.config.is_euclidean_depth:\n ground_truth_depth = ground_truth_depth * outputs[\"directions_norm\"]\n\n ground_truth_depth_colormap = colormaps.apply_depth_colormap(ground_truth_depth)\n predicted_depth_colormap = colormaps.apply_depth_colormap(\n outputs[\"depth\"],\n accumulation=outputs[\"accumulation\"],\n near_plane=torch.min(ground_truth_depth),\n far_plane=torch.max(ground_truth_depth),\n )\n images[\"depth\"] = torch.cat([ground_truth_depth_colormap, predicted_depth_colormap], dim=1)\n depth_mask = ground_truth_depth > 0\n metrics[\"depth_mse\"] = torch.nn.functional.mse_loss(\n outputs[\"depth\"][depth_mask], ground_truth_depth[depth_mask]\n )\n return metrics, images\n\n def _get_sigma(self):\n if not self.config.should_decay_sigma:\n return self.depth_sigma\n\n self.depth_sigma = torch.maximum( # pylint: disable=attribute-defined-outside-init\n self.config.sigma_decay_rate * self.depth_sigma, torch.tensor([self.config.depth_sigma])\n )\n return self.depth_sigma\n", "path": "nerfstudio/models/depth_nerfacto.py"}]} | 1,941 | 164 |
gh_patches_debug_2813 | rasdani/github-patches | git_diff | cupy__cupy-4452 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Triangular Solve not working for complex case
I have discovered, that `solve_triangular` does not work, if the input is complex, but the matrix is real:
```
solve_triangular(
cholesky,
rhs,
lower=True,
)
```
produces a real valued result, if `cholesky` is real but `rhs` is complex.
```
intelpython3 -c 'import cupy; cupy.show_config()'
OS : Linux-5.9.6-1-MANJARO-x86_64-with-arch-Manjaro-Linux
CuPy Version : 9.0.0a1
NumPy Version : 1.16.2+intel.0
SciPy Version : 1.5.0
Cython Build Version : 0.29.21
Cython Runtime Version : 0.29.6
CUDA Root : /opt/cuda
CUDA Build Version : 11010
CUDA Driver Version : 11010
CUDA Runtime Version : 11010
cuBLAS Version : 11300
cuFFT Version : 10300
cuRAND Version : 10202
cuSOLVER Version : (11, 0, 1)
cuSPARSE Version : 11300
NVRTC Version : (11, 1)
Thrust Version : 100910
CUB Build Version : 100910
cuDNN Build Version : None
cuDNN Version : None
NCCL Build Version : 2708
NCCL Runtime Version : 2708
cuTENSOR Version : None
Device 0 Name : GeForce RTX 3090
Device 0 Compute Capability : 86
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cupyx/scipy/linalg/solve_triangular.py`
Content:
```
1 import numpy
2
3 import cupy
4 from cupy.cuda import cublas
5 from cupy.cuda import device
6 from cupy.linalg import _util
7
8
9 def solve_triangular(a, b, trans=0, lower=False, unit_diagonal=False,
10 overwrite_b=False, check_finite=False):
11 """Solve the equation a x = b for x, assuming a is a triangular matrix.
12
13 Args:
14 a (cupy.ndarray): The matrix with dimension ``(M, M)``.
15 b (cupy.ndarray): The matrix with dimension ``(M,)`` or
16 ``(M, N)``.
17 lower (bool): Use only data contained in the lower triangle of ``a``.
18 Default is to use upper triangle.
19 trans (0, 1, 2, 'N', 'T' or 'C'): Type of system to solve:
20
21 - *'0'* or *'N'* -- :math:`a x = b`
22 - *'1'* or *'T'* -- :math:`a^T x = b`
23 - *'2'* or *'C'* -- :math:`a^H x = b`
24
25 unit_diagonal (bool): If ``True``, diagonal elements of ``a`` are
26 assumed to be 1 and will not be referenced.
27 overwrite_b (bool): Allow overwriting data in b (may enhance
28 performance)
29 check_finite (bool): Whether to check that the input matrices contain
30 only finite numbers. Disabling may give a performance gain, but may
31 result in problems (crashes, non-termination) if the inputs do
32 contain infinities or NaNs.
33
34 Returns:
35 cupy.ndarray:
36 The matrix with dimension ``(M,)`` or ``(M, N)``.
37
38 .. seealso:: :func:`scipy.linalg.solve_triangular`
39 """
40
41 _util._assert_cupy_array(a, b)
42
43 if len(a.shape) != 2 or a.shape[0] != a.shape[1]:
44 raise ValueError('expected square matrix')
45 if len(a) != len(b):
46 raise ValueError('incompatible dimensions')
47
48 # Cast to float32 or float64
49 if a.dtype.char in 'fd':
50 dtype = a.dtype
51 else:
52 dtype = numpy.promote_types(a.dtype.char, 'f')
53
54 a = cupy.array(a, dtype=dtype, order='F', copy=False)
55 b = cupy.array(b, dtype=dtype, order='F', copy=(not overwrite_b))
56
57 if check_finite:
58 if a.dtype.kind == 'f' and not cupy.isfinite(a).all():
59 raise ValueError(
60 'array must not contain infs or NaNs')
61 if b.dtype.kind == 'f' and not cupy.isfinite(b).all():
62 raise ValueError(
63 'array must not contain infs or NaNs')
64
65 m, n = (b.size, 1) if b.ndim == 1 else b.shape
66 cublas_handle = device.get_cublas_handle()
67
68 if dtype == 'f':
69 trsm = cublas.strsm
70 else: # dtype == 'd'
71 trsm = cublas.dtrsm
72 one = numpy.array(1, dtype=dtype)
73
74 if lower:
75 uplo = cublas.CUBLAS_FILL_MODE_LOWER
76 else:
77 uplo = cublas.CUBLAS_FILL_MODE_UPPER
78
79 if trans == 'N':
80 trans = cublas.CUBLAS_OP_N
81 elif trans == 'T':
82 trans = cublas.CUBLAS_OP_T
83 elif trans == 'C':
84 trans = cublas.CUBLAS_OP_C
85
86 if unit_diagonal:
87 diag = cublas.CUBLAS_DIAG_UNIT
88 else:
89 diag = cublas.CUBLAS_DIAG_NON_UNIT
90
91 trsm(
92 cublas_handle, cublas.CUBLAS_SIDE_LEFT, uplo,
93 trans, diag,
94 m, n, one.ctypes.data, a.data.ptr, m, b.data.ptr, m)
95 return b
96
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cupyx/scipy/linalg/solve_triangular.py b/cupyx/scipy/linalg/solve_triangular.py
--- a/cupyx/scipy/linalg/solve_triangular.py
+++ b/cupyx/scipy/linalg/solve_triangular.py
@@ -67,8 +67,12 @@
if dtype == 'f':
trsm = cublas.strsm
- else: # dtype == 'd'
+ elif dtype == 'd':
trsm = cublas.dtrsm
+ elif dtype == 'F':
+ trsm = cublas.ctrsm
+ else: # dtype == 'D'
+ trsm = cublas.ztrsm
one = numpy.array(1, dtype=dtype)
if lower:
| {"golden_diff": "diff --git a/cupyx/scipy/linalg/solve_triangular.py b/cupyx/scipy/linalg/solve_triangular.py\n--- a/cupyx/scipy/linalg/solve_triangular.py\n+++ b/cupyx/scipy/linalg/solve_triangular.py\n@@ -67,8 +67,12 @@\n \n if dtype == 'f':\n trsm = cublas.strsm\n- else: # dtype == 'd'\n+ elif dtype == 'd':\n trsm = cublas.dtrsm\n+ elif dtype == 'F':\n+ trsm = cublas.ctrsm\n+ else: # dtype == 'D'\n+ trsm = cublas.ztrsm\n one = numpy.array(1, dtype=dtype)\n \n if lower:\n", "issue": "Triangular Solve not working for complex case\nI have discovered, that `solve_triangular` does not work, if the input is complex, but the matrix is real:\r\n```\r\nsolve_triangular(\r\n cholesky,\r\n rhs,\r\n lower=True,\r\n)\r\n```\r\nproduces a real valued result, if `cholesky` is real but `rhs` is complex.\r\n\r\n```\r\nintelpython3 -c 'import cupy; cupy.show_config()'\r\nOS : Linux-5.9.6-1-MANJARO-x86_64-with-arch-Manjaro-Linux\r\nCuPy Version : 9.0.0a1\r\nNumPy Version : 1.16.2+intel.0\r\nSciPy Version : 1.5.0\r\nCython Build Version : 0.29.21\r\nCython Runtime Version : 0.29.6\r\nCUDA Root : /opt/cuda\r\nCUDA Build Version : 11010\r\nCUDA Driver Version : 11010\r\nCUDA Runtime Version : 11010\r\ncuBLAS Version : 11300\r\ncuFFT Version : 10300\r\ncuRAND Version : 10202\r\ncuSOLVER Version : (11, 0, 1)\r\ncuSPARSE Version : 11300\r\nNVRTC Version : (11, 1)\r\nThrust Version : 100910\r\nCUB Build Version : 100910\r\ncuDNN Build Version : None\r\ncuDNN Version : None\r\nNCCL Build Version : 2708\r\nNCCL Runtime Version : 2708\r\ncuTENSOR Version : None\r\nDevice 0 Name : GeForce RTX 3090\r\nDevice 0 Compute Capability : 86\r\n```\r\n\r\n\n", "before_files": [{"content": "import numpy\n\nimport cupy\nfrom cupy.cuda import cublas\nfrom cupy.cuda import device\nfrom cupy.linalg import _util\n\n\ndef solve_triangular(a, b, trans=0, lower=False, unit_diagonal=False,\n overwrite_b=False, check_finite=False):\n \"\"\"Solve the equation a x = b for x, assuming a is a triangular matrix.\n\n Args:\n a (cupy.ndarray): The matrix with dimension ``(M, M)``.\n b (cupy.ndarray): The matrix with dimension ``(M,)`` or\n ``(M, N)``.\n lower (bool): Use only data contained in the lower triangle of ``a``.\n Default is to use upper triangle.\n trans (0, 1, 2, 'N', 'T' or 'C'): Type of system to solve:\n\n - *'0'* or *'N'* -- :math:`a x = b`\n - *'1'* or *'T'* -- :math:`a^T x = b`\n - *'2'* or *'C'* -- :math:`a^H x = b`\n\n unit_diagonal (bool): If ``True``, diagonal elements of ``a`` are\n assumed to be 1 and will not be referenced.\n overwrite_b (bool): Allow overwriting data in b (may enhance\n performance)\n check_finite (bool): Whether to check that the input matrices contain\n only finite numbers. Disabling may give a performance gain, but may\n result in problems (crashes, non-termination) if the inputs do\n contain infinities or NaNs.\n\n Returns:\n cupy.ndarray:\n The matrix with dimension ``(M,)`` or ``(M, N)``.\n\n .. seealso:: :func:`scipy.linalg.solve_triangular`\n \"\"\"\n\n _util._assert_cupy_array(a, b)\n\n if len(a.shape) != 2 or a.shape[0] != a.shape[1]:\n raise ValueError('expected square matrix')\n if len(a) != len(b):\n raise ValueError('incompatible dimensions')\n\n # Cast to float32 or float64\n if a.dtype.char in 'fd':\n dtype = a.dtype\n else:\n dtype = numpy.promote_types(a.dtype.char, 'f')\n\n a = cupy.array(a, dtype=dtype, order='F', copy=False)\n b = cupy.array(b, dtype=dtype, order='F', copy=(not overwrite_b))\n\n if check_finite:\n if a.dtype.kind == 'f' and not cupy.isfinite(a).all():\n raise ValueError(\n 'array must not contain infs or NaNs')\n if b.dtype.kind == 'f' and not cupy.isfinite(b).all():\n raise ValueError(\n 'array must not contain infs or NaNs')\n\n m, n = (b.size, 1) if b.ndim == 1 else b.shape\n cublas_handle = device.get_cublas_handle()\n\n if dtype == 'f':\n trsm = cublas.strsm\n else: # dtype == 'd'\n trsm = cublas.dtrsm\n one = numpy.array(1, dtype=dtype)\n\n if lower:\n uplo = cublas.CUBLAS_FILL_MODE_LOWER\n else:\n uplo = cublas.CUBLAS_FILL_MODE_UPPER\n\n if trans == 'N':\n trans = cublas.CUBLAS_OP_N\n elif trans == 'T':\n trans = cublas.CUBLAS_OP_T\n elif trans == 'C':\n trans = cublas.CUBLAS_OP_C\n\n if unit_diagonal:\n diag = cublas.CUBLAS_DIAG_UNIT\n else:\n diag = cublas.CUBLAS_DIAG_NON_UNIT\n\n trsm(\n cublas_handle, cublas.CUBLAS_SIDE_LEFT, uplo,\n trans, diag,\n m, n, one.ctypes.data, a.data.ptr, m, b.data.ptr, m)\n return b\n", "path": "cupyx/scipy/linalg/solve_triangular.py"}], "after_files": [{"content": "import numpy\n\nimport cupy\nfrom cupy.cuda import cublas\nfrom cupy.cuda import device\nfrom cupy.linalg import _util\n\n\ndef solve_triangular(a, b, trans=0, lower=False, unit_diagonal=False,\n overwrite_b=False, check_finite=False):\n \"\"\"Solve the equation a x = b for x, assuming a is a triangular matrix.\n\n Args:\n a (cupy.ndarray): The matrix with dimension ``(M, M)``.\n b (cupy.ndarray): The matrix with dimension ``(M,)`` or\n ``(M, N)``.\n lower (bool): Use only data contained in the lower triangle of ``a``.\n Default is to use upper triangle.\n trans (0, 1, 2, 'N', 'T' or 'C'): Type of system to solve:\n\n - *'0'* or *'N'* -- :math:`a x = b`\n - *'1'* or *'T'* -- :math:`a^T x = b`\n - *'2'* or *'C'* -- :math:`a^H x = b`\n\n unit_diagonal (bool): If ``True``, diagonal elements of ``a`` are\n assumed to be 1 and will not be referenced.\n overwrite_b (bool): Allow overwriting data in b (may enhance\n performance)\n check_finite (bool): Whether to check that the input matrices contain\n only finite numbers. Disabling may give a performance gain, but may\n result in problems (crashes, non-termination) if the inputs do\n contain infinities or NaNs.\n\n Returns:\n cupy.ndarray:\n The matrix with dimension ``(M,)`` or ``(M, N)``.\n\n .. seealso:: :func:`scipy.linalg.solve_triangular`\n \"\"\"\n\n _util._assert_cupy_array(a, b)\n\n if len(a.shape) != 2 or a.shape[0] != a.shape[1]:\n raise ValueError('expected square matrix')\n if len(a) != len(b):\n raise ValueError('incompatible dimensions')\n\n # Cast to float32 or float64\n if a.dtype.char in 'fd':\n dtype = a.dtype\n else:\n dtype = numpy.promote_types(a.dtype.char, 'f')\n\n a = cupy.array(a, dtype=dtype, order='F', copy=False)\n b = cupy.array(b, dtype=dtype, order='F', copy=(not overwrite_b))\n\n if check_finite:\n if a.dtype.kind == 'f' and not cupy.isfinite(a).all():\n raise ValueError(\n 'array must not contain infs or NaNs')\n if b.dtype.kind == 'f' and not cupy.isfinite(b).all():\n raise ValueError(\n 'array must not contain infs or NaNs')\n\n m, n = (b.size, 1) if b.ndim == 1 else b.shape\n cublas_handle = device.get_cublas_handle()\n\n if dtype == 'f':\n trsm = cublas.strsm\n elif dtype == 'd':\n trsm = cublas.dtrsm\n elif dtype == 'F':\n trsm = cublas.ctrsm\n else: # dtype == 'D'\n trsm = cublas.ztrsm\n one = numpy.array(1, dtype=dtype)\n\n if lower:\n uplo = cublas.CUBLAS_FILL_MODE_LOWER\n else:\n uplo = cublas.CUBLAS_FILL_MODE_UPPER\n\n if trans == 'N':\n trans = cublas.CUBLAS_OP_N\n elif trans == 'T':\n trans = cublas.CUBLAS_OP_T\n elif trans == 'C':\n trans = cublas.CUBLAS_OP_C\n\n if unit_diagonal:\n diag = cublas.CUBLAS_DIAG_UNIT\n else:\n diag = cublas.CUBLAS_DIAG_NON_UNIT\n\n trsm(\n cublas_handle, cublas.CUBLAS_SIDE_LEFT, uplo,\n trans, diag,\n m, n, one.ctypes.data, a.data.ptr, m, b.data.ptr, m)\n return b\n", "path": "cupyx/scipy/linalg/solve_triangular.py"}]} | 1,764 | 177 |
gh_patches_debug_23575 | rasdani/github-patches | git_diff | optuna__optuna-1876 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve ThresholdPruner example
The variable `n_trial_step` in [Threshold Pruner](https://optuna.readthedocs.io/en/stable/reference/generated/optuna.pruners.ThresholdPruner.html) is not used.
Improve ThresholdPruner example
The variable `n_trial_step` in [Threshold Pruner](https://optuna.readthedocs.io/en/stable/reference/generated/optuna.pruners.ThresholdPruner.html) is not used.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `optuna/pruners/_threshold.py`
Content:
```
1 import math
2 from typing import Any
3 from typing import Optional
4
5 import optuna
6 from optuna.pruners import BasePruner
7 from optuna.pruners._percentile import _is_first_in_interval_step
8
9
10 def _check_value(value: Any) -> float:
11 try:
12 # For convenience, we allow users to report a value that can be cast to `float`.
13 value = float(value)
14 except (TypeError, ValueError):
15 message = "The `value` argument is of type '{}' but supposed to be a float.".format(
16 type(value).__name__
17 )
18 raise TypeError(message) from None
19
20 return value
21
22
23 class ThresholdPruner(BasePruner):
24 """Pruner to detect outlying metrics of the trials.
25
26 Prune if a metric exceeds upper threshold,
27 falls behind lower threshold or reaches ``nan``.
28
29 Example:
30 .. testcode::
31
32 from optuna import create_study
33 from optuna.pruners import ThresholdPruner
34 from optuna import TrialPruned
35
36
37 def objective_for_upper(trial):
38 for step, y in enumerate(ys_for_upper):
39 trial.report(y, step)
40
41 if trial.should_prune():
42 raise TrialPruned()
43 return ys_for_upper[-1]
44
45
46 def objective_for_lower(trial):
47 for step, y in enumerate(ys_for_lower):
48 trial.report(y, step)
49
50 if trial.should_prune():
51 raise TrialPruned()
52 return ys_for_lower[-1]
53
54
55 ys_for_upper = [0.0, 0.1, 0.2, 0.5, 1.2]
56 ys_for_lower = [100.0, 90.0, 0.1, 0.0, -1]
57 n_trial_step = 5
58
59 study = create_study(pruner=ThresholdPruner(upper=1.0))
60 study.optimize(objective_for_upper, n_trials=10)
61
62 study = create_study(pruner=ThresholdPruner(lower=0.0))
63 study.optimize(objective_for_lower, n_trials=10)
64
65 Args
66 lower:
67 A minimum value which determines whether pruner prunes or not.
68 If an intermediate value is smaller than lower, it prunes.
69 upper:
70 A maximum value which determines whether pruner prunes or not.
71 If an intermediate value is larger than upper, it prunes.
72 n_warmup_steps:
73 Pruning is disabled until the trial exceeds the given number of step.
74 interval_steps:
75 Interval in number of steps between the pruning checks, offset by the warmup steps.
76 If no value has been reported at the time of a pruning check, that particular check
77 will be postponed until a value is reported. Value must be at least 1.
78
79 """
80
81 def __init__(
82 self,
83 lower: Optional[float] = None,
84 upper: Optional[float] = None,
85 n_warmup_steps: int = 0,
86 interval_steps: int = 1,
87 ) -> None:
88
89 if lower is None and upper is None:
90 raise TypeError("Either lower or upper must be specified.")
91 if lower is not None:
92 lower = _check_value(lower)
93 if upper is not None:
94 upper = _check_value(upper)
95
96 lower = lower if lower is not None else -float("inf")
97 upper = upper if upper is not None else float("inf")
98
99 if lower > upper:
100 raise ValueError("lower should be smaller than upper.")
101 if n_warmup_steps < 0:
102 raise ValueError(
103 "Number of warmup steps cannot be negative but got {}.".format(n_warmup_steps)
104 )
105 if interval_steps < 1:
106 raise ValueError(
107 "Pruning interval steps must be at least 1 but got {}.".format(interval_steps)
108 )
109
110 self._lower = lower
111 self._upper = upper
112 self._n_warmup_steps = n_warmup_steps
113 self._interval_steps = interval_steps
114
115 def prune(self, study: "optuna.study.Study", trial: "optuna.trial.FrozenTrial") -> bool:
116
117 step = trial.last_step
118 if step is None:
119 return False
120
121 n_warmup_steps = self._n_warmup_steps
122 if step < n_warmup_steps:
123 return False
124
125 if not _is_first_in_interval_step(
126 step, trial.intermediate_values.keys(), n_warmup_steps, self._interval_steps
127 ):
128 return False
129
130 latest_value = trial.intermediate_values[step]
131 if math.isnan(latest_value):
132 return True
133
134 if latest_value < self._lower:
135 return True
136
137 if latest_value > self._upper:
138 return True
139
140 return False
141
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/optuna/pruners/_threshold.py b/optuna/pruners/_threshold.py
--- a/optuna/pruners/_threshold.py
+++ b/optuna/pruners/_threshold.py
@@ -54,7 +54,6 @@
ys_for_upper = [0.0, 0.1, 0.2, 0.5, 1.2]
ys_for_lower = [100.0, 90.0, 0.1, 0.0, -1]
- n_trial_step = 5
study = create_study(pruner=ThresholdPruner(upper=1.0))
study.optimize(objective_for_upper, n_trials=10)
@@ -70,7 +69,7 @@
A maximum value which determines whether pruner prunes or not.
If an intermediate value is larger than upper, it prunes.
n_warmup_steps:
- Pruning is disabled until the trial exceeds the given number of step.
+ Pruning is disabled if the step is less than the given number of warmup steps.
interval_steps:
Interval in number of steps between the pruning checks, offset by the warmup steps.
If no value has been reported at the time of a pruning check, that particular check
| {"golden_diff": "diff --git a/optuna/pruners/_threshold.py b/optuna/pruners/_threshold.py\n--- a/optuna/pruners/_threshold.py\n+++ b/optuna/pruners/_threshold.py\n@@ -54,7 +54,6 @@\n \n ys_for_upper = [0.0, 0.1, 0.2, 0.5, 1.2]\n ys_for_lower = [100.0, 90.0, 0.1, 0.0, -1]\n- n_trial_step = 5\n \n study = create_study(pruner=ThresholdPruner(upper=1.0))\n study.optimize(objective_for_upper, n_trials=10)\n@@ -70,7 +69,7 @@\n A maximum value which determines whether pruner prunes or not.\n If an intermediate value is larger than upper, it prunes.\n n_warmup_steps:\n- Pruning is disabled until the trial exceeds the given number of step.\n+ Pruning is disabled if the step is less than the given number of warmup steps.\n interval_steps:\n Interval in number of steps between the pruning checks, offset by the warmup steps.\n If no value has been reported at the time of a pruning check, that particular check\n", "issue": "Improve ThresholdPruner example\nThe variable `n_trial_step` in [Threshold Pruner](https://optuna.readthedocs.io/en/stable/reference/generated/optuna.pruners.ThresholdPruner.html) is not used.\r\n\nImprove ThresholdPruner example\nThe variable `n_trial_step` in [Threshold Pruner](https://optuna.readthedocs.io/en/stable/reference/generated/optuna.pruners.ThresholdPruner.html) is not used.\r\n\n", "before_files": [{"content": "import math\nfrom typing import Any\nfrom typing import Optional\n\nimport optuna\nfrom optuna.pruners import BasePruner\nfrom optuna.pruners._percentile import _is_first_in_interval_step\n\n\ndef _check_value(value: Any) -> float:\n try:\n # For convenience, we allow users to report a value that can be cast to `float`.\n value = float(value)\n except (TypeError, ValueError):\n message = \"The `value` argument is of type '{}' but supposed to be a float.\".format(\n type(value).__name__\n )\n raise TypeError(message) from None\n\n return value\n\n\nclass ThresholdPruner(BasePruner):\n \"\"\"Pruner to detect outlying metrics of the trials.\n\n Prune if a metric exceeds upper threshold,\n falls behind lower threshold or reaches ``nan``.\n\n Example:\n .. testcode::\n\n from optuna import create_study\n from optuna.pruners import ThresholdPruner\n from optuna import TrialPruned\n\n\n def objective_for_upper(trial):\n for step, y in enumerate(ys_for_upper):\n trial.report(y, step)\n\n if trial.should_prune():\n raise TrialPruned()\n return ys_for_upper[-1]\n\n\n def objective_for_lower(trial):\n for step, y in enumerate(ys_for_lower):\n trial.report(y, step)\n\n if trial.should_prune():\n raise TrialPruned()\n return ys_for_lower[-1]\n\n\n ys_for_upper = [0.0, 0.1, 0.2, 0.5, 1.2]\n ys_for_lower = [100.0, 90.0, 0.1, 0.0, -1]\n n_trial_step = 5\n\n study = create_study(pruner=ThresholdPruner(upper=1.0))\n study.optimize(objective_for_upper, n_trials=10)\n\n study = create_study(pruner=ThresholdPruner(lower=0.0))\n study.optimize(objective_for_lower, n_trials=10)\n\n Args\n lower:\n A minimum value which determines whether pruner prunes or not.\n If an intermediate value is smaller than lower, it prunes.\n upper:\n A maximum value which determines whether pruner prunes or not.\n If an intermediate value is larger than upper, it prunes.\n n_warmup_steps:\n Pruning is disabled until the trial exceeds the given number of step.\n interval_steps:\n Interval in number of steps between the pruning checks, offset by the warmup steps.\n If no value has been reported at the time of a pruning check, that particular check\n will be postponed until a value is reported. Value must be at least 1.\n\n \"\"\"\n\n def __init__(\n self,\n lower: Optional[float] = None,\n upper: Optional[float] = None,\n n_warmup_steps: int = 0,\n interval_steps: int = 1,\n ) -> None:\n\n if lower is None and upper is None:\n raise TypeError(\"Either lower or upper must be specified.\")\n if lower is not None:\n lower = _check_value(lower)\n if upper is not None:\n upper = _check_value(upper)\n\n lower = lower if lower is not None else -float(\"inf\")\n upper = upper if upper is not None else float(\"inf\")\n\n if lower > upper:\n raise ValueError(\"lower should be smaller than upper.\")\n if n_warmup_steps < 0:\n raise ValueError(\n \"Number of warmup steps cannot be negative but got {}.\".format(n_warmup_steps)\n )\n if interval_steps < 1:\n raise ValueError(\n \"Pruning interval steps must be at least 1 but got {}.\".format(interval_steps)\n )\n\n self._lower = lower\n self._upper = upper\n self._n_warmup_steps = n_warmup_steps\n self._interval_steps = interval_steps\n\n def prune(self, study: \"optuna.study.Study\", trial: \"optuna.trial.FrozenTrial\") -> bool:\n\n step = trial.last_step\n if step is None:\n return False\n\n n_warmup_steps = self._n_warmup_steps\n if step < n_warmup_steps:\n return False\n\n if not _is_first_in_interval_step(\n step, trial.intermediate_values.keys(), n_warmup_steps, self._interval_steps\n ):\n return False\n\n latest_value = trial.intermediate_values[step]\n if math.isnan(latest_value):\n return True\n\n if latest_value < self._lower:\n return True\n\n if latest_value > self._upper:\n return True\n\n return False\n", "path": "optuna/pruners/_threshold.py"}], "after_files": [{"content": "import math\nfrom typing import Any\nfrom typing import Optional\n\nimport optuna\nfrom optuna.pruners import BasePruner\nfrom optuna.pruners._percentile import _is_first_in_interval_step\n\n\ndef _check_value(value: Any) -> float:\n try:\n # For convenience, we allow users to report a value that can be cast to `float`.\n value = float(value)\n except (TypeError, ValueError):\n message = \"The `value` argument is of type '{}' but supposed to be a float.\".format(\n type(value).__name__\n )\n raise TypeError(message) from None\n\n return value\n\n\nclass ThresholdPruner(BasePruner):\n \"\"\"Pruner to detect outlying metrics of the trials.\n\n Prune if a metric exceeds upper threshold,\n falls behind lower threshold or reaches ``nan``.\n\n Example:\n .. testcode::\n\n from optuna import create_study\n from optuna.pruners import ThresholdPruner\n from optuna import TrialPruned\n\n\n def objective_for_upper(trial):\n for step, y in enumerate(ys_for_upper):\n trial.report(y, step)\n\n if trial.should_prune():\n raise TrialPruned()\n return ys_for_upper[-1]\n\n\n def objective_for_lower(trial):\n for step, y in enumerate(ys_for_lower):\n trial.report(y, step)\n\n if trial.should_prune():\n raise TrialPruned()\n return ys_for_lower[-1]\n\n\n ys_for_upper = [0.0, 0.1, 0.2, 0.5, 1.2]\n ys_for_lower = [100.0, 90.0, 0.1, 0.0, -1]\n\n study = create_study(pruner=ThresholdPruner(upper=1.0))\n study.optimize(objective_for_upper, n_trials=10)\n\n study = create_study(pruner=ThresholdPruner(lower=0.0))\n study.optimize(objective_for_lower, n_trials=10)\n\n Args\n lower:\n A minimum value which determines whether pruner prunes or not.\n If an intermediate value is smaller than lower, it prunes.\n upper:\n A maximum value which determines whether pruner prunes or not.\n If an intermediate value is larger than upper, it prunes.\n n_warmup_steps:\n Pruning is disabled if the step is less than the given number of warmup steps.\n interval_steps:\n Interval in number of steps between the pruning checks, offset by the warmup steps.\n If no value has been reported at the time of a pruning check, that particular check\n will be postponed until a value is reported. Value must be at least 1.\n\n \"\"\"\n\n def __init__(\n self,\n lower: Optional[float] = None,\n upper: Optional[float] = None,\n n_warmup_steps: int = 0,\n interval_steps: int = 1,\n ) -> None:\n\n if lower is None and upper is None:\n raise TypeError(\"Either lower or upper must be specified.\")\n if lower is not None:\n lower = _check_value(lower)\n if upper is not None:\n upper = _check_value(upper)\n\n lower = lower if lower is not None else -float(\"inf\")\n upper = upper if upper is not None else float(\"inf\")\n\n if lower > upper:\n raise ValueError(\"lower should be smaller than upper.\")\n if n_warmup_steps < 0:\n raise ValueError(\n \"Number of warmup steps cannot be negative but got {}.\".format(n_warmup_steps)\n )\n if interval_steps < 1:\n raise ValueError(\n \"Pruning interval steps must be at least 1 but got {}.\".format(interval_steps)\n )\n\n self._lower = lower\n self._upper = upper\n self._n_warmup_steps = n_warmup_steps\n self._interval_steps = interval_steps\n\n def prune(self, study: \"optuna.study.Study\", trial: \"optuna.trial.FrozenTrial\") -> bool:\n\n step = trial.last_step\n if step is None:\n return False\n\n n_warmup_steps = self._n_warmup_steps\n if step < n_warmup_steps:\n return False\n\n if not _is_first_in_interval_step(\n step, trial.intermediate_values.keys(), n_warmup_steps, self._interval_steps\n ):\n return False\n\n latest_value = trial.intermediate_values[step]\n if math.isnan(latest_value):\n return True\n\n if latest_value < self._lower:\n return True\n\n if latest_value > self._upper:\n return True\n\n return False\n", "path": "optuna/pruners/_threshold.py"}]} | 1,723 | 283 |
gh_patches_debug_17595 | rasdani/github-patches | git_diff | secdev__scapy-137 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Unable to Accurately Set Time Fields in NTP Layer
I'm trying to modify the time fields in the NTP layer, and not having any luck. I've tried setting packet.recv to an integer value and a string representation of time, but no matter what I try, when I send the packet, this field is always set to the default 0. Is there any example code that I just missed that would show me how to do this? Thank you in advance.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scapy/layers/ntp.py`
Content:
```
1 ## This file is part of Scapy
2 ## See http://www.secdev.org/projects/scapy for more informations
3 ## Copyright (C) Philippe Biondi <[email protected]>
4 ## This program is published under a GPLv2 license
5
6 """
7 NTP (Network Time Protocol).
8 """
9
10 import time
11 from scapy.packet import *
12 from scapy.fields import *
13 from scapy.layers.inet import UDP
14
15
16 # seconds between 01-01-1900 and 01-01-1970
17 _NTP_BASETIME = 2208988800
18
19 class TimeStampField(FixedPointField):
20 def __init__(self, name, default):
21 FixedPointField.__init__(self, name, default, 64, 32)
22
23 def i2repr(self, pkt, val):
24 if val is None:
25 return "--"
26 val = self.i2h(pkt,val)
27 if val < _NTP_BASETIME:
28 return val
29 return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(val-_NTP_BASETIME))
30
31 def any2i(self, pkt, val):
32 if type(val) is str:
33 return int(time.mktime(time.strptime(val))) + _NTP_BASETIME + 3600 # XXX
34 return FixedPointField.any2i(self,pkt,val)
35
36 def i2m(self, pkt, val):
37 if val is None:
38 val = FixedPointField.any2i(self, pkt, time.time()+_NTP_BASETIME)
39 return FixedPointField.i2m(self, pkt, val)
40
41
42
43 class NTP(Packet):
44 # RFC 1769
45 name = "NTP"
46 fields_desc = [
47 BitEnumField('leap', 0, 2,
48 { 0: 'nowarning',
49 1: 'longminute',
50 2: 'shortminute',
51 3: 'notsync'}),
52 BitField('version', 3, 3),
53 BitEnumField('mode', 3, 3,
54 { 0: 'reserved',
55 1: 'sym_active',
56 2: 'sym_passive',
57 3: 'client',
58 4: 'server',
59 5: 'broadcast',
60 6: 'control',
61 7: 'private'}),
62 BitField('stratum', 2, 8),
63 BitField('poll', 0xa, 8), ### XXX : it's a signed int
64 BitField('precision', 0, 8), ### XXX : it's a signed int
65 FixedPointField('delay', 0, size=32, frac_bits=16),
66 FixedPointField('dispersion', 0, size=32, frac_bits=16),
67 IPField('id', "127.0.0.1"),
68 TimeStampField('ref', 0),
69 TimeStampField('orig', None), # None means current time
70 TimeStampField('recv', 0),
71 TimeStampField('sent', None)
72 ]
73 def mysummary(self):
74 return self.sprintf("NTP v%ir,NTP.version%, %NTP.mode%")
75
76
77 bind_layers( UDP, NTP, dport=123, sport=123)
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scapy/layers/ntp.py b/scapy/layers/ntp.py
--- a/scapy/layers/ntp.py
+++ b/scapy/layers/ntp.py
@@ -8,6 +8,7 @@
"""
import time
+import datetime
from scapy.packet import *
from scapy.fields import *
from scapy.layers.inet import UDP
@@ -29,9 +30,11 @@
return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(val-_NTP_BASETIME))
def any2i(self, pkt, val):
- if type(val) is str:
- return int(time.mktime(time.strptime(val))) + _NTP_BASETIME + 3600 # XXX
- return FixedPointField.any2i(self,pkt,val)
+ if isinstance(val, basestring):
+ val = int(time.mktime(time.strptime(val))) + _NTP_BASETIME
+ elif isinstance(val, datetime.datetime):
+ val = int(val.strftime("%s")) + _NTP_BASETIME
+ return FixedPointField.any2i(self, pkt, val)
def i2m(self, pkt, val):
if val is None:
| {"golden_diff": "diff --git a/scapy/layers/ntp.py b/scapy/layers/ntp.py\n--- a/scapy/layers/ntp.py\n+++ b/scapy/layers/ntp.py\n@@ -8,6 +8,7 @@\n \"\"\"\n \n import time\n+import datetime\n from scapy.packet import *\n from scapy.fields import *\n from scapy.layers.inet import UDP\n@@ -29,9 +30,11 @@\n return time.strftime(\"%a, %d %b %Y %H:%M:%S +0000\", time.gmtime(val-_NTP_BASETIME))\n \n def any2i(self, pkt, val):\n- if type(val) is str:\n- return int(time.mktime(time.strptime(val))) + _NTP_BASETIME + 3600 # XXX\n- return FixedPointField.any2i(self,pkt,val)\n+ if isinstance(val, basestring):\n+ val = int(time.mktime(time.strptime(val))) + _NTP_BASETIME\n+ elif isinstance(val, datetime.datetime):\n+ val = int(val.strftime(\"%s\")) + _NTP_BASETIME\n+ return FixedPointField.any2i(self, pkt, val)\n \n def i2m(self, pkt, val):\n if val is None:\n", "issue": "Unable to Accurately Set Time Fields in NTP Layer\nI'm trying to modify the time fields in the NTP layer, and not having any luck. I've tried setting packet.recv to an integer value and a string representation of time, but no matter what I try, when I send the packet, this field is always set to the default 0. Is there any example code that I just missed that would show me how to do this? Thank you in advance.\n\n", "before_files": [{"content": "## This file is part of Scapy\n## See http://www.secdev.org/projects/scapy for more informations\n## Copyright (C) Philippe Biondi <[email protected]>\n## This program is published under a GPLv2 license\n\n\"\"\"\nNTP (Network Time Protocol).\n\"\"\"\n\nimport time\nfrom scapy.packet import *\nfrom scapy.fields import *\nfrom scapy.layers.inet import UDP\n\n\n# seconds between 01-01-1900 and 01-01-1970\n_NTP_BASETIME = 2208988800\n\nclass TimeStampField(FixedPointField):\n def __init__(self, name, default):\n FixedPointField.__init__(self, name, default, 64, 32)\n\n def i2repr(self, pkt, val):\n if val is None:\n return \"--\"\n val = self.i2h(pkt,val)\n if val < _NTP_BASETIME:\n return val\n return time.strftime(\"%a, %d %b %Y %H:%M:%S +0000\", time.gmtime(val-_NTP_BASETIME))\n\n def any2i(self, pkt, val):\n if type(val) is str:\n return int(time.mktime(time.strptime(val))) + _NTP_BASETIME + 3600 # XXX\n return FixedPointField.any2i(self,pkt,val)\n \n def i2m(self, pkt, val):\n if val is None:\n val = FixedPointField.any2i(self, pkt, time.time()+_NTP_BASETIME)\n return FixedPointField.i2m(self, pkt, val)\n \n\n\nclass NTP(Packet):\n # RFC 1769\n name = \"NTP\"\n fields_desc = [ \n BitEnumField('leap', 0, 2,\n { 0: 'nowarning',\n 1: 'longminute',\n 2: 'shortminute',\n 3: 'notsync'}),\n BitField('version', 3, 3),\n BitEnumField('mode', 3, 3,\n { 0: 'reserved',\n 1: 'sym_active',\n 2: 'sym_passive',\n 3: 'client',\n 4: 'server',\n 5: 'broadcast',\n 6: 'control',\n 7: 'private'}),\n BitField('stratum', 2, 8),\n BitField('poll', 0xa, 8), ### XXX : it's a signed int\n BitField('precision', 0, 8), ### XXX : it's a signed int\n FixedPointField('delay', 0, size=32, frac_bits=16),\n FixedPointField('dispersion', 0, size=32, frac_bits=16),\n IPField('id', \"127.0.0.1\"),\n TimeStampField('ref', 0),\n TimeStampField('orig', None), # None means current time\n TimeStampField('recv', 0),\n TimeStampField('sent', None) \n ]\n def mysummary(self):\n return self.sprintf(\"NTP v%ir,NTP.version%, %NTP.mode%\")\n\n\nbind_layers( UDP, NTP, dport=123, sport=123)\n", "path": "scapy/layers/ntp.py"}], "after_files": [{"content": "## This file is part of Scapy\n## See http://www.secdev.org/projects/scapy for more informations\n## Copyright (C) Philippe Biondi <[email protected]>\n## This program is published under a GPLv2 license\n\n\"\"\"\nNTP (Network Time Protocol).\n\"\"\"\n\nimport time\nimport datetime\nfrom scapy.packet import *\nfrom scapy.fields import *\nfrom scapy.layers.inet import UDP\n\n\n# seconds between 01-01-1900 and 01-01-1970\n_NTP_BASETIME = 2208988800\n\nclass TimeStampField(FixedPointField):\n def __init__(self, name, default):\n FixedPointField.__init__(self, name, default, 64, 32)\n\n def i2repr(self, pkt, val):\n if val is None:\n return \"--\"\n val = self.i2h(pkt,val)\n if val < _NTP_BASETIME:\n return val\n return time.strftime(\"%a, %d %b %Y %H:%M:%S +0000\", time.gmtime(val-_NTP_BASETIME))\n\n def any2i(self, pkt, val):\n if isinstance(val, basestring):\n val = int(time.mktime(time.strptime(val))) + _NTP_BASETIME\n elif isinstance(val, datetime.datetime):\n val = int(val.strftime(\"%s\")) + _NTP_BASETIME\n return FixedPointField.any2i(self, pkt, val)\n \n def i2m(self, pkt, val):\n if val is None:\n val = FixedPointField.any2i(self, pkt, time.time()+_NTP_BASETIME)\n return FixedPointField.i2m(self, pkt, val)\n \n\n\nclass NTP(Packet):\n # RFC 1769\n name = \"NTP\"\n fields_desc = [ \n BitEnumField('leap', 0, 2,\n { 0: 'nowarning',\n 1: 'longminute',\n 2: 'shortminute',\n 3: 'notsync'}),\n BitField('version', 3, 3),\n BitEnumField('mode', 3, 3,\n { 0: 'reserved',\n 1: 'sym_active',\n 2: 'sym_passive',\n 3: 'client',\n 4: 'server',\n 5: 'broadcast',\n 6: 'control',\n 7: 'private'}),\n BitField('stratum', 2, 8),\n BitField('poll', 0xa, 8), ### XXX : it's a signed int\n BitField('precision', 0, 8), ### XXX : it's a signed int\n FixedPointField('delay', 0, size=32, frac_bits=16),\n FixedPointField('dispersion', 0, size=32, frac_bits=16),\n IPField('id', \"127.0.0.1\"),\n TimeStampField('ref', 0),\n TimeStampField('orig', None), # None means current time\n TimeStampField('recv', 0),\n TimeStampField('sent', None) \n ]\n def mysummary(self):\n return self.sprintf(\"NTP v%ir,NTP.version%, %NTP.mode%\")\n\n\nbind_layers( UDP, NTP, dport=123, sport=123)\n", "path": "scapy/layers/ntp.py"}]} | 1,253 | 276 |
gh_patches_debug_43800 | rasdani/github-patches | git_diff | microsoft__AzureTRE-593 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Task] API to use Managed Identity to authenticate against Cosmos DB
**Is your feature request related to a problem?
Currently the API use an API key `STATE_STORE_KEY` to authenticate against Cosmos DB. The API uses Managed Identity to authenticate against Azure Service Bus.
**Describe the solution you'd like
Using the same simplified access pattern to authenticate from the API to Azure resource.
Note: looks like Cosmos data-plane doesn't really support managed identity (in Python at least) but there might be a way to do this:
https://github.com/MicrosoftDocs/azure-docs/issues/29762
https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/tutorial-linux-vm-access-cosmos-db
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `management_api_app/core/config.py`
Content:
```
1 from starlette.config import Config
2
3
4 config = Config(".env")
5
6 # API settings
7 API_PREFIX = "/api"
8 PROJECT_NAME: str = config("PROJECT_NAME", default="Azure TRE API")
9 DEBUG: bool = config("DEBUG", cast=bool, default=False)
10 VERSION = "0.0.0"
11
12 # Resource Info
13 RESOURCE_LOCATION: str = config("RESOURCE_LOCATION", default="")
14 TRE_ID: str = config("TRE_ID", default="")
15
16 # State store configuration
17 STATE_STORE_ENDPOINT: str = config("STATE_STORE_ENDPOINT", default="") # Cosmos DB endpoint
18 STATE_STORE_KEY: str = config("STATE_STORE_KEY", default="") # Cosmos DB access key
19 STATE_STORE_DATABASE = "AzureTRE"
20 STATE_STORE_RESOURCES_CONTAINER = "Resources"
21 STATE_STORE_RESOURCE_TEMPLATES_CONTAINER = "ResourceTemplates"
22
23 # Service bus configuration
24 SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE: str = config("SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE", default="")
25 SERVICE_BUS_RESOURCE_REQUEST_QUEUE: str = config("SERVICE_BUS_RESOURCE_REQUEST_QUEUE", default="")
26 SERVICE_BUS_DEPLOYMENT_STATUS_UPDATE_QUEUE: str = config("SERVICE_BUS_DEPLOYMENT_STATUS_UPDATE_QUEUE", default="")
27
28 # Managed identity configuration
29 MANAGED_IDENTITY_CLIENT_ID: str = config("MANAGED_IDENTITY_CLIENT_ID", default="")
30
31 # Logging and monitoring
32 APP_INSIGHTS_INSTRUMENTATION_KEY: str = config("APPINSIGHTS_INSTRUMENTATIONKEY", default="")
33
34 # Authentication
35 API_CLIENT_ID: str = config("API_CLIENT_ID", default="")
36 API_CLIENT_SECRET: str = config("API_CLIENT_SECRET", default="")
37 SWAGGER_UI_CLIENT_ID: str = config("SWAGGER_UI_CLIENT_ID", default="")
38 AAD_TENANT_ID: str = config("AAD_TENANT_ID", default="")
39
40 AAD_INSTANCE: str = config("AAD_INSTANCE", default="https://login.microsoftonline.com")
41 API_AUDIENCE: str = config("API_AUDIENCE", default=API_CLIENT_ID)
42
```
Path: `management_api_app/services/health_checker.py`
Content:
```
1 from azure.core import exceptions
2 from azure.cosmos import CosmosClient
3
4 from core.config import STATE_STORE_ENDPOINT, STATE_STORE_KEY
5 from models.schemas.status import StatusEnum
6 from resources import strings
7
8
9 def create_state_store_status() -> (StatusEnum, str):
10 status = StatusEnum.ok
11 message = ""
12 try:
13 client = CosmosClient(STATE_STORE_ENDPOINT, STATE_STORE_KEY) # noqa: F841 - flake 8 client is not used
14 except exceptions.ServiceRequestError:
15 status = StatusEnum.not_ok
16 message = strings.STATE_STORE_ENDPOINT_NOT_RESPONDING
17 except: # noqa: E722 flake8 - no bare excepts
18 status = StatusEnum.not_ok
19 message = strings.UNSPECIFIED_ERROR
20 return status, message
21
```
Path: `management_api_app/api/dependencies/database.py`
Content:
```
1 import logging
2 from typing import Callable, Type
3
4 from azure.cosmos import CosmosClient
5 from fastapi import Depends, FastAPI, HTTPException
6 from starlette.requests import Request
7 from starlette.status import HTTP_503_SERVICE_UNAVAILABLE
8
9 from core import config
10 from db.errors import UnableToAccessDatabase
11 from db.repositories.base import BaseRepository
12 from resources import strings
13
14
15 def connect_to_db() -> CosmosClient:
16 logging.debug(f"Connecting to {config.STATE_STORE_ENDPOINT}")
17
18 try:
19 if config.DEBUG:
20 # ignore TLS(setup is pain) when on dev container and connecting to cosmosdb on windows host.
21 cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY,
22 connection_verify=False)
23 else:
24 cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY)
25 logging.debug("Connection established")
26 return cosmos_client
27 except Exception as e:
28 logging.debug(f"Connection to state store could not be established: {e}")
29
30
31 def get_db_client(app: FastAPI) -> CosmosClient:
32 if not app.state.cosmos_client:
33 app.state.cosmos_client = connect_to_db()
34 return app.state.cosmos_client
35
36
37 def get_db_client_from_request(request: Request) -> CosmosClient:
38 return get_db_client(request.app)
39
40
41 def get_repository(repo_type: Type[BaseRepository]) -> Callable[[CosmosClient], BaseRepository]:
42 def _get_repo(client: CosmosClient = Depends(get_db_client_from_request)) -> BaseRepository:
43 try:
44 return repo_type(client)
45 except UnableToAccessDatabase:
46 raise HTTPException(status_code=HTTP_503_SERVICE_UNAVAILABLE, detail=strings.STATE_STORE_ENDPOINT_NOT_RESPONDING)
47
48 return _get_repo
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/management_api_app/api/dependencies/database.py b/management_api_app/api/dependencies/database.py
--- a/management_api_app/api/dependencies/database.py
+++ b/management_api_app/api/dependencies/database.py
@@ -2,6 +2,8 @@
from typing import Callable, Type
from azure.cosmos import CosmosClient
+from azure.identity import DefaultAzureCredential
+from azure.mgmt.cosmosdb import CosmosDBManagementClient
from fastapi import Depends, FastAPI, HTTPException
from starlette.requests import Request
from starlette.status import HTTP_503_SERVICE_UNAVAILABLE
@@ -16,18 +18,30 @@
logging.debug(f"Connecting to {config.STATE_STORE_ENDPOINT}")
try:
+ primary_master_key = get_store_key()
if config.DEBUG:
# ignore TLS(setup is pain) when on dev container and connecting to cosmosdb on windows host.
- cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY,
- connection_verify=False)
+ cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key, connection_verify=False)
else:
- cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY)
+ cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key)
logging.debug("Connection established")
return cosmos_client
except Exception as e:
logging.debug(f"Connection to state store could not be established: {e}")
+def get_store_key() -> str:
+ if config.STATE_STORE_KEY:
+ primary_master_key = config.STATE_STORE_KEY
+ else:
+ credential = DefaultAzureCredential(managed_identity_client_id=config.MANAGED_IDENTITY_CLIENT_ID, exclude_shared_token_cache_credential=True)
+ cosmosdb_client = CosmosDBManagementClient(credential, subscription_id=config.SUBSCRIPTION_ID)
+ database_keys = cosmosdb_client.database_accounts.list_keys(resource_group_name=config.RESOURCE_GROUP_NAME, account_name=config.COSMOSDB_ACCOUNT_NAME)
+ primary_master_key = database_keys.primary_master_key
+
+ return primary_master_key
+
+
def get_db_client(app: FastAPI) -> CosmosClient:
if not app.state.cosmos_client:
app.state.cosmos_client = connect_to_db()
diff --git a/management_api_app/core/config.py b/management_api_app/core/config.py
--- a/management_api_app/core/config.py
+++ b/management_api_app/core/config.py
@@ -16,9 +16,13 @@
# State store configuration
STATE_STORE_ENDPOINT: str = config("STATE_STORE_ENDPOINT", default="") # Cosmos DB endpoint
STATE_STORE_KEY: str = config("STATE_STORE_KEY", default="") # Cosmos DB access key
+COSMOSDB_ACCOUNT_NAME: str = config("COSMOSDB_ACCOUNT_NAME", default="") # Cosmos DB account name
STATE_STORE_DATABASE = "AzureTRE"
STATE_STORE_RESOURCES_CONTAINER = "Resources"
STATE_STORE_RESOURCE_TEMPLATES_CONTAINER = "ResourceTemplates"
+SUBSCRIPTION_ID: str = config("SUBSCRIPTION_ID", default="")
+RESOURCE_GROUP_NAME: str = config("RESOURCE_GROUP_NAME", default="")
+
# Service bus configuration
SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE: str = config("SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE", default="")
diff --git a/management_api_app/services/health_checker.py b/management_api_app/services/health_checker.py
--- a/management_api_app/services/health_checker.py
+++ b/management_api_app/services/health_checker.py
@@ -1,7 +1,8 @@
from azure.core import exceptions
from azure.cosmos import CosmosClient
-from core.config import STATE_STORE_ENDPOINT, STATE_STORE_KEY
+from api.dependencies.database import get_store_key
+from core import config
from models.schemas.status import StatusEnum
from resources import strings
@@ -10,7 +11,8 @@
status = StatusEnum.ok
message = ""
try:
- client = CosmosClient(STATE_STORE_ENDPOINT, STATE_STORE_KEY) # noqa: F841 - flake 8 client is not used
+ primary_master_key = get_store_key()
+ client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key) # noqa: F841 - flake 8 client is not used
except exceptions.ServiceRequestError:
status = StatusEnum.not_ok
message = strings.STATE_STORE_ENDPOINT_NOT_RESPONDING
| {"golden_diff": "diff --git a/management_api_app/api/dependencies/database.py b/management_api_app/api/dependencies/database.py\n--- a/management_api_app/api/dependencies/database.py\n+++ b/management_api_app/api/dependencies/database.py\n@@ -2,6 +2,8 @@\n from typing import Callable, Type\n \n from azure.cosmos import CosmosClient\n+from azure.identity import DefaultAzureCredential\n+from azure.mgmt.cosmosdb import CosmosDBManagementClient\n from fastapi import Depends, FastAPI, HTTPException\n from starlette.requests import Request\n from starlette.status import HTTP_503_SERVICE_UNAVAILABLE\n@@ -16,18 +18,30 @@\n logging.debug(f\"Connecting to {config.STATE_STORE_ENDPOINT}\")\n \n try:\n+ primary_master_key = get_store_key()\n if config.DEBUG:\n # ignore TLS(setup is pain) when on dev container and connecting to cosmosdb on windows host.\n- cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY,\n- connection_verify=False)\n+ cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key, connection_verify=False)\n else:\n- cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY)\n+ cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key)\n logging.debug(\"Connection established\")\n return cosmos_client\n except Exception as e:\n logging.debug(f\"Connection to state store could not be established: {e}\")\n \n \n+def get_store_key() -> str:\n+ if config.STATE_STORE_KEY:\n+ primary_master_key = config.STATE_STORE_KEY\n+ else:\n+ credential = DefaultAzureCredential(managed_identity_client_id=config.MANAGED_IDENTITY_CLIENT_ID, exclude_shared_token_cache_credential=True)\n+ cosmosdb_client = CosmosDBManagementClient(credential, subscription_id=config.SUBSCRIPTION_ID)\n+ database_keys = cosmosdb_client.database_accounts.list_keys(resource_group_name=config.RESOURCE_GROUP_NAME, account_name=config.COSMOSDB_ACCOUNT_NAME)\n+ primary_master_key = database_keys.primary_master_key\n+\n+ return primary_master_key\n+\n+\n def get_db_client(app: FastAPI) -> CosmosClient:\n if not app.state.cosmos_client:\n app.state.cosmos_client = connect_to_db()\ndiff --git a/management_api_app/core/config.py b/management_api_app/core/config.py\n--- a/management_api_app/core/config.py\n+++ b/management_api_app/core/config.py\n@@ -16,9 +16,13 @@\n # State store configuration\n STATE_STORE_ENDPOINT: str = config(\"STATE_STORE_ENDPOINT\", default=\"\") # Cosmos DB endpoint\n STATE_STORE_KEY: str = config(\"STATE_STORE_KEY\", default=\"\") # Cosmos DB access key\n+COSMOSDB_ACCOUNT_NAME: str = config(\"COSMOSDB_ACCOUNT_NAME\", default=\"\") # Cosmos DB account name\n STATE_STORE_DATABASE = \"AzureTRE\"\n STATE_STORE_RESOURCES_CONTAINER = \"Resources\"\n STATE_STORE_RESOURCE_TEMPLATES_CONTAINER = \"ResourceTemplates\"\n+SUBSCRIPTION_ID: str = config(\"SUBSCRIPTION_ID\", default=\"\")\n+RESOURCE_GROUP_NAME: str = config(\"RESOURCE_GROUP_NAME\", default=\"\")\n+\n \n # Service bus configuration\n SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE: str = config(\"SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE\", default=\"\")\ndiff --git a/management_api_app/services/health_checker.py b/management_api_app/services/health_checker.py\n--- a/management_api_app/services/health_checker.py\n+++ b/management_api_app/services/health_checker.py\n@@ -1,7 +1,8 @@\n from azure.core import exceptions\n from azure.cosmos import CosmosClient\n \n-from core.config import STATE_STORE_ENDPOINT, STATE_STORE_KEY\n+from api.dependencies.database import get_store_key\n+from core import config\n from models.schemas.status import StatusEnum\n from resources import strings\n \n@@ -10,7 +11,8 @@\n status = StatusEnum.ok\n message = \"\"\n try:\n- client = CosmosClient(STATE_STORE_ENDPOINT, STATE_STORE_KEY) # noqa: F841 - flake 8 client is not used\n+ primary_master_key = get_store_key()\n+ client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key) # noqa: F841 - flake 8 client is not used\n except exceptions.ServiceRequestError:\n status = StatusEnum.not_ok\n message = strings.STATE_STORE_ENDPOINT_NOT_RESPONDING\n", "issue": "[Task] API to use Managed Identity to authenticate against Cosmos DB\n**Is your feature request related to a problem?\r\n\r\nCurrently the API use an API key `STATE_STORE_KEY` to authenticate against Cosmos DB. The API uses Managed Identity to authenticate against Azure Service Bus.\r\n\r\n**Describe the solution you'd like\r\n\r\nUsing the same simplified access pattern to authenticate from the API to Azure resource.\r\n\r\nNote: looks like Cosmos data-plane doesn't really support managed identity (in Python at least) but there might be a way to do this:\r\nhttps://github.com/MicrosoftDocs/azure-docs/issues/29762\r\nhttps://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/tutorial-linux-vm-access-cosmos-db\r\n\n", "before_files": [{"content": "from starlette.config import Config\n\n\nconfig = Config(\".env\")\n\n# API settings\nAPI_PREFIX = \"/api\"\nPROJECT_NAME: str = config(\"PROJECT_NAME\", default=\"Azure TRE API\")\nDEBUG: bool = config(\"DEBUG\", cast=bool, default=False)\nVERSION = \"0.0.0\"\n\n# Resource Info\nRESOURCE_LOCATION: str = config(\"RESOURCE_LOCATION\", default=\"\")\nTRE_ID: str = config(\"TRE_ID\", default=\"\")\n\n# State store configuration\nSTATE_STORE_ENDPOINT: str = config(\"STATE_STORE_ENDPOINT\", default=\"\") # Cosmos DB endpoint\nSTATE_STORE_KEY: str = config(\"STATE_STORE_KEY\", default=\"\") # Cosmos DB access key\nSTATE_STORE_DATABASE = \"AzureTRE\"\nSTATE_STORE_RESOURCES_CONTAINER = \"Resources\"\nSTATE_STORE_RESOURCE_TEMPLATES_CONTAINER = \"ResourceTemplates\"\n\n# Service bus configuration\nSERVICE_BUS_FULLY_QUALIFIED_NAMESPACE: str = config(\"SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE\", default=\"\")\nSERVICE_BUS_RESOURCE_REQUEST_QUEUE: str = config(\"SERVICE_BUS_RESOURCE_REQUEST_QUEUE\", default=\"\")\nSERVICE_BUS_DEPLOYMENT_STATUS_UPDATE_QUEUE: str = config(\"SERVICE_BUS_DEPLOYMENT_STATUS_UPDATE_QUEUE\", default=\"\")\n\n# Managed identity configuration\nMANAGED_IDENTITY_CLIENT_ID: str = config(\"MANAGED_IDENTITY_CLIENT_ID\", default=\"\")\n\n# Logging and monitoring\nAPP_INSIGHTS_INSTRUMENTATION_KEY: str = config(\"APPINSIGHTS_INSTRUMENTATIONKEY\", default=\"\")\n\n# Authentication\nAPI_CLIENT_ID: str = config(\"API_CLIENT_ID\", default=\"\")\nAPI_CLIENT_SECRET: str = config(\"API_CLIENT_SECRET\", default=\"\")\nSWAGGER_UI_CLIENT_ID: str = config(\"SWAGGER_UI_CLIENT_ID\", default=\"\")\nAAD_TENANT_ID: str = config(\"AAD_TENANT_ID\", default=\"\")\n\nAAD_INSTANCE: str = config(\"AAD_INSTANCE\", default=\"https://login.microsoftonline.com\")\nAPI_AUDIENCE: str = config(\"API_AUDIENCE\", default=API_CLIENT_ID)\n", "path": "management_api_app/core/config.py"}, {"content": "from azure.core import exceptions\nfrom azure.cosmos import CosmosClient\n\nfrom core.config import STATE_STORE_ENDPOINT, STATE_STORE_KEY\nfrom models.schemas.status import StatusEnum\nfrom resources import strings\n\n\ndef create_state_store_status() -> (StatusEnum, str):\n status = StatusEnum.ok\n message = \"\"\n try:\n client = CosmosClient(STATE_STORE_ENDPOINT, STATE_STORE_KEY) # noqa: F841 - flake 8 client is not used\n except exceptions.ServiceRequestError:\n status = StatusEnum.not_ok\n message = strings.STATE_STORE_ENDPOINT_NOT_RESPONDING\n except: # noqa: E722 flake8 - no bare excepts\n status = StatusEnum.not_ok\n message = strings.UNSPECIFIED_ERROR\n return status, message\n", "path": "management_api_app/services/health_checker.py"}, {"content": "import logging\nfrom typing import Callable, Type\n\nfrom azure.cosmos import CosmosClient\nfrom fastapi import Depends, FastAPI, HTTPException\nfrom starlette.requests import Request\nfrom starlette.status import HTTP_503_SERVICE_UNAVAILABLE\n\nfrom core import config\nfrom db.errors import UnableToAccessDatabase\nfrom db.repositories.base import BaseRepository\nfrom resources import strings\n\n\ndef connect_to_db() -> CosmosClient:\n logging.debug(f\"Connecting to {config.STATE_STORE_ENDPOINT}\")\n\n try:\n if config.DEBUG:\n # ignore TLS(setup is pain) when on dev container and connecting to cosmosdb on windows host.\n cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY,\n connection_verify=False)\n else:\n cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, config.STATE_STORE_KEY)\n logging.debug(\"Connection established\")\n return cosmos_client\n except Exception as e:\n logging.debug(f\"Connection to state store could not be established: {e}\")\n\n\ndef get_db_client(app: FastAPI) -> CosmosClient:\n if not app.state.cosmos_client:\n app.state.cosmos_client = connect_to_db()\n return app.state.cosmos_client\n\n\ndef get_db_client_from_request(request: Request) -> CosmosClient:\n return get_db_client(request.app)\n\n\ndef get_repository(repo_type: Type[BaseRepository]) -> Callable[[CosmosClient], BaseRepository]:\n def _get_repo(client: CosmosClient = Depends(get_db_client_from_request)) -> BaseRepository:\n try:\n return repo_type(client)\n except UnableToAccessDatabase:\n raise HTTPException(status_code=HTTP_503_SERVICE_UNAVAILABLE, detail=strings.STATE_STORE_ENDPOINT_NOT_RESPONDING)\n\n return _get_repo\n", "path": "management_api_app/api/dependencies/database.py"}], "after_files": [{"content": "from starlette.config import Config\n\n\nconfig = Config(\".env\")\n\n# API settings\nAPI_PREFIX = \"/api\"\nPROJECT_NAME: str = config(\"PROJECT_NAME\", default=\"Azure TRE API\")\nDEBUG: bool = config(\"DEBUG\", cast=bool, default=False)\nVERSION = \"0.0.0\"\n\n# Resource Info\nRESOURCE_LOCATION: str = config(\"RESOURCE_LOCATION\", default=\"\")\nTRE_ID: str = config(\"TRE_ID\", default=\"\")\n\n# State store configuration\nSTATE_STORE_ENDPOINT: str = config(\"STATE_STORE_ENDPOINT\", default=\"\") # Cosmos DB endpoint\nSTATE_STORE_KEY: str = config(\"STATE_STORE_KEY\", default=\"\") # Cosmos DB access key\nCOSMOSDB_ACCOUNT_NAME: str = config(\"COSMOSDB_ACCOUNT_NAME\", default=\"\") # Cosmos DB account name\nSTATE_STORE_DATABASE = \"AzureTRE\"\nSTATE_STORE_RESOURCES_CONTAINER = \"Resources\"\nSTATE_STORE_RESOURCE_TEMPLATES_CONTAINER = \"ResourceTemplates\"\nSUBSCRIPTION_ID: str = config(\"SUBSCRIPTION_ID\", default=\"\")\nRESOURCE_GROUP_NAME: str = config(\"RESOURCE_GROUP_NAME\", default=\"\")\n\n\n# Service bus configuration\nSERVICE_BUS_FULLY_QUALIFIED_NAMESPACE: str = config(\"SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE\", default=\"\")\nSERVICE_BUS_RESOURCE_REQUEST_QUEUE: str = config(\"SERVICE_BUS_RESOURCE_REQUEST_QUEUE\", default=\"\")\nSERVICE_BUS_DEPLOYMENT_STATUS_UPDATE_QUEUE: str = config(\"SERVICE_BUS_DEPLOYMENT_STATUS_UPDATE_QUEUE\", default=\"\")\n\n# Managed identity configuration\nMANAGED_IDENTITY_CLIENT_ID: str = config(\"MANAGED_IDENTITY_CLIENT_ID\", default=\"\")\n\n# Logging and monitoring\nAPP_INSIGHTS_INSTRUMENTATION_KEY: str = config(\"APPINSIGHTS_INSTRUMENTATIONKEY\", default=\"\")\n\n# Authentication\nAPI_CLIENT_ID: str = config(\"API_CLIENT_ID\", default=\"\")\nAPI_CLIENT_SECRET: str = config(\"API_CLIENT_SECRET\", default=\"\")\nSWAGGER_UI_CLIENT_ID: str = config(\"SWAGGER_UI_CLIENT_ID\", default=\"\")\nAAD_TENANT_ID: str = config(\"AAD_TENANT_ID\", default=\"\")\n\nAAD_INSTANCE: str = config(\"AAD_INSTANCE\", default=\"https://login.microsoftonline.com\")\nAPI_AUDIENCE: str = config(\"API_AUDIENCE\", default=API_CLIENT_ID)\n", "path": "management_api_app/core/config.py"}, {"content": "from azure.core import exceptions\nfrom azure.cosmos import CosmosClient\n\nfrom api.dependencies.database import get_store_key\nfrom core import config\nfrom models.schemas.status import StatusEnum\nfrom resources import strings\n\n\ndef create_state_store_status() -> (StatusEnum, str):\n status = StatusEnum.ok\n message = \"\"\n try:\n primary_master_key = get_store_key()\n client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key) # noqa: F841 - flake 8 client is not used\n except exceptions.ServiceRequestError:\n status = StatusEnum.not_ok\n message = strings.STATE_STORE_ENDPOINT_NOT_RESPONDING\n except: # noqa: E722 flake8 - no bare excepts\n status = StatusEnum.not_ok\n message = strings.UNSPECIFIED_ERROR\n return status, message\n", "path": "management_api_app/services/health_checker.py"}, {"content": "import logging\nfrom typing import Callable, Type\n\nfrom azure.cosmos import CosmosClient\nfrom azure.identity import DefaultAzureCredential\nfrom azure.mgmt.cosmosdb import CosmosDBManagementClient\nfrom fastapi import Depends, FastAPI, HTTPException\nfrom starlette.requests import Request\nfrom starlette.status import HTTP_503_SERVICE_UNAVAILABLE\n\nfrom core import config\nfrom db.errors import UnableToAccessDatabase\nfrom db.repositories.base import BaseRepository\nfrom resources import strings\n\n\ndef connect_to_db() -> CosmosClient:\n logging.debug(f\"Connecting to {config.STATE_STORE_ENDPOINT}\")\n\n try:\n primary_master_key = get_store_key()\n if config.DEBUG:\n # ignore TLS(setup is pain) when on dev container and connecting to cosmosdb on windows host.\n cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key, connection_verify=False)\n else:\n cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key)\n logging.debug(\"Connection established\")\n return cosmos_client\n except Exception as e:\n logging.debug(f\"Connection to state store could not be established: {e}\")\n\n\ndef get_store_key() -> str:\n if config.STATE_STORE_KEY:\n primary_master_key = config.STATE_STORE_KEY\n else:\n credential = DefaultAzureCredential(managed_identity_client_id=config.MANAGED_IDENTITY_CLIENT_ID, exclude_shared_token_cache_credential=True)\n cosmosdb_client = CosmosDBManagementClient(credential, subscription_id=config.SUBSCRIPTION_ID)\n database_keys = cosmosdb_client.database_accounts.list_keys(resource_group_name=config.RESOURCE_GROUP_NAME, account_name=config.COSMOSDB_ACCOUNT_NAME)\n primary_master_key = database_keys.primary_master_key\n\n return primary_master_key\n\n\ndef get_db_client(app: FastAPI) -> CosmosClient:\n if not app.state.cosmos_client:\n app.state.cosmos_client = connect_to_db()\n return app.state.cosmos_client\n\n\ndef get_db_client_from_request(request: Request) -> CosmosClient:\n return get_db_client(request.app)\n\n\ndef get_repository(repo_type: Type[BaseRepository]) -> Callable[[CosmosClient], BaseRepository]:\n def _get_repo(client: CosmosClient = Depends(get_db_client_from_request)) -> BaseRepository:\n try:\n return repo_type(client)\n except UnableToAccessDatabase:\n raise HTTPException(status_code=HTTP_503_SERVICE_UNAVAILABLE, detail=strings.STATE_STORE_ENDPOINT_NOT_RESPONDING)\n\n return _get_repo\n", "path": "management_api_app/api/dependencies/database.py"}]} | 1,606 | 942 |
gh_patches_debug_2366 | rasdani/github-patches | git_diff | ydataai__ydata-profiling-1540 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AttributeError: 'FileDataContext' object has no attribute 'create_expectation_suite'
### Current Behaviour
AttributeError: 'FileDataContext' object has no attribute 'create_expectation_suite'
[Related to this issue](https://docs.greatexpectations.io/docs/changelog/#01549): "[MAINTENANCE] Change all instances of create_expectation_suite to add_expectation_suite in tests, docs, and source code ([#7117](https://github.com/great-expectations/great_expectations/pull/7117))"
DataContext objects no longer has a method 'create_expectation_suite'.
### Expected Behaviour
[Great expectation test suite](https://docs.profiling.ydata.ai/4.6/integrations/great_expectations/#creating-expectation-suites-with-ydata-profiling): "ydata-profiling provides a simple to_expectation_suite() method that returns a Great Expectations ExpectationSuite object which contains a set of Expectations."
### Data Description
[Dataset can be used from the documentation](https://docs.profiling.ydata.ai/4.6/integrations/great_expectations/#about-great-expectations)
### Code that reproduces the bug
```Python
profile = ProfileReport(
df,
title='Pandas Profiling Report',
type_schema=type_schema,
minimal=True,
lazy=False
)
data_context = ge.data_context.DataContext(
context_root_dir='./gx/'
)
suite = profile.to_expectation_suite(
data_context=data_context
)
```
### pandas-profiling version
-
### Dependencies
```Text
-
```
### OS
_No response_
### Checklist
- [X] There is not yet another bug report for this issue in the [issue tracker](https://github.com/ydataai/pandas-profiling/issues)
- [X] The problem is reproducible from this bug report. [This guide](http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports) can help to craft a minimal bug report.
- [X] The issue has not been resolved by the entries listed under [Common Issues](https://pandas-profiling.ydata.ai/docs/master/pages/support_contrib/common_issues.html).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/ydata_profiling/expectations_report.py`
Content:
```
1 from typing import Any, Optional
2
3 import pandas as pd
4 from visions import VisionsTypeset
5
6 from ydata_profiling.config import Settings
7 from ydata_profiling.model import BaseDescription, expectation_algorithms
8 from ydata_profiling.model.handler import Handler
9 from ydata_profiling.utils.dataframe import slugify
10
11
12 class ExpectationHandler(Handler):
13 """Default handler"""
14
15 def __init__(self, typeset: VisionsTypeset, *args, **kwargs):
16 mapping = {
17 "Unsupported": [expectation_algorithms.generic_expectations],
18 "Text": [expectation_algorithms.categorical_expectations],
19 "Categorical": [expectation_algorithms.categorical_expectations],
20 "Boolean": [expectation_algorithms.categorical_expectations],
21 "Numeric": [expectation_algorithms.numeric_expectations],
22 "URL": [expectation_algorithms.url_expectations],
23 "File": [expectation_algorithms.file_expectations],
24 "Path": [expectation_algorithms.path_expectations],
25 "DateTime": [expectation_algorithms.datetime_expectations],
26 "Image": [expectation_algorithms.image_expectations],
27 }
28 super().__init__(mapping, typeset, *args, **kwargs)
29
30
31 class ExpectationsReport:
32 config: Settings
33 df: Optional[pd.DataFrame] = None
34
35 @property
36 def typeset(self) -> Optional[VisionsTypeset]:
37 return None
38
39 def to_expectation_suite(
40 self,
41 suite_name: Optional[str] = None,
42 data_context: Optional[Any] = None,
43 save_suite: bool = True,
44 run_validation: bool = True,
45 build_data_docs: bool = True,
46 handler: Optional[Handler] = None,
47 ) -> Any:
48 """
49 All parameters default to True to make it easier to access the full functionality of Great Expectations out of
50 the box.
51 Args:
52 suite_name: The name of your expectation suite
53 data_context: A user-specified data context
54 save_suite: Boolean to determine whether to save the suite to .json as part of the method
55 run_validation: Boolean to determine whether to run validation as part of the method
56 build_data_docs: Boolean to determine whether to build data docs, save the .html file, and open data docs in
57 your browser
58 handler: The handler to use for building expectation
59
60 Returns:
61 An ExpectationSuite
62 """
63 try:
64 import great_expectations as ge
65 except ImportError as ex:
66 raise ImportError(
67 "Please install great expectations before using the expectation functionality"
68 ) from ex
69
70 # Use report title if suite is empty
71 if suite_name is None:
72 suite_name = slugify(self.config.title)
73
74 # Use the default handler if none
75 if handler is None:
76 handler = ExpectationHandler(self.typeset)
77
78 # Obtain the ge context and create the expectation suite
79 if not data_context:
80 data_context = ge.data_context.DataContext()
81
82 suite = data_context.create_expectation_suite(
83 suite_name, overwrite_existing=True
84 )
85
86 # Instantiate an in-memory pandas dataset
87 batch = ge.dataset.PandasDataset(self.df, expectation_suite=suite)
88
89 # Obtain the profiling summary
90 summary: BaseDescription = self.get_description() # type: ignore
91
92 # Dispatch to expectations per semantic variable type
93 for name, variable_summary in summary.variables.items():
94 handler.handle(variable_summary["type"], name, variable_summary, batch)
95
96 # We don't actually update the suite object on the batch in place, so need
97 # to get the populated suite from the batch
98 suite = batch.get_expectation_suite()
99
100 validation_result_identifier = None
101 if run_validation:
102 batch = ge.dataset.PandasDataset(self.df, expectation_suite=suite)
103
104 results = data_context.run_validation_operator(
105 "action_list_operator", assets_to_validate=[batch]
106 )
107 validation_result_identifier = results.list_validation_result_identifiers()[
108 0
109 ]
110
111 # Write expectations and open data docs
112 if save_suite or build_data_docs:
113 data_context.save_expectation_suite(suite)
114
115 if build_data_docs:
116 data_context.build_data_docs()
117 data_context.open_data_docs(validation_result_identifier)
118
119 return batch.get_expectation_suite()
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/ydata_profiling/expectations_report.py b/src/ydata_profiling/expectations_report.py
--- a/src/ydata_profiling/expectations_report.py
+++ b/src/ydata_profiling/expectations_report.py
@@ -79,7 +79,7 @@
if not data_context:
data_context = ge.data_context.DataContext()
- suite = data_context.create_expectation_suite(
+ suite = data_context.add_expectation_suite(
suite_name, overwrite_existing=True
)
| {"golden_diff": "diff --git a/src/ydata_profiling/expectations_report.py b/src/ydata_profiling/expectations_report.py\n--- a/src/ydata_profiling/expectations_report.py\n+++ b/src/ydata_profiling/expectations_report.py\n@@ -79,7 +79,7 @@\n if not data_context:\n data_context = ge.data_context.DataContext()\n \n- suite = data_context.create_expectation_suite(\n+ suite = data_context.add_expectation_suite(\n suite_name, overwrite_existing=True\n )\n", "issue": "AttributeError: 'FileDataContext' object has no attribute 'create_expectation_suite'\n### Current Behaviour\n\nAttributeError: 'FileDataContext' object has no attribute 'create_expectation_suite'\r\n\r\n[Related to this issue](https://docs.greatexpectations.io/docs/changelog/#01549): \"[MAINTENANCE] Change all instances of create_expectation_suite to add_expectation_suite in tests, docs, and source code ([#7117](https://github.com/great-expectations/great_expectations/pull/7117))\" \r\n\r\nDataContext objects no longer has a method 'create_expectation_suite'.\n\n### Expected Behaviour\n\n[Great expectation test suite](https://docs.profiling.ydata.ai/4.6/integrations/great_expectations/#creating-expectation-suites-with-ydata-profiling): \"ydata-profiling provides a simple to_expectation_suite() method that returns a Great Expectations ExpectationSuite object which contains a set of Expectations.\"\n\n### Data Description\n\n[Dataset can be used from the documentation](https://docs.profiling.ydata.ai/4.6/integrations/great_expectations/#about-great-expectations)\n\n### Code that reproduces the bug\n\n```Python\nprofile = ProfileReport(\r\n df, \r\n title='Pandas Profiling Report', \r\n type_schema=type_schema,\r\n minimal=True, \r\n lazy=False\r\n)\r\ndata_context = ge.data_context.DataContext(\r\n context_root_dir='./gx/'\r\n)\r\nsuite = profile.to_expectation_suite(\r\n data_context=data_context\r\n)\n```\n\n\n### pandas-profiling version\n\n-\n\n### Dependencies\n\n```Text\n-\n```\n\n\n### OS\n\n_No response_\n\n### Checklist\n\n- [X] There is not yet another bug report for this issue in the [issue tracker](https://github.com/ydataai/pandas-profiling/issues)\n- [X] The problem is reproducible from this bug report. [This guide](http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports) can help to craft a minimal bug report.\n- [X] The issue has not been resolved by the entries listed under [Common Issues](https://pandas-profiling.ydata.ai/docs/master/pages/support_contrib/common_issues.html).\n", "before_files": [{"content": "from typing import Any, Optional\n\nimport pandas as pd\nfrom visions import VisionsTypeset\n\nfrom ydata_profiling.config import Settings\nfrom ydata_profiling.model import BaseDescription, expectation_algorithms\nfrom ydata_profiling.model.handler import Handler\nfrom ydata_profiling.utils.dataframe import slugify\n\n\nclass ExpectationHandler(Handler):\n \"\"\"Default handler\"\"\"\n\n def __init__(self, typeset: VisionsTypeset, *args, **kwargs):\n mapping = {\n \"Unsupported\": [expectation_algorithms.generic_expectations],\n \"Text\": [expectation_algorithms.categorical_expectations],\n \"Categorical\": [expectation_algorithms.categorical_expectations],\n \"Boolean\": [expectation_algorithms.categorical_expectations],\n \"Numeric\": [expectation_algorithms.numeric_expectations],\n \"URL\": [expectation_algorithms.url_expectations],\n \"File\": [expectation_algorithms.file_expectations],\n \"Path\": [expectation_algorithms.path_expectations],\n \"DateTime\": [expectation_algorithms.datetime_expectations],\n \"Image\": [expectation_algorithms.image_expectations],\n }\n super().__init__(mapping, typeset, *args, **kwargs)\n\n\nclass ExpectationsReport:\n config: Settings\n df: Optional[pd.DataFrame] = None\n\n @property\n def typeset(self) -> Optional[VisionsTypeset]:\n return None\n\n def to_expectation_suite(\n self,\n suite_name: Optional[str] = None,\n data_context: Optional[Any] = None,\n save_suite: bool = True,\n run_validation: bool = True,\n build_data_docs: bool = True,\n handler: Optional[Handler] = None,\n ) -> Any:\n \"\"\"\n All parameters default to True to make it easier to access the full functionality of Great Expectations out of\n the box.\n Args:\n suite_name: The name of your expectation suite\n data_context: A user-specified data context\n save_suite: Boolean to determine whether to save the suite to .json as part of the method\n run_validation: Boolean to determine whether to run validation as part of the method\n build_data_docs: Boolean to determine whether to build data docs, save the .html file, and open data docs in\n your browser\n handler: The handler to use for building expectation\n\n Returns:\n An ExpectationSuite\n \"\"\"\n try:\n import great_expectations as ge\n except ImportError as ex:\n raise ImportError(\n \"Please install great expectations before using the expectation functionality\"\n ) from ex\n\n # Use report title if suite is empty\n if suite_name is None:\n suite_name = slugify(self.config.title)\n\n # Use the default handler if none\n if handler is None:\n handler = ExpectationHandler(self.typeset)\n\n # Obtain the ge context and create the expectation suite\n if not data_context:\n data_context = ge.data_context.DataContext()\n\n suite = data_context.create_expectation_suite(\n suite_name, overwrite_existing=True\n )\n\n # Instantiate an in-memory pandas dataset\n batch = ge.dataset.PandasDataset(self.df, expectation_suite=suite)\n\n # Obtain the profiling summary\n summary: BaseDescription = self.get_description() # type: ignore\n\n # Dispatch to expectations per semantic variable type\n for name, variable_summary in summary.variables.items():\n handler.handle(variable_summary[\"type\"], name, variable_summary, batch)\n\n # We don't actually update the suite object on the batch in place, so need\n # to get the populated suite from the batch\n suite = batch.get_expectation_suite()\n\n validation_result_identifier = None\n if run_validation:\n batch = ge.dataset.PandasDataset(self.df, expectation_suite=suite)\n\n results = data_context.run_validation_operator(\n \"action_list_operator\", assets_to_validate=[batch]\n )\n validation_result_identifier = results.list_validation_result_identifiers()[\n 0\n ]\n\n # Write expectations and open data docs\n if save_suite or build_data_docs:\n data_context.save_expectation_suite(suite)\n\n if build_data_docs:\n data_context.build_data_docs()\n data_context.open_data_docs(validation_result_identifier)\n\n return batch.get_expectation_suite()\n", "path": "src/ydata_profiling/expectations_report.py"}], "after_files": [{"content": "from typing import Any, Optional\n\nimport pandas as pd\nfrom visions import VisionsTypeset\n\nfrom ydata_profiling.config import Settings\nfrom ydata_profiling.model import BaseDescription, expectation_algorithms\nfrom ydata_profiling.model.handler import Handler\nfrom ydata_profiling.utils.dataframe import slugify\n\n\nclass ExpectationHandler(Handler):\n \"\"\"Default handler\"\"\"\n\n def __init__(self, typeset: VisionsTypeset, *args, **kwargs):\n mapping = {\n \"Unsupported\": [expectation_algorithms.generic_expectations],\n \"Text\": [expectation_algorithms.categorical_expectations],\n \"Categorical\": [expectation_algorithms.categorical_expectations],\n \"Boolean\": [expectation_algorithms.categorical_expectations],\n \"Numeric\": [expectation_algorithms.numeric_expectations],\n \"URL\": [expectation_algorithms.url_expectations],\n \"File\": [expectation_algorithms.file_expectations],\n \"Path\": [expectation_algorithms.path_expectations],\n \"DateTime\": [expectation_algorithms.datetime_expectations],\n \"Image\": [expectation_algorithms.image_expectations],\n }\n super().__init__(mapping, typeset, *args, **kwargs)\n\n\nclass ExpectationsReport:\n config: Settings\n df: Optional[pd.DataFrame] = None\n\n @property\n def typeset(self) -> Optional[VisionsTypeset]:\n return None\n\n def to_expectation_suite(\n self,\n suite_name: Optional[str] = None,\n data_context: Optional[Any] = None,\n save_suite: bool = True,\n run_validation: bool = True,\n build_data_docs: bool = True,\n handler: Optional[Handler] = None,\n ) -> Any:\n \"\"\"\n All parameters default to True to make it easier to access the full functionality of Great Expectations out of\n the box.\n Args:\n suite_name: The name of your expectation suite\n data_context: A user-specified data context\n save_suite: Boolean to determine whether to save the suite to .json as part of the method\n run_validation: Boolean to determine whether to run validation as part of the method\n build_data_docs: Boolean to determine whether to build data docs, save the .html file, and open data docs in\n your browser\n handler: The handler to use for building expectation\n\n Returns:\n An ExpectationSuite\n \"\"\"\n try:\n import great_expectations as ge\n except ImportError as ex:\n raise ImportError(\n \"Please install great expectations before using the expectation functionality\"\n ) from ex\n\n # Use report title if suite is empty\n if suite_name is None:\n suite_name = slugify(self.config.title)\n\n # Use the default handler if none\n if handler is None:\n handler = ExpectationHandler(self.typeset)\n\n # Obtain the ge context and create the expectation suite\n if not data_context:\n data_context = ge.data_context.DataContext()\n\n suite = data_context.add_expectation_suite(\n suite_name, overwrite_existing=True\n )\n\n # Instantiate an in-memory pandas dataset\n batch = ge.dataset.PandasDataset(self.df, expectation_suite=suite)\n\n # Obtain the profiling summary\n summary: BaseDescription = self.get_description() # type: ignore\n\n # Dispatch to expectations per semantic variable type\n for name, variable_summary in summary.variables.items():\n handler.handle(variable_summary[\"type\"], name, variable_summary, batch)\n\n # We don't actually update the suite object on the batch in place, so need\n # to get the populated suite from the batch\n suite = batch.get_expectation_suite()\n\n validation_result_identifier = None\n if run_validation:\n batch = ge.dataset.PandasDataset(self.df, expectation_suite=suite)\n\n results = data_context.run_validation_operator(\n \"action_list_operator\", assets_to_validate=[batch]\n )\n validation_result_identifier = results.list_validation_result_identifiers()[\n 0\n ]\n\n # Write expectations and open data docs\n if save_suite or build_data_docs:\n data_context.save_expectation_suite(suite)\n\n if build_data_docs:\n data_context.build_data_docs()\n data_context.open_data_docs(validation_result_identifier)\n\n return batch.get_expectation_suite()\n", "path": "src/ydata_profiling/expectations_report.py"}]} | 1,915 | 112 |
gh_patches_debug_22349 | rasdani/github-patches | git_diff | yt-project__yt-2502 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
repr for particle unions
The `__repr__` for particle unions should include the contents of `sub_types`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yt/data_objects/unions.py`
Content:
```
1 from yt.funcs import ensure_list
2
3 class Union(object):
4 def __init__(self, name, sub_types):
5 self.name = name
6 self.sub_types = ensure_list(sub_types)
7
8 def __iter__(self):
9 for st in self.sub_types:
10 yield st
11
12 class MeshUnion(Union):
13 def __init__(self, name, sub_types):
14 super(MeshUnion, self).__init__(name, sub_types)
15
```
Path: `yt/data_objects/particle_unions.py`
Content:
```
1 from .unions import Union
2
3 class ParticleUnion(Union):
4 def __init__(self, name, sub_types):
5 super(ParticleUnion, self).__init__(name, sub_types)
6
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/yt/data_objects/particle_unions.py b/yt/data_objects/particle_unions.py
--- a/yt/data_objects/particle_unions.py
+++ b/yt/data_objects/particle_unions.py
@@ -1,5 +1,6 @@
from .unions import Union
class ParticleUnion(Union):
+ _union_type = "particle"
def __init__(self, name, sub_types):
super(ParticleUnion, self).__init__(name, sub_types)
diff --git a/yt/data_objects/unions.py b/yt/data_objects/unions.py
--- a/yt/data_objects/unions.py
+++ b/yt/data_objects/unions.py
@@ -1,6 +1,7 @@
from yt.funcs import ensure_list
class Union(object):
+ _union_type = ""
def __init__(self, name, sub_types):
self.name = name
self.sub_types = ensure_list(sub_types)
@@ -9,6 +10,11 @@
for st in self.sub_types:
yield st
+ def __repr__(self):
+ return "{} Union: '{}' composed of: {}".format(
+ self._union_type.capitalize(), self.name, self.sub_types)
+
class MeshUnion(Union):
+ _union_type = "mesh"
def __init__(self, name, sub_types):
super(MeshUnion, self).__init__(name, sub_types)
| {"golden_diff": "diff --git a/yt/data_objects/particle_unions.py b/yt/data_objects/particle_unions.py\n--- a/yt/data_objects/particle_unions.py\n+++ b/yt/data_objects/particle_unions.py\n@@ -1,5 +1,6 @@\n from .unions import Union\n \n class ParticleUnion(Union):\n+ _union_type = \"particle\"\n def __init__(self, name, sub_types):\n super(ParticleUnion, self).__init__(name, sub_types)\ndiff --git a/yt/data_objects/unions.py b/yt/data_objects/unions.py\n--- a/yt/data_objects/unions.py\n+++ b/yt/data_objects/unions.py\n@@ -1,6 +1,7 @@\n from yt.funcs import ensure_list\n \n class Union(object):\n+ _union_type = \"\"\n def __init__(self, name, sub_types):\n self.name = name\n self.sub_types = ensure_list(sub_types)\n@@ -9,6 +10,11 @@\n for st in self.sub_types:\n yield st\n \n+ def __repr__(self):\n+ return \"{} Union: '{}' composed of: {}\".format(\n+ self._union_type.capitalize(), self.name, self.sub_types)\n+\n class MeshUnion(Union):\n+ _union_type = \"mesh\"\n def __init__(self, name, sub_types):\n super(MeshUnion, self).__init__(name, sub_types)\n", "issue": "repr for particle unions\nThe `__repr__` for particle unions should include the contents of `sub_types`\n", "before_files": [{"content": "from yt.funcs import ensure_list\n\nclass Union(object):\n def __init__(self, name, sub_types):\n self.name = name\n self.sub_types = ensure_list(sub_types)\n\n def __iter__(self):\n for st in self.sub_types:\n yield st\n\nclass MeshUnion(Union):\n def __init__(self, name, sub_types):\n super(MeshUnion, self).__init__(name, sub_types)\n", "path": "yt/data_objects/unions.py"}, {"content": "from .unions import Union\n\nclass ParticleUnion(Union):\n def __init__(self, name, sub_types):\n super(ParticleUnion, self).__init__(name, sub_types)\n", "path": "yt/data_objects/particle_unions.py"}], "after_files": [{"content": "from yt.funcs import ensure_list\n\nclass Union(object):\n _union_type = \"\"\n def __init__(self, name, sub_types):\n self.name = name\n self.sub_types = ensure_list(sub_types)\n\n def __iter__(self):\n for st in self.sub_types:\n yield st\n\n def __repr__(self):\n return \"{} Union: '{}' composed of: {}\".format(\n self._union_type.capitalize(), self.name, self.sub_types)\n\nclass MeshUnion(Union):\n _union_type = \"mesh\"\n def __init__(self, name, sub_types):\n super(MeshUnion, self).__init__(name, sub_types)\n", "path": "yt/data_objects/unions.py"}, {"content": "from .unions import Union\n\nclass ParticleUnion(Union):\n _union_type = \"particle\"\n def __init__(self, name, sub_types):\n super(ParticleUnion, self).__init__(name, sub_types)\n", "path": "yt/data_objects/particle_unions.py"}]} | 465 | 307 |
gh_patches_debug_22500 | rasdani/github-patches | git_diff | zulip__zulip-28236 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`manage.py change_user_role --help` should self-document the set of roles it supports
Right now, the only way it self-documents this is when you try an invalid role:
```
manage.py change_user_role: error: argument <new_role>: invalid choice: 'is_billing_administrator' (choose from 'owner', 'admin', 'moderator', 'member', 'guest', 'can_forge_sender', 'can_create_users', 'is_billing_admin')
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `zerver/management/commands/change_user_role.py`
Content:
```
1 from argparse import ArgumentParser
2 from typing import Any
3
4 from django.core.management.base import CommandError
5 from typing_extensions import override
6
7 from zerver.actions.users import (
8 do_change_can_create_users,
9 do_change_can_forge_sender,
10 do_change_is_billing_admin,
11 do_change_user_role,
12 )
13 from zerver.lib.management import ZulipBaseCommand
14 from zerver.models import UserProfile
15
16
17 class Command(ZulipBaseCommand):
18 help = """Change role of an existing user in their (own) Realm.
19
20 ONLY perform this on customer request from an authorized person.
21 """
22
23 @override
24 def add_arguments(self, parser: ArgumentParser) -> None:
25 parser.add_argument("email", metavar="<email>", help="email of user to change role")
26 parser.add_argument(
27 "new_role",
28 metavar="<new_role>",
29 choices=[
30 "owner",
31 "admin",
32 "moderator",
33 "member",
34 "guest",
35 "can_forge_sender",
36 "can_create_users",
37 "is_billing_admin",
38 ],
39 help="new role of the user",
40 )
41 parser.add_argument(
42 "--revoke",
43 dest="grant",
44 action="store_false",
45 help="Remove can_forge_sender or can_create_users permission.",
46 )
47 self.add_realm_args(parser, required=True)
48
49 @override
50 def handle(self, *args: Any, **options: Any) -> None:
51 email = options["email"]
52 realm = self.get_realm(options)
53
54 user = self.get_user(email, realm)
55
56 user_role_map = {
57 "owner": UserProfile.ROLE_REALM_OWNER,
58 "admin": UserProfile.ROLE_REALM_ADMINISTRATOR,
59 "moderator": UserProfile.ROLE_MODERATOR,
60 "member": UserProfile.ROLE_MEMBER,
61 "guest": UserProfile.ROLE_GUEST,
62 }
63
64 if options["new_role"] not in ["can_forge_sender", "can_create_users", "is_billing_admin"]:
65 new_role = user_role_map[options["new_role"]]
66 if not options["grant"]:
67 raise CommandError(
68 "Revoke not supported with this permission; please specify new role."
69 )
70 if new_role == user.role:
71 raise CommandError("User already has this role.")
72 old_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role]
73 do_change_user_role(user, new_role, acting_user=None)
74 new_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role]
75 print(
76 f"Role for {user.delivery_email} changed from {old_role_name} to {new_role_name}."
77 )
78 return
79
80 if options["new_role"] == "can_forge_sender":
81 if user.can_forge_sender and options["grant"]:
82 raise CommandError("User can already forge messages for this realm.")
83 elif not user.can_forge_sender and not options["grant"]:
84 raise CommandError("User can't forge messages for this realm.")
85 do_change_can_forge_sender(user, options["grant"])
86
87 granted_text = "have" if options["grant"] else "not have"
88 print(
89 f"{user.delivery_email} changed to {granted_text} {options['new_role']} permission."
90 )
91 elif options["new_role"] == "can_create_users":
92 if user.can_create_users and options["grant"]:
93 raise CommandError("User can already create users for this realm.")
94 elif not user.can_create_users and not options["grant"]:
95 raise CommandError("User can't create users for this realm.")
96 do_change_can_create_users(user, options["grant"])
97 else:
98 assert options["new_role"] == "is_billing_admin"
99 if user.is_billing_admin and options["grant"]:
100 raise CommandError("User already is a billing admin for this realm.")
101 elif not user.is_billing_admin and not options["grant"]:
102 raise CommandError("User is not a billing admin for this realm.")
103
104 do_change_is_billing_admin(user, options["grant"])
105
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/zerver/management/commands/change_user_role.py b/zerver/management/commands/change_user_role.py
--- a/zerver/management/commands/change_user_role.py
+++ b/zerver/management/commands/change_user_role.py
@@ -13,6 +13,17 @@
from zerver.lib.management import ZulipBaseCommand
from zerver.models import UserProfile
+ROLE_CHOICES = [
+ "owner",
+ "admin",
+ "moderator",
+ "member",
+ "guest",
+ "can_forge_sender",
+ "can_create_users",
+ "is_billing_admin",
+]
+
class Command(ZulipBaseCommand):
help = """Change role of an existing user in their (own) Realm.
@@ -26,17 +37,8 @@
parser.add_argument(
"new_role",
metavar="<new_role>",
- choices=[
- "owner",
- "admin",
- "moderator",
- "member",
- "guest",
- "can_forge_sender",
- "can_create_users",
- "is_billing_admin",
- ],
- help="new role of the user",
+ choices=ROLE_CHOICES,
+ help="new role of the user; choose from " + ", ".join(ROLE_CHOICES),
)
parser.add_argument(
"--revoke",
| {"golden_diff": "diff --git a/zerver/management/commands/change_user_role.py b/zerver/management/commands/change_user_role.py\n--- a/zerver/management/commands/change_user_role.py\n+++ b/zerver/management/commands/change_user_role.py\n@@ -13,6 +13,17 @@\n from zerver.lib.management import ZulipBaseCommand\n from zerver.models import UserProfile\n \n+ROLE_CHOICES = [\n+ \"owner\",\n+ \"admin\",\n+ \"moderator\",\n+ \"member\",\n+ \"guest\",\n+ \"can_forge_sender\",\n+ \"can_create_users\",\n+ \"is_billing_admin\",\n+]\n+\n \n class Command(ZulipBaseCommand):\n help = \"\"\"Change role of an existing user in their (own) Realm.\n@@ -26,17 +37,8 @@\n parser.add_argument(\n \"new_role\",\n metavar=\"<new_role>\",\n- choices=[\n- \"owner\",\n- \"admin\",\n- \"moderator\",\n- \"member\",\n- \"guest\",\n- \"can_forge_sender\",\n- \"can_create_users\",\n- \"is_billing_admin\",\n- ],\n- help=\"new role of the user\",\n+ choices=ROLE_CHOICES,\n+ help=\"new role of the user; choose from \" + \", \".join(ROLE_CHOICES),\n )\n parser.add_argument(\n \"--revoke\",\n", "issue": "`manage.py change_user_role --help` should self-document the set of roles it supports\nRight now, the only way it self-documents this is when you try an invalid role:\r\n\r\n```\r\nmanage.py change_user_role: error: argument <new_role>: invalid choice: 'is_billing_administrator' (choose from 'owner', 'admin', 'moderator', 'member', 'guest', 'can_forge_sender', 'can_create_users', 'is_billing_admin')\r\n```\n", "before_files": [{"content": "from argparse import ArgumentParser\nfrom typing import Any\n\nfrom django.core.management.base import CommandError\nfrom typing_extensions import override\n\nfrom zerver.actions.users import (\n do_change_can_create_users,\n do_change_can_forge_sender,\n do_change_is_billing_admin,\n do_change_user_role,\n)\nfrom zerver.lib.management import ZulipBaseCommand\nfrom zerver.models import UserProfile\n\n\nclass Command(ZulipBaseCommand):\n help = \"\"\"Change role of an existing user in their (own) Realm.\n\nONLY perform this on customer request from an authorized person.\n\"\"\"\n\n @override\n def add_arguments(self, parser: ArgumentParser) -> None:\n parser.add_argument(\"email\", metavar=\"<email>\", help=\"email of user to change role\")\n parser.add_argument(\n \"new_role\",\n metavar=\"<new_role>\",\n choices=[\n \"owner\",\n \"admin\",\n \"moderator\",\n \"member\",\n \"guest\",\n \"can_forge_sender\",\n \"can_create_users\",\n \"is_billing_admin\",\n ],\n help=\"new role of the user\",\n )\n parser.add_argument(\n \"--revoke\",\n dest=\"grant\",\n action=\"store_false\",\n help=\"Remove can_forge_sender or can_create_users permission.\",\n )\n self.add_realm_args(parser, required=True)\n\n @override\n def handle(self, *args: Any, **options: Any) -> None:\n email = options[\"email\"]\n realm = self.get_realm(options)\n\n user = self.get_user(email, realm)\n\n user_role_map = {\n \"owner\": UserProfile.ROLE_REALM_OWNER,\n \"admin\": UserProfile.ROLE_REALM_ADMINISTRATOR,\n \"moderator\": UserProfile.ROLE_MODERATOR,\n \"member\": UserProfile.ROLE_MEMBER,\n \"guest\": UserProfile.ROLE_GUEST,\n }\n\n if options[\"new_role\"] not in [\"can_forge_sender\", \"can_create_users\", \"is_billing_admin\"]:\n new_role = user_role_map[options[\"new_role\"]]\n if not options[\"grant\"]:\n raise CommandError(\n \"Revoke not supported with this permission; please specify new role.\"\n )\n if new_role == user.role:\n raise CommandError(\"User already has this role.\")\n old_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role]\n do_change_user_role(user, new_role, acting_user=None)\n new_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role]\n print(\n f\"Role for {user.delivery_email} changed from {old_role_name} to {new_role_name}.\"\n )\n return\n\n if options[\"new_role\"] == \"can_forge_sender\":\n if user.can_forge_sender and options[\"grant\"]:\n raise CommandError(\"User can already forge messages for this realm.\")\n elif not user.can_forge_sender and not options[\"grant\"]:\n raise CommandError(\"User can't forge messages for this realm.\")\n do_change_can_forge_sender(user, options[\"grant\"])\n\n granted_text = \"have\" if options[\"grant\"] else \"not have\"\n print(\n f\"{user.delivery_email} changed to {granted_text} {options['new_role']} permission.\"\n )\n elif options[\"new_role\"] == \"can_create_users\":\n if user.can_create_users and options[\"grant\"]:\n raise CommandError(\"User can already create users for this realm.\")\n elif not user.can_create_users and not options[\"grant\"]:\n raise CommandError(\"User can't create users for this realm.\")\n do_change_can_create_users(user, options[\"grant\"])\n else:\n assert options[\"new_role\"] == \"is_billing_admin\"\n if user.is_billing_admin and options[\"grant\"]:\n raise CommandError(\"User already is a billing admin for this realm.\")\n elif not user.is_billing_admin and not options[\"grant\"]:\n raise CommandError(\"User is not a billing admin for this realm.\")\n\n do_change_is_billing_admin(user, options[\"grant\"])\n", "path": "zerver/management/commands/change_user_role.py"}], "after_files": [{"content": "from argparse import ArgumentParser\nfrom typing import Any\n\nfrom django.core.management.base import CommandError\nfrom typing_extensions import override\n\nfrom zerver.actions.users import (\n do_change_can_create_users,\n do_change_can_forge_sender,\n do_change_is_billing_admin,\n do_change_user_role,\n)\nfrom zerver.lib.management import ZulipBaseCommand\nfrom zerver.models import UserProfile\n\nROLE_CHOICES = [\n \"owner\",\n \"admin\",\n \"moderator\",\n \"member\",\n \"guest\",\n \"can_forge_sender\",\n \"can_create_users\",\n \"is_billing_admin\",\n]\n\n\nclass Command(ZulipBaseCommand):\n help = \"\"\"Change role of an existing user in their (own) Realm.\n\nONLY perform this on customer request from an authorized person.\n\"\"\"\n\n @override\n def add_arguments(self, parser: ArgumentParser) -> None:\n parser.add_argument(\"email\", metavar=\"<email>\", help=\"email of user to change role\")\n parser.add_argument(\n \"new_role\",\n metavar=\"<new_role>\",\n choices=ROLE_CHOICES,\n help=\"new role of the user; choose from \" + \", \".join(ROLE_CHOICES),\n )\n parser.add_argument(\n \"--revoke\",\n dest=\"grant\",\n action=\"store_false\",\n help=\"Remove can_forge_sender or can_create_users permission.\",\n )\n self.add_realm_args(parser, required=True)\n\n @override\n def handle(self, *args: Any, **options: Any) -> None:\n email = options[\"email\"]\n realm = self.get_realm(options)\n\n user = self.get_user(email, realm)\n\n user_role_map = {\n \"owner\": UserProfile.ROLE_REALM_OWNER,\n \"admin\": UserProfile.ROLE_REALM_ADMINISTRATOR,\n \"moderator\": UserProfile.ROLE_MODERATOR,\n \"member\": UserProfile.ROLE_MEMBER,\n \"guest\": UserProfile.ROLE_GUEST,\n }\n\n if options[\"new_role\"] not in [\"can_forge_sender\", \"can_create_users\", \"is_billing_admin\"]:\n new_role = user_role_map[options[\"new_role\"]]\n if not options[\"grant\"]:\n raise CommandError(\n \"Revoke not supported with this permission; please specify new role.\"\n )\n if new_role == user.role:\n raise CommandError(\"User already has this role.\")\n old_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role]\n do_change_user_role(user, new_role, acting_user=None)\n new_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role]\n print(\n f\"Role for {user.delivery_email} changed from {old_role_name} to {new_role_name}.\"\n )\n return\n\n if options[\"new_role\"] == \"can_forge_sender\":\n if user.can_forge_sender and options[\"grant\"]:\n raise CommandError(\"User can already forge messages for this realm.\")\n elif not user.can_forge_sender and not options[\"grant\"]:\n raise CommandError(\"User can't forge messages for this realm.\")\n do_change_can_forge_sender(user, options[\"grant\"])\n\n granted_text = \"have\" if options[\"grant\"] else \"not have\"\n print(\n f\"{user.delivery_email} changed to {granted_text} {options['new_role']} permission.\"\n )\n elif options[\"new_role\"] == \"can_create_users\":\n if user.can_create_users and options[\"grant\"]:\n raise CommandError(\"User can already create users for this realm.\")\n elif not user.can_create_users and not options[\"grant\"]:\n raise CommandError(\"User can't create users for this realm.\")\n do_change_can_create_users(user, options[\"grant\"])\n else:\n assert options[\"new_role\"] == \"is_billing_admin\"\n if user.is_billing_admin and options[\"grant\"]:\n raise CommandError(\"User already is a billing admin for this realm.\")\n elif not user.is_billing_admin and not options[\"grant\"]:\n raise CommandError(\"User is not a billing admin for this realm.\")\n\n do_change_is_billing_admin(user, options[\"grant\"])\n", "path": "zerver/management/commands/change_user_role.py"}]} | 1,416 | 300 |
gh_patches_debug_618 | rasdani/github-patches | git_diff | Bitmessage__PyBitmessage-1387 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Better logging
Using the built-in python logging module I've made various log levels possible and made the creation of a log file a matter of changing the configuration in debug.py. The python logging module is thread-safe so we can safely replace all `print` calls with calls to `logger`. I only replaced some of them mainly to test the configuration (and there are a lot of `print` calls)
There are some commits in my merge that mention translation files, I'm working on that but didn't mean to include them in this merge. I deleted them but the commit history is already there.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkdeps.py`
Content:
```
1 #!/usr/bin/env python2
2 """
3 Check dependendies and give recommendations about how to satisfy them
4
5 Limitations:
6
7 * Does not detect whether packages are already installed. Solving this requires writing more of a configuration
8 management system. Or we could switch to an existing one.
9 * Not fully PEP508 compliant. Not slightly. It makes bold assumptions about the simplicity of the contents of
10 EXTRAS_REQUIRE. This is fine because most developers do, too.
11 """
12
13 import os
14 import sys
15 from distutils.errors import CompileError
16 try:
17 from setuptools.dist import Distribution
18 from setuptools.extension import Extension
19 from setuptools.command.build_ext import build_ext
20 HAVE_SETUPTOOLS = True
21 # another import from setuptools is in setup.py
22 from setup import EXTRAS_REQUIRE
23 except ImportError:
24 HAVE_SETUPTOOLS = False
25 EXTRAS_REQUIRE = []
26
27 from importlib import import_module
28
29 from src.depends import detectOS, PACKAGES, PACKAGE_MANAGER
30
31
32 COMPILING = {
33 "Debian": "build-essential libssl-dev",
34 "Ubuntu": "build-essential libssl-dev",
35 "Fedora": "gcc-c++ redhat-rpm-config python-devel openssl-devel",
36 "openSUSE": "gcc-c++ libopenssl-devel python-devel",
37 "optional": False,
38 }
39
40 # OS-specific dependencies for optional components listed in EXTRAS_REQUIRE
41 EXTRAS_REQUIRE_DEPS = {
42 # The values from setup.EXTRAS_REQUIRE
43 'python_prctl': {
44 # The packages needed for this requirement, by OS
45 "OpenBSD": [""],
46 "FreeBSD": [""],
47 "Debian": ["libcap-dev python-prctl"],
48 "Ubuntu": ["libcap-dev python-prctl"],
49 "Ubuntu 12": ["libcap-dev python-prctl"],
50 "openSUSE": [""],
51 "Fedora": ["prctl"],
52 "Guix": [""],
53 "Gentoo": ["dev-python/python-prctl"],
54 },
55 }
56
57
58 def detectPrereqs(missing=True):
59 available = []
60 for module in PACKAGES:
61 try:
62 import_module(module)
63 if not missing:
64 available.append(module)
65 except ImportError:
66 if missing:
67 available.append(module)
68 return available
69
70
71 def prereqToPackages():
72 if not detectPrereqs():
73 return
74 print("%s %s" % (
75 PACKAGE_MANAGER[detectOS()], " ".join(
76 PACKAGES[x][detectOS()] for x in detectPrereqs())))
77
78
79 def compilerToPackages():
80 if not detectOS() in COMPILING:
81 return
82 print("%s %s" % (
83 PACKAGE_MANAGER[detectOS.result], COMPILING[detectOS.result]))
84
85
86 def testCompiler():
87 if not HAVE_SETUPTOOLS:
88 # silent, we can't test without setuptools
89 return True
90
91 bitmsghash = Extension(
92 'bitmsghash',
93 sources=['src/bitmsghash/bitmsghash.cpp'],
94 libraries=['pthread', 'crypto'],
95 )
96
97 dist = Distribution()
98 dist.ext_modules = [bitmsghash]
99 cmd = build_ext(dist)
100 cmd.initialize_options()
101 cmd.finalize_options()
102 cmd.force = True
103 try:
104 cmd.run()
105 except CompileError:
106 return False
107 else:
108 fullPath = os.path.join(cmd.build_lib, cmd.get_ext_filename("bitmsghash"))
109 return os.path.isfile(fullPath)
110
111
112 prereqs = detectPrereqs()
113 compiler = testCompiler()
114
115 if (not compiler or prereqs) and detectOS() in PACKAGE_MANAGER:
116 print(
117 "It looks like you're using %s. "
118 "It is highly recommended to use the package manager\n"
119 "to install the missing dependencies." % detectOS.result)
120
121 if not compiler:
122 print(
123 "Building the bitmsghash module failed.\n"
124 "You may be missing a C++ compiler and/or the OpenSSL headers.")
125
126 if prereqs:
127 mandatory = [x for x in prereqs if not PACKAGES[x].get("optional")]
128 optional = [x for x in prereqs if PACKAGES[x].get("optional")]
129 if mandatory:
130 print("Missing mandatory dependencies: %s" % " ".join(mandatory))
131 if optional:
132 print("Missing optional dependencies: %s" % " ".join(optional))
133 for package in optional:
134 print(PACKAGES[package].get('description'))
135
136 # Install the system dependencies of optional extras_require components
137 OPSYS = detectOS()
138 CMD = PACKAGE_MANAGER[OPSYS] if OPSYS in PACKAGE_MANAGER else 'UNKNOWN_INSTALLER'
139 for lhs, rhs in EXTRAS_REQUIRE.items():
140 if OPSYS is None:
141 break
142 if rhs and any([
143 EXTRAS_REQUIRE_DEPS[x][OPSYS]
144 for x in rhs
145 if x in EXTRAS_REQUIRE_DEPS
146 ]):
147 rhs_cmd = ''.join([
148 CMD,
149 ' ',
150 ' '.join([
151 ''. join([
152 xx for xx in EXTRAS_REQUIRE_DEPS[x][OPSYS]
153 ])
154 for x in rhs
155 if x in EXTRAS_REQUIRE_DEPS
156 ]),
157 ])
158 print(
159 "Optional dependency `pip install .[{}]` would require `{}`"
160 " to be run as root".format(lhs, rhs_cmd))
161
162 if (not compiler or prereqs) and OPSYS in PACKAGE_MANAGER:
163 print("You can install the missing dependencies by running, as root:")
164 if not compiler:
165 compilerToPackages()
166 prereqToPackages()
167 if mandatory:
168 sys.exit(1)
169 else:
170 print("All the dependencies satisfied, you can install PyBitmessage")
171
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/checkdeps.py b/checkdeps.py
--- a/checkdeps.py
+++ b/checkdeps.py
@@ -22,7 +22,7 @@
from setup import EXTRAS_REQUIRE
except ImportError:
HAVE_SETUPTOOLS = False
- EXTRAS_REQUIRE = []
+ EXTRAS_REQUIRE = {}
from importlib import import_module
| {"golden_diff": "diff --git a/checkdeps.py b/checkdeps.py\n--- a/checkdeps.py\n+++ b/checkdeps.py\n@@ -22,7 +22,7 @@\n from setup import EXTRAS_REQUIRE\n except ImportError:\n HAVE_SETUPTOOLS = False\n- EXTRAS_REQUIRE = []\n+ EXTRAS_REQUIRE = {}\n \n from importlib import import_module\n", "issue": "Better logging\nUsing the built-in python logging module I've made various log levels possible and made the creation of a log file a matter of changing the configuration in debug.py. The python logging module is thread-safe so we can safely replace all `print` calls with calls to `logger`. I only replaced some of them mainly to test the configuration (and there are a lot of `print` calls)\n\nThere are some commits in my merge that mention translation files, I'm working on that but didn't mean to include them in this merge. I deleted them but the commit history is already there.\n\n", "before_files": [{"content": "#!/usr/bin/env python2\n\"\"\"\nCheck dependendies and give recommendations about how to satisfy them\n\nLimitations:\n\n * Does not detect whether packages are already installed. Solving this requires writing more of a configuration\n management system. Or we could switch to an existing one.\n * Not fully PEP508 compliant. Not slightly. It makes bold assumptions about the simplicity of the contents of\n EXTRAS_REQUIRE. This is fine because most developers do, too.\n\"\"\"\n\nimport os\nimport sys\nfrom distutils.errors import CompileError\ntry:\n from setuptools.dist import Distribution\n from setuptools.extension import Extension\n from setuptools.command.build_ext import build_ext\n HAVE_SETUPTOOLS = True\n # another import from setuptools is in setup.py\n from setup import EXTRAS_REQUIRE\nexcept ImportError:\n HAVE_SETUPTOOLS = False\n EXTRAS_REQUIRE = []\n\nfrom importlib import import_module\n\nfrom src.depends import detectOS, PACKAGES, PACKAGE_MANAGER\n\n\nCOMPILING = {\n \"Debian\": \"build-essential libssl-dev\",\n \"Ubuntu\": \"build-essential libssl-dev\",\n \"Fedora\": \"gcc-c++ redhat-rpm-config python-devel openssl-devel\",\n \"openSUSE\": \"gcc-c++ libopenssl-devel python-devel\",\n \"optional\": False,\n}\n\n# OS-specific dependencies for optional components listed in EXTRAS_REQUIRE\nEXTRAS_REQUIRE_DEPS = {\n # The values from setup.EXTRAS_REQUIRE\n 'python_prctl': {\n # The packages needed for this requirement, by OS\n \"OpenBSD\": [\"\"],\n \"FreeBSD\": [\"\"],\n \"Debian\": [\"libcap-dev python-prctl\"],\n \"Ubuntu\": [\"libcap-dev python-prctl\"],\n \"Ubuntu 12\": [\"libcap-dev python-prctl\"],\n \"openSUSE\": [\"\"],\n \"Fedora\": [\"prctl\"],\n \"Guix\": [\"\"],\n \"Gentoo\": [\"dev-python/python-prctl\"],\n },\n}\n\n\ndef detectPrereqs(missing=True):\n available = []\n for module in PACKAGES:\n try:\n import_module(module)\n if not missing:\n available.append(module)\n except ImportError:\n if missing:\n available.append(module)\n return available\n\n\ndef prereqToPackages():\n if not detectPrereqs():\n return\n print(\"%s %s\" % (\n PACKAGE_MANAGER[detectOS()], \" \".join(\n PACKAGES[x][detectOS()] for x in detectPrereqs())))\n\n\ndef compilerToPackages():\n if not detectOS() in COMPILING:\n return\n print(\"%s %s\" % (\n PACKAGE_MANAGER[detectOS.result], COMPILING[detectOS.result]))\n\n\ndef testCompiler():\n if not HAVE_SETUPTOOLS:\n # silent, we can't test without setuptools\n return True\n\n bitmsghash = Extension(\n 'bitmsghash',\n sources=['src/bitmsghash/bitmsghash.cpp'],\n libraries=['pthread', 'crypto'],\n )\n\n dist = Distribution()\n dist.ext_modules = [bitmsghash]\n cmd = build_ext(dist)\n cmd.initialize_options()\n cmd.finalize_options()\n cmd.force = True\n try:\n cmd.run()\n except CompileError:\n return False\n else:\n fullPath = os.path.join(cmd.build_lib, cmd.get_ext_filename(\"bitmsghash\"))\n return os.path.isfile(fullPath)\n\n\nprereqs = detectPrereqs()\ncompiler = testCompiler()\n\nif (not compiler or prereqs) and detectOS() in PACKAGE_MANAGER:\n print(\n \"It looks like you're using %s. \"\n \"It is highly recommended to use the package manager\\n\"\n \"to install the missing dependencies.\" % detectOS.result)\n\nif not compiler:\n print(\n \"Building the bitmsghash module failed.\\n\"\n \"You may be missing a C++ compiler and/or the OpenSSL headers.\")\n\nif prereqs:\n mandatory = [x for x in prereqs if not PACKAGES[x].get(\"optional\")]\n optional = [x for x in prereqs if PACKAGES[x].get(\"optional\")]\n if mandatory:\n print(\"Missing mandatory dependencies: %s\" % \" \".join(mandatory))\n if optional:\n print(\"Missing optional dependencies: %s\" % \" \".join(optional))\n for package in optional:\n print(PACKAGES[package].get('description'))\n\n# Install the system dependencies of optional extras_require components\nOPSYS = detectOS()\nCMD = PACKAGE_MANAGER[OPSYS] if OPSYS in PACKAGE_MANAGER else 'UNKNOWN_INSTALLER'\nfor lhs, rhs in EXTRAS_REQUIRE.items():\n if OPSYS is None:\n break\n if rhs and any([\n EXTRAS_REQUIRE_DEPS[x][OPSYS]\n for x in rhs\n if x in EXTRAS_REQUIRE_DEPS\n ]):\n rhs_cmd = ''.join([\n CMD,\n ' ',\n ' '.join([\n ''. join([\n xx for xx in EXTRAS_REQUIRE_DEPS[x][OPSYS]\n ])\n for x in rhs\n if x in EXTRAS_REQUIRE_DEPS\n ]),\n ])\n print(\n \"Optional dependency `pip install .[{}]` would require `{}`\"\n \" to be run as root\".format(lhs, rhs_cmd))\n\nif (not compiler or prereqs) and OPSYS in PACKAGE_MANAGER:\n print(\"You can install the missing dependencies by running, as root:\")\n if not compiler:\n compilerToPackages()\n prereqToPackages()\n if mandatory:\n sys.exit(1)\nelse:\n print(\"All the dependencies satisfied, you can install PyBitmessage\")\n", "path": "checkdeps.py"}], "after_files": [{"content": "#!/usr/bin/env python2\n\"\"\"\nCheck dependendies and give recommendations about how to satisfy them\n\nLimitations:\n\n * Does not detect whether packages are already installed. Solving this requires writing more of a configuration\n management system. Or we could switch to an existing one.\n * Not fully PEP508 compliant. Not slightly. It makes bold assumptions about the simplicity of the contents of\n EXTRAS_REQUIRE. This is fine because most developers do, too.\n\"\"\"\n\nimport os\nimport sys\nfrom distutils.errors import CompileError\ntry:\n from setuptools.dist import Distribution\n from setuptools.extension import Extension\n from setuptools.command.build_ext import build_ext\n HAVE_SETUPTOOLS = True\n # another import from setuptools is in setup.py\n from setup import EXTRAS_REQUIRE\nexcept ImportError:\n HAVE_SETUPTOOLS = False\n EXTRAS_REQUIRE = {}\n\nfrom importlib import import_module\n\nfrom src.depends import detectOS, PACKAGES, PACKAGE_MANAGER\n\n\nCOMPILING = {\n \"Debian\": \"build-essential libssl-dev\",\n \"Ubuntu\": \"build-essential libssl-dev\",\n \"Fedora\": \"gcc-c++ redhat-rpm-config python-devel openssl-devel\",\n \"openSUSE\": \"gcc-c++ libopenssl-devel python-devel\",\n \"optional\": False,\n}\n\n# OS-specific dependencies for optional components listed in EXTRAS_REQUIRE\nEXTRAS_REQUIRE_DEPS = {\n # The values from setup.EXTRAS_REQUIRE\n 'python_prctl': {\n # The packages needed for this requirement, by OS\n \"OpenBSD\": [\"\"],\n \"FreeBSD\": [\"\"],\n \"Debian\": [\"libcap-dev python-prctl\"],\n \"Ubuntu\": [\"libcap-dev python-prctl\"],\n \"Ubuntu 12\": [\"libcap-dev python-prctl\"],\n \"openSUSE\": [\"\"],\n \"Fedora\": [\"prctl\"],\n \"Guix\": [\"\"],\n \"Gentoo\": [\"dev-python/python-prctl\"],\n },\n}\n\n\ndef detectPrereqs(missing=True):\n available = []\n for module in PACKAGES:\n try:\n import_module(module)\n if not missing:\n available.append(module)\n except ImportError:\n if missing:\n available.append(module)\n return available\n\n\ndef prereqToPackages():\n if not detectPrereqs():\n return\n print(\"%s %s\" % (\n PACKAGE_MANAGER[detectOS()], \" \".join(\n PACKAGES[x][detectOS()] for x in detectPrereqs())))\n\n\ndef compilerToPackages():\n if not detectOS() in COMPILING:\n return\n print(\"%s %s\" % (\n PACKAGE_MANAGER[detectOS.result], COMPILING[detectOS.result]))\n\n\ndef testCompiler():\n if not HAVE_SETUPTOOLS:\n # silent, we can't test without setuptools\n return True\n\n bitmsghash = Extension(\n 'bitmsghash',\n sources=['src/bitmsghash/bitmsghash.cpp'],\n libraries=['pthread', 'crypto'],\n )\n\n dist = Distribution()\n dist.ext_modules = [bitmsghash]\n cmd = build_ext(dist)\n cmd.initialize_options()\n cmd.finalize_options()\n cmd.force = True\n try:\n cmd.run()\n except CompileError:\n return False\n else:\n fullPath = os.path.join(cmd.build_lib, cmd.get_ext_filename(\"bitmsghash\"))\n return os.path.isfile(fullPath)\n\n\nprereqs = detectPrereqs()\ncompiler = testCompiler()\n\nif (not compiler or prereqs) and detectOS() in PACKAGE_MANAGER:\n print(\n \"It looks like you're using %s. \"\n \"It is highly recommended to use the package manager\\n\"\n \"to install the missing dependencies.\" % detectOS.result)\n\nif not compiler:\n print(\n \"Building the bitmsghash module failed.\\n\"\n \"You may be missing a C++ compiler and/or the OpenSSL headers.\")\n\nif prereqs:\n mandatory = [x for x in prereqs if not PACKAGES[x].get(\"optional\")]\n optional = [x for x in prereqs if PACKAGES[x].get(\"optional\")]\n if mandatory:\n print(\"Missing mandatory dependencies: %s\" % \" \".join(mandatory))\n if optional:\n print(\"Missing optional dependencies: %s\" % \" \".join(optional))\n for package in optional:\n print(PACKAGES[package].get('description'))\n\n# Install the system dependencies of optional extras_require components\nOPSYS = detectOS()\nCMD = PACKAGE_MANAGER[OPSYS] if OPSYS in PACKAGE_MANAGER else 'UNKNOWN_INSTALLER'\nfor lhs, rhs in EXTRAS_REQUIRE.items():\n if OPSYS is None:\n break\n if rhs and any([\n EXTRAS_REQUIRE_DEPS[x][OPSYS]\n for x in rhs\n if x in EXTRAS_REQUIRE_DEPS\n ]):\n rhs_cmd = ''.join([\n CMD,\n ' ',\n ' '.join([\n ''. join([\n xx for xx in EXTRAS_REQUIRE_DEPS[x][OPSYS]\n ])\n for x in rhs\n if x in EXTRAS_REQUIRE_DEPS\n ]),\n ])\n print(\n \"Optional dependency `pip install .[{}]` would require `{}`\"\n \" to be run as root\".format(lhs, rhs_cmd))\n\nif (not compiler or prereqs) and OPSYS in PACKAGE_MANAGER:\n print(\"You can install the missing dependencies by running, as root:\")\n if not compiler:\n compilerToPackages()\n prereqToPackages()\n if mandatory:\n sys.exit(1)\nelse:\n print(\"All the dependencies satisfied, you can install PyBitmessage\")\n", "path": "checkdeps.py"}]} | 2,017 | 82 |
gh_patches_debug_13498 | rasdani/github-patches | git_diff | iterative__dvc-1004 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`dvc status` print something even if project is just initialized
Empty outputs look like a bug
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dvc/command/status.py`
Content:
```
1 from dvc.command.data_sync import CmdDataBase
2
3
4 class CmdDataStatus(CmdDataBase):
5 STATUS_LEN = 10
6 STATUS_INDENT = '\t'
7
8 def _normalize(self, s):
9 s += ':'
10 assert len(s) < self.STATUS_LEN
11 return s + (self.STATUS_LEN - len(s))*' '
12
13 def _show(self, status, indent=0):
14 ind = indent * self.STATUS_INDENT
15
16 for key, value in status.items():
17 if isinstance(value, dict):
18 self.project.logger.info('{}{}'.format(ind, key))
19 self._show(value, indent+1)
20 else:
21 msg = '{}{}{}'.format(ind, self._normalize(value), key)
22 self.project.logger.info(msg)
23
24 def do_run(self, target=None):
25 indent = 1 if self.args.cloud else 0
26 try:
27 st = self.project.status(target=target,
28 jobs=self.args.jobs,
29 cloud=self.args.cloud,
30 show_checksums=self.args.show_checksums,
31 remote=self.args.remote)
32 self._show(st, indent)
33 except Exception as exc:
34 self.project.logger.error('Failed to obtain data status', exc)
35 return 1
36 return 0
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dvc/command/status.py b/dvc/command/status.py
--- a/dvc/command/status.py
+++ b/dvc/command/status.py
@@ -29,7 +29,11 @@
cloud=self.args.cloud,
show_checksums=self.args.show_checksums,
remote=self.args.remote)
- self._show(st, indent)
+ if st:
+ self._show(st, indent)
+ else:
+ self.project.logger.info("Nothing to reproduce. "
+ "Pipeline is up to date.")
except Exception as exc:
self.project.logger.error('Failed to obtain data status', exc)
return 1
| {"golden_diff": "diff --git a/dvc/command/status.py b/dvc/command/status.py\n--- a/dvc/command/status.py\n+++ b/dvc/command/status.py\n@@ -29,7 +29,11 @@\n cloud=self.args.cloud,\n show_checksums=self.args.show_checksums,\n remote=self.args.remote)\n- self._show(st, indent)\n+ if st:\n+ self._show(st, indent)\n+ else:\n+ self.project.logger.info(\"Nothing to reproduce. \"\n+ \"Pipeline is up to date.\")\n except Exception as exc:\n self.project.logger.error('Failed to obtain data status', exc)\n return 1\n", "issue": "`dvc status` print something even if project is just initialized\nEmpty outputs look like a bug\n", "before_files": [{"content": "from dvc.command.data_sync import CmdDataBase\n\n\nclass CmdDataStatus(CmdDataBase):\n STATUS_LEN = 10\n STATUS_INDENT = '\\t'\n\n def _normalize(self, s):\n s += ':'\n assert len(s) < self.STATUS_LEN\n return s + (self.STATUS_LEN - len(s))*' '\n\n def _show(self, status, indent=0):\n ind = indent * self.STATUS_INDENT\n\n for key, value in status.items():\n if isinstance(value, dict):\n self.project.logger.info('{}{}'.format(ind, key))\n self._show(value, indent+1)\n else:\n msg = '{}{}{}'.format(ind, self._normalize(value), key)\n self.project.logger.info(msg)\n\n def do_run(self, target=None):\n indent = 1 if self.args.cloud else 0\n try:\n st = self.project.status(target=target,\n jobs=self.args.jobs,\n cloud=self.args.cloud,\n show_checksums=self.args.show_checksums,\n remote=self.args.remote)\n self._show(st, indent)\n except Exception as exc:\n self.project.logger.error('Failed to obtain data status', exc)\n return 1\n return 0\n", "path": "dvc/command/status.py"}], "after_files": [{"content": "from dvc.command.data_sync import CmdDataBase\n\n\nclass CmdDataStatus(CmdDataBase):\n STATUS_LEN = 10\n STATUS_INDENT = '\\t'\n\n def _normalize(self, s):\n s += ':'\n assert len(s) < self.STATUS_LEN\n return s + (self.STATUS_LEN - len(s))*' '\n\n def _show(self, status, indent=0):\n ind = indent * self.STATUS_INDENT\n\n for key, value in status.items():\n if isinstance(value, dict):\n self.project.logger.info('{}{}'.format(ind, key))\n self._show(value, indent+1)\n else:\n msg = '{}{}{}'.format(ind, self._normalize(value), key)\n self.project.logger.info(msg)\n\n def do_run(self, target=None):\n indent = 1 if self.args.cloud else 0\n try:\n st = self.project.status(target=target,\n jobs=self.args.jobs,\n cloud=self.args.cloud,\n show_checksums=self.args.show_checksums,\n remote=self.args.remote)\n if st:\n self._show(st, indent)\n else:\n self.project.logger.info(\"Nothing to reproduce. \"\n \"Pipeline is up to date.\")\n except Exception as exc:\n self.project.logger.error('Failed to obtain data status', exc)\n return 1\n return 0\n", "path": "dvc/command/status.py"}]} | 608 | 140 |
gh_patches_debug_10716 | rasdani/github-patches | git_diff | nextcloud__appstore-406 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
RSS/Atom Feed: Don't show Changes headline if Changelog is empty
Needs a check if item.changelog is empty https://github.com/nextcloud/appstore/blob/master/nextcloudappstore/core/feeds.py#L36
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/core/feeds.py`
Content:
```
1 from django.conf import settings
2 from django.contrib.syndication.views import Feed
3 from django.urls import reverse
4 from django.urls import reverse_lazy
5 from django.utils.feedgenerator import Atom1Feed
6 from django.utils.translation import ugettext_lazy as _ # type: ignore
7 from parler.models import TranslationDoesNotExist
8
9 from nextcloudappstore.core.models import AppRelease
10 from markdown import markdown
11 from bleach import clean
12
13
14 class AppReleaseRssFeed(Feed):
15 title = _('Newest app releases')
16 description = _('Get the newest app release updates')
17 link = reverse_lazy('home')
18
19 def __call__(self, request, *args, **kwargs):
20 self.request = request
21 return super().__call__(request, *args, **kwargs)
22
23 def items(self):
24 queryset = AppRelease.objects.order_by('-last_modified')
25 if 'nightly' not in self.request.GET:
26 queryset = queryset.filter(is_nightly=False)
27 if 'prerelease' not in self.request.GET:
28 queryset = queryset.exclude(version__contains='-')
29 return queryset[:10]
30
31 def item_title(self, item):
32 return '%s (%s)' % (item.app.name, item.version)
33
34 def item_description(self, item):
35 try:
36 content = ('%s\n\n# %s\n\n%s' % (
37 item.app.description, _('Changes'), item.changelog))
38 except TranslationDoesNotExist:
39 content = item.app.description
40 content += '\n\n [%s](%s)' % (_('Download'), item.download)
41 return clean(markdown(content),
42 attributes=settings.MARKDOWN_ALLOWED_ATTRIBUTES,
43 tags=settings.MARKDOWN_ALLOWED_TAGS)
44
45 def item_guid(self, obj):
46 if obj.is_nightly:
47 nightly = '-nightly'
48 else:
49 nightly = ''
50 return '%s-%s%s' % (obj.app.id, obj.version, nightly)
51
52 def item_link(self, item):
53 return reverse('app-detail', kwargs={'id': item.app.id})
54
55 def item_author_name(self, item):
56 return '%s %s' % (item.app.owner.first_name, item.app.owner.last_name)
57
58 def item_pubdate(self, item):
59 return item.last_modified
60
61 def item_updateddate(self, item):
62 return item.last_modified
63
64
65 class AppReleaseAtomFeed(AppReleaseRssFeed):
66 feed_type = Atom1Feed
67 subtitle = AppReleaseRssFeed.description
68
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nextcloudappstore/core/feeds.py b/nextcloudappstore/core/feeds.py
--- a/nextcloudappstore/core/feeds.py
+++ b/nextcloudappstore/core/feeds.py
@@ -33,8 +33,11 @@
def item_description(self, item):
try:
- content = ('%s\n\n# %s\n\n%s' % (
- item.app.description, _('Changes'), item.changelog))
+ if item.changelog:
+ changelog = '\n\n# %s\n\n%s' % (_('Changes'), item.changelog)
+ else:
+ changelog = ''
+ content = '%s%s' % (item.app.description, changelog)
except TranslationDoesNotExist:
content = item.app.description
content += '\n\n [%s](%s)' % (_('Download'), item.download)
| {"golden_diff": "diff --git a/nextcloudappstore/core/feeds.py b/nextcloudappstore/core/feeds.py\n--- a/nextcloudappstore/core/feeds.py\n+++ b/nextcloudappstore/core/feeds.py\n@@ -33,8 +33,11 @@\n \n def item_description(self, item):\n try:\n- content = ('%s\\n\\n# %s\\n\\n%s' % (\n- item.app.description, _('Changes'), item.changelog))\n+ if item.changelog:\n+ changelog = '\\n\\n# %s\\n\\n%s' % (_('Changes'), item.changelog)\n+ else:\n+ changelog = ''\n+ content = '%s%s' % (item.app.description, changelog)\n except TranslationDoesNotExist:\n content = item.app.description\n content += '\\n\\n [%s](%s)' % (_('Download'), item.download)\n", "issue": "RSS/Atom Feed: Don't show Changes headline if Changelog is empty\nNeeds a check if item.changelog is empty https://github.com/nextcloud/appstore/blob/master/nextcloudappstore/core/feeds.py#L36\n", "before_files": [{"content": "from django.conf import settings\nfrom django.contrib.syndication.views import Feed\nfrom django.urls import reverse\nfrom django.urls import reverse_lazy\nfrom django.utils.feedgenerator import Atom1Feed\nfrom django.utils.translation import ugettext_lazy as _ # type: ignore\nfrom parler.models import TranslationDoesNotExist\n\nfrom nextcloudappstore.core.models import AppRelease\nfrom markdown import markdown\nfrom bleach import clean\n\n\nclass AppReleaseRssFeed(Feed):\n title = _('Newest app releases')\n description = _('Get the newest app release updates')\n link = reverse_lazy('home')\n\n def __call__(self, request, *args, **kwargs):\n self.request = request\n return super().__call__(request, *args, **kwargs)\n\n def items(self):\n queryset = AppRelease.objects.order_by('-last_modified')\n if 'nightly' not in self.request.GET:\n queryset = queryset.filter(is_nightly=False)\n if 'prerelease' not in self.request.GET:\n queryset = queryset.exclude(version__contains='-')\n return queryset[:10]\n\n def item_title(self, item):\n return '%s (%s)' % (item.app.name, item.version)\n\n def item_description(self, item):\n try:\n content = ('%s\\n\\n# %s\\n\\n%s' % (\n item.app.description, _('Changes'), item.changelog))\n except TranslationDoesNotExist:\n content = item.app.description\n content += '\\n\\n [%s](%s)' % (_('Download'), item.download)\n return clean(markdown(content),\n attributes=settings.MARKDOWN_ALLOWED_ATTRIBUTES,\n tags=settings.MARKDOWN_ALLOWED_TAGS)\n\n def item_guid(self, obj):\n if obj.is_nightly:\n nightly = '-nightly'\n else:\n nightly = ''\n return '%s-%s%s' % (obj.app.id, obj.version, nightly)\n\n def item_link(self, item):\n return reverse('app-detail', kwargs={'id': item.app.id})\n\n def item_author_name(self, item):\n return '%s %s' % (item.app.owner.first_name, item.app.owner.last_name)\n\n def item_pubdate(self, item):\n return item.last_modified\n\n def item_updateddate(self, item):\n return item.last_modified\n\n\nclass AppReleaseAtomFeed(AppReleaseRssFeed):\n feed_type = Atom1Feed\n subtitle = AppReleaseRssFeed.description\n", "path": "nextcloudappstore/core/feeds.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.contrib.syndication.views import Feed\nfrom django.urls import reverse\nfrom django.urls import reverse_lazy\nfrom django.utils.feedgenerator import Atom1Feed\nfrom django.utils.translation import ugettext_lazy as _ # type: ignore\nfrom parler.models import TranslationDoesNotExist\n\nfrom nextcloudappstore.core.models import AppRelease\nfrom markdown import markdown\nfrom bleach import clean\n\n\nclass AppReleaseRssFeed(Feed):\n title = _('Newest app releases')\n description = _('Get the newest app release updates')\n link = reverse_lazy('home')\n\n def __call__(self, request, *args, **kwargs):\n self.request = request\n return super().__call__(request, *args, **kwargs)\n\n def items(self):\n queryset = AppRelease.objects.order_by('-last_modified')\n if 'nightly' not in self.request.GET:\n queryset = queryset.filter(is_nightly=False)\n if 'prerelease' not in self.request.GET:\n queryset = queryset.exclude(version__contains='-')\n return queryset[:10]\n\n def item_title(self, item):\n return '%s (%s)' % (item.app.name, item.version)\n\n def item_description(self, item):\n try:\n if item.changelog:\n changelog = '\\n\\n# %s\\n\\n%s' % (_('Changes'), item.changelog)\n else:\n changelog = ''\n content = '%s%s' % (item.app.description, changelog)\n except TranslationDoesNotExist:\n content = item.app.description\n content += '\\n\\n [%s](%s)' % (_('Download'), item.download)\n return clean(markdown(content),\n attributes=settings.MARKDOWN_ALLOWED_ATTRIBUTES,\n tags=settings.MARKDOWN_ALLOWED_TAGS)\n\n def item_guid(self, obj):\n if obj.is_nightly:\n nightly = '-nightly'\n else:\n nightly = ''\n return '%s-%s%s' % (obj.app.id, obj.version, nightly)\n\n def item_link(self, item):\n return reverse('app-detail', kwargs={'id': item.app.id})\n\n def item_author_name(self, item):\n return '%s %s' % (item.app.owner.first_name, item.app.owner.last_name)\n\n def item_pubdate(self, item):\n return item.last_modified\n\n def item_updateddate(self, item):\n return item.last_modified\n\n\nclass AppReleaseAtomFeed(AppReleaseRssFeed):\n feed_type = Atom1Feed\n subtitle = AppReleaseRssFeed.description\n", "path": "nextcloudappstore/core/feeds.py"}]} | 960 | 195 |
gh_patches_debug_41827 | rasdani/github-patches | git_diff | streamlink__streamlink-5771 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
plugins.bilibili: Playable streams can't be found on stream pages with custom layout
### Checklist
- [X] This is a [plugin issue](https://streamlink.github.io/plugins.html) and not [a different kind of issue](https://github.com/streamlink/streamlink/issues/new/choose)
- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)
- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)
- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)
### Streamlink version
streamlink 6.5.0
### Description
It appears that for Bilibili streams with custom pages, the plugin will simply say there's no playable stream found, even if a stream is actually live. An example of [a stream with a custom page at the time of posting](https://live.bilibili.com/27888667) is given in the debug log (screenshot shown here).

### Debug log
```text
PS C:\Users\Admin> streamlink --loglevel=debug https://live.bilibili.com/27888667 best
[cli][debug] OS: Windows 10
[cli][debug] Python: 3.11.7
[cli][debug] OpenSSL: OpenSSL 3.0.11 19 Sep 2023
[cli][debug] Streamlink: 6.5.0
[cli][debug] Dependencies:
[cli][debug] certifi: 2023.11.17
[cli][debug] isodate: 0.6.1
[cli][debug] lxml: 4.9.3
[cli][debug] pycountry: 23.12.11
[cli][debug] pycryptodome: 3.19.0
[cli][debug] PySocks: 1.7.1
[cli][debug] requests: 2.31.0
[cli][debug] trio: 0.23.2
[cli][debug] trio-websocket: 0.11.1
[cli][debug] typing-extensions: 4.9.0
[cli][debug] urllib3: 2.1.0
[cli][debug] websocket-client: 1.7.0
[cli][debug] Arguments:
[cli][debug] url=https://live.bilibili.com/27888667
[cli][debug] stream=['best']
[cli][debug] --loglevel=debug
[cli][debug] --ffmpeg-ffmpeg=C:\Program Files\Streamlink\ffmpeg\ffmpeg.exe
[cli][info] Found matching plugin bilibili for URL https://live.bilibili.com/27888667
error: No playable streams found on this URL: https://live.bilibili.com/27888667
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/bilibili.py`
Content:
```
1 """
2 $description Chinese video sharing website based in Shanghai, themed around animation, comics, and games (ACG).
3 $url live.bilibili.com
4 $type live
5 """
6
7 import re
8
9 from streamlink.plugin import Plugin, pluginmatcher
10 from streamlink.plugin.api import validate
11 from streamlink.stream.hls import HLSStream
12
13
14 @pluginmatcher(re.compile(
15 r"https?://live\.bilibili\.com/(?P<channel>[^/]+)",
16 ))
17 class Bilibili(Plugin):
18 SHOW_STATUS_OFFLINE = 0
19 SHOW_STATUS_ONLINE = 1
20 SHOW_STATUS_ROUND = 2
21
22 def _get_streams(self):
23 schema_stream = validate.all(
24 [{
25 "protocol_name": str,
26 "format": validate.all(
27 [{
28 "format_name": str,
29 "codec": validate.all(
30 [{
31 "codec_name": str,
32 "base_url": str,
33 "url_info": [{
34 "host": validate.url(),
35 "extra": str,
36 }],
37 }],
38 validate.filter(lambda item: item["codec_name"] == "avc"),
39 ),
40 }],
41 validate.filter(lambda item: item["format_name"] == "fmp4"),
42 ),
43 }],
44 validate.filter(lambda item: item["protocol_name"] == "http_hls"),
45 )
46
47 data = self.session.http.get(
48 self.url,
49 schema=validate.Schema(
50 validate.parse_html(),
51 validate.xml_xpath_string(".//script[contains(text(),'window.__NEPTUNE_IS_MY_WAIFU__={')][1]/text()"),
52 validate.none_or_all(
53 validate.transform(str.replace, "window.__NEPTUNE_IS_MY_WAIFU__=", ""),
54 validate.parse_json(),
55 {
56 "roomInitRes": {
57 "data": {
58 "live_status": int,
59 "playurl_info": {
60 "playurl": {
61 "stream": schema_stream,
62 },
63 },
64 },
65 },
66 "roomInfoRes": {
67 "data": {
68 "room_info": {
69 "live_id": int,
70 "title": str,
71 "area_name": str,
72 },
73 "anchor_info": {
74 "base_info": {
75 "uname": str,
76 },
77 },
78 },
79 },
80 },
81 validate.union_get(
82 ("roomInfoRes", "data", "room_info", "live_id"),
83 ("roomInfoRes", "data", "anchor_info", "base_info", "uname"),
84 ("roomInfoRes", "data", "room_info", "area_name"),
85 ("roomInfoRes", "data", "room_info", "title"),
86 ("roomInitRes", "data", "live_status"),
87 ("roomInitRes", "data", "playurl_info", "playurl", "stream"),
88 ),
89 ),
90 ),
91 )
92 if not data:
93 return
94
95 self.id, self.author, self.category, self.title, live_status, streams = data
96 if live_status != self.SHOW_STATUS_ONLINE:
97 return
98
99 for stream in streams:
100 for stream_format in stream["format"]:
101 for codec in stream_format["codec"]:
102 for url_info in codec["url_info"]:
103 url = f"{url_info['host']}{codec['base_url']}{url_info['extra']}"
104 yield "live", HLSStream(self.session, url)
105
106
107 __plugin__ = Bilibili
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/streamlink/plugins/bilibili.py b/src/streamlink/plugins/bilibili.py
--- a/src/streamlink/plugins/bilibili.py
+++ b/src/streamlink/plugins/bilibili.py
@@ -4,23 +4,31 @@
$type live
"""
+import logging
import re
+from streamlink.exceptions import NoStreamsError
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
+log = logging.getLogger(__name__)
+
+
@pluginmatcher(re.compile(
r"https?://live\.bilibili\.com/(?P<channel>[^/]+)",
))
class Bilibili(Plugin):
+ _URL_API_PLAYINFO = "https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo"
+
SHOW_STATUS_OFFLINE = 0
SHOW_STATUS_ONLINE = 1
SHOW_STATUS_ROUND = 2
- def _get_streams(self):
- schema_stream = validate.all(
+ @staticmethod
+ def _schema_streams():
+ return validate.all(
[{
"protocol_name": str,
"format": validate.all(
@@ -44,6 +52,38 @@
validate.filter(lambda item: item["protocol_name"] == "http_hls"),
)
+ def _get_api_playinfo(self, room_id):
+ return self.session.http.get(
+ self._URL_API_PLAYINFO,
+ params={
+ "room_id": room_id,
+ "no_playurl": 0,
+ "mask": 1,
+ "qn": 0,
+ "platform": "web",
+ "protocol": "0,1",
+ "format": "0,1,2",
+ "codec": "0,1,2",
+ "dolby": 5,
+ "panorama": 1,
+ },
+ schema=validate.Schema(
+ validate.parse_json(),
+ {
+ "code": 0,
+ "data": {
+ "playurl_info": {
+ "playurl": {
+ "stream": self._schema_streams(),
+ },
+ },
+ },
+ },
+ validate.get(("data", "playurl_info", "playurl", "stream")),
+ ),
+ )
+
+ def _get_page_playinfo(self):
data = self.session.http.get(
self.url,
schema=validate.Schema(
@@ -58,7 +98,7 @@
"live_status": int,
"playurl_info": {
"playurl": {
- "stream": schema_stream,
+ "stream": self._schema_streams(),
},
},
},
@@ -94,9 +134,18 @@
self.id, self.author, self.category, self.title, live_status, streams = data
if live_status != self.SHOW_STATUS_ONLINE:
- return
+ log.info("Channel is offline")
+ raise NoStreamsError
+
+ return streams
+
+ def _get_streams(self):
+ streams = self._get_page_playinfo()
+ if not streams:
+ log.debug("Falling back to _get_api_playinfo()")
+ streams = self._get_api_playinfo(self.match["channel"])
- for stream in streams:
+ for stream in streams or []:
for stream_format in stream["format"]:
for codec in stream_format["codec"]:
for url_info in codec["url_info"]:
| {"golden_diff": "diff --git a/src/streamlink/plugins/bilibili.py b/src/streamlink/plugins/bilibili.py\n--- a/src/streamlink/plugins/bilibili.py\n+++ b/src/streamlink/plugins/bilibili.py\n@@ -4,23 +4,31 @@\n $type live\n \"\"\"\n \n+import logging\n import re\n \n+from streamlink.exceptions import NoStreamsError\n from streamlink.plugin import Plugin, pluginmatcher\n from streamlink.plugin.api import validate\n from streamlink.stream.hls import HLSStream\n \n \n+log = logging.getLogger(__name__)\n+\n+\n @pluginmatcher(re.compile(\n r\"https?://live\\.bilibili\\.com/(?P<channel>[^/]+)\",\n ))\n class Bilibili(Plugin):\n+ _URL_API_PLAYINFO = \"https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo\"\n+\n SHOW_STATUS_OFFLINE = 0\n SHOW_STATUS_ONLINE = 1\n SHOW_STATUS_ROUND = 2\n \n- def _get_streams(self):\n- schema_stream = validate.all(\n+ @staticmethod\n+ def _schema_streams():\n+ return validate.all(\n [{\n \"protocol_name\": str,\n \"format\": validate.all(\n@@ -44,6 +52,38 @@\n validate.filter(lambda item: item[\"protocol_name\"] == \"http_hls\"),\n )\n \n+ def _get_api_playinfo(self, room_id):\n+ return self.session.http.get(\n+ self._URL_API_PLAYINFO,\n+ params={\n+ \"room_id\": room_id,\n+ \"no_playurl\": 0,\n+ \"mask\": 1,\n+ \"qn\": 0,\n+ \"platform\": \"web\",\n+ \"protocol\": \"0,1\",\n+ \"format\": \"0,1,2\",\n+ \"codec\": \"0,1,2\",\n+ \"dolby\": 5,\n+ \"panorama\": 1,\n+ },\n+ schema=validate.Schema(\n+ validate.parse_json(),\n+ {\n+ \"code\": 0,\n+ \"data\": {\n+ \"playurl_info\": {\n+ \"playurl\": {\n+ \"stream\": self._schema_streams(),\n+ },\n+ },\n+ },\n+ },\n+ validate.get((\"data\", \"playurl_info\", \"playurl\", \"stream\")),\n+ ),\n+ )\n+\n+ def _get_page_playinfo(self):\n data = self.session.http.get(\n self.url,\n schema=validate.Schema(\n@@ -58,7 +98,7 @@\n \"live_status\": int,\n \"playurl_info\": {\n \"playurl\": {\n- \"stream\": schema_stream,\n+ \"stream\": self._schema_streams(),\n },\n },\n },\n@@ -94,9 +134,18 @@\n \n self.id, self.author, self.category, self.title, live_status, streams = data\n if live_status != self.SHOW_STATUS_ONLINE:\n- return\n+ log.info(\"Channel is offline\")\n+ raise NoStreamsError\n+\n+ return streams\n+\n+ def _get_streams(self):\n+ streams = self._get_page_playinfo()\n+ if not streams:\n+ log.debug(\"Falling back to _get_api_playinfo()\")\n+ streams = self._get_api_playinfo(self.match[\"channel\"])\n \n- for stream in streams:\n+ for stream in streams or []:\n for stream_format in stream[\"format\"]:\n for codec in stream_format[\"codec\"]:\n for url_info in codec[\"url_info\"]:\n", "issue": "plugins.bilibili: Playable streams can't be found on stream pages with custom layout\n### Checklist\r\n\r\n- [X] This is a [plugin issue](https://streamlink.github.io/plugins.html) and not [a different kind of issue](https://github.com/streamlink/streamlink/issues/new/choose)\r\n- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)\r\n- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)\r\n- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)\r\n\r\n### Streamlink version\r\n\r\nstreamlink 6.5.0\r\n\r\n### Description\r\n\r\nIt appears that for Bilibili streams with custom pages, the plugin will simply say there's no playable stream found, even if a stream is actually live. An example of [a stream with a custom page at the time of posting](https://live.bilibili.com/27888667) is given in the debug log (screenshot shown here).\r\n\r\n\r\n\r\n### Debug log\r\n\r\n```text\r\nPS C:\\Users\\Admin> streamlink --loglevel=debug https://live.bilibili.com/27888667 best\r\n[cli][debug] OS: Windows 10\r\n[cli][debug] Python: 3.11.7\r\n[cli][debug] OpenSSL: OpenSSL 3.0.11 19 Sep 2023\r\n[cli][debug] Streamlink: 6.5.0\r\n[cli][debug] Dependencies:\r\n[cli][debug] certifi: 2023.11.17\r\n[cli][debug] isodate: 0.6.1\r\n[cli][debug] lxml: 4.9.3\r\n[cli][debug] pycountry: 23.12.11\r\n[cli][debug] pycryptodome: 3.19.0\r\n[cli][debug] PySocks: 1.7.1\r\n[cli][debug] requests: 2.31.0\r\n[cli][debug] trio: 0.23.2\r\n[cli][debug] trio-websocket: 0.11.1\r\n[cli][debug] typing-extensions: 4.9.0\r\n[cli][debug] urllib3: 2.1.0\r\n[cli][debug] websocket-client: 1.7.0\r\n[cli][debug] Arguments:\r\n[cli][debug] url=https://live.bilibili.com/27888667\r\n[cli][debug] stream=['best']\r\n[cli][debug] --loglevel=debug\r\n[cli][debug] --ffmpeg-ffmpeg=C:\\Program Files\\Streamlink\\ffmpeg\\ffmpeg.exe\r\n[cli][info] Found matching plugin bilibili for URL https://live.bilibili.com/27888667\r\nerror: No playable streams found on this URL: https://live.bilibili.com/27888667\r\n```\r\n\n", "before_files": [{"content": "\"\"\"\n$description Chinese video sharing website based in Shanghai, themed around animation, comics, and games (ACG).\n$url live.bilibili.com\n$type live\n\"\"\"\n\nimport re\n\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream.hls import HLSStream\n\n\n@pluginmatcher(re.compile(\n r\"https?://live\\.bilibili\\.com/(?P<channel>[^/]+)\",\n))\nclass Bilibili(Plugin):\n SHOW_STATUS_OFFLINE = 0\n SHOW_STATUS_ONLINE = 1\n SHOW_STATUS_ROUND = 2\n\n def _get_streams(self):\n schema_stream = validate.all(\n [{\n \"protocol_name\": str,\n \"format\": validate.all(\n [{\n \"format_name\": str,\n \"codec\": validate.all(\n [{\n \"codec_name\": str,\n \"base_url\": str,\n \"url_info\": [{\n \"host\": validate.url(),\n \"extra\": str,\n }],\n }],\n validate.filter(lambda item: item[\"codec_name\"] == \"avc\"),\n ),\n }],\n validate.filter(lambda item: item[\"format_name\"] == \"fmp4\"),\n ),\n }],\n validate.filter(lambda item: item[\"protocol_name\"] == \"http_hls\"),\n )\n\n data = self.session.http.get(\n self.url,\n schema=validate.Schema(\n validate.parse_html(),\n validate.xml_xpath_string(\".//script[contains(text(),'window.__NEPTUNE_IS_MY_WAIFU__={')][1]/text()\"),\n validate.none_or_all(\n validate.transform(str.replace, \"window.__NEPTUNE_IS_MY_WAIFU__=\", \"\"),\n validate.parse_json(),\n {\n \"roomInitRes\": {\n \"data\": {\n \"live_status\": int,\n \"playurl_info\": {\n \"playurl\": {\n \"stream\": schema_stream,\n },\n },\n },\n },\n \"roomInfoRes\": {\n \"data\": {\n \"room_info\": {\n \"live_id\": int,\n \"title\": str,\n \"area_name\": str,\n },\n \"anchor_info\": {\n \"base_info\": {\n \"uname\": str,\n },\n },\n },\n },\n },\n validate.union_get(\n (\"roomInfoRes\", \"data\", \"room_info\", \"live_id\"),\n (\"roomInfoRes\", \"data\", \"anchor_info\", \"base_info\", \"uname\"),\n (\"roomInfoRes\", \"data\", \"room_info\", \"area_name\"),\n (\"roomInfoRes\", \"data\", \"room_info\", \"title\"),\n (\"roomInitRes\", \"data\", \"live_status\"),\n (\"roomInitRes\", \"data\", \"playurl_info\", \"playurl\", \"stream\"),\n ),\n ),\n ),\n )\n if not data:\n return\n\n self.id, self.author, self.category, self.title, live_status, streams = data\n if live_status != self.SHOW_STATUS_ONLINE:\n return\n\n for stream in streams:\n for stream_format in stream[\"format\"]:\n for codec in stream_format[\"codec\"]:\n for url_info in codec[\"url_info\"]:\n url = f\"{url_info['host']}{codec['base_url']}{url_info['extra']}\"\n yield \"live\", HLSStream(self.session, url)\n\n\n__plugin__ = Bilibili\n", "path": "src/streamlink/plugins/bilibili.py"}], "after_files": [{"content": "\"\"\"\n$description Chinese video sharing website based in Shanghai, themed around animation, comics, and games (ACG).\n$url live.bilibili.com\n$type live\n\"\"\"\n\nimport logging\nimport re\n\nfrom streamlink.exceptions import NoStreamsError\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream.hls import HLSStream\n\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(re.compile(\n r\"https?://live\\.bilibili\\.com/(?P<channel>[^/]+)\",\n))\nclass Bilibili(Plugin):\n _URL_API_PLAYINFO = \"https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo\"\n\n SHOW_STATUS_OFFLINE = 0\n SHOW_STATUS_ONLINE = 1\n SHOW_STATUS_ROUND = 2\n\n @staticmethod\n def _schema_streams():\n return validate.all(\n [{\n \"protocol_name\": str,\n \"format\": validate.all(\n [{\n \"format_name\": str,\n \"codec\": validate.all(\n [{\n \"codec_name\": str,\n \"base_url\": str,\n \"url_info\": [{\n \"host\": validate.url(),\n \"extra\": str,\n }],\n }],\n validate.filter(lambda item: item[\"codec_name\"] == \"avc\"),\n ),\n }],\n validate.filter(lambda item: item[\"format_name\"] == \"fmp4\"),\n ),\n }],\n validate.filter(lambda item: item[\"protocol_name\"] == \"http_hls\"),\n )\n\n def _get_api_playinfo(self, room_id):\n return self.session.http.get(\n self._URL_API_PLAYINFO,\n params={\n \"room_id\": room_id,\n \"no_playurl\": 0,\n \"mask\": 1,\n \"qn\": 0,\n \"platform\": \"web\",\n \"protocol\": \"0,1\",\n \"format\": \"0,1,2\",\n \"codec\": \"0,1,2\",\n \"dolby\": 5,\n \"panorama\": 1,\n },\n schema=validate.Schema(\n validate.parse_json(),\n {\n \"code\": 0,\n \"data\": {\n \"playurl_info\": {\n \"playurl\": {\n \"stream\": self._schema_streams(),\n },\n },\n },\n },\n validate.get((\"data\", \"playurl_info\", \"playurl\", \"stream\")),\n ),\n )\n\n def _get_page_playinfo(self):\n data = self.session.http.get(\n self.url,\n schema=validate.Schema(\n validate.parse_html(),\n validate.xml_xpath_string(\".//script[contains(text(),'window.__NEPTUNE_IS_MY_WAIFU__={')][1]/text()\"),\n validate.none_or_all(\n validate.transform(str.replace, \"window.__NEPTUNE_IS_MY_WAIFU__=\", \"\"),\n validate.parse_json(),\n {\n \"roomInitRes\": {\n \"data\": {\n \"live_status\": int,\n \"playurl_info\": {\n \"playurl\": {\n \"stream\": self._schema_streams(),\n },\n },\n },\n },\n \"roomInfoRes\": {\n \"data\": {\n \"room_info\": {\n \"live_id\": int,\n \"title\": str,\n \"area_name\": str,\n },\n \"anchor_info\": {\n \"base_info\": {\n \"uname\": str,\n },\n },\n },\n },\n },\n validate.union_get(\n (\"roomInfoRes\", \"data\", \"room_info\", \"live_id\"),\n (\"roomInfoRes\", \"data\", \"anchor_info\", \"base_info\", \"uname\"),\n (\"roomInfoRes\", \"data\", \"room_info\", \"area_name\"),\n (\"roomInfoRes\", \"data\", \"room_info\", \"title\"),\n (\"roomInitRes\", \"data\", \"live_status\"),\n (\"roomInitRes\", \"data\", \"playurl_info\", \"playurl\", \"stream\"),\n ),\n ),\n ),\n )\n if not data:\n return\n\n self.id, self.author, self.category, self.title, live_status, streams = data\n if live_status != self.SHOW_STATUS_ONLINE:\n log.info(\"Channel is offline\")\n raise NoStreamsError\n\n return streams\n\n def _get_streams(self):\n streams = self._get_page_playinfo()\n if not streams:\n log.debug(\"Falling back to _get_api_playinfo()\")\n streams = self._get_api_playinfo(self.match[\"channel\"])\n\n for stream in streams or []:\n for stream_format in stream[\"format\"]:\n for codec in stream_format[\"codec\"]:\n for url_info in codec[\"url_info\"]:\n url = f\"{url_info['host']}{codec['base_url']}{url_info['extra']}\"\n yield \"live\", HLSStream(self.session, url)\n\n\n__plugin__ = Bilibili\n", "path": "src/streamlink/plugins/bilibili.py"}]} | 1,984 | 775 |
gh_patches_debug_18873 | rasdani/github-patches | git_diff | streamlink__streamlink-2911 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[bug] Tv1channel plugin no longer works
## Bug Report
<!-- Replace [ ] with [x] in order to check the box -->
- [x] This is a bug report and I have read the contribution guidelines.
### Reproduction steps / Explicit stream URLs to test
http://tv1channel.org/index.php/livetv fails to open. Stream seems to have moved away to a m3u8 link - either a quick fix or just remove the plugin as it is covered by one of the main plugins.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/tv1channel.py`
Content:
```
1 import re
2
3 from streamlink.plugin import Plugin
4 from streamlink.plugin.api import useragents
5 from streamlink.plugin.api.utils import itertags
6 from streamlink.stream import HLSStream
7
8
9 class TV1Channel(Plugin):
10 _url_re = re.compile(r'https?://(?:www\.)?tv1channel\.org/(?!play/)(?:index\.php/livetv)?')
11
12 @classmethod
13 def can_handle_url(cls, url):
14 return cls._url_re.match(url) is not None
15
16 def _get_streams(self):
17 self.session.http.headers.update({'User-Agent': useragents.FIREFOX})
18 res = self.session.http.get(self.url)
19 for iframe in itertags(res.text, 'iframe'):
20 if 'cdn.netbadgers.com' not in iframe.attributes.get('src'):
21 continue
22
23 res = self.session.http.get(iframe.attributes.get('src'))
24 for source in itertags(res.text, 'source'):
25 if source.attributes.get('src') and source.attributes.get('src').endswith('.m3u8'):
26 return HLSStream.parse_variant_playlist(self.session,
27 source.attributes.get('src'))
28
29 break
30
31
32 __plugin__ = TV1Channel
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/streamlink/plugins/tv1channel.py b/src/streamlink/plugins/tv1channel.py
deleted file mode 100644
--- a/src/streamlink/plugins/tv1channel.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import re
-
-from streamlink.plugin import Plugin
-from streamlink.plugin.api import useragents
-from streamlink.plugin.api.utils import itertags
-from streamlink.stream import HLSStream
-
-
-class TV1Channel(Plugin):
- _url_re = re.compile(r'https?://(?:www\.)?tv1channel\.org/(?!play/)(?:index\.php/livetv)?')
-
- @classmethod
- def can_handle_url(cls, url):
- return cls._url_re.match(url) is not None
-
- def _get_streams(self):
- self.session.http.headers.update({'User-Agent': useragents.FIREFOX})
- res = self.session.http.get(self.url)
- for iframe in itertags(res.text, 'iframe'):
- if 'cdn.netbadgers.com' not in iframe.attributes.get('src'):
- continue
-
- res = self.session.http.get(iframe.attributes.get('src'))
- for source in itertags(res.text, 'source'):
- if source.attributes.get('src') and source.attributes.get('src').endswith('.m3u8'):
- return HLSStream.parse_variant_playlist(self.session,
- source.attributes.get('src'))
-
- break
-
-
-__plugin__ = TV1Channel
| {"golden_diff": "diff --git a/src/streamlink/plugins/tv1channel.py b/src/streamlink/plugins/tv1channel.py\ndeleted file mode 100644\n--- a/src/streamlink/plugins/tv1channel.py\n+++ /dev/null\n@@ -1,32 +0,0 @@\n-import re\n-\n-from streamlink.plugin import Plugin\n-from streamlink.plugin.api import useragents\n-from streamlink.plugin.api.utils import itertags\n-from streamlink.stream import HLSStream\n-\n-\n-class TV1Channel(Plugin):\n- _url_re = re.compile(r'https?://(?:www\\.)?tv1channel\\.org/(?!play/)(?:index\\.php/livetv)?')\n-\n- @classmethod\n- def can_handle_url(cls, url):\n- return cls._url_re.match(url) is not None\n-\n- def _get_streams(self):\n- self.session.http.headers.update({'User-Agent': useragents.FIREFOX})\n- res = self.session.http.get(self.url)\n- for iframe in itertags(res.text, 'iframe'):\n- if 'cdn.netbadgers.com' not in iframe.attributes.get('src'):\n- continue\n-\n- res = self.session.http.get(iframe.attributes.get('src'))\n- for source in itertags(res.text, 'source'):\n- if source.attributes.get('src') and source.attributes.get('src').endswith('.m3u8'):\n- return HLSStream.parse_variant_playlist(self.session,\n- source.attributes.get('src'))\n-\n- break\n-\n-\n-__plugin__ = TV1Channel\n", "issue": "[bug] Tv1channel plugin no longer works\n## Bug Report\r\n<!-- Replace [ ] with [x] in order to check the box -->\r\n- [x] This is a bug report and I have read the contribution guidelines.\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\nhttp://tv1channel.org/index.php/livetv fails to open. Stream seems to have moved away to a m3u8 link - either a quick fix or just remove the plugin as it is covered by one of the main plugins.\n", "before_files": [{"content": "import re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import useragents\nfrom streamlink.plugin.api.utils import itertags\nfrom streamlink.stream import HLSStream\n\n\nclass TV1Channel(Plugin):\n _url_re = re.compile(r'https?://(?:www\\.)?tv1channel\\.org/(?!play/)(?:index\\.php/livetv)?')\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url) is not None\n\n def _get_streams(self):\n self.session.http.headers.update({'User-Agent': useragents.FIREFOX})\n res = self.session.http.get(self.url)\n for iframe in itertags(res.text, 'iframe'):\n if 'cdn.netbadgers.com' not in iframe.attributes.get('src'):\n continue\n\n res = self.session.http.get(iframe.attributes.get('src'))\n for source in itertags(res.text, 'source'):\n if source.attributes.get('src') and source.attributes.get('src').endswith('.m3u8'):\n return HLSStream.parse_variant_playlist(self.session,\n source.attributes.get('src'))\n\n break\n\n\n__plugin__ = TV1Channel\n", "path": "src/streamlink/plugins/tv1channel.py"}], "after_files": [{"content": null, "path": "src/streamlink/plugins/tv1channel.py"}]} | 686 | 339 |
gh_patches_debug_479 | rasdani/github-patches | git_diff | cloud-custodian__cloud-custodian-4910 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
c7n-mailer - azure sendgrid pinned version not compatible with requirements.txt
The requirements.txt is installing sendgrid==6.1.0, but we are pinned to sendgrid==6.0.5.
We should look if it's possible to remove the pin in our setup.py
## Error:
`c7n-mailer 0.5.4 has requirement sendgrid==6.0.5, but you'll have sendgrid 6.1.0 which is incompatible.`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tools/c7n_mailer/setup.py`
Content:
```
1 # Copyright 2015-2017 Capital One Services, LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from __future__ import absolute_import, division, print_function, unicode_literals
15
16 from io import open
17 from os import path
18 from setuptools import setup, find_packages
19
20 # *Any* updates here should also go into c7n_mailer/deploy.py for lambda packaging.
21 requires = [
22 "Jinja2",
23 "boto3",
24 "jsonschema",
25 "simplejson", # datadog packaging dep
26 "python-dateutil>=2.8",
27 "ruamel.yaml>=0.15.93",
28 "datadog",
29 "sendgrid==6.0.5",
30 "ldap3",
31 "c7n",
32 "redis"]
33
34
35 try:
36 from concurrent import futures # noqa F401
37 except ImportError:
38 # The backport has SyntaxErrors under py36, so avoid installing it.
39 # https://github.com/agronholm/pythonfutures/issues/41
40 requires += ['futures']
41
42 # read the contents of your README file
43 this_directory = path.abspath(path.dirname(__file__))
44 readme = path.join(this_directory, 'README.md')
45 long_description = ''
46 if path.exists(readme):
47 with open(readme, encoding='utf-8') as f:
48 long_description = f.read()
49
50 setup(
51 name="c7n_mailer",
52 version='0.5.4',
53 description="Cloud Custodian - Reference Mailer",
54 long_description=long_description,
55 long_description_content_type='text/markdown',
56 classifiers=[
57 "Topic :: System :: Systems Administration",
58 "Topic :: System :: Distributed Computing"
59 ],
60 url="https://github.com/cloud-custodian/cloud-custodian",
61 license="Apache-2.0",
62 packages=find_packages(),
63 entry_points={
64 'console_scripts': [
65 'c7n-mailer = c7n_mailer.cli:main',
66 'c7n-mailer-replay = c7n_mailer.replay:main'
67 ]
68 },
69 install_requires=requires,
70 package_data={str(''): [str('msg-templates/*.j2')]},
71 )
72
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tools/c7n_mailer/setup.py b/tools/c7n_mailer/setup.py
--- a/tools/c7n_mailer/setup.py
+++ b/tools/c7n_mailer/setup.py
@@ -26,7 +26,7 @@
"python-dateutil>=2.8",
"ruamel.yaml>=0.15.93",
"datadog",
- "sendgrid==6.0.5",
+ "sendgrid>=6.0.5,<7.0",
"ldap3",
"c7n",
"redis"]
| {"golden_diff": "diff --git a/tools/c7n_mailer/setup.py b/tools/c7n_mailer/setup.py\n--- a/tools/c7n_mailer/setup.py\n+++ b/tools/c7n_mailer/setup.py\n@@ -26,7 +26,7 @@\n \"python-dateutil>=2.8\",\n \"ruamel.yaml>=0.15.93\",\n \"datadog\",\n- \"sendgrid==6.0.5\",\n+ \"sendgrid>=6.0.5,<7.0\",\n \"ldap3\",\n \"c7n\",\n \"redis\"]\n", "issue": "c7n-mailer - azure sendgrid pinned version not compatible with requirements.txt\nThe requirements.txt is installing sendgrid==6.1.0, but we are pinned to sendgrid==6.0.5.\r\n\r\nWe should look if it's possible to remove the pin in our setup.py\r\n\r\n## Error:\r\n`c7n-mailer 0.5.4 has requirement sendgrid==6.0.5, but you'll have sendgrid 6.1.0 which is incompatible.`\n", "before_files": [{"content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom io import open\nfrom os import path\nfrom setuptools import setup, find_packages\n\n# *Any* updates here should also go into c7n_mailer/deploy.py for lambda packaging.\nrequires = [\n \"Jinja2\",\n \"boto3\",\n \"jsonschema\",\n \"simplejson\", # datadog packaging dep\n \"python-dateutil>=2.8\",\n \"ruamel.yaml>=0.15.93\",\n \"datadog\",\n \"sendgrid==6.0.5\",\n \"ldap3\",\n \"c7n\",\n \"redis\"]\n\n\ntry:\n from concurrent import futures # noqa F401\nexcept ImportError:\n # The backport has SyntaxErrors under py36, so avoid installing it.\n # https://github.com/agronholm/pythonfutures/issues/41\n requires += ['futures']\n\n# read the contents of your README file\nthis_directory = path.abspath(path.dirname(__file__))\nreadme = path.join(this_directory, 'README.md')\nlong_description = ''\nif path.exists(readme):\n with open(readme, encoding='utf-8') as f:\n long_description = f.read()\n\nsetup(\n name=\"c7n_mailer\",\n version='0.5.4',\n description=\"Cloud Custodian - Reference Mailer\",\n long_description=long_description,\n long_description_content_type='text/markdown',\n classifiers=[\n \"Topic :: System :: Systems Administration\",\n \"Topic :: System :: Distributed Computing\"\n ],\n url=\"https://github.com/cloud-custodian/cloud-custodian\",\n license=\"Apache-2.0\",\n packages=find_packages(),\n entry_points={\n 'console_scripts': [\n 'c7n-mailer = c7n_mailer.cli:main',\n 'c7n-mailer-replay = c7n_mailer.replay:main'\n ]\n },\n install_requires=requires,\n package_data={str(''): [str('msg-templates/*.j2')]},\n)\n", "path": "tools/c7n_mailer/setup.py"}], "after_files": [{"content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom io import open\nfrom os import path\nfrom setuptools import setup, find_packages\n\n# *Any* updates here should also go into c7n_mailer/deploy.py for lambda packaging.\nrequires = [\n \"Jinja2\",\n \"boto3\",\n \"jsonschema\",\n \"simplejson\", # datadog packaging dep\n \"python-dateutil>=2.8\",\n \"ruamel.yaml>=0.15.93\",\n \"datadog\",\n \"sendgrid>=6.0.5,<7.0\",\n \"ldap3\",\n \"c7n\",\n \"redis\"]\n\n\ntry:\n from concurrent import futures # noqa F401\nexcept ImportError:\n # The backport has SyntaxErrors under py36, so avoid installing it.\n # https://github.com/agronholm/pythonfutures/issues/41\n requires += ['futures']\n\n# read the contents of your README file\nthis_directory = path.abspath(path.dirname(__file__))\nreadme = path.join(this_directory, 'README.md')\nlong_description = ''\nif path.exists(readme):\n with open(readme, encoding='utf-8') as f:\n long_description = f.read()\n\nsetup(\n name=\"c7n_mailer\",\n version='0.5.4',\n description=\"Cloud Custodian - Reference Mailer\",\n long_description=long_description,\n long_description_content_type='text/markdown',\n classifiers=[\n \"Topic :: System :: Systems Administration\",\n \"Topic :: System :: Distributed Computing\"\n ],\n url=\"https://github.com/cloud-custodian/cloud-custodian\",\n license=\"Apache-2.0\",\n packages=find_packages(),\n entry_points={\n 'console_scripts': [\n 'c7n-mailer = c7n_mailer.cli:main',\n 'c7n-mailer-replay = c7n_mailer.replay:main'\n ]\n },\n install_requires=requires,\n package_data={str(''): [str('msg-templates/*.j2')]},\n)\n", "path": "tools/c7n_mailer/setup.py"}]} | 1,088 | 129 |
gh_patches_debug_17888 | rasdani/github-patches | git_diff | ManimCommunity__manim-203 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Document and test the '-' file name command line argument
The `get_module` function in `extract_scene.py` allows the user to input the code for a Scene from stdin, but only when the argument passed as file name is `-`. This should be documented somewhere. At the very least, the `-h` flag should mention this possibility.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `manim/__main__.py`
Content:
```
1 import inspect
2 import os
3 import platform
4 import subprocess as sp
5 import sys
6 import re
7 import traceback
8 import importlib.util
9
10 from .config import file_writer_config
11 from .scene.scene import Scene
12 from .utils.sounds import play_error_sound
13 from .utils.sounds import play_finish_sound
14 from . import constants
15 from .logger import logger
16
17
18 def open_file_if_needed(file_writer):
19 if file_writer_config["quiet"]:
20 curr_stdout = sys.stdout
21 sys.stdout = open(os.devnull, "w")
22
23 open_file = any([
24 file_writer_config["preview"],
25 file_writer_config["show_file_in_finder"]
26 ])
27 if open_file:
28 current_os = platform.system()
29 file_paths = []
30
31 if file_writer_config["save_last_frame"]:
32 file_paths.append(file_writer.get_image_file_path())
33 if file_writer_config["write_to_movie"]:
34 file_paths.append(file_writer.get_movie_file_path())
35
36 for file_path in file_paths:
37 if current_os == "Windows":
38 os.startfile(file_path)
39 else:
40 commands = []
41 if current_os == "Linux":
42 commands.append("xdg-open")
43 elif current_os.startswith("CYGWIN"):
44 commands.append("cygstart")
45 else: # Assume macOS
46 commands.append("open")
47
48 if file_writer_config["show_file_in_finder"]:
49 commands.append("-R")
50
51 commands.append(file_path)
52
53 # commands.append("-g")
54 FNULL = open(os.devnull, 'w')
55 sp.call(commands, stdout=FNULL, stderr=sp.STDOUT)
56 FNULL.close()
57
58 if file_writer_config["quiet"]:
59 sys.stdout.close()
60 sys.stdout = curr_stdout
61
62
63 def is_child_scene(obj, module):
64 return (inspect.isclass(obj)
65 and issubclass(obj, Scene)
66 and obj != Scene
67 and obj.__module__.startswith(module.__name__))
68
69
70 def prompt_user_for_choice(scene_classes):
71 num_to_class = {}
72 for count, scene_class in enumerate(scene_classes):
73 count += 1 # start with 1 instead of 0
74 name = scene_class.__name__
75 print("%d: %s" % (count, name))
76 num_to_class[count] = scene_class
77 try:
78 user_input = input(constants.CHOOSE_NUMBER_MESSAGE)
79 return [num_to_class[int(num_str)]
80 for num_str in re.split(r"\s*,\s*", user_input.strip())]
81 except KeyError:
82 logger.error(constants.INVALID_NUMBER_MESSAGE)
83 sys.exit(2)
84 except EOFError:
85 sys.exit(1)
86
87
88 def get_scenes_to_render(scene_classes):
89 if not scene_classes:
90 logger.error(constants.NO_SCENE_MESSAGE)
91 return []
92 if file_writer_config["write_all"]:
93 return scene_classes
94 result = []
95 for scene_name in file_writer_config["scene_names"]:
96 found = False
97 for scene_class in scene_classes:
98 if scene_class.__name__ == scene_name:
99 result.append(scene_class)
100 found = True
101 break
102 if not found and (scene_name != ""):
103 logger.error(
104 constants.SCENE_NOT_FOUND_MESSAGE.format(
105 scene_name
106 )
107 )
108 if result:
109 return result
110 return [scene_classes[0]] if len(scene_classes) == 1 else prompt_user_for_choice(scene_classes)
111
112
113 def get_scene_classes_from_module(module):
114 return [
115 member[1]
116 for member in inspect.getmembers(
117 module,
118 lambda x: is_child_scene(x, module)
119 )
120 ]
121
122
123 def get_module(file_name):
124 if file_name == "-":
125 module = types.ModuleType("input_scenes")
126 code = sys.stdin.read()
127 try:
128 exec(code, module.__dict__)
129 return module
130 except Exception as e:
131 logger.error(f"Failed to render scene: {str(e)}")
132 sys.exit(2)
133 else:
134 if os.path.exists(file_name):
135 if file_name[-3:] != ".py":
136 raise Exception(f"{file_name} is not a valid Manim python script.")
137 module_name = file_name[:-3].replace(os.sep, '.').split('.')[-1]
138 spec = importlib.util.spec_from_file_location(module_name, file_name)
139 module = importlib.util.module_from_spec(spec)
140 spec.loader.exec_module(module)
141 return module
142 else:
143 raise FileNotFoundError(f'{file_name} not found')
144
145
146 def main():
147 module = get_module(file_writer_config["input_file"])
148 all_scene_classes = get_scene_classes_from_module(module)
149 scene_classes_to_render = get_scenes_to_render(all_scene_classes)
150 sound_on = file_writer_config["sound"]
151 for SceneClass in scene_classes_to_render:
152 try:
153 # By invoking, this renders the full scene
154 scene = SceneClass()
155 open_file_if_needed(scene.file_writer)
156 if sound_on:
157 play_finish_sound()
158 except Exception:
159 print("\n\n")
160 traceback.print_exc()
161 print("\n\n")
162 if sound_on:
163 play_error_sound()
164
165
166 if __name__ == "__main__":
167 main()
168
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/manim/__main__.py b/manim/__main__.py
--- a/manim/__main__.py
+++ b/manim/__main__.py
@@ -6,6 +6,7 @@
import re
import traceback
import importlib.util
+import types
from .config import file_writer_config
from .scene.scene import Scene
@@ -123,7 +124,12 @@
def get_module(file_name):
if file_name == "-":
module = types.ModuleType("input_scenes")
+ logger.info("Enter the animation's code & end with an EOF (CTRL+D on Linux/Unix, CTRL+Z on Windows):")
code = sys.stdin.read()
+ if not code.startswith("from manim import"):
+ logger.warn("Didn't find an import statement for Manim. Importing automatically...")
+ code="from manim import *\n"+code
+ logger.info("Rendering animation from typed code...")
try:
exec(code, module.__dict__)
return module
| {"golden_diff": "diff --git a/manim/__main__.py b/manim/__main__.py\n--- a/manim/__main__.py\n+++ b/manim/__main__.py\n@@ -6,6 +6,7 @@\n import re\n import traceback\n import importlib.util\n+import types\n \n from .config import file_writer_config\n from .scene.scene import Scene\n@@ -123,7 +124,12 @@\n def get_module(file_name):\n if file_name == \"-\":\n module = types.ModuleType(\"input_scenes\")\n+ logger.info(\"Enter the animation's code & end with an EOF (CTRL+D on Linux/Unix, CTRL+Z on Windows):\")\n code = sys.stdin.read()\n+ if not code.startswith(\"from manim import\"):\n+ logger.warn(\"Didn't find an import statement for Manim. Importing automatically...\")\n+ code=\"from manim import *\\n\"+code\n+ logger.info(\"Rendering animation from typed code...\")\n try:\n exec(code, module.__dict__)\n return module\n", "issue": "Document and test the '-' file name command line argument\nThe `get_module` function in `extract_scene.py` allows the user to input the code for a Scene from stdin, but only when the argument passed as file name is `-`. This should be documented somewhere. At the very least, the `-h` flag should mention this possibility.\n", "before_files": [{"content": "import inspect\nimport os\nimport platform\nimport subprocess as sp\nimport sys\nimport re\nimport traceback\nimport importlib.util\n\nfrom .config import file_writer_config\nfrom .scene.scene import Scene\nfrom .utils.sounds import play_error_sound\nfrom .utils.sounds import play_finish_sound\nfrom . import constants\nfrom .logger import logger\n\n\ndef open_file_if_needed(file_writer):\n if file_writer_config[\"quiet\"]:\n curr_stdout = sys.stdout\n sys.stdout = open(os.devnull, \"w\")\n\n open_file = any([\n file_writer_config[\"preview\"],\n file_writer_config[\"show_file_in_finder\"]\n ])\n if open_file:\n current_os = platform.system()\n file_paths = []\n\n if file_writer_config[\"save_last_frame\"]:\n file_paths.append(file_writer.get_image_file_path())\n if file_writer_config[\"write_to_movie\"]:\n file_paths.append(file_writer.get_movie_file_path())\n\n for file_path in file_paths:\n if current_os == \"Windows\":\n os.startfile(file_path)\n else:\n commands = []\n if current_os == \"Linux\":\n commands.append(\"xdg-open\")\n elif current_os.startswith(\"CYGWIN\"):\n commands.append(\"cygstart\")\n else: # Assume macOS\n commands.append(\"open\")\n\n if file_writer_config[\"show_file_in_finder\"]:\n commands.append(\"-R\")\n\n commands.append(file_path)\n\n # commands.append(\"-g\")\n FNULL = open(os.devnull, 'w')\n sp.call(commands, stdout=FNULL, stderr=sp.STDOUT)\n FNULL.close()\n\n if file_writer_config[\"quiet\"]:\n sys.stdout.close()\n sys.stdout = curr_stdout\n\n\ndef is_child_scene(obj, module):\n return (inspect.isclass(obj)\n and issubclass(obj, Scene)\n and obj != Scene\n and obj.__module__.startswith(module.__name__))\n\n\ndef prompt_user_for_choice(scene_classes):\n num_to_class = {}\n for count, scene_class in enumerate(scene_classes):\n count += 1 # start with 1 instead of 0\n name = scene_class.__name__\n print(\"%d: %s\" % (count, name))\n num_to_class[count] = scene_class\n try:\n user_input = input(constants.CHOOSE_NUMBER_MESSAGE)\n return [num_to_class[int(num_str)]\n for num_str in re.split(r\"\\s*,\\s*\", user_input.strip())]\n except KeyError:\n logger.error(constants.INVALID_NUMBER_MESSAGE)\n sys.exit(2)\n except EOFError:\n sys.exit(1)\n\n\ndef get_scenes_to_render(scene_classes):\n if not scene_classes:\n logger.error(constants.NO_SCENE_MESSAGE)\n return []\n if file_writer_config[\"write_all\"]:\n return scene_classes\n result = []\n for scene_name in file_writer_config[\"scene_names\"]:\n found = False\n for scene_class in scene_classes:\n if scene_class.__name__ == scene_name:\n result.append(scene_class)\n found = True\n break\n if not found and (scene_name != \"\"):\n logger.error(\n constants.SCENE_NOT_FOUND_MESSAGE.format(\n scene_name\n )\n )\n if result:\n return result\n return [scene_classes[0]] if len(scene_classes) == 1 else prompt_user_for_choice(scene_classes)\n\n\ndef get_scene_classes_from_module(module):\n return [\n member[1]\n for member in inspect.getmembers(\n module,\n lambda x: is_child_scene(x, module)\n )\n ]\n\n\ndef get_module(file_name):\n if file_name == \"-\":\n module = types.ModuleType(\"input_scenes\")\n code = sys.stdin.read()\n try:\n exec(code, module.__dict__)\n return module\n except Exception as e:\n logger.error(f\"Failed to render scene: {str(e)}\")\n sys.exit(2)\n else:\n if os.path.exists(file_name):\n if file_name[-3:] != \".py\":\n raise Exception(f\"{file_name} is not a valid Manim python script.\")\n module_name = file_name[:-3].replace(os.sep, '.').split('.')[-1]\n spec = importlib.util.spec_from_file_location(module_name, file_name)\n module = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(module)\n return module\n else:\n raise FileNotFoundError(f'{file_name} not found')\n\n\ndef main():\n module = get_module(file_writer_config[\"input_file\"])\n all_scene_classes = get_scene_classes_from_module(module)\n scene_classes_to_render = get_scenes_to_render(all_scene_classes)\n sound_on = file_writer_config[\"sound\"]\n for SceneClass in scene_classes_to_render:\n try:\n # By invoking, this renders the full scene\n scene = SceneClass()\n open_file_if_needed(scene.file_writer)\n if sound_on:\n play_finish_sound()\n except Exception:\n print(\"\\n\\n\")\n traceback.print_exc()\n print(\"\\n\\n\")\n if sound_on:\n play_error_sound()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "manim/__main__.py"}], "after_files": [{"content": "import inspect\nimport os\nimport platform\nimport subprocess as sp\nimport sys\nimport re\nimport traceback\nimport importlib.util\nimport types\n\nfrom .config import file_writer_config\nfrom .scene.scene import Scene\nfrom .utils.sounds import play_error_sound\nfrom .utils.sounds import play_finish_sound\nfrom . import constants\nfrom .logger import logger\n\n\ndef open_file_if_needed(file_writer):\n if file_writer_config[\"quiet\"]:\n curr_stdout = sys.stdout\n sys.stdout = open(os.devnull, \"w\")\n\n open_file = any([\n file_writer_config[\"preview\"],\n file_writer_config[\"show_file_in_finder\"]\n ])\n if open_file:\n current_os = platform.system()\n file_paths = []\n\n if file_writer_config[\"save_last_frame\"]:\n file_paths.append(file_writer.get_image_file_path())\n if file_writer_config[\"write_to_movie\"]:\n file_paths.append(file_writer.get_movie_file_path())\n\n for file_path in file_paths:\n if current_os == \"Windows\":\n os.startfile(file_path)\n else:\n commands = []\n if current_os == \"Linux\":\n commands.append(\"xdg-open\")\n elif current_os.startswith(\"CYGWIN\"):\n commands.append(\"cygstart\")\n else: # Assume macOS\n commands.append(\"open\")\n\n if file_writer_config[\"show_file_in_finder\"]:\n commands.append(\"-R\")\n\n commands.append(file_path)\n\n # commands.append(\"-g\")\n FNULL = open(os.devnull, 'w')\n sp.call(commands, stdout=FNULL, stderr=sp.STDOUT)\n FNULL.close()\n\n if file_writer_config[\"quiet\"]:\n sys.stdout.close()\n sys.stdout = curr_stdout\n\n\ndef is_child_scene(obj, module):\n return (inspect.isclass(obj)\n and issubclass(obj, Scene)\n and obj != Scene\n and obj.__module__.startswith(module.__name__))\n\n\ndef prompt_user_for_choice(scene_classes):\n num_to_class = {}\n for count, scene_class in enumerate(scene_classes):\n count += 1 # start with 1 instead of 0\n name = scene_class.__name__\n print(\"%d: %s\" % (count, name))\n num_to_class[count] = scene_class\n try:\n user_input = input(constants.CHOOSE_NUMBER_MESSAGE)\n return [num_to_class[int(num_str)]\n for num_str in re.split(r\"\\s*,\\s*\", user_input.strip())]\n except KeyError:\n logger.error(constants.INVALID_NUMBER_MESSAGE)\n sys.exit(2)\n except EOFError:\n sys.exit(1)\n\n\ndef get_scenes_to_render(scene_classes):\n if not scene_classes:\n logger.error(constants.NO_SCENE_MESSAGE)\n return []\n if file_writer_config[\"write_all\"]:\n return scene_classes\n result = []\n for scene_name in file_writer_config[\"scene_names\"]:\n found = False\n for scene_class in scene_classes:\n if scene_class.__name__ == scene_name:\n result.append(scene_class)\n found = True\n break\n if not found and (scene_name != \"\"):\n logger.error(\n constants.SCENE_NOT_FOUND_MESSAGE.format(\n scene_name\n )\n )\n if result:\n return result\n return [scene_classes[0]] if len(scene_classes) == 1 else prompt_user_for_choice(scene_classes)\n\n\ndef get_scene_classes_from_module(module):\n return [\n member[1]\n for member in inspect.getmembers(\n module,\n lambda x: is_child_scene(x, module)\n )\n ]\n\n\ndef get_module(file_name):\n if file_name == \"-\":\n module = types.ModuleType(\"input_scenes\")\n logger.info(\"Enter the animation's code & end with an EOF (CTRL+D on Linux/Unix, CTRL+Z on Windows):\")\n code = sys.stdin.read()\n if not code.startswith(\"from manim import\"):\n logger.warn(\"Didn't find an import statement for Manim. Importing automatically...\")\n code=\"from manim import *\\n\"+code\n logger.info(\"Rendering animation from typed code...\")\n try:\n exec(code, module.__dict__)\n return module\n except Exception as e:\n logger.error(f\"Failed to render scene: {str(e)}\")\n sys.exit(2)\n else:\n if os.path.exists(file_name):\n if file_name[-3:] != \".py\":\n raise Exception(f\"{file_name} is not a valid Manim python script.\")\n module_name = file_name[:-3].replace(os.sep, '.').split('.')[-1]\n spec = importlib.util.spec_from_file_location(module_name, file_name)\n module = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(module)\n return module\n else:\n raise FileNotFoundError(f'{file_name} not found')\n\n\ndef main():\n module = get_module(file_writer_config[\"input_file\"])\n all_scene_classes = get_scene_classes_from_module(module)\n scene_classes_to_render = get_scenes_to_render(all_scene_classes)\n sound_on = file_writer_config[\"sound\"]\n for SceneClass in scene_classes_to_render:\n try:\n # By invoking, this renders the full scene\n scene = SceneClass()\n open_file_if_needed(scene.file_writer)\n if sound_on:\n play_finish_sound()\n except Exception:\n print(\"\\n\\n\")\n traceback.print_exc()\n print(\"\\n\\n\")\n if sound_on:\n play_error_sound()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "manim/__main__.py"}]} | 1,816 | 223 |
gh_patches_debug_40359 | rasdani/github-patches | git_diff | kartoza__prj.app-909 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Reshape html summary in atom/rss sponsors feed
Currently the sponsor items are looking like this:
```
<div>
<img
src="/media/images/projects/b9c08a00a74cba8e406e97f94a1f90595f5ac39d.png"
width="300px">
</div>
<p><span>Sponsorship level: Gold : 9000 EUR</span>
<br/>
<span>Sponsorship period: 21 June 2017 - 21 June 2018</span><br/>
<span>Amount sponsored: EUR 9000.00<span>
</p>
```
Is it (easy) possible in this html to change the following:
- make the img-element a full url (as I use the feed in qgis.org and it is easiest to just retrieve the images from changelog.qgis.org
- add some classes to the img-element and the p-element as then I can do some more styling.
An example:
```
<div>
<img src="http://changelog.qgis.org/media/images/projects/..5ac39d.png"
class="sponsor_img gold">
</div>
<p class="sponsor_body gold">
<span>Sponsorship level: Gold : 9000 EUR</span>
<br/>
<span>Sponsorship period: 21 June 2017 - 21 June 2018</span>
<br/>
<span>Amount sponsored: EUR 9000.00<span>
</p>
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django_project/changes/feeds/sponsor.py`
Content:
```
1 # coding=utf-8
2
3 __author__ = 'Anita Hapsari <[email protected]>'
4 __date__ = '23/10/2017'
5
6 from django.conf import settings
7 from django.contrib.syndication.views import Feed
8 from django.utils.feedgenerator import Atom1Feed
9 from django.shortcuts import get_object_or_404
10 from base.models.project import Project
11 from changes.models.sponsorship_period import SponsorshipPeriod
12 from changes.feeds.json_rss_feed import JSONFeed
13
14
15 # noinspection PyMethodMayBeStatic
16 class RssSponsorFeed(Feed):
17 """RSS Feed class for sponsor."""
18
19 def get_object(self, request, *args, **kwargs):
20 """Return project object that matches the project_slug.
21
22 :param request: The incoming HTTP request object
23 :type request: HttpRequest
24
25 :param args: Positional arguments
26 :type args: tuple
27
28 :param kwargs: Keyword arguments
29 :type kwargs: dict
30
31 :returns: A project
32 :rtype: Project
33
34 :raises: Http404
35 """
36 project_slug = kwargs.get('project_slug', None)
37 return get_object_or_404(Project, slug=project_slug)
38
39 def title(self, obj):
40 """Return a title for the RSS.
41
42 :param obj: A project
43 :type obj: Project
44
45 :returns: Title of the RSS Feed.
46 :rtype: str
47 """
48 return 'RSS Sponsor of %s Project' % obj.name
49
50 def description(self, obj):
51 """Return a description for the RSS.
52
53 :param obj: A project
54 :type obj: Project
55
56 :returns: Description of the RSS Feed.
57 :rtype: str
58 """
59 return 'These are the latest sponsor of %s project.' % obj.name
60
61 def link(self, obj):
62 """Return the url of the latest sponsor.
63
64 :param obj: Latest sponsor of a project
65 :type obj: SponsorshipPeriod
66
67 :returns: Url of the latest sponsor.
68 :rtype: str
69 """
70 return obj.get_absolute_url()
71
72 def items(self, obj):
73 """Return latest sponsors of the project.
74
75 :param obj: A project
76 :type obj: Project
77
78 :returns: List of latest sponsor of a project
79 :rtype: list
80 """
81 return SponsorshipPeriod.objects.filter(
82 project=obj).order_by('-sponsorship_level__value', '-end_date')
83
84 def item_title(self, item):
85 """Return the title of the sponsor.
86
87 :param item: Sponsorship period object of a project
88 :type item: Sponsorship period
89
90 :returns: name of the sponsor
91 :rtype: str
92 """
93 return item.sponsor.name
94
95 def item_description(self, item):
96 """Return the description of the sponsor.
97
98 :param item: Sponsorship period object of a project
99 :type item: Sponsorship period
100
101 :returns: description of the sponsor
102 :rtype: str
103 """
104 data = {
105 'media_url': settings.MEDIA_URL,
106 'sponsor_logo': item.sponsor.logo,
107 'sponsor_level': item.sponsorship_level,
108 'start_date': item.start_date.strftime('%d %B %Y'),
109 'end_date': item.end_date.strftime('%d %B %Y'),
110 'currency': item.currency,
111 'amount_sponsored': item.amount_sponsored,
112 }
113
114 descriptions = \
115 '<div>' \
116 '<img src="{media_url}{sponsor_logo}" width="300px"></div>' \
117 '<p><span>Sponsorship level: {sponsor_level}</span><br/>' \
118 '<span>Sponsorship period: {start_date} - {end_date}</span><br/>' \
119 '<span>Amount sponsored: {currency} {amount_sponsored}<span></p>'\
120 .format(**data)
121 return descriptions
122
123 def item_extra_kwargs(self, item):
124 return {'image_url': item.sponsor.logo.url}
125
126
127
128 class AtomSponsorFeed(RssSponsorFeed):
129 """Atom Feed class for sponsor."""
130
131 feed_type = Atom1Feed
132 subtitle = RssSponsorFeed.description
133
134
135 class JSONSponsorFeed(RssSponsorFeed):
136 """JSON Feed class for sponsor."""
137
138 feed_type = JSONFeed
139
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django_project/changes/feeds/sponsor.py b/django_project/changes/feeds/sponsor.py
--- a/django_project/changes/feeds/sponsor.py
+++ b/django_project/changes/feeds/sponsor.py
@@ -3,8 +3,9 @@
__author__ = 'Anita Hapsari <[email protected]>'
__date__ = '23/10/2017'
-from django.conf import settings
+import datetime
from django.contrib.syndication.views import Feed
+from django.core.urlresolvers import reverse
from django.utils.feedgenerator import Atom1Feed
from django.shortcuts import get_object_or_404
from base.models.project import Project
@@ -34,6 +35,7 @@
:raises: Http404
"""
project_slug = kwargs.get('project_slug', None)
+ self.domain_path_url = request.build_absolute_uri(reverse('home'))
return get_object_or_404(Project, slug=project_slug)
def title(self, obj):
@@ -78,8 +80,10 @@
:returns: List of latest sponsor of a project
:rtype: list
"""
+ today = datetime.datetime.now().date()
return SponsorshipPeriod.objects.filter(
- project=obj).order_by('-sponsorship_level__value', '-end_date')
+ project=obj, end_date__gte=today
+ ).order_by('-sponsorship_level__value', '-end_date')
def item_title(self, item):
"""Return the title of the sponsor.
@@ -101,20 +105,26 @@
:returns: description of the sponsor
:rtype: str
"""
+ level_class = str(item.sponsorship_level.name).decode('utf-8').lower()
+ head, sep, tail = self.domain_path_url.partition('/en/')
+
data = {
- 'media_url': settings.MEDIA_URL,
- 'sponsor_logo': item.sponsor.logo,
+ 'domain': head,
+ 'sponsor_logo': item.sponsor.logo.url,
'sponsor_level': item.sponsorship_level,
'start_date': item.start_date.strftime('%d %B %Y'),
'end_date': item.end_date.strftime('%d %B %Y'),
'currency': item.currency,
'amount_sponsored': item.amount_sponsored,
+ 'sponsor_class': level_class,
}
descriptions = \
'<div>' \
- '<img src="{media_url}{sponsor_logo}" width="300px"></div>' \
- '<p><span>Sponsorship level: {sponsor_level}</span><br/>' \
+ '<img class="sponsor_img {sponsor_class}" ' \
+ 'src="{domain}{sponsor_logo}" width="300px"></div>' \
+ '<p class="sponsor_body {sponsor_class}">' \
+ '<span>Sponsorship level: {sponsor_level}</span><br/>' \
'<span>Sponsorship period: {start_date} - {end_date}</span><br/>' \
'<span>Amount sponsored: {currency} {amount_sponsored}<span></p>'\
.format(**data)
| {"golden_diff": "diff --git a/django_project/changes/feeds/sponsor.py b/django_project/changes/feeds/sponsor.py\n--- a/django_project/changes/feeds/sponsor.py\n+++ b/django_project/changes/feeds/sponsor.py\n@@ -3,8 +3,9 @@\n __author__ = 'Anita Hapsari <[email protected]>'\n __date__ = '23/10/2017'\n \n-from django.conf import settings\n+import datetime\n from django.contrib.syndication.views import Feed\n+from django.core.urlresolvers import reverse\n from django.utils.feedgenerator import Atom1Feed\n from django.shortcuts import get_object_or_404\n from base.models.project import Project\n@@ -34,6 +35,7 @@\n :raises: Http404\n \"\"\"\n project_slug = kwargs.get('project_slug', None)\n+ self.domain_path_url = request.build_absolute_uri(reverse('home'))\n return get_object_or_404(Project, slug=project_slug)\n \n def title(self, obj):\n@@ -78,8 +80,10 @@\n :returns: List of latest sponsor of a project\n :rtype: list\n \"\"\"\n+ today = datetime.datetime.now().date()\n return SponsorshipPeriod.objects.filter(\n- project=obj).order_by('-sponsorship_level__value', '-end_date')\n+ project=obj, end_date__gte=today\n+ ).order_by('-sponsorship_level__value', '-end_date')\n \n def item_title(self, item):\n \"\"\"Return the title of the sponsor.\n@@ -101,20 +105,26 @@\n :returns: description of the sponsor\n :rtype: str\n \"\"\"\n+ level_class = str(item.sponsorship_level.name).decode('utf-8').lower()\n+ head, sep, tail = self.domain_path_url.partition('/en/')\n+\n data = {\n- 'media_url': settings.MEDIA_URL,\n- 'sponsor_logo': item.sponsor.logo,\n+ 'domain': head,\n+ 'sponsor_logo': item.sponsor.logo.url,\n 'sponsor_level': item.sponsorship_level,\n 'start_date': item.start_date.strftime('%d %B %Y'),\n 'end_date': item.end_date.strftime('%d %B %Y'),\n 'currency': item.currency,\n 'amount_sponsored': item.amount_sponsored,\n+ 'sponsor_class': level_class,\n }\n \n descriptions = \\\n '<div>' \\\n- '<img src=\"{media_url}{sponsor_logo}\" width=\"300px\"></div>' \\\n- '<p><span>Sponsorship level: {sponsor_level}</span><br/>' \\\n+ '<img class=\"sponsor_img {sponsor_class}\" ' \\\n+ 'src=\"{domain}{sponsor_logo}\" width=\"300px\"></div>' \\\n+ '<p class=\"sponsor_body {sponsor_class}\">' \\\n+ '<span>Sponsorship level: {sponsor_level}</span><br/>' \\\n '<span>Sponsorship period: {start_date} - {end_date}</span><br/>' \\\n '<span>Amount sponsored: {currency} {amount_sponsored}<span></p>'\\\n .format(**data)\n", "issue": "Reshape html summary in atom/rss sponsors feed\nCurrently the sponsor items are looking like this:\r\n\r\n```\r\n<div>\r\n<img\r\nsrc=\"/media/images/projects/b9c08a00a74cba8e406e97f94a1f90595f5ac39d.png\"\r\nwidth=\"300px\">\r\n</div>\r\n<p><span>Sponsorship level: Gold : 9000 EUR</span>\r\n<br/>\r\n<span>Sponsorship period: 21 June 2017 - 21 June 2018</span><br/>\r\n<span>Amount sponsored: EUR 9000.00<span>\r\n</p>\r\n```\r\n\r\nIs it (easy) possible in this html to change the following:\r\n- make the img-element a full url (as I use the feed in qgis.org and it is easiest to just retrieve the images from changelog.qgis.org\r\n- add some classes to the img-element and the p-element as then I can do some more styling.\r\n\r\nAn example:\r\n\r\n```\r\n<div>\r\n<img src=\"http://changelog.qgis.org/media/images/projects/..5ac39d.png\"\r\nclass=\"sponsor_img gold\">\r\n</div>\r\n<p class=\"sponsor_body gold\">\r\n<span>Sponsorship level: Gold : 9000 EUR</span>\r\n<br/>\r\n<span>Sponsorship period: 21 June 2017 - 21 June 2018</span>\r\n<br/>\r\n<span>Amount sponsored: EUR 9000.00<span>\r\n</p>\r\n```\r\n\r\n\n", "before_files": [{"content": "# coding=utf-8\n\n__author__ = 'Anita Hapsari <[email protected]>'\n__date__ = '23/10/2017'\n\nfrom django.conf import settings\nfrom django.contrib.syndication.views import Feed\nfrom django.utils.feedgenerator import Atom1Feed\nfrom django.shortcuts import get_object_or_404\nfrom base.models.project import Project\nfrom changes.models.sponsorship_period import SponsorshipPeriod\nfrom changes.feeds.json_rss_feed import JSONFeed\n\n\n# noinspection PyMethodMayBeStatic\nclass RssSponsorFeed(Feed):\n \"\"\"RSS Feed class for sponsor.\"\"\"\n\n def get_object(self, request, *args, **kwargs):\n \"\"\"Return project object that matches the project_slug.\n\n :param request: The incoming HTTP request object\n :type request: HttpRequest\n\n :param args: Positional arguments\n :type args: tuple\n\n :param kwargs: Keyword arguments\n :type kwargs: dict\n\n :returns: A project\n :rtype: Project\n\n :raises: Http404\n \"\"\"\n project_slug = kwargs.get('project_slug', None)\n return get_object_or_404(Project, slug=project_slug)\n\n def title(self, obj):\n \"\"\"Return a title for the RSS.\n\n :param obj: A project\n :type obj: Project\n\n :returns: Title of the RSS Feed.\n :rtype: str\n \"\"\"\n return 'RSS Sponsor of %s Project' % obj.name\n\n def description(self, obj):\n \"\"\"Return a description for the RSS.\n\n :param obj: A project\n :type obj: Project\n\n :returns: Description of the RSS Feed.\n :rtype: str\n \"\"\"\n return 'These are the latest sponsor of %s project.' % obj.name\n\n def link(self, obj):\n \"\"\"Return the url of the latest sponsor.\n\n :param obj: Latest sponsor of a project\n :type obj: SponsorshipPeriod\n\n :returns: Url of the latest sponsor.\n :rtype: str\n \"\"\"\n return obj.get_absolute_url()\n\n def items(self, obj):\n \"\"\"Return latest sponsors of the project.\n\n :param obj: A project\n :type obj: Project\n\n :returns: List of latest sponsor of a project\n :rtype: list\n \"\"\"\n return SponsorshipPeriod.objects.filter(\n project=obj).order_by('-sponsorship_level__value', '-end_date')\n\n def item_title(self, item):\n \"\"\"Return the title of the sponsor.\n\n :param item: Sponsorship period object of a project\n :type item: Sponsorship period\n\n :returns: name of the sponsor\n :rtype: str\n \"\"\"\n return item.sponsor.name\n\n def item_description(self, item):\n \"\"\"Return the description of the sponsor.\n\n :param item: Sponsorship period object of a project\n :type item: Sponsorship period\n\n :returns: description of the sponsor\n :rtype: str\n \"\"\"\n data = {\n 'media_url': settings.MEDIA_URL,\n 'sponsor_logo': item.sponsor.logo,\n 'sponsor_level': item.sponsorship_level,\n 'start_date': item.start_date.strftime('%d %B %Y'),\n 'end_date': item.end_date.strftime('%d %B %Y'),\n 'currency': item.currency,\n 'amount_sponsored': item.amount_sponsored,\n }\n\n descriptions = \\\n '<div>' \\\n '<img src=\"{media_url}{sponsor_logo}\" width=\"300px\"></div>' \\\n '<p><span>Sponsorship level: {sponsor_level}</span><br/>' \\\n '<span>Sponsorship period: {start_date} - {end_date}</span><br/>' \\\n '<span>Amount sponsored: {currency} {amount_sponsored}<span></p>'\\\n .format(**data)\n return descriptions\n\n def item_extra_kwargs(self, item):\n return {'image_url': item.sponsor.logo.url}\n\n\n\nclass AtomSponsorFeed(RssSponsorFeed):\n \"\"\"Atom Feed class for sponsor.\"\"\"\n\n feed_type = Atom1Feed\n subtitle = RssSponsorFeed.description\n\n\nclass JSONSponsorFeed(RssSponsorFeed):\n \"\"\"JSON Feed class for sponsor.\"\"\"\n\n feed_type = JSONFeed\n", "path": "django_project/changes/feeds/sponsor.py"}], "after_files": [{"content": "# coding=utf-8\n\n__author__ = 'Anita Hapsari <[email protected]>'\n__date__ = '23/10/2017'\n\nimport datetime\nfrom django.contrib.syndication.views import Feed\nfrom django.core.urlresolvers import reverse\nfrom django.utils.feedgenerator import Atom1Feed\nfrom django.shortcuts import get_object_or_404\nfrom base.models.project import Project\nfrom changes.models.sponsorship_period import SponsorshipPeriod\nfrom changes.feeds.json_rss_feed import JSONFeed\n\n\n# noinspection PyMethodMayBeStatic\nclass RssSponsorFeed(Feed):\n \"\"\"RSS Feed class for sponsor.\"\"\"\n\n def get_object(self, request, *args, **kwargs):\n \"\"\"Return project object that matches the project_slug.\n\n :param request: The incoming HTTP request object\n :type request: HttpRequest\n\n :param args: Positional arguments\n :type args: tuple\n\n :param kwargs: Keyword arguments\n :type kwargs: dict\n\n :returns: A project\n :rtype: Project\n\n :raises: Http404\n \"\"\"\n project_slug = kwargs.get('project_slug', None)\n self.domain_path_url = request.build_absolute_uri(reverse('home'))\n return get_object_or_404(Project, slug=project_slug)\n\n def title(self, obj):\n \"\"\"Return a title for the RSS.\n\n :param obj: A project\n :type obj: Project\n\n :returns: Title of the RSS Feed.\n :rtype: str\n \"\"\"\n return 'RSS Sponsor of %s Project' % obj.name\n\n def description(self, obj):\n \"\"\"Return a description for the RSS.\n\n :param obj: A project\n :type obj: Project\n\n :returns: Description of the RSS Feed.\n :rtype: str\n \"\"\"\n return 'These are the latest sponsor of %s project.' % obj.name\n\n def link(self, obj):\n \"\"\"Return the url of the latest sponsor.\n\n :param obj: Latest sponsor of a project\n :type obj: SponsorshipPeriod\n\n :returns: Url of the latest sponsor.\n :rtype: str\n \"\"\"\n return obj.get_absolute_url()\n\n def items(self, obj):\n \"\"\"Return latest sponsors of the project.\n\n :param obj: A project\n :type obj: Project\n\n :returns: List of latest sponsor of a project\n :rtype: list\n \"\"\"\n today = datetime.datetime.now().date()\n return SponsorshipPeriod.objects.filter(\n project=obj, end_date__gte=today\n ).order_by('-sponsorship_level__value', '-end_date')\n\n def item_title(self, item):\n \"\"\"Return the title of the sponsor.\n\n :param item: Sponsorship period object of a project\n :type item: Sponsorship period\n\n :returns: name of the sponsor\n :rtype: str\n \"\"\"\n return item.sponsor.name\n\n def item_description(self, item):\n \"\"\"Return the description of the sponsor.\n\n :param item: Sponsorship period object of a project\n :type item: Sponsorship period\n\n :returns: description of the sponsor\n :rtype: str\n \"\"\"\n level_class = str(item.sponsorship_level.name).decode('utf-8').lower()\n head, sep, tail = self.domain_path_url.partition('/en/')\n\n data = {\n 'domain': head,\n 'sponsor_logo': item.sponsor.logo.url,\n 'sponsor_level': item.sponsorship_level,\n 'start_date': item.start_date.strftime('%d %B %Y'),\n 'end_date': item.end_date.strftime('%d %B %Y'),\n 'currency': item.currency,\n 'amount_sponsored': item.amount_sponsored,\n 'sponsor_class': level_class,\n }\n\n descriptions = \\\n '<div>' \\\n '<img class=\"sponsor_img {sponsor_class}\" ' \\\n 'src=\"{domain}{sponsor_logo}\" width=\"300px\"></div>' \\\n '<p class=\"sponsor_body {sponsor_class}\">' \\\n '<span>Sponsorship level: {sponsor_level}</span><br/>' \\\n '<span>Sponsorship period: {start_date} - {end_date}</span><br/>' \\\n '<span>Amount sponsored: {currency} {amount_sponsored}<span></p>'\\\n .format(**data)\n return descriptions\n\n def item_extra_kwargs(self, item):\n return {'image_url': item.sponsor.logo.url}\n\n\n\nclass AtomSponsorFeed(RssSponsorFeed):\n \"\"\"Atom Feed class for sponsor.\"\"\"\n\n feed_type = Atom1Feed\n subtitle = RssSponsorFeed.description\n\n\nclass JSONSponsorFeed(RssSponsorFeed):\n \"\"\"JSON Feed class for sponsor.\"\"\"\n\n feed_type = JSONFeed\n", "path": "django_project/changes/feeds/sponsor.py"}]} | 1,867 | 702 |
gh_patches_debug_7720 | rasdani/github-patches | git_diff | yt-project__yt-3773 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pytest is now a hidden hard dependency
<!--To help us understand and resolve your issue, please fill out the form to
the best of your ability.-->
<!--You can feel free to delete the sections that do not apply.-->
### Bug report
We now import `pytest` directly in `yt/testing.py`, but `pytest` is not yet listed as a hard dependency. This will cause a `ModuleNotFoundError` to be raised after installing yt until you install pytest manually.
I'm opening an issue rather than a PR because I don't know whether people would prefer to put the `pytest` import in a try block or make it a dependency.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yt/__init__.py`
Content:
```
1 """
2 yt is a toolkit for analyzing and visualizing volumetric data.
3
4 * Website: https://yt-project.org
5 * Documentation: https://yt-project.org/doc
6 * Data hub: https://girder.hub.yt
7 * Contribute: https://github.com/yt-project/yt
8
9 """
10 __version__ = "4.1.dev0"
11
12 import yt.units as units
13 import yt.utilities.physical_constants as physical_constants
14 from yt.data_objects.api import (
15 DatasetSeries,
16 ImageArray,
17 ParticleProfile,
18 Profile1D,
19 Profile2D,
20 Profile3D,
21 add_particle_filter,
22 create_profile,
23 particle_filter,
24 )
25 from yt.fields.api import (
26 DerivedField,
27 FieldDetector,
28 FieldInfoContainer,
29 ValidateDataField,
30 ValidateGridType,
31 ValidateParameter,
32 ValidateProperty,
33 ValidateSpatial,
34 add_field,
35 add_xray_emissivity_field,
36 derived_field,
37 field_plugins,
38 )
39 from yt.frontends.api import _frontend_container
40 from yt.funcs import (
41 enable_plugins,
42 get_memory_usage,
43 get_pbar,
44 get_version_stack,
45 get_yt_version,
46 insert_ipython,
47 is_root,
48 is_sequence,
49 memory_checker,
50 only_on_root,
51 parallel_profile,
52 print_tb,
53 rootonly,
54 toggle_interactivity,
55 )
56 from yt.units import (
57 YTArray,
58 YTQuantity,
59 display_ytarray,
60 loadtxt,
61 savetxt,
62 uconcatenate,
63 ucross,
64 udot,
65 uhstack,
66 uintersect1d,
67 unorm,
68 ustack,
69 uunion1d,
70 uvstack,
71 )
72 from yt.units.unit_object import define_unit # type: ignore
73 from yt.utilities.logger import set_log_level, ytLogger as mylog
74
75 frontends = _frontend_container()
76
77 import yt.visualization.volume_rendering.api as volume_rendering
78 from yt.frontends.stream.api import hexahedral_connectivity
79 from yt.frontends.ytdata.api import save_as_dataset
80 from yt.loaders import (
81 load,
82 load_amr_grids,
83 load_archive,
84 load_hexahedral_mesh,
85 load_octree,
86 load_particles,
87 load_sample,
88 load_simulation,
89 load_uniform_grid,
90 load_unstructured_mesh,
91 )
92 from yt.testing import run_nose
93 from yt.units.unit_systems import UnitSystem, unit_system_registry # type: ignore
94
95 # Import some helpful math utilities
96 from yt.utilities.math_utils import ortho_find, periodic_position, quartiles
97 from yt.utilities.parallel_tools.parallel_analysis_interface import (
98 communication_system,
99 enable_parallelism,
100 parallel_objects,
101 )
102
103 # Now individual component imports from the visualization API
104 from yt.visualization.api import (
105 AxisAlignedProjectionPlot,
106 AxisAlignedSlicePlot,
107 FITSImageData,
108 FITSOffAxisProjection,
109 FITSOffAxisSlice,
110 FITSParticleProjection,
111 FITSProjection,
112 FITSSlice,
113 FixedResolutionBuffer,
114 LineBuffer,
115 LinePlot,
116 OffAxisProjectionPlot,
117 OffAxisSlicePlot,
118 ParticleImageBuffer,
119 ParticlePhasePlot,
120 ParticlePlot,
121 ParticleProjectionPlot,
122 PhasePlot,
123 ProfilePlot,
124 ProjectionPlot,
125 SlicePlot,
126 add_colormap,
127 apply_colormap,
128 make_colormap,
129 plot_2d,
130 scale_image,
131 show_colormaps,
132 write_bitmap,
133 write_image,
134 write_projection,
135 )
136 from yt.visualization.volume_rendering.api import (
137 ColorTransferFunction,
138 TransferFunction,
139 create_scene,
140 off_axis_projection,
141 volume_render,
142 )
143
144 # TransferFunctionHelper, MultiVariateTransferFunction
145 # off_axis_projection
146
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/yt/__init__.py b/yt/__init__.py
--- a/yt/__init__.py
+++ b/yt/__init__.py
@@ -89,7 +89,17 @@
load_uniform_grid,
load_unstructured_mesh,
)
-from yt.testing import run_nose
+
+
+def run_nose(*args, **kwargs):
+ # we hide this function behind a closure so we
+ # don't make pytest a hard dependency for end users
+ # see https://github.com/yt-project/yt/issues/3771
+ from yt.testing import run_nose
+
+ return run_nose(*args, **kwargs)
+
+
from yt.units.unit_systems import UnitSystem, unit_system_registry # type: ignore
# Import some helpful math utilities
| {"golden_diff": "diff --git a/yt/__init__.py b/yt/__init__.py\n--- a/yt/__init__.py\n+++ b/yt/__init__.py\n@@ -89,7 +89,17 @@\n load_uniform_grid,\n load_unstructured_mesh,\n )\n-from yt.testing import run_nose\n+\n+\n+def run_nose(*args, **kwargs):\n+ # we hide this function behind a closure so we\n+ # don't make pytest a hard dependency for end users\n+ # see https://github.com/yt-project/yt/issues/3771\n+ from yt.testing import run_nose\n+\n+ return run_nose(*args, **kwargs)\n+\n+\n from yt.units.unit_systems import UnitSystem, unit_system_registry # type: ignore\n \n # Import some helpful math utilities\n", "issue": "pytest is now a hidden hard dependency\n<!--To help us understand and resolve your issue, please fill out the form to\r\nthe best of your ability.-->\r\n<!--You can feel free to delete the sections that do not apply.-->\r\n\r\n### Bug report\r\n\r\nWe now import `pytest` directly in `yt/testing.py`, but `pytest` is not yet listed as a hard dependency. This will cause a `ModuleNotFoundError` to be raised after installing yt until you install pytest manually.\r\n\r\nI'm opening an issue rather than a PR because I don't know whether people would prefer to put the `pytest` import in a try block or make it a dependency.\n", "before_files": [{"content": "\"\"\"\nyt is a toolkit for analyzing and visualizing volumetric data.\n\n* Website: https://yt-project.org\n* Documentation: https://yt-project.org/doc\n* Data hub: https://girder.hub.yt\n* Contribute: https://github.com/yt-project/yt\n\n\"\"\"\n__version__ = \"4.1.dev0\"\n\nimport yt.units as units\nimport yt.utilities.physical_constants as physical_constants\nfrom yt.data_objects.api import (\n DatasetSeries,\n ImageArray,\n ParticleProfile,\n Profile1D,\n Profile2D,\n Profile3D,\n add_particle_filter,\n create_profile,\n particle_filter,\n)\nfrom yt.fields.api import (\n DerivedField,\n FieldDetector,\n FieldInfoContainer,\n ValidateDataField,\n ValidateGridType,\n ValidateParameter,\n ValidateProperty,\n ValidateSpatial,\n add_field,\n add_xray_emissivity_field,\n derived_field,\n field_plugins,\n)\nfrom yt.frontends.api import _frontend_container\nfrom yt.funcs import (\n enable_plugins,\n get_memory_usage,\n get_pbar,\n get_version_stack,\n get_yt_version,\n insert_ipython,\n is_root,\n is_sequence,\n memory_checker,\n only_on_root,\n parallel_profile,\n print_tb,\n rootonly,\n toggle_interactivity,\n)\nfrom yt.units import (\n YTArray,\n YTQuantity,\n display_ytarray,\n loadtxt,\n savetxt,\n uconcatenate,\n ucross,\n udot,\n uhstack,\n uintersect1d,\n unorm,\n ustack,\n uunion1d,\n uvstack,\n)\nfrom yt.units.unit_object import define_unit # type: ignore\nfrom yt.utilities.logger import set_log_level, ytLogger as mylog\n\nfrontends = _frontend_container()\n\nimport yt.visualization.volume_rendering.api as volume_rendering\nfrom yt.frontends.stream.api import hexahedral_connectivity\nfrom yt.frontends.ytdata.api import save_as_dataset\nfrom yt.loaders import (\n load,\n load_amr_grids,\n load_archive,\n load_hexahedral_mesh,\n load_octree,\n load_particles,\n load_sample,\n load_simulation,\n load_uniform_grid,\n load_unstructured_mesh,\n)\nfrom yt.testing import run_nose\nfrom yt.units.unit_systems import UnitSystem, unit_system_registry # type: ignore\n\n# Import some helpful math utilities\nfrom yt.utilities.math_utils import ortho_find, periodic_position, quartiles\nfrom yt.utilities.parallel_tools.parallel_analysis_interface import (\n communication_system,\n enable_parallelism,\n parallel_objects,\n)\n\n# Now individual component imports from the visualization API\nfrom yt.visualization.api import (\n AxisAlignedProjectionPlot,\n AxisAlignedSlicePlot,\n FITSImageData,\n FITSOffAxisProjection,\n FITSOffAxisSlice,\n FITSParticleProjection,\n FITSProjection,\n FITSSlice,\n FixedResolutionBuffer,\n LineBuffer,\n LinePlot,\n OffAxisProjectionPlot,\n OffAxisSlicePlot,\n ParticleImageBuffer,\n ParticlePhasePlot,\n ParticlePlot,\n ParticleProjectionPlot,\n PhasePlot,\n ProfilePlot,\n ProjectionPlot,\n SlicePlot,\n add_colormap,\n apply_colormap,\n make_colormap,\n plot_2d,\n scale_image,\n show_colormaps,\n write_bitmap,\n write_image,\n write_projection,\n)\nfrom yt.visualization.volume_rendering.api import (\n ColorTransferFunction,\n TransferFunction,\n create_scene,\n off_axis_projection,\n volume_render,\n)\n\n# TransferFunctionHelper, MultiVariateTransferFunction\n# off_axis_projection\n", "path": "yt/__init__.py"}], "after_files": [{"content": "\"\"\"\nyt is a toolkit for analyzing and visualizing volumetric data.\n\n* Website: https://yt-project.org\n* Documentation: https://yt-project.org/doc\n* Data hub: https://girder.hub.yt\n* Contribute: https://github.com/yt-project/yt\n\n\"\"\"\n__version__ = \"4.1.dev0\"\n\nimport yt.units as units\nimport yt.utilities.physical_constants as physical_constants\nfrom yt.data_objects.api import (\n DatasetSeries,\n ImageArray,\n ParticleProfile,\n Profile1D,\n Profile2D,\n Profile3D,\n add_particle_filter,\n create_profile,\n particle_filter,\n)\nfrom yt.fields.api import (\n DerivedField,\n FieldDetector,\n FieldInfoContainer,\n ValidateDataField,\n ValidateGridType,\n ValidateParameter,\n ValidateProperty,\n ValidateSpatial,\n add_field,\n add_xray_emissivity_field,\n derived_field,\n field_plugins,\n)\nfrom yt.frontends.api import _frontend_container\nfrom yt.funcs import (\n enable_plugins,\n get_memory_usage,\n get_pbar,\n get_version_stack,\n get_yt_version,\n insert_ipython,\n is_root,\n is_sequence,\n memory_checker,\n only_on_root,\n parallel_profile,\n print_tb,\n rootonly,\n toggle_interactivity,\n)\nfrom yt.units import (\n YTArray,\n YTQuantity,\n display_ytarray,\n loadtxt,\n savetxt,\n uconcatenate,\n ucross,\n udot,\n uhstack,\n uintersect1d,\n unorm,\n ustack,\n uunion1d,\n uvstack,\n)\nfrom yt.units.unit_object import define_unit # type: ignore\nfrom yt.utilities.logger import set_log_level, ytLogger as mylog\n\nfrontends = _frontend_container()\n\nimport yt.visualization.volume_rendering.api as volume_rendering\nfrom yt.frontends.stream.api import hexahedral_connectivity\nfrom yt.frontends.ytdata.api import save_as_dataset\nfrom yt.loaders import (\n load,\n load_amr_grids,\n load_archive,\n load_hexahedral_mesh,\n load_octree,\n load_particles,\n load_sample,\n load_simulation,\n load_uniform_grid,\n load_unstructured_mesh,\n)\n\n\ndef run_nose(*args, **kwargs):\n # we hide this function behind a closure so we\n # don't make pytest a hard dependency for end users\n # see https://github.com/yt-project/yt/issues/3771\n from yt.testing import run_nose\n\n return run_nose(*args, **kwargs)\n\n\nfrom yt.units.unit_systems import UnitSystem, unit_system_registry # type: ignore\n\n# Import some helpful math utilities\nfrom yt.utilities.math_utils import ortho_find, periodic_position, quartiles\nfrom yt.utilities.parallel_tools.parallel_analysis_interface import (\n communication_system,\n enable_parallelism,\n parallel_objects,\n)\n\n# Now individual component imports from the visualization API\nfrom yt.visualization.api import (\n AxisAlignedProjectionPlot,\n AxisAlignedSlicePlot,\n FITSImageData,\n FITSOffAxisProjection,\n FITSOffAxisSlice,\n FITSParticleProjection,\n FITSProjection,\n FITSSlice,\n FixedResolutionBuffer,\n LineBuffer,\n LinePlot,\n OffAxisProjectionPlot,\n OffAxisSlicePlot,\n ParticleImageBuffer,\n ParticlePhasePlot,\n ParticlePlot,\n ParticleProjectionPlot,\n PhasePlot,\n ProfilePlot,\n ProjectionPlot,\n SlicePlot,\n add_colormap,\n apply_colormap,\n make_colormap,\n plot_2d,\n scale_image,\n show_colormaps,\n write_bitmap,\n write_image,\n write_projection,\n)\nfrom yt.visualization.volume_rendering.api import (\n ColorTransferFunction,\n TransferFunction,\n create_scene,\n off_axis_projection,\n volume_render,\n)\n\n# TransferFunctionHelper, MultiVariateTransferFunction\n# off_axis_projection\n", "path": "yt/__init__.py"}]} | 1,500 | 180 |
gh_patches_debug_19805 | rasdani/github-patches | git_diff | NVIDIA__NVFlare-260 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Plot the results in cifar10 example
Hi, I ran the experiments (run_2 to run_10 in the cifar10 example) following the steps 3.1-3.6 and would like to plot the results and regenerate the figures you show. However, running this command as advised in step 3.3 to view the results does not work:
cat ./workspaces/poc_workspace/server/run_2/cross_site_val/cross_site_val.json
Could you advise please on how to regenerate the results after completing all experiments?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/cifar10/figs/plot_tensorboard_events.py`
Content:
```
1 # Copyright (c) 2021, NVIDIA CORPORATION.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import glob
16 import json
17 import os
18
19 import matplotlib.pyplot as plt
20 import pandas as pd
21 import seaborn as sns
22 import tensorflow as tf
23
24 client_results_root = "./workspaces/secure_workspace/site-1"
25 server_results_root = "./workspaces/secure_workspace/localhost"
26
27 # 4.1 Central vs. FedAvg
28 experiments = {
29 "cifar10_central": {"run": "run_1", "tag": "val_acc_local_model"},
30 "cifar10_fedavg": {"run": "run_2", "tag": "val_acc_global_model"},
31 "cifar10_fedavg_he": {"run": "run_9", "tag": "val_acc_global_model"},
32 }
33
34 # # 4.2 Impact of client data heterogeneity
35 # experiments = {"cifar10_fedavg (alpha=1.0)": {"run": "run_2", "tag": "val_acc_global_model"},
36 # "cifar10_fedavg (alpha=0.5)": {"run": "run_3", "tag": "val_acc_global_model"},
37 # "cifar10_fedavg (alpha=0.3)": {"run": "run_4", "tag": "val_acc_global_model"},
38 # "cifar10_fedavg (alpha=0.1)": {"run": "run_5", "tag": "val_acc_global_model"}}
39 #
40 # # 4.3 FedProx vs. FedOpt vs. SCAFFOLD
41 # experiments = {"cifar10_fedavg": {"run": "run_5", "tag": "val_acc_global_model"},
42 # "cifar10_fedprox": {"run": "run_6", "tag": "val_acc_global_model"},
43 # "cifar10_fedopt": {"run": "run_7", "tag": "val_acc_global_model"},
44 # "cifar10_scaffold": {"run": "run_8", "tag": "val_acc_global_model"}}
45
46 add_cross_site_val = True
47
48
49 def read_eventfile(filepath, tags=["val_acc_global_model"]):
50 data = {}
51 for summary in tf.compat.v1.train.summary_iterator(filepath):
52 for v in summary.summary.value:
53 if v.tag in tags:
54 # print(v.tag, summary.step, v.simple_value)
55 if v.tag in data.keys():
56 data[v.tag].append([summary.step, v.simple_value])
57 else:
58 data[v.tag] = [[summary.step, v.simple_value]]
59 return data
60
61
62 def add_eventdata(data, config, filepath, tag="val_acc_global_model"):
63 event_data = read_eventfile(filepath, tags=[tag])
64
65 assert len(event_data[tag]) > 0, f"No data for key {tag}"
66 # print(event_data)
67 for e in event_data[tag]:
68 # print(e)
69 data["Config"].append(config)
70 data["Step"].append(e[0])
71 data["Accuracy"].append(e[1])
72 print(f"added {len(event_data[tag])} entries for {tag}")
73
74
75 def main():
76 data = {"Config": [], "Step": [], "Accuracy": []}
77
78 if add_cross_site_val:
79 xsite_keys = ["SRV_server", "SRV_server_best"]
80 xsite_data = {"Config": []}
81 for k in xsite_keys:
82 xsite_data.update({k: []})
83 else:
84 xsite_data = None
85 xsite_keys = None
86
87 # add event files
88 for config, exp in experiments.items():
89 eventfile = glob.glob(os.path.join(client_results_root, exp["run"] + "/**/events.*"), recursive=True)
90 assert len(eventfile) == 1, "No unique event file found!"
91 eventfile = eventfile[0]
92 print("adding", eventfile)
93 add_eventdata(data, config, eventfile, tag=exp["tag"])
94
95 if add_cross_site_val:
96 xsite_file = glob.glob(
97 os.path.join(server_results_root, exp["run"] + "/**/cross_site_val.json"), recursive=True
98 )
99 assert len(xsite_file) == 1, "No unique x-site file found!"
100 with open(xsite_file[0], "r") as f:
101 xsite_results = json.load(f)
102
103 xsite_data["Config"].append(config)
104 for k in xsite_keys:
105 xsite_data[k].append(xsite_results["site-1"][k]["val_accuracy"])
106
107 print("Training TB data:")
108 print(pd.DataFrame(data))
109
110 if xsite_data:
111 print("Cross-site val data:")
112 print(pd.DataFrame(xsite_data))
113
114 sns.lineplot(x="Step", y="Accuracy", hue="Config", data=data)
115 plt.show()
116
117
118 if __name__ == "__main__":
119 main()
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/cifar10/figs/plot_tensorboard_events.py b/examples/cifar10/figs/plot_tensorboard_events.py
--- a/examples/cifar10/figs/plot_tensorboard_events.py
+++ b/examples/cifar10/figs/plot_tensorboard_events.py
@@ -76,7 +76,7 @@
data = {"Config": [], "Step": [], "Accuracy": []}
if add_cross_site_val:
- xsite_keys = ["SRV_server", "SRV_server_best"]
+ xsite_keys = ["SRV_FL_global_model.pt", "SRV_best_FL_global_model.pt"]
xsite_data = {"Config": []}
for k in xsite_keys:
xsite_data.update({k: []})
@@ -94,7 +94,7 @@
if add_cross_site_val:
xsite_file = glob.glob(
- os.path.join(server_results_root, exp["run"] + "/**/cross_site_val.json"), recursive=True
+ os.path.join(server_results_root, exp["run"] + "/**/cross_val_results.json"), recursive=True
)
assert len(xsite_file) == 1, "No unique x-site file found!"
with open(xsite_file[0], "r") as f:
| {"golden_diff": "diff --git a/examples/cifar10/figs/plot_tensorboard_events.py b/examples/cifar10/figs/plot_tensorboard_events.py\n--- a/examples/cifar10/figs/plot_tensorboard_events.py\n+++ b/examples/cifar10/figs/plot_tensorboard_events.py\n@@ -76,7 +76,7 @@\n data = {\"Config\": [], \"Step\": [], \"Accuracy\": []}\n \n if add_cross_site_val:\n- xsite_keys = [\"SRV_server\", \"SRV_server_best\"]\n+ xsite_keys = [\"SRV_FL_global_model.pt\", \"SRV_best_FL_global_model.pt\"]\n xsite_data = {\"Config\": []}\n for k in xsite_keys:\n xsite_data.update({k: []})\n@@ -94,7 +94,7 @@\n \n if add_cross_site_val:\n xsite_file = glob.glob(\n- os.path.join(server_results_root, exp[\"run\"] + \"/**/cross_site_val.json\"), recursive=True\n+ os.path.join(server_results_root, exp[\"run\"] + \"/**/cross_val_results.json\"), recursive=True\n )\n assert len(xsite_file) == 1, \"No unique x-site file found!\"\n with open(xsite_file[0], \"r\") as f:\n", "issue": "Plot the results in cifar10 example\nHi, I ran the experiments (run_2 to run_10 in the cifar10 example) following the steps 3.1-3.6 and would like to plot the results and regenerate the figures you show. However, running this command as advised in step 3.3 to view the results does not work:\r\n cat ./workspaces/poc_workspace/server/run_2/cross_site_val/cross_site_val.json\r\nCould you advise please on how to regenerate the results after completing all experiments?\r\n\n", "before_files": [{"content": "# Copyright (c) 2021, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport glob\nimport json\nimport os\n\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport seaborn as sns\nimport tensorflow as tf\n\nclient_results_root = \"./workspaces/secure_workspace/site-1\"\nserver_results_root = \"./workspaces/secure_workspace/localhost\"\n\n# 4.1 Central vs. FedAvg\nexperiments = {\n \"cifar10_central\": {\"run\": \"run_1\", \"tag\": \"val_acc_local_model\"},\n \"cifar10_fedavg\": {\"run\": \"run_2\", \"tag\": \"val_acc_global_model\"},\n \"cifar10_fedavg_he\": {\"run\": \"run_9\", \"tag\": \"val_acc_global_model\"},\n}\n\n# # 4.2 Impact of client data heterogeneity\n# experiments = {\"cifar10_fedavg (alpha=1.0)\": {\"run\": \"run_2\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedavg (alpha=0.5)\": {\"run\": \"run_3\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedavg (alpha=0.3)\": {\"run\": \"run_4\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedavg (alpha=0.1)\": {\"run\": \"run_5\", \"tag\": \"val_acc_global_model\"}}\n#\n# # 4.3 FedProx vs. FedOpt vs. SCAFFOLD\n# experiments = {\"cifar10_fedavg\": {\"run\": \"run_5\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedprox\": {\"run\": \"run_6\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedopt\": {\"run\": \"run_7\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_scaffold\": {\"run\": \"run_8\", \"tag\": \"val_acc_global_model\"}}\n\nadd_cross_site_val = True\n\n\ndef read_eventfile(filepath, tags=[\"val_acc_global_model\"]):\n data = {}\n for summary in tf.compat.v1.train.summary_iterator(filepath):\n for v in summary.summary.value:\n if v.tag in tags:\n # print(v.tag, summary.step, v.simple_value)\n if v.tag in data.keys():\n data[v.tag].append([summary.step, v.simple_value])\n else:\n data[v.tag] = [[summary.step, v.simple_value]]\n return data\n\n\ndef add_eventdata(data, config, filepath, tag=\"val_acc_global_model\"):\n event_data = read_eventfile(filepath, tags=[tag])\n\n assert len(event_data[tag]) > 0, f\"No data for key {tag}\"\n # print(event_data)\n for e in event_data[tag]:\n # print(e)\n data[\"Config\"].append(config)\n data[\"Step\"].append(e[0])\n data[\"Accuracy\"].append(e[1])\n print(f\"added {len(event_data[tag])} entries for {tag}\")\n\n\ndef main():\n data = {\"Config\": [], \"Step\": [], \"Accuracy\": []}\n\n if add_cross_site_val:\n xsite_keys = [\"SRV_server\", \"SRV_server_best\"]\n xsite_data = {\"Config\": []}\n for k in xsite_keys:\n xsite_data.update({k: []})\n else:\n xsite_data = None\n xsite_keys = None\n\n # add event files\n for config, exp in experiments.items():\n eventfile = glob.glob(os.path.join(client_results_root, exp[\"run\"] + \"/**/events.*\"), recursive=True)\n assert len(eventfile) == 1, \"No unique event file found!\"\n eventfile = eventfile[0]\n print(\"adding\", eventfile)\n add_eventdata(data, config, eventfile, tag=exp[\"tag\"])\n\n if add_cross_site_val:\n xsite_file = glob.glob(\n os.path.join(server_results_root, exp[\"run\"] + \"/**/cross_site_val.json\"), recursive=True\n )\n assert len(xsite_file) == 1, \"No unique x-site file found!\"\n with open(xsite_file[0], \"r\") as f:\n xsite_results = json.load(f)\n\n xsite_data[\"Config\"].append(config)\n for k in xsite_keys:\n xsite_data[k].append(xsite_results[\"site-1\"][k][\"val_accuracy\"])\n\n print(\"Training TB data:\")\n print(pd.DataFrame(data))\n\n if xsite_data:\n print(\"Cross-site val data:\")\n print(pd.DataFrame(xsite_data))\n\n sns.lineplot(x=\"Step\", y=\"Accuracy\", hue=\"Config\", data=data)\n plt.show()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "examples/cifar10/figs/plot_tensorboard_events.py"}], "after_files": [{"content": "# Copyright (c) 2021, NVIDIA CORPORATION.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport glob\nimport json\nimport os\n\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport seaborn as sns\nimport tensorflow as tf\n\nclient_results_root = \"./workspaces/secure_workspace/site-1\"\nserver_results_root = \"./workspaces/secure_workspace/localhost\"\n\n# 4.1 Central vs. FedAvg\nexperiments = {\n \"cifar10_central\": {\"run\": \"run_1\", \"tag\": \"val_acc_local_model\"},\n \"cifar10_fedavg\": {\"run\": \"run_2\", \"tag\": \"val_acc_global_model\"},\n \"cifar10_fedavg_he\": {\"run\": \"run_9\", \"tag\": \"val_acc_global_model\"},\n}\n\n# # 4.2 Impact of client data heterogeneity\n# experiments = {\"cifar10_fedavg (alpha=1.0)\": {\"run\": \"run_2\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedavg (alpha=0.5)\": {\"run\": \"run_3\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedavg (alpha=0.3)\": {\"run\": \"run_4\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedavg (alpha=0.1)\": {\"run\": \"run_5\", \"tag\": \"val_acc_global_model\"}}\n#\n# # 4.3 FedProx vs. FedOpt vs. SCAFFOLD\n# experiments = {\"cifar10_fedavg\": {\"run\": \"run_5\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedprox\": {\"run\": \"run_6\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_fedopt\": {\"run\": \"run_7\", \"tag\": \"val_acc_global_model\"},\n# \"cifar10_scaffold\": {\"run\": \"run_8\", \"tag\": \"val_acc_global_model\"}}\n\nadd_cross_site_val = True\n\n\ndef read_eventfile(filepath, tags=[\"val_acc_global_model\"]):\n data = {}\n for summary in tf.compat.v1.train.summary_iterator(filepath):\n for v in summary.summary.value:\n if v.tag in tags:\n # print(v.tag, summary.step, v.simple_value)\n if v.tag in data.keys():\n data[v.tag].append([summary.step, v.simple_value])\n else:\n data[v.tag] = [[summary.step, v.simple_value]]\n return data\n\n\ndef add_eventdata(data, config, filepath, tag=\"val_acc_global_model\"):\n event_data = read_eventfile(filepath, tags=[tag])\n\n assert len(event_data[tag]) > 0, f\"No data for key {tag}\"\n # print(event_data)\n for e in event_data[tag]:\n # print(e)\n data[\"Config\"].append(config)\n data[\"Step\"].append(e[0])\n data[\"Accuracy\"].append(e[1])\n print(f\"added {len(event_data[tag])} entries for {tag}\")\n\n\ndef main():\n data = {\"Config\": [], \"Step\": [], \"Accuracy\": []}\n\n if add_cross_site_val:\n xsite_keys = [\"SRV_FL_global_model.pt\", \"SRV_best_FL_global_model.pt\"]\n xsite_data = {\"Config\": []}\n for k in xsite_keys:\n xsite_data.update({k: []})\n else:\n xsite_data = None\n xsite_keys = None\n\n # add event files\n for config, exp in experiments.items():\n eventfile = glob.glob(os.path.join(client_results_root, exp[\"run\"] + \"/**/events.*\"), recursive=True)\n assert len(eventfile) == 1, \"No unique event file found!\"\n eventfile = eventfile[0]\n print(\"adding\", eventfile)\n add_eventdata(data, config, eventfile, tag=exp[\"tag\"])\n\n if add_cross_site_val:\n xsite_file = glob.glob(\n os.path.join(server_results_root, exp[\"run\"] + \"/**/cross_val_results.json\"), recursive=True\n )\n assert len(xsite_file) == 1, \"No unique x-site file found!\"\n with open(xsite_file[0], \"r\") as f:\n xsite_results = json.load(f)\n\n xsite_data[\"Config\"].append(config)\n for k in xsite_keys:\n xsite_data[k].append(xsite_results[\"site-1\"][k][\"val_accuracy\"])\n\n print(\"Training TB data:\")\n print(pd.DataFrame(data))\n\n if xsite_data:\n print(\"Cross-site val data:\")\n print(pd.DataFrame(xsite_data))\n\n sns.lineplot(x=\"Step\", y=\"Accuracy\", hue=\"Config\", data=data)\n plt.show()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "examples/cifar10/figs/plot_tensorboard_events.py"}]} | 1,823 | 286 |
gh_patches_debug_28217 | rasdani/github-patches | git_diff | Mailu__Mailu-1902 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make smtp_tls_policy_maps easily configurable
Hi Team,
It will be great to have following options configurable from mailu.env as they are required in many cases
when SMTP RELAY from an ISP is in use:
smtp_tls_security_level
smtp_tls_wrappermode
(PR is following)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/postfix/start.py`
Content:
```
1 #!/usr/bin/python3
2
3 import os
4 import glob
5 import shutil
6 import multiprocessing
7 import logging as log
8 import sys
9
10 from podop import run_server
11 from pwd import getpwnam
12 from socrate import system, conf
13
14 log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING"))
15
16 def start_podop():
17 os.setuid(getpwnam('postfix').pw_uid)
18 os.mkdir('/dev/shm/postfix',mode=0o700)
19 url = "http://" + os.environ["ADMIN_ADDRESS"] + "/internal/postfix/"
20 # TODO: Remove verbosity setting from Podop?
21 run_server(0, "postfix", "/tmp/podop.socket", [
22 ("transport", "url", url + "transport/§"),
23 ("alias", "url", url + "alias/§"),
24 ("domain", "url", url + "domain/§"),
25 ("mailbox", "url", url + "mailbox/§"),
26 ("recipientmap", "url", url + "recipient/map/§"),
27 ("sendermap", "url", url + "sender/map/§"),
28 ("senderaccess", "url", url + "sender/access/§"),
29 ("senderlogin", "url", url + "sender/login/§"),
30 ("senderrate", "url", url + "sender/rate/§")
31 ])
32
33 def is_valid_postconf_line(line):
34 return not line.startswith("#") \
35 and not line == ''
36
37 # Actual startup script
38 os.environ["FRONT_ADDRESS"] = system.get_host_address_from_environment("FRONT", "front")
39 os.environ["ADMIN_ADDRESS"] = system.get_host_address_from_environment("ADMIN", "admin")
40 os.environ["ANTISPAM_MILTER_ADDRESS"] = system.get_host_address_from_environment("ANTISPAM_MILTER", "antispam:11332")
41 os.environ["LMTP_ADDRESS"] = system.get_host_address_from_environment("LMTP", "imap:2525")
42 os.environ["OUTCLEAN"] = os.environ["HOSTNAMES"].split(",")[0]
43 try:
44 _to_lookup = os.environ["OUTCLEAN"]
45 # Ensure we lookup a FQDN: @see #1884
46 if not _to_lookup.endswith('.'):
47 _to_lookup += '.'
48 os.environ["OUTCLEAN_ADDRESS"] = system.resolve_hostname(_to_lookup)
49 except:
50 os.environ["OUTCLEAN_ADDRESS"] = "10.10.10.10"
51
52 for postfix_file in glob.glob("/conf/*.cf"):
53 conf.jinja(postfix_file, os.environ, os.path.join("/etc/postfix", os.path.basename(postfix_file)))
54
55 if os.path.exists("/overrides/postfix.cf"):
56 for line in open("/overrides/postfix.cf").read().strip().split("\n"):
57 if is_valid_postconf_line(line):
58 os.system('postconf -e "{}"'.format(line))
59
60 if os.path.exists("/overrides/postfix.master"):
61 for line in open("/overrides/postfix.master").read().strip().split("\n"):
62 if is_valid_postconf_line(line):
63 os.system('postconf -Me "{}"'.format(line))
64
65 for map_file in glob.glob("/overrides/*.map"):
66 destination = os.path.join("/etc/postfix", os.path.basename(map_file))
67 shutil.copyfile(map_file, destination)
68 os.system("postmap {}".format(destination))
69 os.remove(destination)
70
71 if "RELAYUSER" in os.environ:
72 path = "/etc/postfix/sasl_passwd"
73 conf.jinja("/conf/sasl_passwd", os.environ, path)
74 os.system("postmap {}".format(path))
75
76 # Run Podop and Postfix
77 multiprocessing.Process(target=start_podop).start()
78 os.system("/usr/libexec/postfix/post-install meta_directory=/etc/postfix create-missing")
79 # Before starting postfix, we need to check permissions on /queue
80 # in the event that postfix,postdrop id have changed
81 os.system("postfix set-permissions")
82 os.system("postfix start-fg")
83
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/postfix/start.py b/core/postfix/start.py
--- a/core/postfix/start.py
+++ b/core/postfix/start.py
@@ -68,6 +68,12 @@
os.system("postmap {}".format(destination))
os.remove(destination)
+if not os.path.exists("/etc/postfix/tls_policy.map.db"):
+ with open("/etc/postfix/tls_policy.map", "w") as f:
+ for domain in ['gmail.com', 'yahoo.com', 'hotmail.com', 'aol.com', 'outlook.com', 'comcast.net', 'icloud.com', 'msn.com', 'hotmail.co.uk', 'live.com', 'yahoo.co.in', 'me.com', 'mail.ru', 'cox.net', 'yahoo.co.uk', 'verizon.net', 'ymail.com', 'hotmail.it', 'kw.com', 'yahoo.com.tw', 'mac.com', 'live.se', 'live.nl', 'yahoo.com.br', 'googlemail.com', 'libero.it', 'web.de', 'allstate.com', 'btinternet.com', 'online.no', 'yahoo.com.au', 'live.dk', 'earthlink.net', 'yahoo.fr', 'yahoo.it', 'gmx.de', 'hotmail.fr', 'shawinc.com', 'yahoo.de', 'moe.edu.sg', 'naver.com', 'bigpond.com', 'statefarm.com', 'remax.net', 'rocketmail.com', 'live.no', 'yahoo.ca', 'bigpond.net.au', 'hotmail.se', 'gmx.at', 'live.co.uk', 'mail.com', 'yahoo.in', 'yandex.ru', 'qq.com', 'charter.net', 'indeedemail.com', 'alice.it', 'hotmail.de', 'bluewin.ch', 'optonline.net', 'wp.pl', 'yahoo.es', 'hotmail.no', 'pindotmedia.com', 'orange.fr', 'live.it', 'yahoo.co.id', 'yahoo.no', 'hotmail.es', 'morganstanley.com', 'wellsfargo.com', 'wanadoo.fr', 'facebook.com', 'yahoo.se', 'fema.dhs.gov', 'rogers.com', 'yahoo.com.hk', 'live.com.au', 'nic.in', 'nab.com.au', 'ubs.com', 'shaw.ca', 'umich.edu', 'westpac.com.au', 'yahoo.com.mx', 'yahoo.com.sg', 'farmersagent.com', 'yahoo.dk', 'dhs.gov']:
+ f.write(f'{domain}\tsecure\n')
+ os.system("postmap /etc/postfix/tls_policy.map")
+
if "RELAYUSER" in os.environ:
path = "/etc/postfix/sasl_passwd"
conf.jinja("/conf/sasl_passwd", os.environ, path)
| {"golden_diff": "diff --git a/core/postfix/start.py b/core/postfix/start.py\n--- a/core/postfix/start.py\n+++ b/core/postfix/start.py\n@@ -68,6 +68,12 @@\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n \n+if not os.path.exists(\"/etc/postfix/tls_policy.map.db\"):\n+ with open(\"/etc/postfix/tls_policy.map\", \"w\") as f:\n+ for domain in ['gmail.com', 'yahoo.com', 'hotmail.com', 'aol.com', 'outlook.com', 'comcast.net', 'icloud.com', 'msn.com', 'hotmail.co.uk', 'live.com', 'yahoo.co.in', 'me.com', 'mail.ru', 'cox.net', 'yahoo.co.uk', 'verizon.net', 'ymail.com', 'hotmail.it', 'kw.com', 'yahoo.com.tw', 'mac.com', 'live.se', 'live.nl', 'yahoo.com.br', 'googlemail.com', 'libero.it', 'web.de', 'allstate.com', 'btinternet.com', 'online.no', 'yahoo.com.au', 'live.dk', 'earthlink.net', 'yahoo.fr', 'yahoo.it', 'gmx.de', 'hotmail.fr', 'shawinc.com', 'yahoo.de', 'moe.edu.sg', 'naver.com', 'bigpond.com', 'statefarm.com', 'remax.net', 'rocketmail.com', 'live.no', 'yahoo.ca', 'bigpond.net.au', 'hotmail.se', 'gmx.at', 'live.co.uk', 'mail.com', 'yahoo.in', 'yandex.ru', 'qq.com', 'charter.net', 'indeedemail.com', 'alice.it', 'hotmail.de', 'bluewin.ch', 'optonline.net', 'wp.pl', 'yahoo.es', 'hotmail.no', 'pindotmedia.com', 'orange.fr', 'live.it', 'yahoo.co.id', 'yahoo.no', 'hotmail.es', 'morganstanley.com', 'wellsfargo.com', 'wanadoo.fr', 'facebook.com', 'yahoo.se', 'fema.dhs.gov', 'rogers.com', 'yahoo.com.hk', 'live.com.au', 'nic.in', 'nab.com.au', 'ubs.com', 'shaw.ca', 'umich.edu', 'westpac.com.au', 'yahoo.com.mx', 'yahoo.com.sg', 'farmersagent.com', 'yahoo.dk', 'dhs.gov']:\n+ f.write(f'{domain}\\tsecure\\n')\n+ os.system(\"postmap /etc/postfix/tls_policy.map\")\n+\n if \"RELAYUSER\" in os.environ:\n path = \"/etc/postfix/sasl_passwd\"\n conf.jinja(\"/conf/sasl_passwd\", os.environ, path)\n", "issue": "Make smtp_tls_policy_maps easily configurable\nHi Team,\r\n\r\nIt will be great to have following options configurable from mailu.env as they are required in many cases\r\nwhen SMTP RELAY from an ISP is in use:\r\nsmtp_tls_security_level\r\nsmtp_tls_wrappermode\r\n(PR is following)\n", "before_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport glob\nimport shutil\nimport multiprocessing\nimport logging as log\nimport sys\n\nfrom podop import run_server\nfrom pwd import getpwnam\nfrom socrate import system, conf\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\ndef start_podop():\n os.setuid(getpwnam('postfix').pw_uid)\n os.mkdir('/dev/shm/postfix',mode=0o700)\n url = \"http://\" + os.environ[\"ADMIN_ADDRESS\"] + \"/internal/postfix/\"\n # TODO: Remove verbosity setting from Podop?\n run_server(0, \"postfix\", \"/tmp/podop.socket\", [\n\t\t(\"transport\", \"url\", url + \"transport/\u00a7\"),\n\t\t(\"alias\", \"url\", url + \"alias/\u00a7\"),\n\t\t(\"domain\", \"url\", url + \"domain/\u00a7\"),\n (\"mailbox\", \"url\", url + \"mailbox/\u00a7\"),\n (\"recipientmap\", \"url\", url + \"recipient/map/\u00a7\"),\n (\"sendermap\", \"url\", url + \"sender/map/\u00a7\"),\n (\"senderaccess\", \"url\", url + \"sender/access/\u00a7\"),\n (\"senderlogin\", \"url\", url + \"sender/login/\u00a7\"),\n (\"senderrate\", \"url\", url + \"sender/rate/\u00a7\")\n ])\n\ndef is_valid_postconf_line(line):\n return not line.startswith(\"#\") \\\n and not line == ''\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = system.get_host_address_from_environment(\"FRONT\", \"front\")\nos.environ[\"ADMIN_ADDRESS\"] = system.get_host_address_from_environment(\"ADMIN\", \"admin\")\nos.environ[\"ANTISPAM_MILTER_ADDRESS\"] = system.get_host_address_from_environment(\"ANTISPAM_MILTER\", \"antispam:11332\")\nos.environ[\"LMTP_ADDRESS\"] = system.get_host_address_from_environment(\"LMTP\", \"imap:2525\")\nos.environ[\"OUTCLEAN\"] = os.environ[\"HOSTNAMES\"].split(\",\")[0]\ntry:\n _to_lookup = os.environ[\"OUTCLEAN\"]\n # Ensure we lookup a FQDN: @see #1884\n if not _to_lookup.endswith('.'):\n _to_lookup += '.'\n os.environ[\"OUTCLEAN_ADDRESS\"] = system.resolve_hostname(_to_lookup)\nexcept:\n os.environ[\"OUTCLEAN_ADDRESS\"] = \"10.10.10.10\"\n\nfor postfix_file in glob.glob(\"/conf/*.cf\"):\n conf.jinja(postfix_file, os.environ, os.path.join(\"/etc/postfix\", os.path.basename(postfix_file)))\n\nif os.path.exists(\"/overrides/postfix.cf\"):\n for line in open(\"/overrides/postfix.cf\").read().strip().split(\"\\n\"):\n if is_valid_postconf_line(line):\n os.system('postconf -e \"{}\"'.format(line))\n\nif os.path.exists(\"/overrides/postfix.master\"):\n for line in open(\"/overrides/postfix.master\").read().strip().split(\"\\n\"):\n if is_valid_postconf_line(line):\n os.system('postconf -Me \"{}\"'.format(line))\n\nfor map_file in glob.glob(\"/overrides/*.map\"):\n destination = os.path.join(\"/etc/postfix\", os.path.basename(map_file))\n shutil.copyfile(map_file, destination)\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n\nif \"RELAYUSER\" in os.environ:\n path = \"/etc/postfix/sasl_passwd\"\n conf.jinja(\"/conf/sasl_passwd\", os.environ, path)\n os.system(\"postmap {}\".format(path))\n\n# Run Podop and Postfix\nmultiprocessing.Process(target=start_podop).start()\nos.system(\"/usr/libexec/postfix/post-install meta_directory=/etc/postfix create-missing\")\n# Before starting postfix, we need to check permissions on /queue\n# in the event that postfix,postdrop id have changed\nos.system(\"postfix set-permissions\")\nos.system(\"postfix start-fg\")\n", "path": "core/postfix/start.py"}], "after_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport glob\nimport shutil\nimport multiprocessing\nimport logging as log\nimport sys\n\nfrom podop import run_server\nfrom pwd import getpwnam\nfrom socrate import system, conf\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\ndef start_podop():\n os.setuid(getpwnam('postfix').pw_uid)\n os.mkdir('/dev/shm/postfix',mode=0o700)\n url = \"http://\" + os.environ[\"ADMIN_ADDRESS\"] + \"/internal/postfix/\"\n # TODO: Remove verbosity setting from Podop?\n run_server(0, \"postfix\", \"/tmp/podop.socket\", [\n\t\t(\"transport\", \"url\", url + \"transport/\u00a7\"),\n\t\t(\"alias\", \"url\", url + \"alias/\u00a7\"),\n\t\t(\"domain\", \"url\", url + \"domain/\u00a7\"),\n (\"mailbox\", \"url\", url + \"mailbox/\u00a7\"),\n (\"recipientmap\", \"url\", url + \"recipient/map/\u00a7\"),\n (\"sendermap\", \"url\", url + \"sender/map/\u00a7\"),\n (\"senderaccess\", \"url\", url + \"sender/access/\u00a7\"),\n (\"senderlogin\", \"url\", url + \"sender/login/\u00a7\"),\n (\"senderrate\", \"url\", url + \"sender/rate/\u00a7\")\n ])\n\ndef is_valid_postconf_line(line):\n return not line.startswith(\"#\") \\\n and not line == ''\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = system.get_host_address_from_environment(\"FRONT\", \"front\")\nos.environ[\"ADMIN_ADDRESS\"] = system.get_host_address_from_environment(\"ADMIN\", \"admin\")\nos.environ[\"ANTISPAM_MILTER_ADDRESS\"] = system.get_host_address_from_environment(\"ANTISPAM_MILTER\", \"antispam:11332\")\nos.environ[\"LMTP_ADDRESS\"] = system.get_host_address_from_environment(\"LMTP\", \"imap:2525\")\nos.environ[\"OUTCLEAN\"] = os.environ[\"HOSTNAMES\"].split(\",\")[0]\ntry:\n _to_lookup = os.environ[\"OUTCLEAN\"]\n # Ensure we lookup a FQDN: @see #1884\n if not _to_lookup.endswith('.'):\n _to_lookup += '.'\n os.environ[\"OUTCLEAN_ADDRESS\"] = system.resolve_hostname(_to_lookup)\nexcept:\n os.environ[\"OUTCLEAN_ADDRESS\"] = \"10.10.10.10\"\n\nfor postfix_file in glob.glob(\"/conf/*.cf\"):\n conf.jinja(postfix_file, os.environ, os.path.join(\"/etc/postfix\", os.path.basename(postfix_file)))\n\nif os.path.exists(\"/overrides/postfix.cf\"):\n for line in open(\"/overrides/postfix.cf\").read().strip().split(\"\\n\"):\n if is_valid_postconf_line(line):\n os.system('postconf -e \"{}\"'.format(line))\n\nif os.path.exists(\"/overrides/postfix.master\"):\n for line in open(\"/overrides/postfix.master\").read().strip().split(\"\\n\"):\n if is_valid_postconf_line(line):\n os.system('postconf -Me \"{}\"'.format(line))\n\nfor map_file in glob.glob(\"/overrides/*.map\"):\n destination = os.path.join(\"/etc/postfix\", os.path.basename(map_file))\n shutil.copyfile(map_file, destination)\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n\nif not os.path.exists(\"/etc/postfix/tls_policy.map.db\"):\n with open(\"/etc/postfix/tls_policy.map\", \"w\") as f:\n for domain in ['gmail.com', 'yahoo.com', 'hotmail.com', 'aol.com', 'outlook.com', 'comcast.net', 'icloud.com', 'msn.com', 'hotmail.co.uk', 'live.com', 'yahoo.co.in', 'me.com', 'mail.ru', 'cox.net', 'yahoo.co.uk', 'verizon.net', 'ymail.com', 'hotmail.it', 'kw.com', 'yahoo.com.tw', 'mac.com', 'live.se', 'live.nl', 'yahoo.com.br', 'googlemail.com', 'libero.it', 'web.de', 'allstate.com', 'btinternet.com', 'online.no', 'yahoo.com.au', 'live.dk', 'earthlink.net', 'yahoo.fr', 'yahoo.it', 'gmx.de', 'hotmail.fr', 'shawinc.com', 'yahoo.de', 'moe.edu.sg', 'naver.com', 'bigpond.com', 'statefarm.com', 'remax.net', 'rocketmail.com', 'live.no', 'yahoo.ca', 'bigpond.net.au', 'hotmail.se', 'gmx.at', 'live.co.uk', 'mail.com', 'yahoo.in', 'yandex.ru', 'qq.com', 'charter.net', 'indeedemail.com', 'alice.it', 'hotmail.de', 'bluewin.ch', 'optonline.net', 'wp.pl', 'yahoo.es', 'hotmail.no', 'pindotmedia.com', 'orange.fr', 'live.it', 'yahoo.co.id', 'yahoo.no', 'hotmail.es', 'morganstanley.com', 'wellsfargo.com', 'wanadoo.fr', 'facebook.com', 'yahoo.se', 'fema.dhs.gov', 'rogers.com', 'yahoo.com.hk', 'live.com.au', 'nic.in', 'nab.com.au', 'ubs.com', 'shaw.ca', 'umich.edu', 'westpac.com.au', 'yahoo.com.mx', 'yahoo.com.sg', 'farmersagent.com', 'yahoo.dk', 'dhs.gov']:\n f.write(f'{domain}\\tsecure\\n')\n os.system(\"postmap /etc/postfix/tls_policy.map\")\n\nif \"RELAYUSER\" in os.environ:\n path = \"/etc/postfix/sasl_passwd\"\n conf.jinja(\"/conf/sasl_passwd\", os.environ, path)\n os.system(\"postmap {}\".format(path))\n\n# Run Podop and Postfix\nmultiprocessing.Process(target=start_podop).start()\nos.system(\"/usr/libexec/postfix/post-install meta_directory=/etc/postfix create-missing\")\n# Before starting postfix, we need to check permissions on /queue\n# in the event that postfix,postdrop id have changed\nos.system(\"postfix set-permissions\")\nos.system(\"postfix start-fg\")\n", "path": "core/postfix/start.py"}]} | 1,342 | 590 |
gh_patches_debug_7997 | rasdani/github-patches | git_diff | searx__searx-332 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bing news engine broken
There is no result when I'm using the bang "!bing_news".
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/engines/bing_news.py`
Content:
```
1 """
2 Bing (News)
3
4 @website https://www.bing.com/news
5 @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
6 max. 5000 query/month
7
8 @using-api no (because of query limit)
9 @results HTML (using search portal)
10 @stable no (HTML can change)
11 @parse url, title, content, publishedDate
12 """
13
14 from urllib import urlencode
15 from cgi import escape
16 from lxml import html
17 from datetime import datetime, timedelta
18 from dateutil import parser
19 import re
20 from searx.engines.xpath import extract_text
21
22 # engine dependent config
23 categories = ['news']
24 paging = True
25 language_support = True
26
27 # search-url
28 base_url = 'https://www.bing.com/'
29 search_string = 'news/search?{query}&first={offset}'
30
31
32 # do search-request
33 def request(query, params):
34 offset = (params['pageno'] - 1) * 10 + 1
35
36 if params['language'] == 'all':
37 language = 'en-US'
38 else:
39 language = params['language'].replace('_', '-')
40
41 search_path = search_string.format(
42 query=urlencode({'q': query, 'setmkt': language}),
43 offset=offset)
44
45 params['cookies']['_FP'] = "ui=en-US"
46
47 params['url'] = base_url + search_path
48
49 return params
50
51
52 # get response from search-request
53 def response(resp):
54 results = []
55
56 dom = html.fromstring(resp.content)
57
58 # parse results
59 for result in dom.xpath('//div[@class="sn_r"]'):
60 link = result.xpath('.//div[@class="newstitle"]/a')[0]
61 url = link.attrib.get('href')
62 title = extract_text(link)
63 contentXPath = result.xpath('.//div[@class="sn_txt"]/div//span[@class="sn_snip"]')
64 content = escape(extract_text(contentXPath))
65
66 # parse publishedDate
67 publishedDateXPath = result.xpath('.//div[@class="sn_txt"]/div'
68 '//span[contains(@class,"sn_ST")]'
69 '//span[contains(@class,"sn_tm")]')
70
71 publishedDate = escape(extract_text(publishedDateXPath))
72
73 if re.match("^[0-9]+ minute(s|) ago$", publishedDate):
74 timeNumbers = re.findall(r'\d+', publishedDate)
75 publishedDate = datetime.now() - timedelta(minutes=int(timeNumbers[0]))
76 elif re.match("^[0-9]+ hour(s|) ago$", publishedDate):
77 timeNumbers = re.findall(r'\d+', publishedDate)
78 publishedDate = datetime.now() - timedelta(hours=int(timeNumbers[0]))
79 elif re.match("^[0-9]+ hour(s|), [0-9]+ minute(s|) ago$", publishedDate):
80 timeNumbers = re.findall(r'\d+', publishedDate)
81 publishedDate = datetime.now()\
82 - timedelta(hours=int(timeNumbers[0]))\
83 - timedelta(minutes=int(timeNumbers[1]))
84 elif re.match("^[0-9]+ day(s|) ago$", publishedDate):
85 timeNumbers = re.findall(r'\d+', publishedDate)
86 publishedDate = datetime.now() - timedelta(days=int(timeNumbers[0]))
87 else:
88 try:
89 publishedDate = parser.parse(publishedDate, dayfirst=False)
90 except TypeError:
91 publishedDate = datetime.now()
92 except ValueError:
93 publishedDate = datetime.now()
94
95 # append result
96 results.append({'url': url,
97 'title': title,
98 'publishedDate': publishedDate,
99 'content': content})
100
101 # return results
102 return results
103
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py
--- a/searx/engines/bing_news.py
+++ b/searx/engines/bing_news.py
@@ -65,7 +65,7 @@
# parse publishedDate
publishedDateXPath = result.xpath('.//div[@class="sn_txt"]/div'
- '//span[contains(@class,"sn_ST")]'
+ '//div[contains(@class,"sn_ST")]'
'//span[contains(@class,"sn_tm")]')
publishedDate = escape(extract_text(publishedDateXPath))
| {"golden_diff": "diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py\n--- a/searx/engines/bing_news.py\n+++ b/searx/engines/bing_news.py\n@@ -65,7 +65,7 @@\n \n # parse publishedDate\n publishedDateXPath = result.xpath('.//div[@class=\"sn_txt\"]/div'\n- '//span[contains(@class,\"sn_ST\")]'\n+ '//div[contains(@class,\"sn_ST\")]'\n '//span[contains(@class,\"sn_tm\")]')\n \n publishedDate = escape(extract_text(publishedDateXPath))\n", "issue": "Bing news engine broken\nThere is no result when I'm using the bang \"!bing_news\".\n\n", "before_files": [{"content": "\"\"\"\n Bing (News)\n\n @website https://www.bing.com/news\n @provide-api yes (http://datamarket.azure.com/dataset/bing/search),\n max. 5000 query/month\n\n @using-api no (because of query limit)\n @results HTML (using search portal)\n @stable no (HTML can change)\n @parse url, title, content, publishedDate\n\"\"\"\n\nfrom urllib import urlencode\nfrom cgi import escape\nfrom lxml import html\nfrom datetime import datetime, timedelta\nfrom dateutil import parser\nimport re\nfrom searx.engines.xpath import extract_text\n\n# engine dependent config\ncategories = ['news']\npaging = True\nlanguage_support = True\n\n# search-url\nbase_url = 'https://www.bing.com/'\nsearch_string = 'news/search?{query}&first={offset}'\n\n\n# do search-request\ndef request(query, params):\n offset = (params['pageno'] - 1) * 10 + 1\n\n if params['language'] == 'all':\n language = 'en-US'\n else:\n language = params['language'].replace('_', '-')\n\n search_path = search_string.format(\n query=urlencode({'q': query, 'setmkt': language}),\n offset=offset)\n\n params['cookies']['_FP'] = \"ui=en-US\"\n\n params['url'] = base_url + search_path\n\n return params\n\n\n# get response from search-request\ndef response(resp):\n results = []\n\n dom = html.fromstring(resp.content)\n\n # parse results\n for result in dom.xpath('//div[@class=\"sn_r\"]'):\n link = result.xpath('.//div[@class=\"newstitle\"]/a')[0]\n url = link.attrib.get('href')\n title = extract_text(link)\n contentXPath = result.xpath('.//div[@class=\"sn_txt\"]/div//span[@class=\"sn_snip\"]')\n content = escape(extract_text(contentXPath))\n\n # parse publishedDate\n publishedDateXPath = result.xpath('.//div[@class=\"sn_txt\"]/div'\n '//span[contains(@class,\"sn_ST\")]'\n '//span[contains(@class,\"sn_tm\")]')\n\n publishedDate = escape(extract_text(publishedDateXPath))\n\n if re.match(\"^[0-9]+ minute(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now() - timedelta(minutes=int(timeNumbers[0]))\n elif re.match(\"^[0-9]+ hour(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now() - timedelta(hours=int(timeNumbers[0]))\n elif re.match(\"^[0-9]+ hour(s|), [0-9]+ minute(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now()\\\n - timedelta(hours=int(timeNumbers[0]))\\\n - timedelta(minutes=int(timeNumbers[1]))\n elif re.match(\"^[0-9]+ day(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now() - timedelta(days=int(timeNumbers[0]))\n else:\n try:\n publishedDate = parser.parse(publishedDate, dayfirst=False)\n except TypeError:\n publishedDate = datetime.now()\n except ValueError:\n publishedDate = datetime.now()\n\n # append result\n results.append({'url': url,\n 'title': title,\n 'publishedDate': publishedDate,\n 'content': content})\n\n # return results\n return results\n", "path": "searx/engines/bing_news.py"}], "after_files": [{"content": "\"\"\"\n Bing (News)\n\n @website https://www.bing.com/news\n @provide-api yes (http://datamarket.azure.com/dataset/bing/search),\n max. 5000 query/month\n\n @using-api no (because of query limit)\n @results HTML (using search portal)\n @stable no (HTML can change)\n @parse url, title, content, publishedDate\n\"\"\"\n\nfrom urllib import urlencode\nfrom cgi import escape\nfrom lxml import html\nfrom datetime import datetime, timedelta\nfrom dateutil import parser\nimport re\nfrom searx.engines.xpath import extract_text\n\n# engine dependent config\ncategories = ['news']\npaging = True\nlanguage_support = True\n\n# search-url\nbase_url = 'https://www.bing.com/'\nsearch_string = 'news/search?{query}&first={offset}'\n\n\n# do search-request\ndef request(query, params):\n offset = (params['pageno'] - 1) * 10 + 1\n\n if params['language'] == 'all':\n language = 'en-US'\n else:\n language = params['language'].replace('_', '-')\n\n search_path = search_string.format(\n query=urlencode({'q': query, 'setmkt': language}),\n offset=offset)\n\n params['cookies']['_FP'] = \"ui=en-US\"\n\n params['url'] = base_url + search_path\n\n return params\n\n\n# get response from search-request\ndef response(resp):\n results = []\n\n dom = html.fromstring(resp.content)\n\n # parse results\n for result in dom.xpath('//div[@class=\"sn_r\"]'):\n link = result.xpath('.//div[@class=\"newstitle\"]/a')[0]\n url = link.attrib.get('href')\n title = extract_text(link)\n contentXPath = result.xpath('.//div[@class=\"sn_txt\"]/div//span[@class=\"sn_snip\"]')\n content = escape(extract_text(contentXPath))\n\n # parse publishedDate\n publishedDateXPath = result.xpath('.//div[@class=\"sn_txt\"]/div'\n '//div[contains(@class,\"sn_ST\")]'\n '//span[contains(@class,\"sn_tm\")]')\n\n publishedDate = escape(extract_text(publishedDateXPath))\n\n if re.match(\"^[0-9]+ minute(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now() - timedelta(minutes=int(timeNumbers[0]))\n elif re.match(\"^[0-9]+ hour(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now() - timedelta(hours=int(timeNumbers[0]))\n elif re.match(\"^[0-9]+ hour(s|), [0-9]+ minute(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now()\\\n - timedelta(hours=int(timeNumbers[0]))\\\n - timedelta(minutes=int(timeNumbers[1]))\n elif re.match(\"^[0-9]+ day(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now() - timedelta(days=int(timeNumbers[0]))\n else:\n try:\n publishedDate = parser.parse(publishedDate, dayfirst=False)\n except TypeError:\n publishedDate = datetime.now()\n except ValueError:\n publishedDate = datetime.now()\n\n # append result\n results.append({'url': url,\n 'title': title,\n 'publishedDate': publishedDate,\n 'content': content})\n\n # return results\n return results\n", "path": "searx/engines/bing_news.py"}]} | 1,284 | 138 |
gh_patches_debug_23842 | rasdani/github-patches | git_diff | freqtrade__freqtrade-3614 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Hyperopt confusing warning message
## Step 1: Describe your environment
* Operating system: Windows 10
* Python Version: 3.7.5 (`python -V`)
* Branch: freqtrade develop-1aa1fada
## Step 2: Describe the problem:
When hyperopting with the following command: `freqtrade hyperopt -c test_config.json --hyperopt strategy_hyperopt -e 750--spaces buy sell stoploss trailing -–min-trades=20 –-timerange=20200111-20200510 –j 2 --no-color`
I get the following warning messages:

I didn't include the 'normal' buy/sell trends in the hyperopt file, but since I am using the buy/sell trends generated by the hyperopt file, these messages can be confusing. It's taking the 'normal' buy/sell trends from the original strategy file, but it doesn't use it anyway.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `freqtrade/resolvers/hyperopt_resolver.py`
Content:
```
1 # pragma pylint: disable=attribute-defined-outside-init
2
3 """
4 This module load custom hyperopt
5 """
6 import logging
7 from pathlib import Path
8 from typing import Dict
9
10 from freqtrade.constants import DEFAULT_HYPEROPT_LOSS, USERPATH_HYPEROPTS
11 from freqtrade.exceptions import OperationalException
12 from freqtrade.optimize.hyperopt_interface import IHyperOpt
13 from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss
14 from freqtrade.resolvers import IResolver
15
16 logger = logging.getLogger(__name__)
17
18
19 class HyperOptResolver(IResolver):
20 """
21 This class contains all the logic to load custom hyperopt class
22 """
23 object_type = IHyperOpt
24 object_type_str = "Hyperopt"
25 user_subdir = USERPATH_HYPEROPTS
26 initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()
27
28 @staticmethod
29 def load_hyperopt(config: Dict) -> IHyperOpt:
30 """
31 Load the custom hyperopt class from config parameter
32 :param config: configuration dictionary
33 """
34 if not config.get('hyperopt'):
35 raise OperationalException("No Hyperopt set. Please use `--hyperopt` to specify "
36 "the Hyperopt class to use.")
37
38 hyperopt_name = config['hyperopt']
39
40 hyperopt = HyperOptResolver.load_object(hyperopt_name, config,
41 kwargs={'config': config},
42 extra_dir=config.get('hyperopt_path'))
43
44 if not hasattr(hyperopt, 'populate_indicators'):
45 logger.warning("Hyperopt class does not provide populate_indicators() method. "
46 "Using populate_indicators from the strategy.")
47 if not hasattr(hyperopt, 'populate_buy_trend'):
48 logger.warning("Hyperopt class does not provide populate_buy_trend() method. "
49 "Using populate_buy_trend from the strategy.")
50 if not hasattr(hyperopt, 'populate_sell_trend'):
51 logger.warning("Hyperopt class does not provide populate_sell_trend() method. "
52 "Using populate_sell_trend from the strategy.")
53 return hyperopt
54
55
56 class HyperOptLossResolver(IResolver):
57 """
58 This class contains all the logic to load custom hyperopt loss class
59 """
60 object_type = IHyperOptLoss
61 object_type_str = "HyperoptLoss"
62 user_subdir = USERPATH_HYPEROPTS
63 initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()
64
65 @staticmethod
66 def load_hyperoptloss(config: Dict) -> IHyperOptLoss:
67 """
68 Load the custom class from config parameter
69 :param config: configuration dictionary
70 """
71
72 # Verify the hyperopt_loss is in the configuration, otherwise fallback to the
73 # default hyperopt loss
74 hyperoptloss_name = config.get('hyperopt_loss') or DEFAULT_HYPEROPT_LOSS
75
76 hyperoptloss = HyperOptLossResolver.load_object(hyperoptloss_name,
77 config, kwargs={},
78 extra_dir=config.get('hyperopt_path'))
79
80 # Assign timeframe to be used in hyperopt
81 hyperoptloss.__class__.ticker_interval = str(config['timeframe'])
82 hyperoptloss.__class__.timeframe = str(config['timeframe'])
83
84 if not hasattr(hyperoptloss, 'hyperopt_loss_function'):
85 raise OperationalException(
86 f"Found HyperoptLoss class {hyperoptloss_name} does not "
87 "implement `hyperopt_loss_function`.")
88 return hyperoptloss
89
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/freqtrade/resolvers/hyperopt_resolver.py b/freqtrade/resolvers/hyperopt_resolver.py
--- a/freqtrade/resolvers/hyperopt_resolver.py
+++ b/freqtrade/resolvers/hyperopt_resolver.py
@@ -42,14 +42,14 @@
extra_dir=config.get('hyperopt_path'))
if not hasattr(hyperopt, 'populate_indicators'):
- logger.warning("Hyperopt class does not provide populate_indicators() method. "
- "Using populate_indicators from the strategy.")
+ logger.info("Hyperopt class does not provide populate_indicators() method. "
+ "Using populate_indicators from the strategy.")
if not hasattr(hyperopt, 'populate_buy_trend'):
- logger.warning("Hyperopt class does not provide populate_buy_trend() method. "
- "Using populate_buy_trend from the strategy.")
+ logger.info("Hyperopt class does not provide populate_buy_trend() method. "
+ "Using populate_buy_trend from the strategy.")
if not hasattr(hyperopt, 'populate_sell_trend'):
- logger.warning("Hyperopt class does not provide populate_sell_trend() method. "
- "Using populate_sell_trend from the strategy.")
+ logger.info("Hyperopt class does not provide populate_sell_trend() method. "
+ "Using populate_sell_trend from the strategy.")
return hyperopt
| {"golden_diff": "diff --git a/freqtrade/resolvers/hyperopt_resolver.py b/freqtrade/resolvers/hyperopt_resolver.py\n--- a/freqtrade/resolvers/hyperopt_resolver.py\n+++ b/freqtrade/resolvers/hyperopt_resolver.py\n@@ -42,14 +42,14 @@\n extra_dir=config.get('hyperopt_path'))\n \n if not hasattr(hyperopt, 'populate_indicators'):\n- logger.warning(\"Hyperopt class does not provide populate_indicators() method. \"\n- \"Using populate_indicators from the strategy.\")\n+ logger.info(\"Hyperopt class does not provide populate_indicators() method. \"\n+ \"Using populate_indicators from the strategy.\")\n if not hasattr(hyperopt, 'populate_buy_trend'):\n- logger.warning(\"Hyperopt class does not provide populate_buy_trend() method. \"\n- \"Using populate_buy_trend from the strategy.\")\n+ logger.info(\"Hyperopt class does not provide populate_buy_trend() method. \"\n+ \"Using populate_buy_trend from the strategy.\")\n if not hasattr(hyperopt, 'populate_sell_trend'):\n- logger.warning(\"Hyperopt class does not provide populate_sell_trend() method. \"\n- \"Using populate_sell_trend from the strategy.\")\n+ logger.info(\"Hyperopt class does not provide populate_sell_trend() method. \"\n+ \"Using populate_sell_trend from the strategy.\")\n return hyperopt\n", "issue": "Hyperopt confusing warning message\n## Step 1: Describe your environment\r\n\r\n * Operating system: Windows 10\r\n * Python Version: 3.7.5 (`python -V`)\r\n * Branch: freqtrade develop-1aa1fada\r\n \r\n## Step 2: Describe the problem:\r\n\r\nWhen hyperopting with the following command: `freqtrade hyperopt -c test_config.json --hyperopt strategy_hyperopt -e 750--spaces buy sell stoploss trailing -\u2013min-trades=20 \u2013-timerange=20200111-20200510 \u2013j 2 --no-color` \r\n\r\nI get the following warning messages:\r\n\r\n\r\nI didn't include the 'normal' buy/sell trends in the hyperopt file, but since I am using the buy/sell trends generated by the hyperopt file, these messages can be confusing. It's taking the 'normal' buy/sell trends from the original strategy file, but it doesn't use it anyway.\r\n\n", "before_files": [{"content": "# pragma pylint: disable=attribute-defined-outside-init\n\n\"\"\"\nThis module load custom hyperopt\n\"\"\"\nimport logging\nfrom pathlib import Path\nfrom typing import Dict\n\nfrom freqtrade.constants import DEFAULT_HYPEROPT_LOSS, USERPATH_HYPEROPTS\nfrom freqtrade.exceptions import OperationalException\nfrom freqtrade.optimize.hyperopt_interface import IHyperOpt\nfrom freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss\nfrom freqtrade.resolvers import IResolver\n\nlogger = logging.getLogger(__name__)\n\n\nclass HyperOptResolver(IResolver):\n \"\"\"\n This class contains all the logic to load custom hyperopt class\n \"\"\"\n object_type = IHyperOpt\n object_type_str = \"Hyperopt\"\n user_subdir = USERPATH_HYPEROPTS\n initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()\n\n @staticmethod\n def load_hyperopt(config: Dict) -> IHyperOpt:\n \"\"\"\n Load the custom hyperopt class from config parameter\n :param config: configuration dictionary\n \"\"\"\n if not config.get('hyperopt'):\n raise OperationalException(\"No Hyperopt set. Please use `--hyperopt` to specify \"\n \"the Hyperopt class to use.\")\n\n hyperopt_name = config['hyperopt']\n\n hyperopt = HyperOptResolver.load_object(hyperopt_name, config,\n kwargs={'config': config},\n extra_dir=config.get('hyperopt_path'))\n\n if not hasattr(hyperopt, 'populate_indicators'):\n logger.warning(\"Hyperopt class does not provide populate_indicators() method. \"\n \"Using populate_indicators from the strategy.\")\n if not hasattr(hyperopt, 'populate_buy_trend'):\n logger.warning(\"Hyperopt class does not provide populate_buy_trend() method. \"\n \"Using populate_buy_trend from the strategy.\")\n if not hasattr(hyperopt, 'populate_sell_trend'):\n logger.warning(\"Hyperopt class does not provide populate_sell_trend() method. \"\n \"Using populate_sell_trend from the strategy.\")\n return hyperopt\n\n\nclass HyperOptLossResolver(IResolver):\n \"\"\"\n This class contains all the logic to load custom hyperopt loss class\n \"\"\"\n object_type = IHyperOptLoss\n object_type_str = \"HyperoptLoss\"\n user_subdir = USERPATH_HYPEROPTS\n initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()\n\n @staticmethod\n def load_hyperoptloss(config: Dict) -> IHyperOptLoss:\n \"\"\"\n Load the custom class from config parameter\n :param config: configuration dictionary\n \"\"\"\n\n # Verify the hyperopt_loss is in the configuration, otherwise fallback to the\n # default hyperopt loss\n hyperoptloss_name = config.get('hyperopt_loss') or DEFAULT_HYPEROPT_LOSS\n\n hyperoptloss = HyperOptLossResolver.load_object(hyperoptloss_name,\n config, kwargs={},\n extra_dir=config.get('hyperopt_path'))\n\n # Assign timeframe to be used in hyperopt\n hyperoptloss.__class__.ticker_interval = str(config['timeframe'])\n hyperoptloss.__class__.timeframe = str(config['timeframe'])\n\n if not hasattr(hyperoptloss, 'hyperopt_loss_function'):\n raise OperationalException(\n f\"Found HyperoptLoss class {hyperoptloss_name} does not \"\n \"implement `hyperopt_loss_function`.\")\n return hyperoptloss\n", "path": "freqtrade/resolvers/hyperopt_resolver.py"}], "after_files": [{"content": "# pragma pylint: disable=attribute-defined-outside-init\n\n\"\"\"\nThis module load custom hyperopt\n\"\"\"\nimport logging\nfrom pathlib import Path\nfrom typing import Dict\n\nfrom freqtrade.constants import DEFAULT_HYPEROPT_LOSS, USERPATH_HYPEROPTS\nfrom freqtrade.exceptions import OperationalException\nfrom freqtrade.optimize.hyperopt_interface import IHyperOpt\nfrom freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss\nfrom freqtrade.resolvers import IResolver\n\nlogger = logging.getLogger(__name__)\n\n\nclass HyperOptResolver(IResolver):\n \"\"\"\n This class contains all the logic to load custom hyperopt class\n \"\"\"\n object_type = IHyperOpt\n object_type_str = \"Hyperopt\"\n user_subdir = USERPATH_HYPEROPTS\n initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()\n\n @staticmethod\n def load_hyperopt(config: Dict) -> IHyperOpt:\n \"\"\"\n Load the custom hyperopt class from config parameter\n :param config: configuration dictionary\n \"\"\"\n if not config.get('hyperopt'):\n raise OperationalException(\"No Hyperopt set. Please use `--hyperopt` to specify \"\n \"the Hyperopt class to use.\")\n\n hyperopt_name = config['hyperopt']\n\n hyperopt = HyperOptResolver.load_object(hyperopt_name, config,\n kwargs={'config': config},\n extra_dir=config.get('hyperopt_path'))\n\n if not hasattr(hyperopt, 'populate_indicators'):\n logger.info(\"Hyperopt class does not provide populate_indicators() method. \"\n \"Using populate_indicators from the strategy.\")\n if not hasattr(hyperopt, 'populate_buy_trend'):\n logger.info(\"Hyperopt class does not provide populate_buy_trend() method. \"\n \"Using populate_buy_trend from the strategy.\")\n if not hasattr(hyperopt, 'populate_sell_trend'):\n logger.info(\"Hyperopt class does not provide populate_sell_trend() method. \"\n \"Using populate_sell_trend from the strategy.\")\n return hyperopt\n\n\nclass HyperOptLossResolver(IResolver):\n \"\"\"\n This class contains all the logic to load custom hyperopt loss class\n \"\"\"\n object_type = IHyperOptLoss\n object_type_str = \"HyperoptLoss\"\n user_subdir = USERPATH_HYPEROPTS\n initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()\n\n @staticmethod\n def load_hyperoptloss(config: Dict) -> IHyperOptLoss:\n \"\"\"\n Load the custom class from config parameter\n :param config: configuration dictionary\n \"\"\"\n\n # Verify the hyperopt_loss is in the configuration, otherwise fallback to the\n # default hyperopt loss\n hyperoptloss_name = config.get('hyperopt_loss') or DEFAULT_HYPEROPT_LOSS\n\n hyperoptloss = HyperOptLossResolver.load_object(hyperoptloss_name,\n config, kwargs={},\n extra_dir=config.get('hyperopt_path'))\n\n # Assign timeframe to be used in hyperopt\n hyperoptloss.__class__.ticker_interval = str(config['timeframe'])\n hyperoptloss.__class__.timeframe = str(config['timeframe'])\n\n if not hasattr(hyperoptloss, 'hyperopt_loss_function'):\n raise OperationalException(\n f\"Found HyperoptLoss class {hyperoptloss_name} does not \"\n \"implement `hyperopt_loss_function`.\")\n return hyperoptloss\n", "path": "freqtrade/resolvers/hyperopt_resolver.py"}]} | 1,452 | 305 |
gh_patches_debug_33323 | rasdani/github-patches | git_diff | ytdl-org__youtube-dl-4629 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support for tfou.fr
youtube-dl --verbose 'http://www.tfou.fr/les-mysterieuses-cites-d-or-saison-2/videos/l-expedition-les-mysterieuses-cites-d-or-8302866-739.html'
[debug] System config: []
[debug] User config: []
[debug] Command-line args: ['--verbose', 'http://www.tfou.fr/les-mysterieuses-cites-d-or-saison-2/videos/l-expedition-les-mysterieuses-cites-d-or-8302866-739.html']
[debug] Encodings: locale ANSI_X3.4-1968, fs ANSI_X3.4-1968, out ANSI_X3.4-1968, pref ANSI_X3.4-1968
[debug] youtube-dl version 2015.01.01
[debug] Python version 2.7.6 - Linux-3.13.0-34-generic-x86_64-with-Ubuntu-14.04-trusty
[debug] exe versions: avconv 9.16-6, avprobe 9.16-6, ffmpeg 2.4.2, ffprobe 2.4.2, rtmpdump 2.4
[debug] Proxy map: {}
[generic] l-expedition-les-mysterieuses-cites-d-or-8302866-739: Requesting header
WARNING: Falling back on generic information extractor.
[generic] l-expedition-les-mysterieuses-cites-d-or-8302866-739: Downloading webpage
[generic] l-expedition-les-mysterieuses-cites-d-or-8302866-739: Extracting information
ERROR: Unsupported URL: http://www.tfou.fr/les-mysterieuses-cites-d-or-saison-2/videos/l-expedition-les-mysterieuses-cites-d-or-8302866-739.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `youtube_dl/extractor/tf1.py`
Content:
```
1 # coding: utf-8
2 from __future__ import unicode_literals
3
4 import re
5
6 from .common import InfoExtractor
7
8
9 class TF1IE(InfoExtractor):
10 """TF1 uses the wat.tv player."""
11 _VALID_URL = r'http://videos\.tf1\.fr/.*-(?P<id>.*?)\.html'
12 _TEST = {
13 'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',
14 'info_dict': {
15 'id': '10635995',
16 'ext': 'mp4',
17 'title': 'Citroën Grand C4 Picasso 2013 : présentation officielle',
18 'description': 'Vidéo officielle du nouveau Citroën Grand C4 Picasso, lancé à l\'automne 2013.',
19 },
20 'params': {
21 # Sometimes wat serves the whole file with the --test option
22 'skip_download': True,
23 },
24 }
25
26 def _real_extract(self, url):
27 mobj = re.match(self._VALID_URL, url)
28 video_id = mobj.group('id')
29 webpage = self._download_webpage(url, video_id)
30 embed_url = self._html_search_regex(
31 r'"(https://www.wat.tv/embedframe/.*?)"', webpage, 'embed url')
32 embed_page = self._download_webpage(embed_url, video_id,
33 'Downloading embed player page')
34 wat_id = self._search_regex(r'UVID=(.*?)&', embed_page, 'wat id')
35 wat_info = self._download_json(
36 'http://www.wat.tv/interface/contentv3/%s' % wat_id, video_id)
37 return self.url_result(wat_info['media']['url'], 'Wat')
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/youtube_dl/extractor/tf1.py b/youtube_dl/extractor/tf1.py
--- a/youtube_dl/extractor/tf1.py
+++ b/youtube_dl/extractor/tf1.py
@@ -8,8 +8,8 @@
class TF1IE(InfoExtractor):
"""TF1 uses the wat.tv player."""
- _VALID_URL = r'http://videos\.tf1\.fr/.*-(?P<id>.*?)\.html'
- _TEST = {
+ _VALID_URL = r'http://(?:videos\.tf1|www\.tfou)\.fr/.*?-(?P<id>\d+)(?:-\d+)?\.html'
+ _TESTS = {
'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',
'info_dict': {
'id': '10635995',
@@ -21,14 +21,26 @@
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
+ }, {
+ 'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html',
+ 'info_dict': {
+ 'id': '12043945',
+ 'ext': 'mp4',
+ 'title': 'Le grand Mystérioso - Chuggington',
+ 'description': 'Le grand Mystérioso - Emery rêve qu\'un article lui soit consacré dans le journal.',
+ 'upload_date': '20150103',
+ },
+ 'params': {
+ # Sometimes wat serves the whole file with the --test option
+ 'skip_download': True,
+ },
}
def _real_extract(self, url):
- mobj = re.match(self._VALID_URL, url)
- video_id = mobj.group('id')
+ video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
embed_url = self._html_search_regex(
- r'"(https://www.wat.tv/embedframe/.*?)"', webpage, 'embed url')
+ r'["\'](https?://www.wat.tv/embedframe/.*?)["\']', webpage, 'embed url')
embed_page = self._download_webpage(embed_url, video_id,
'Downloading embed player page')
wat_id = self._search_regex(r'UVID=(.*?)&', embed_page, 'wat id')
| {"golden_diff": "diff --git a/youtube_dl/extractor/tf1.py b/youtube_dl/extractor/tf1.py\n--- a/youtube_dl/extractor/tf1.py\n+++ b/youtube_dl/extractor/tf1.py\n@@ -8,8 +8,8 @@\n \n class TF1IE(InfoExtractor):\n \"\"\"TF1 uses the wat.tv player.\"\"\"\n- _VALID_URL = r'http://videos\\.tf1\\.fr/.*-(?P<id>.*?)\\.html'\n- _TEST = {\n+ _VALID_URL = r'http://(?:videos\\.tf1|www\\.tfou)\\.fr/.*?-(?P<id>\\d+)(?:-\\d+)?\\.html'\n+ _TESTS = {\n 'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',\n 'info_dict': {\n 'id': '10635995',\n@@ -21,14 +21,26 @@\n # Sometimes wat serves the whole file with the --test option\n 'skip_download': True,\n },\n+ }, {\n+ 'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html',\n+ 'info_dict': {\n+ 'id': '12043945',\n+ 'ext': 'mp4',\n+ 'title': 'Le grand Myst\u00e9rioso - Chuggington',\n+ 'description': 'Le grand Myst\u00e9rioso - Emery r\u00eave qu\\'un article lui soit consacr\u00e9 dans le journal.',\n+ 'upload_date': '20150103',\n+ },\n+ 'params': {\n+ # Sometimes wat serves the whole file with the --test option\n+ 'skip_download': True,\n+ },\n }\n \n def _real_extract(self, url):\n- mobj = re.match(self._VALID_URL, url)\n- video_id = mobj.group('id')\n+ video_id = self._match_id(url)\n webpage = self._download_webpage(url, video_id)\n embed_url = self._html_search_regex(\n- r'\"(https://www.wat.tv/embedframe/.*?)\"', webpage, 'embed url')\n+ r'[\"\\'](https?://www.wat.tv/embedframe/.*?)[\"\\']', webpage, 'embed url')\n embed_page = self._download_webpage(embed_url, video_id,\n 'Downloading embed player page')\n wat_id = self._search_regex(r'UVID=(.*?)&', embed_page, 'wat id')\n", "issue": "Support for tfou.fr\n youtube-dl --verbose 'http://www.tfou.fr/les-mysterieuses-cites-d-or-saison-2/videos/l-expedition-les-mysterieuses-cites-d-or-8302866-739.html'\n[debug] System config: []\n[debug] User config: []\n[debug] Command-line args: ['--verbose', 'http://www.tfou.fr/les-mysterieuses-cites-d-or-saison-2/videos/l-expedition-les-mysterieuses-cites-d-or-8302866-739.html']\n[debug] Encodings: locale ANSI_X3.4-1968, fs ANSI_X3.4-1968, out ANSI_X3.4-1968, pref ANSI_X3.4-1968\n[debug] youtube-dl version 2015.01.01\n[debug] Python version 2.7.6 - Linux-3.13.0-34-generic-x86_64-with-Ubuntu-14.04-trusty\n[debug] exe versions: avconv 9.16-6, avprobe 9.16-6, ffmpeg 2.4.2, ffprobe 2.4.2, rtmpdump 2.4\n[debug] Proxy map: {}\n[generic] l-expedition-les-mysterieuses-cites-d-or-8302866-739: Requesting header\nWARNING: Falling back on generic information extractor.\n[generic] l-expedition-les-mysterieuses-cites-d-or-8302866-739: Downloading webpage\n[generic] l-expedition-les-mysterieuses-cites-d-or-8302866-739: Extracting information\nERROR: Unsupported URL: http://www.tfou.fr/les-mysterieuses-cites-d-or-saison-2/videos/l-expedition-les-mysterieuses-cites-d-or-8302866-739.html\n\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom .common import InfoExtractor\n\n\nclass TF1IE(InfoExtractor):\n \"\"\"TF1 uses the wat.tv player.\"\"\"\n _VALID_URL = r'http://videos\\.tf1\\.fr/.*-(?P<id>.*?)\\.html'\n _TEST = {\n 'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',\n 'info_dict': {\n 'id': '10635995',\n 'ext': 'mp4',\n 'title': 'Citro\u00ebn Grand C4 Picasso 2013 : pr\u00e9sentation officielle',\n 'description': 'Vid\u00e9o officielle du nouveau Citro\u00ebn Grand C4 Picasso, lanc\u00e9 \u00e0 l\\'automne 2013.',\n },\n 'params': {\n # Sometimes wat serves the whole file with the --test option\n 'skip_download': True,\n },\n }\n\n def _real_extract(self, url):\n mobj = re.match(self._VALID_URL, url)\n video_id = mobj.group('id')\n webpage = self._download_webpage(url, video_id)\n embed_url = self._html_search_regex(\n r'\"(https://www.wat.tv/embedframe/.*?)\"', webpage, 'embed url')\n embed_page = self._download_webpage(embed_url, video_id,\n 'Downloading embed player page')\n wat_id = self._search_regex(r'UVID=(.*?)&', embed_page, 'wat id')\n wat_info = self._download_json(\n 'http://www.wat.tv/interface/contentv3/%s' % wat_id, video_id)\n return self.url_result(wat_info['media']['url'], 'Wat')\n", "path": "youtube_dl/extractor/tf1.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom .common import InfoExtractor\n\n\nclass TF1IE(InfoExtractor):\n \"\"\"TF1 uses the wat.tv player.\"\"\"\n _VALID_URL = r'http://(?:videos\\.tf1|www\\.tfou)\\.fr/.*?-(?P<id>\\d+)(?:-\\d+)?\\.html'\n _TESTS = {\n 'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',\n 'info_dict': {\n 'id': '10635995',\n 'ext': 'mp4',\n 'title': 'Citro\u00ebn Grand C4 Picasso 2013 : pr\u00e9sentation officielle',\n 'description': 'Vid\u00e9o officielle du nouveau Citro\u00ebn Grand C4 Picasso, lanc\u00e9 \u00e0 l\\'automne 2013.',\n },\n 'params': {\n # Sometimes wat serves the whole file with the --test option\n 'skip_download': True,\n },\n }, {\n 'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html',\n 'info_dict': {\n 'id': '12043945',\n 'ext': 'mp4',\n 'title': 'Le grand Myst\u00e9rioso - Chuggington',\n 'description': 'Le grand Myst\u00e9rioso - Emery r\u00eave qu\\'un article lui soit consacr\u00e9 dans le journal.',\n 'upload_date': '20150103',\n },\n 'params': {\n # Sometimes wat serves the whole file with the --test option\n 'skip_download': True,\n },\n }\n\n def _real_extract(self, url):\n video_id = self._match_id(url)\n webpage = self._download_webpage(url, video_id)\n embed_url = self._html_search_regex(\n r'[\"\\'](https?://www.wat.tv/embedframe/.*?)[\"\\']', webpage, 'embed url')\n embed_page = self._download_webpage(embed_url, video_id,\n 'Downloading embed player page')\n wat_id = self._search_regex(r'UVID=(.*?)&', embed_page, 'wat id')\n wat_info = self._download_json(\n 'http://www.wat.tv/interface/contentv3/%s' % wat_id, video_id)\n return self.url_result(wat_info['media']['url'], 'Wat')\n", "path": "youtube_dl/extractor/tf1.py"}]} | 1,209 | 600 |
gh_patches_debug_23014 | rasdani/github-patches | git_diff | chainer__chainer-529 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
The behavior of Function#parameters= is inconsistent with FunctionSet#parameters=
At aaf2aae14ad683734964590e6ceee9d775d9d7c5, an assertion was introduced into `Function#parameters=`, which requires the argument `values` to be a list-like object (implementing `__len__` method).
However, this assertion seems to be inconsistent with the behavior of `FunctionSet#parameters=`.
`FunctionSet#parameters=` passes the iterator of its argument `params` to its child functions through `Function#parameters=`, and it causes a TypeError exception.
``` python
from chainer import FunctionSet
import chainer.functions as F
func_set = FunctionSet(f=F.Linear(10,10))
params = func_set.parameters
func_set.parameters = params # TypeError: object of type 'tuple_iterator' has no len()
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chainer/function_set.py`
Content:
```
1 import numpy
2 import six
3 import warnings
4
5 from chainer import cuda
6 from chainer import function
7
8
9 class FunctionSet(object):
10
11 """Set of objects with ``parameters`` and ``gradients`` properties.
12
13 :class:`FunctionSet` is useful to collect parameters and gradients of
14 multiple parameterized :class:`Function` objects. :class:`FunctionSet`
15 itself also implements :attr:`~FunctionSet.parameters` and
16 :attr:`~FunctionSet.gradients`, so it can be nested in another
17 :class:`FunctionSet` object.
18
19 Function registration is done by just adding an attribute to
20 :class:`FunctionSet` object.
21
22 """
23
24 def __init__(self, **functions):
25 """Initializes the function set by given functions.
26
27 Args:
28 **functions: ``dict`` of ``str`` key and :class:`Function` values.
29 The key-value pairs are just set to the :class:`FunctionSet`
30 object as attributes.
31
32 """
33 for name, func in six.iteritems(functions):
34 setattr(self, name, func)
35
36 def collect_parameters(self):
37 """Returns a tuple of parameters and gradients.
38
39 Returns:
40 Tuple (pair) of two tuples. The first element is a tuple of
41 parameter arrays, and the second is a tuple of gradient arrays.
42
43 """
44
45 msg = ("'collect_parameters' is deprecated. "
46 "You can pass FunctionSet itself to 'optimizer.setup'")
47 warnings.warn(msg, FutureWarning)
48 return self
49
50 def __getitem__(self, key):
51 """Returns the :class:`Function` objects by name.
52
53 Args:
54 key (str): Name of the function.
55
56 Returns:
57 ~chainer.Function: Function object.
58
59 .. admonition:: Example
60
61 >>> model = FunctionSet(l1=F.Linear(10, 10), l2=F.Linear(10, 10))
62 >>> l1 = model['l1']
63 """
64
65 return getattr(self, key)
66
67 def to_gpu(self, device=None):
68 """Migrates all parameters and gradients onto GPU.
69
70 This method calls ``to_gpu`` method of each registered object.
71
72 Args:
73 device (int or :class:`cupy.cuda.Device` or ``None``): Device
74 ID of GPU. If ``None`` is given, it uses the current device.
75
76 Returns:
77 self
78
79 """
80 for func in six.itervalues(self.__dict__):
81 if isinstance(func, (function.Function, FunctionSet)):
82 func.to_gpu(device=device)
83 return self
84
85 def to_cpu(self):
86 """Migrates all parameters and gradients onto CPU.
87
88 This method calls ``to_cpu`` method of each registered object.
89
90 Returns:
91 self
92
93 """
94 for func in six.itervalues(self.__dict__):
95 if isinstance(func, (function.Function, FunctionSet)):
96 func.to_cpu()
97 return self
98
99 def copy_parameters_from(self, params):
100 """Copies parameters from another source without reallocation.
101
102 Args:
103 params (Iterable): Iterable of parameter arrays.
104
105 """
106 for dst, src in zip(self.parameters, params):
107 if isinstance(dst, numpy.ndarray):
108 if isinstance(src, numpy.ndarray):
109 numpy.copyto(dst, src)
110 else:
111 dst[:] = src.get()
112 elif isinstance(src, numpy.ndarray):
113 dst.set(src)
114 else:
115 cuda.copy(src, out=dst)
116
117 @property
118 def parameters(self):
119 """Tuple of parameter arrays of all registered functions.
120
121 The order of parameters is consistent with :meth:`gradients` property.
122
123 """
124 return sum((func.parameters for _, func in self._get_sorted_funcs()),
125 ())
126
127 @parameters.setter
128 def parameters(self, params):
129 param_iter = iter(params)
130 for _, func in self._get_sorted_funcs():
131 func.parameters = param_iter
132
133 @property
134 def gradients(self):
135 """Tuple of gradient arrays of all registered functions.
136
137 The order of gradients is consistent with :meth:`parameters` property.
138
139 """
140 return sum((func.gradients for _, func in self._get_sorted_funcs()),
141 ())
142
143 @gradients.setter
144 def gradients(self, grads):
145 grad_iter = iter(grads)
146 for _, func in self._get_sorted_funcs():
147 func.gradients = grad_iter
148
149 def _get_sorted_funcs(self):
150 return sorted(
151 [func_tuple for func_tuple in six.iteritems(self.__dict__)
152 if isinstance(func_tuple[1], (function.Function, FunctionSet))])
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/chainer/function_set.py b/chainer/function_set.py
--- a/chainer/function_set.py
+++ b/chainer/function_set.py
@@ -126,9 +126,16 @@
@parameters.setter
def parameters(self, params):
- param_iter = iter(params)
- for _, func in self._get_sorted_funcs():
- func.parameters = param_iter
+ funcs = self._get_sorted_funcs()
+
+ expected_params_size = sum(len(func.parameters) for _, func in funcs)
+ assert expected_params_size == len(params)
+
+ idx = 0
+ for _, func in funcs:
+ param_size = len(func.parameters)
+ func.parameters = params[idx: idx + param_size]
+ idx += param_size
@property
def gradients(self):
@@ -142,9 +149,16 @@
@gradients.setter
def gradients(self, grads):
- grad_iter = iter(grads)
- for _, func in self._get_sorted_funcs():
- func.gradients = grad_iter
+ funcs = self._get_sorted_funcs()
+
+ expected_grads_size = sum(len(func.gradients) for _, func in funcs)
+ assert expected_grads_size == len(grads)
+
+ idx = 0
+ for _, func in funcs:
+ grad_size = len(func.gradients)
+ func.gradients = grads[idx: idx + grad_size]
+ idx += grad_size
def _get_sorted_funcs(self):
return sorted(
| {"golden_diff": "diff --git a/chainer/function_set.py b/chainer/function_set.py\n--- a/chainer/function_set.py\n+++ b/chainer/function_set.py\n@@ -126,9 +126,16 @@\n \n @parameters.setter\n def parameters(self, params):\n- param_iter = iter(params)\n- for _, func in self._get_sorted_funcs():\n- func.parameters = param_iter\n+ funcs = self._get_sorted_funcs()\n+\n+ expected_params_size = sum(len(func.parameters) for _, func in funcs)\n+ assert expected_params_size == len(params)\n+\n+ idx = 0\n+ for _, func in funcs:\n+ param_size = len(func.parameters)\n+ func.parameters = params[idx: idx + param_size]\n+ idx += param_size\n \n @property\n def gradients(self):\n@@ -142,9 +149,16 @@\n \n @gradients.setter\n def gradients(self, grads):\n- grad_iter = iter(grads)\n- for _, func in self._get_sorted_funcs():\n- func.gradients = grad_iter\n+ funcs = self._get_sorted_funcs()\n+\n+ expected_grads_size = sum(len(func.gradients) for _, func in funcs)\n+ assert expected_grads_size == len(grads)\n+\n+ idx = 0\n+ for _, func in funcs:\n+ grad_size = len(func.gradients)\n+ func.gradients = grads[idx: idx + grad_size]\n+ idx += grad_size\n \n def _get_sorted_funcs(self):\n return sorted(\n", "issue": "The behavior of Function#parameters= is inconsistent with FunctionSet#parameters=\nAt aaf2aae14ad683734964590e6ceee9d775d9d7c5, an assertion was introduced into `Function#parameters=`, which requires the argument `values` to be a list-like object (implementing `__len__` method).\nHowever, this assertion seems to be inconsistent with the behavior of `FunctionSet#parameters=`.\n`FunctionSet#parameters=` passes the iterator of its argument `params` to its child functions through `Function#parameters=`, and it causes a TypeError exception.\n\n``` python\nfrom chainer import FunctionSet\nimport chainer.functions as F\nfunc_set = FunctionSet(f=F.Linear(10,10))\nparams = func_set.parameters\nfunc_set.parameters = params # TypeError: object of type 'tuple_iterator' has no len()\n```\n\n", "before_files": [{"content": "import numpy\nimport six\nimport warnings\n\nfrom chainer import cuda\nfrom chainer import function\n\n\nclass FunctionSet(object):\n\n \"\"\"Set of objects with ``parameters`` and ``gradients`` properties.\n\n :class:`FunctionSet` is useful to collect parameters and gradients of\n multiple parameterized :class:`Function` objects. :class:`FunctionSet`\n itself also implements :attr:`~FunctionSet.parameters` and\n :attr:`~FunctionSet.gradients`, so it can be nested in another\n :class:`FunctionSet` object.\n\n Function registration is done by just adding an attribute to\n :class:`FunctionSet` object.\n\n \"\"\"\n\n def __init__(self, **functions):\n \"\"\"Initializes the function set by given functions.\n\n Args:\n **functions: ``dict`` of ``str`` key and :class:`Function` values.\n The key-value pairs are just set to the :class:`FunctionSet`\n object as attributes.\n\n \"\"\"\n for name, func in six.iteritems(functions):\n setattr(self, name, func)\n\n def collect_parameters(self):\n \"\"\"Returns a tuple of parameters and gradients.\n\n Returns:\n Tuple (pair) of two tuples. The first element is a tuple of\n parameter arrays, and the second is a tuple of gradient arrays.\n\n \"\"\"\n\n msg = (\"'collect_parameters' is deprecated. \"\n \"You can pass FunctionSet itself to 'optimizer.setup'\")\n warnings.warn(msg, FutureWarning)\n return self\n\n def __getitem__(self, key):\n \"\"\"Returns the :class:`Function` objects by name.\n\n Args:\n key (str): Name of the function.\n\n Returns:\n ~chainer.Function: Function object.\n\n .. admonition:: Example\n\n >>> model = FunctionSet(l1=F.Linear(10, 10), l2=F.Linear(10, 10))\n >>> l1 = model['l1']\n \"\"\"\n\n return getattr(self, key)\n\n def to_gpu(self, device=None):\n \"\"\"Migrates all parameters and gradients onto GPU.\n\n This method calls ``to_gpu`` method of each registered object.\n\n Args:\n device (int or :class:`cupy.cuda.Device` or ``None``): Device\n ID of GPU. If ``None`` is given, it uses the current device.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n if isinstance(func, (function.Function, FunctionSet)):\n func.to_gpu(device=device)\n return self\n\n def to_cpu(self):\n \"\"\"Migrates all parameters and gradients onto CPU.\n\n This method calls ``to_cpu`` method of each registered object.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n if isinstance(func, (function.Function, FunctionSet)):\n func.to_cpu()\n return self\n\n def copy_parameters_from(self, params):\n \"\"\"Copies parameters from another source without reallocation.\n\n Args:\n params (Iterable): Iterable of parameter arrays.\n\n \"\"\"\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n numpy.copyto(dst, src)\n else:\n dst[:] = src.get()\n elif isinstance(src, numpy.ndarray):\n dst.set(src)\n else:\n cuda.copy(src, out=dst)\n\n @property\n def parameters(self):\n \"\"\"Tuple of parameter arrays of all registered functions.\n\n The order of parameters is consistent with :meth:`gradients` property.\n\n \"\"\"\n return sum((func.parameters for _, func in self._get_sorted_funcs()),\n ())\n\n @parameters.setter\n def parameters(self, params):\n param_iter = iter(params)\n for _, func in self._get_sorted_funcs():\n func.parameters = param_iter\n\n @property\n def gradients(self):\n \"\"\"Tuple of gradient arrays of all registered functions.\n\n The order of gradients is consistent with :meth:`parameters` property.\n\n \"\"\"\n return sum((func.gradients for _, func in self._get_sorted_funcs()),\n ())\n\n @gradients.setter\n def gradients(self, grads):\n grad_iter = iter(grads)\n for _, func in self._get_sorted_funcs():\n func.gradients = grad_iter\n\n def _get_sorted_funcs(self):\n return sorted(\n [func_tuple for func_tuple in six.iteritems(self.__dict__)\n if isinstance(func_tuple[1], (function.Function, FunctionSet))])\n", "path": "chainer/function_set.py"}], "after_files": [{"content": "import numpy\nimport six\nimport warnings\n\nfrom chainer import cuda\nfrom chainer import function\n\n\nclass FunctionSet(object):\n\n \"\"\"Set of objects with ``parameters`` and ``gradients`` properties.\n\n :class:`FunctionSet` is useful to collect parameters and gradients of\n multiple parameterized :class:`Function` objects. :class:`FunctionSet`\n itself also implements :attr:`~FunctionSet.parameters` and\n :attr:`~FunctionSet.gradients`, so it can be nested in another\n :class:`FunctionSet` object.\n\n Function registration is done by just adding an attribute to\n :class:`FunctionSet` object.\n\n \"\"\"\n\n def __init__(self, **functions):\n \"\"\"Initializes the function set by given functions.\n\n Args:\n **functions: ``dict`` of ``str`` key and :class:`Function` values.\n The key-value pairs are just set to the :class:`FunctionSet`\n object as attributes.\n\n \"\"\"\n for name, func in six.iteritems(functions):\n setattr(self, name, func)\n\n def collect_parameters(self):\n \"\"\"Returns a tuple of parameters and gradients.\n\n Returns:\n Tuple (pair) of two tuples. The first element is a tuple of\n parameter arrays, and the second is a tuple of gradient arrays.\n\n \"\"\"\n\n msg = (\"'collect_parameters' is deprecated. \"\n \"You can pass FunctionSet itself to 'optimizer.setup'\")\n warnings.warn(msg, FutureWarning)\n return self\n\n def __getitem__(self, key):\n \"\"\"Returns the :class:`Function` objects by name.\n\n Args:\n key (str): Name of the function.\n\n Returns:\n ~chainer.Function: Function object.\n\n .. admonition:: Example\n\n >>> model = FunctionSet(l1=F.Linear(10, 10), l2=F.Linear(10, 10))\n >>> l1 = model['l1']\n \"\"\"\n\n return getattr(self, key)\n\n def to_gpu(self, device=None):\n \"\"\"Migrates all parameters and gradients onto GPU.\n\n This method calls ``to_gpu`` method of each registered object.\n\n Args:\n device (int or :class:`cupy.cuda.Device` or ``None``): Device\n ID of GPU. If ``None`` is given, it uses the current device.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n if isinstance(func, (function.Function, FunctionSet)):\n func.to_gpu(device=device)\n return self\n\n def to_cpu(self):\n \"\"\"Migrates all parameters and gradients onto CPU.\n\n This method calls ``to_cpu`` method of each registered object.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n if isinstance(func, (function.Function, FunctionSet)):\n func.to_cpu()\n return self\n\n def copy_parameters_from(self, params):\n \"\"\"Copies parameters from another source without reallocation.\n\n Args:\n params (Iterable): Iterable of parameter arrays.\n\n \"\"\"\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n numpy.copyto(dst, src)\n else:\n dst[:] = src.get()\n elif isinstance(src, numpy.ndarray):\n dst.set(src)\n else:\n cuda.copy(src, out=dst)\n\n @property\n def parameters(self):\n \"\"\"Tuple of parameter arrays of all registered functions.\n\n The order of parameters is consistent with :meth:`gradients` property.\n\n \"\"\"\n return sum((func.parameters for _, func in self._get_sorted_funcs()),\n ())\n\n @parameters.setter\n def parameters(self, params):\n funcs = self._get_sorted_funcs()\n\n expected_params_size = sum(len(func.parameters) for _, func in funcs)\n assert expected_params_size == len(params)\n\n idx = 0\n for _, func in funcs:\n param_size = len(func.parameters)\n func.parameters = params[idx: idx + param_size]\n idx += param_size\n\n @property\n def gradients(self):\n \"\"\"Tuple of gradient arrays of all registered functions.\n\n The order of gradients is consistent with :meth:`parameters` property.\n\n \"\"\"\n return sum((func.gradients for _, func in self._get_sorted_funcs()),\n ())\n\n @gradients.setter\n def gradients(self, grads):\n funcs = self._get_sorted_funcs()\n\n expected_grads_size = sum(len(func.gradients) for _, func in funcs)\n assert expected_grads_size == len(grads)\n\n idx = 0\n for _, func in funcs:\n grad_size = len(func.gradients)\n func.gradients = grads[idx: idx + grad_size]\n idx += grad_size\n\n def _get_sorted_funcs(self):\n return sorted(\n [func_tuple for func_tuple in six.iteritems(self.__dict__)\n if isinstance(func_tuple[1], (function.Function, FunctionSet))])\n", "path": "chainer/function_set.py"}]} | 1,795 | 340 |
gh_patches_debug_26306 | rasdani/github-patches | git_diff | sql-machine-learning__elasticdl-47 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use stable hashing in ps client
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python/tflib/ps/ps_client.py`
Content:
```
1 import future
2
3
4 # No partition, all in the first ps
5 def no_partition(v, ps_size):
6 return [v]
7
8
9 # Partition v according to the name hashing.
10 def hash_partition(v, ps_size):
11 if v is None:
12 return [None for i in range(ps_size)]
13 if len(v) == 0:
14 raise ValueError('Empty input used in HashPartition.')
15 if ps_size == 1:
16 return [v]
17 if isinstance(v, dict):
18 # (name, data) dict from push
19 results = [{} for i in range(ps_size)]
20 for name, data in v.items():
21 index = hash(name) % ps_size
22 results[index][name] = data
23 elif isinstance(v, list):
24 # name list from pull
25 results = [[] for i in range(ps_size)]
26 for name in v:
27 index = hash(name) % ps_size
28 results[index].append(name)
29 else:
30 raise TypeError('Illegal v type %s, only dict or '
31 'str list is supported.' % str(type(v)))
32 return results
33
34
35 # Multi-thread implementation of PSClientComm
36 class MultiThreadPSClientComm(object):
37 def __init__(self, ps):
38 self._ps = ps
39
40 def push(self, base_step, sub_step, grads):
41 if len(grads) > 0:
42 self._ps.push(base_step, sub_step, grads)
43
44 def pull(self, names=None, min_step=0):
45 return self._ps.pull(names=names, min_step=min_step)
46
47
48 # ParameterSererClient uses PSClientComm for ps data transfer.
49 class ParameterServerClient(object):
50 def __init__(self,
51 ps_configs=None,
52 comm_class=MultiThreadPSClientComm,
53 partition_func=no_partition):
54 self._ps_size = 1 if partition_func == no_partition else len(ps_configs)
55 self._partition_func = partition_func
56 self._clients = [comm_class(ps_configs[i])
57 for i in range(self._ps_size)]
58 self._base_step = [0 for _ in range(self._ps_size)]
59
60 def push(self, sub_step=0, grads=None):
61 partition_result = self._partition_func(grads, self._ps_size)
62 # TODO: multithread optimization, one thread per ps communication.
63 for index, g in enumerate(partition_result):
64 self._clients[index].push(self._base_step[index],
65 sub_step, g)
66
67 def pull(self, min_step=0, names=None):
68 pull_result = {}
69 partition_result = self._partition_func(names, self._ps_size)
70 # TODO: multithread optimization, one thread per ps communication.
71 for index, n in enumerate(partition_result):
72 ps_step, ps_vars = self._clients[index].pull(
73 names=n, min_step=min_step)
74 self._base_step[index] = ps_step
75 pull_result.update(ps_vars)
76 return self.get_min_base_step(), pull_result
77
78 def get_min_base_step(self):
79 return min(self._base_step)
80
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/python/tflib/ps/ps_client.py b/python/tflib/ps/ps_client.py
--- a/python/tflib/ps/ps_client.py
+++ b/python/tflib/ps/ps_client.py
@@ -1,5 +1,4 @@
-import future
-
+from binascii import crc32
# No partition, all in the first ps
def no_partition(v, ps_size):
@@ -8,6 +7,10 @@
# Partition v according to the name hashing.
def hash_partition(v, ps_size):
+ # a simple stable string hash
+ def _hash(s):
+ return crc32(s.encode())
+
if v is None:
return [None for i in range(ps_size)]
if len(v) == 0:
@@ -18,13 +21,13 @@
# (name, data) dict from push
results = [{} for i in range(ps_size)]
for name, data in v.items():
- index = hash(name) % ps_size
+ index = _hash(name) % ps_size
results[index][name] = data
elif isinstance(v, list):
# name list from pull
results = [[] for i in range(ps_size)]
for name in v:
- index = hash(name) % ps_size
+ index = _hash(name) % ps_size
results[index].append(name)
else:
raise TypeError('Illegal v type %s, only dict or '
| {"golden_diff": "diff --git a/python/tflib/ps/ps_client.py b/python/tflib/ps/ps_client.py\n--- a/python/tflib/ps/ps_client.py\n+++ b/python/tflib/ps/ps_client.py\n@@ -1,5 +1,4 @@\n-import future\n-\n+from binascii import crc32\n \n # No partition, all in the first ps\n def no_partition(v, ps_size):\n@@ -8,6 +7,10 @@\n \n # Partition v according to the name hashing.\n def hash_partition(v, ps_size):\n+ # a simple stable string hash\n+ def _hash(s):\n+ return crc32(s.encode())\n+\n if v is None:\n return [None for i in range(ps_size)]\n if len(v) == 0:\n@@ -18,13 +21,13 @@\n # (name, data) dict from push\n results = [{} for i in range(ps_size)]\n for name, data in v.items():\n- index = hash(name) % ps_size\n+ index = _hash(name) % ps_size\n results[index][name] = data\n elif isinstance(v, list):\n # name list from pull\n results = [[] for i in range(ps_size)]\n for name in v:\n- index = hash(name) % ps_size\n+ index = _hash(name) % ps_size\n results[index].append(name)\n else:\n raise TypeError('Illegal v type %s, only dict or '\n", "issue": "Use stable hashing in ps client\n\n", "before_files": [{"content": "import future\n\n\n# No partition, all in the first ps\ndef no_partition(v, ps_size):\n return [v]\n\n\n# Partition v according to the name hashing.\ndef hash_partition(v, ps_size):\n if v is None:\n return [None for i in range(ps_size)]\n if len(v) == 0:\n raise ValueError('Empty input used in HashPartition.')\n if ps_size == 1:\n return [v]\n if isinstance(v, dict):\n # (name, data) dict from push\n results = [{} for i in range(ps_size)]\n for name, data in v.items():\n index = hash(name) % ps_size\n results[index][name] = data\n elif isinstance(v, list):\n # name list from pull\n results = [[] for i in range(ps_size)]\n for name in v:\n index = hash(name) % ps_size\n results[index].append(name)\n else:\n raise TypeError('Illegal v type %s, only dict or '\n 'str list is supported.' % str(type(v)))\n return results\n\n\n# Multi-thread implementation of PSClientComm\nclass MultiThreadPSClientComm(object):\n def __init__(self, ps):\n self._ps = ps\n\n def push(self, base_step, sub_step, grads):\n if len(grads) > 0:\n self._ps.push(base_step, sub_step, grads)\n\n def pull(self, names=None, min_step=0):\n return self._ps.pull(names=names, min_step=min_step)\n\n\n# ParameterSererClient uses PSClientComm for ps data transfer.\nclass ParameterServerClient(object):\n def __init__(self,\n ps_configs=None,\n comm_class=MultiThreadPSClientComm,\n partition_func=no_partition):\n self._ps_size = 1 if partition_func == no_partition else len(ps_configs)\n self._partition_func = partition_func\n self._clients = [comm_class(ps_configs[i])\n for i in range(self._ps_size)]\n self._base_step = [0 for _ in range(self._ps_size)]\n\n def push(self, sub_step=0, grads=None):\n partition_result = self._partition_func(grads, self._ps_size)\n # TODO: multithread optimization, one thread per ps communication.\n for index, g in enumerate(partition_result):\n self._clients[index].push(self._base_step[index],\n sub_step, g)\n\n def pull(self, min_step=0, names=None):\n pull_result = {}\n partition_result = self._partition_func(names, self._ps_size)\n # TODO: multithread optimization, one thread per ps communication.\n for index, n in enumerate(partition_result):\n ps_step, ps_vars = self._clients[index].pull(\n names=n, min_step=min_step)\n self._base_step[index] = ps_step\n pull_result.update(ps_vars)\n return self.get_min_base_step(), pull_result\n\n def get_min_base_step(self):\n return min(self._base_step)\n", "path": "python/tflib/ps/ps_client.py"}], "after_files": [{"content": "from binascii import crc32\n\n# No partition, all in the first ps\ndef no_partition(v, ps_size):\n return [v]\n\n\n# Partition v according to the name hashing.\ndef hash_partition(v, ps_size):\n # a simple stable string hash\n def _hash(s):\n return crc32(s.encode())\n\n if v is None:\n return [None for i in range(ps_size)]\n if len(v) == 0:\n raise ValueError('Empty input used in HashPartition.')\n if ps_size == 1:\n return [v]\n if isinstance(v, dict):\n # (name, data) dict from push\n results = [{} for i in range(ps_size)]\n for name, data in v.items():\n index = _hash(name) % ps_size\n results[index][name] = data\n elif isinstance(v, list):\n # name list from pull\n results = [[] for i in range(ps_size)]\n for name in v:\n index = _hash(name) % ps_size\n results[index].append(name)\n else:\n raise TypeError('Illegal v type %s, only dict or '\n 'str list is supported.' % str(type(v)))\n return results\n\n\n# Multi-thread implementation of PSClientComm\nclass MultiThreadPSClientComm(object):\n def __init__(self, ps):\n self._ps = ps\n\n def push(self, base_step, sub_step, grads):\n if len(grads) > 0:\n self._ps.push(base_step, sub_step, grads)\n\n def pull(self, names=None, min_step=0):\n return self._ps.pull(names=names, min_step=min_step)\n\n\n# ParameterSererClient uses PSClientComm for ps data transfer.\nclass ParameterServerClient(object):\n def __init__(self,\n ps_configs=None,\n comm_class=MultiThreadPSClientComm,\n partition_func=no_partition):\n self._ps_size = 1 if partition_func == no_partition else len(ps_configs)\n self._partition_func = partition_func\n self._clients = [comm_class(ps_configs[i])\n for i in range(self._ps_size)]\n self._base_step = [0 for _ in range(self._ps_size)]\n\n def push(self, sub_step=0, grads=None):\n partition_result = self._partition_func(grads, self._ps_size)\n # TODO: multithread optimization, one thread per ps communication.\n for index, g in enumerate(partition_result):\n self._clients[index].push(self._base_step[index],\n sub_step, g)\n\n def pull(self, min_step=0, names=None):\n pull_result = {}\n partition_result = self._partition_func(names, self._ps_size)\n # TODO: multithread optimization, one thread per ps communication.\n for index, n in enumerate(partition_result):\n ps_step, ps_vars = self._clients[index].pull(\n names=n, min_step=min_step)\n self._base_step[index] = ps_step\n pull_result.update(ps_vars)\n return self.get_min_base_step(), pull_result\n\n def get_min_base_step(self):\n return min(self._base_step)\n", "path": "python/tflib/ps/ps_client.py"}]} | 1,080 | 330 |
gh_patches_debug_39372 | rasdani/github-patches | git_diff | Cloud-CV__EvalAI-1550 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Feature to notify users about new challenge on EvalAI.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/web/views.py`
Content:
```
1 from django.contrib.auth.models import User
2 from django.shortcuts import render
3
4 from .models import Team
5
6 from rest_framework import permissions, status
7 from rest_framework.decorators import (api_view,
8 permission_classes,
9 throttle_classes,)
10 from rest_framework.response import Response
11 from rest_framework.throttling import AnonRateThrottle
12
13 from .serializers import ContactSerializer, TeamSerializer
14
15
16 def home(request, template_name="index.html"):
17 """
18 Home Page View
19 """
20 return render(request, template_name)
21
22
23 def page_not_found(request):
24 response = render(request, 'error404.html',
25 )
26 response.status_code = 404
27 return response
28
29
30 def internal_server_error(request):
31 response = render(request, 'error500.html',
32 )
33 response.status_code = 500
34 return response
35
36
37 @throttle_classes([AnonRateThrottle, ])
38 @api_view(['GET', 'POST'])
39 @permission_classes((permissions.AllowAny,))
40 def contact_us(request):
41 user_does_not_exist = False
42 try:
43 user = User.objects.get(username=request.user)
44 name = user.username
45 email = user.email
46 request_data = {'name': name, 'email': email}
47 except:
48 request_data = request.data
49 user_does_not_exist = True
50
51 if request.method == 'POST' or user_does_not_exist:
52 if request.POST.get('message'):
53 request_data['message'] = request.POST.get('message')
54 serializer = ContactSerializer(data=request_data)
55 if serializer.is_valid():
56 serializer.save()
57 response_data = {'message': 'We have received your request and will contact you shortly.'}
58 return Response(response_data, status=status.HTTP_201_CREATED)
59 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
60
61 elif request.method == 'GET':
62 response_data = {"name": name, "email": email}
63 return Response(response_data, status=status.HTTP_200_OK)
64
65
66 @throttle_classes([AnonRateThrottle])
67 @api_view(['GET', 'POST'])
68 @permission_classes((permissions.AllowAny,))
69 def our_team(request):
70 if request.method == 'GET':
71 teams = Team.objects.all()
72 serializer = TeamSerializer(teams, many=True, context={'request': request})
73 response_data = serializer.data
74 return Response(response_data, status=status.HTTP_200_OK)
75 elif request.method == 'POST':
76 # team_type is set to Team.CONTRIBUTOR by default and can be overridden by the requester
77 request.data['team_type'] = request.data.get('team_type', Team.CONTRIBUTOR)
78 serializer = TeamSerializer(data=request.data)
79 if serializer.is_valid():
80 serializer.save()
81 response_data = {'message', 'Successfully added the contributor.'}
82 return Response(response_data, status=status.HTTP_201_CREATED)
83 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
84
```
Path: `apps/web/urls.py`
Content:
```
1 from django.conf.urls import url
2
3 from . import views
4
5 urlpatterns = [
6 url(r'^contact/$', views.contact_us, name='contact_us'),
7 url(r'^team/$', views.our_team, name='our_team'),
8 ]
9
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/apps/web/urls.py b/apps/web/urls.py
--- a/apps/web/urls.py
+++ b/apps/web/urls.py
@@ -5,4 +5,5 @@
urlpatterns = [
url(r'^contact/$', views.contact_us, name='contact_us'),
url(r'^team/$', views.our_team, name='our_team'),
+ url(r'^notify_users/$', views.notify_users_about_challenge, name='notify_users_about_challenge'),
]
diff --git a/apps/web/views.py b/apps/web/views.py
--- a/apps/web/views.py
+++ b/apps/web/views.py
@@ -1,7 +1,14 @@
from django.contrib.auth.models import User
+from django.conf import settings
+from django.core.mail import EmailMultiAlternatives
from django.shortcuts import render
+from django.template.loader import get_template
+from django.template import Context
+
+from email.MIMEImage import MIMEImage
from .models import Team
+from .serializers import ContactSerializer, TeamSerializer
from rest_framework import permissions, status
from rest_framework.decorators import (api_view,
@@ -10,8 +17,6 @@
from rest_framework.response import Response
from rest_framework.throttling import AnonRateThrottle
-from .serializers import ContactSerializer, TeamSerializer
-
def home(request, template_name="index.html"):
"""
@@ -34,6 +39,57 @@
return response
+def notify_users_about_challenge(request):
+ """
+ Email New Challenge Details to EvalAI Users
+ """
+ if request.user.is_authenticated() and request.user.is_superuser:
+ if request.method == 'GET':
+ template_name = 'notification_email_data.html'
+ return render(request, template_name)
+
+ elif request.method == 'POST':
+ template_name = 'notification_email.html'
+ emails = User.objects.all().exclude(email__isnull=True, email__exact='').values_list('email', flat=True)
+ htmly = get_template('notification_email.html')
+
+ subject = request.POST.get('subject')
+ body = request.POST.get('body')
+
+ try:
+ challenge_image = request.FILES['challenge_image']
+ except:
+ challenge_image = None
+
+ if challenge_image:
+ image = MIMEImage(challenge_image.read())
+ image.add_header('Content-ID', '<{}>'.format(challenge_image))
+
+ context = Context({'body': body,
+ 'image': challenge_image})
+
+ for email in emails:
+ from_email = settings.EMAIL_HOST_USER
+ to = [email]
+ html_content = htmly.render(context)
+
+ msg = EmailMultiAlternatives(subject, html_content, from_email, to)
+ msg.attach_alternative(html_content, "text/html")
+ msg.mixed_subtype = 'related'
+
+ if challenge_image:
+ msg.attach(image)
+
+ msg.send()
+ return render(request,
+ 'notification_email_conformation.html',
+ {'message': 'All the emails are sent successfully!'})
+ else:
+ return render(request, 'error404.html')
+ else:
+ return render(request, 'error404.html')
+
+
@throttle_classes([AnonRateThrottle, ])
@api_view(['GET', 'POST'])
@permission_classes((permissions.AllowAny,))
| {"golden_diff": "diff --git a/apps/web/urls.py b/apps/web/urls.py\n--- a/apps/web/urls.py\n+++ b/apps/web/urls.py\n@@ -5,4 +5,5 @@\n urlpatterns = [\n url(r'^contact/$', views.contact_us, name='contact_us'),\n url(r'^team/$', views.our_team, name='our_team'),\n+ url(r'^notify_users/$', views.notify_users_about_challenge, name='notify_users_about_challenge'),\n ]\ndiff --git a/apps/web/views.py b/apps/web/views.py\n--- a/apps/web/views.py\n+++ b/apps/web/views.py\n@@ -1,7 +1,14 @@\n from django.contrib.auth.models import User\n+from django.conf import settings\n+from django.core.mail import EmailMultiAlternatives\n from django.shortcuts import render\n+from django.template.loader import get_template\n+from django.template import Context\n+\n+from email.MIMEImage import MIMEImage\n \n from .models import Team\n+from .serializers import ContactSerializer, TeamSerializer\n \n from rest_framework import permissions, status\n from rest_framework.decorators import (api_view,\n@@ -10,8 +17,6 @@\n from rest_framework.response import Response\n from rest_framework.throttling import AnonRateThrottle\n \n-from .serializers import ContactSerializer, TeamSerializer\n-\n \n def home(request, template_name=\"index.html\"):\n \"\"\"\n@@ -34,6 +39,57 @@\n return response\n \n \n+def notify_users_about_challenge(request):\n+ \"\"\"\n+ Email New Challenge Details to EvalAI Users\n+ \"\"\"\n+ if request.user.is_authenticated() and request.user.is_superuser:\n+ if request.method == 'GET':\n+ template_name = 'notification_email_data.html'\n+ return render(request, template_name)\n+\n+ elif request.method == 'POST':\n+ template_name = 'notification_email.html'\n+ emails = User.objects.all().exclude(email__isnull=True, email__exact='').values_list('email', flat=True)\n+ htmly = get_template('notification_email.html')\n+\n+ subject = request.POST.get('subject')\n+ body = request.POST.get('body')\n+\n+ try:\n+ challenge_image = request.FILES['challenge_image']\n+ except:\n+ challenge_image = None\n+\n+ if challenge_image:\n+ image = MIMEImage(challenge_image.read())\n+ image.add_header('Content-ID', '<{}>'.format(challenge_image))\n+\n+ context = Context({'body': body,\n+ 'image': challenge_image})\n+\n+ for email in emails:\n+ from_email = settings.EMAIL_HOST_USER\n+ to = [email]\n+ html_content = htmly.render(context)\n+\n+ msg = EmailMultiAlternatives(subject, html_content, from_email, to)\n+ msg.attach_alternative(html_content, \"text/html\")\n+ msg.mixed_subtype = 'related'\n+\n+ if challenge_image:\n+ msg.attach(image)\n+\n+ msg.send()\n+ return render(request,\n+ 'notification_email_conformation.html',\n+ {'message': 'All the emails are sent successfully!'})\n+ else:\n+ return render(request, 'error404.html')\n+ else:\n+ return render(request, 'error404.html')\n+\n+\n @throttle_classes([AnonRateThrottle, ])\n @api_view(['GET', 'POST'])\n @permission_classes((permissions.AllowAny,))\n", "issue": "Add Feature to notify users about new challenge on EvalAI.\n\n", "before_files": [{"content": "from django.contrib.auth.models import User\nfrom django.shortcuts import render\n\nfrom .models import Team\n\nfrom rest_framework import permissions, status\nfrom rest_framework.decorators import (api_view,\n permission_classes,\n throttle_classes,)\nfrom rest_framework.response import Response\nfrom rest_framework.throttling import AnonRateThrottle\n\nfrom .serializers import ContactSerializer, TeamSerializer\n\n\ndef home(request, template_name=\"index.html\"):\n \"\"\"\n Home Page View\n \"\"\"\n return render(request, template_name)\n\n\ndef page_not_found(request):\n response = render(request, 'error404.html',\n )\n response.status_code = 404\n return response\n\n\ndef internal_server_error(request):\n response = render(request, 'error500.html',\n )\n response.status_code = 500\n return response\n\n\n@throttle_classes([AnonRateThrottle, ])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.AllowAny,))\ndef contact_us(request):\n user_does_not_exist = False\n try:\n user = User.objects.get(username=request.user)\n name = user.username\n email = user.email\n request_data = {'name': name, 'email': email}\n except:\n request_data = request.data\n user_does_not_exist = True\n\n if request.method == 'POST' or user_does_not_exist:\n if request.POST.get('message'):\n request_data['message'] = request.POST.get('message')\n serializer = ContactSerializer(data=request_data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message': 'We have received your request and will contact you shortly.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n elif request.method == 'GET':\n response_data = {\"name\": name, \"email\": email}\n return Response(response_data, status=status.HTTP_200_OK)\n\n\n@throttle_classes([AnonRateThrottle])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.AllowAny,))\ndef our_team(request):\n if request.method == 'GET':\n teams = Team.objects.all()\n serializer = TeamSerializer(teams, many=True, context={'request': request})\n response_data = serializer.data\n return Response(response_data, status=status.HTTP_200_OK)\n elif request.method == 'POST':\n # team_type is set to Team.CONTRIBUTOR by default and can be overridden by the requester\n request.data['team_type'] = request.data.get('team_type', Team.CONTRIBUTOR)\n serializer = TeamSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message', 'Successfully added the contributor.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n", "path": "apps/web/views.py"}, {"content": "from django.conf.urls import url\n\nfrom . import views\n\nurlpatterns = [\n url(r'^contact/$', views.contact_us, name='contact_us'),\n url(r'^team/$', views.our_team, name='our_team'),\n]\n", "path": "apps/web/urls.py"}], "after_files": [{"content": "from django.contrib.auth.models import User\nfrom django.conf import settings\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.shortcuts import render\nfrom django.template.loader import get_template\nfrom django.template import Context\n\nfrom email.MIMEImage import MIMEImage\n\nfrom .models import Team\nfrom .serializers import ContactSerializer, TeamSerializer\n\nfrom rest_framework import permissions, status\nfrom rest_framework.decorators import (api_view,\n permission_classes,\n throttle_classes,)\nfrom rest_framework.response import Response\nfrom rest_framework.throttling import AnonRateThrottle\n\n\ndef home(request, template_name=\"index.html\"):\n \"\"\"\n Home Page View\n \"\"\"\n return render(request, template_name)\n\n\ndef page_not_found(request):\n response = render(request, 'error404.html',\n )\n response.status_code = 404\n return response\n\n\ndef internal_server_error(request):\n response = render(request, 'error500.html',\n )\n response.status_code = 500\n return response\n\n\ndef notify_users_about_challenge(request):\n \"\"\"\n Email New Challenge Details to EvalAI Users\n \"\"\"\n if request.user.is_authenticated() and request.user.is_superuser:\n if request.method == 'GET':\n template_name = 'notification_email_data.html'\n return render(request, template_name)\n\n elif request.method == 'POST':\n template_name = 'notification_email.html'\n emails = User.objects.all().exclude(email__isnull=True, email__exact='').values_list('email', flat=True)\n htmly = get_template('notification_email.html')\n\n subject = request.POST.get('subject')\n body = request.POST.get('body')\n\n try:\n challenge_image = request.FILES['challenge_image']\n except:\n challenge_image = None\n\n if challenge_image:\n image = MIMEImage(challenge_image.read())\n image.add_header('Content-ID', '<{}>'.format(challenge_image))\n\n context = Context({'body': body,\n 'image': challenge_image})\n\n for email in emails:\n from_email = settings.EMAIL_HOST_USER\n to = [email]\n html_content = htmly.render(context)\n\n msg = EmailMultiAlternatives(subject, html_content, from_email, to)\n msg.attach_alternative(html_content, \"text/html\")\n msg.mixed_subtype = 'related'\n\n if challenge_image:\n msg.attach(image)\n\n msg.send()\n return render(request,\n 'notification_email_conformation.html',\n {'message': 'All the emails are sent successfully!'})\n else:\n return render(request, 'error404.html')\n else:\n return render(request, 'error404.html')\n\n\n@throttle_classes([AnonRateThrottle, ])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.AllowAny,))\ndef contact_us(request):\n user_does_not_exist = False\n try:\n user = User.objects.get(username=request.user)\n name = user.username\n email = user.email\n request_data = {'name': name, 'email': email}\n except:\n request_data = request.data\n user_does_not_exist = True\n\n if request.method == 'POST' or user_does_not_exist:\n if request.POST.get('message'):\n request_data['message'] = request.POST.get('message')\n serializer = ContactSerializer(data=request_data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message': 'We have received your request and will contact you shortly.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n elif request.method == 'GET':\n response_data = {\"name\": name, \"email\": email}\n return Response(response_data, status=status.HTTP_200_OK)\n\n\n@throttle_classes([AnonRateThrottle])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.AllowAny,))\ndef our_team(request):\n if request.method == 'GET':\n teams = Team.objects.all()\n serializer = TeamSerializer(teams, many=True, context={'request': request})\n response_data = serializer.data\n return Response(response_data, status=status.HTTP_200_OK)\n elif request.method == 'POST':\n # team_type is set to Team.CONTRIBUTOR by default and can be overridden by the requester\n request.data['team_type'] = request.data.get('team_type', Team.CONTRIBUTOR)\n serializer = TeamSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n response_data = {'message', 'Successfully added the contributor.'}\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n", "path": "apps/web/views.py"}, {"content": "from django.conf.urls import url\n\nfrom . import views\n\nurlpatterns = [\n url(r'^contact/$', views.contact_us, name='contact_us'),\n url(r'^team/$', views.our_team, name='our_team'),\n url(r'^notify_users/$', views.notify_users_about_challenge, name='notify_users_about_challenge'),\n]\n", "path": "apps/web/urls.py"}]} | 1,134 | 728 |
gh_patches_debug_11414 | rasdani/github-patches | git_diff | bridgecrewio__checkov-686 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
No module named 'packaging'
**Describe the bug**
Since latest docker image v1.0.640. We got an issue when running checkov on our CI.
**To Reproduce**
```
$ checkov --quiet -d directory
Traceback (most recent call last):
File "/usr/local/bin/checkov", line 2, in <module>
from checkov.main import run
File "/usr/local/lib/python3.8/site-packages/checkov/main.py", line 23, in <module>
from checkov.terraform.runner import Runner as tf_runner
File "/usr/local/lib/python3.8/site-packages/checkov/terraform/runner.py", line 20, in <module>
from checkov.terraform.parser import Parser
File "/usr/local/lib/python3.8/site-packages/checkov/terraform/parser.py", line 16, in <module>
from checkov.terraform.module_loading.registry import ModuleLoaderRegistry
File "/usr/local/lib/python3.8/site-packages/checkov/terraform/module_loading/__init__.py", line 1, in <module>
from checkov.terraform.module_loading.loaders import *
File "/usr/local/lib/python3.8/site-packages/checkov/terraform/module_loading/loaders/versions_parser.py", line 2, in <module>
from packaging import version
ModuleNotFoundError: No module named 'packaging'
```
**Additional context**
Using Docker Image v1.0.640
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 import logging
3 import os
4 from importlib import util
5 from os import path
6
7 import setuptools
8 from setuptools import setup
9
10 # read the contents of your README file
11 this_directory = path.abspath(path.dirname(__file__))
12 with open(path.join(this_directory, "README.md"), encoding="utf-8") as f:
13 long_description = f.read()
14
15 logger = logging.getLogger(__name__)
16 spec = util.spec_from_file_location(
17 "checkov.version", os.path.join("checkov", "version.py")
18 )
19 # noinspection PyUnresolvedReferences
20 mod = util.module_from_spec(spec)
21 spec.loader.exec_module(mod) # type: ignore
22 version = mod.version # type: ignore
23
24 setup(
25 extras_require={
26 "dev": [
27 "alabaster==0.7.12",
28 "attrs==19.3.0",
29 "babel==2.7.0",
30 "certifi==2019.11.28",
31 "chardet==3.0.4",
32 "coverage==4.5.4",
33 "coverage-badge==1.0.1",
34 "docopt==0.6.2",
35 "docutils==0.15.2",
36 "idna==2.8",
37 "imagesize==1.1.0",
38 "importlib-metadata==1.1.0; python_version < '3.8'",
39 "jinja2==2.10.3",
40 "lark-parser==0.7.8",
41 "markupsafe==1.1.1",
42 "more-itertools==8.0.0",
43 "packaging==19.2",
44 "pluggy==0.13.1",
45 "py==1.8.0",
46 "pygments==2.5.2",
47 "pyparsing==2.4.5",
48 "pytest==5.3.1",
49 "bc-python-hcl2>=0.3.10",
50 "pytz==2019.3",
51 "pyyaml==5.3.1",
52 "requests==2.22.0",
53 "six==1.15.0",
54 "snowballstemmer==2.0.0",
55 "sphinx==2.2.1",
56 "sphinxcontrib-applehelp==1.0.1",
57 "sphinxcontrib-devhelp==1.0.1",
58 "sphinxcontrib-htmlhelp==1.0.2",
59 "sphinxcontrib-jsmath==1.0.1",
60 "sphinxcontrib-qthelp==1.0.2",
61 "sphinxcontrib-serializinghtml==1.1.3",
62 "urllib3==1.25.10",
63 "wcwidth==0.1.7",
64 "zipp==0.6.0",
65 "GitPython==3.1.7",
66 "gitdb==4.0.5"
67 ]
68 },
69 install_requires=[
70 "update-checker==0.18.0",
71 "tqdm==4.49.0",
72 "boto3==1.12.43",
73 "chardet==3.0.4",
74 "colorama==0.4.3",
75 "deep_merge==0.0.4",
76 "docopt==0.6.2",
77 "idna==2.8",
78 "jmespath==0.10.0",
79 "junit-xml==1.8",
80 "lark-parser==0.7.8",
81 "bc-python-hcl2>=0.3.11",
82 "pyyaml==5.3.1",
83 "requests==2.22.0",
84 "six==1.15.0",
85 "semantic_version==2.8.5",
86 "tabulate==0.8.6",
87 "termcolor==1.1.0",
88 "urllib3==1.25.10",
89 "dpath==1.5.0",
90 "GitPython==3.1.7",
91 "gitdb==4.0.5"
92 ],
93 license="Apache License 2.0",
94 name="checkov",
95 version=version,
96 description="Infrastructure as code static analysis",
97 author="bridgecrew",
98 author_email="[email protected]",
99 url="https://github.com/bridgecrewio/checkov",
100 packages=setuptools.find_packages(exclude=["tests*","integration_tests*"]),
101 scripts=["bin/checkov","bin/checkov.cmd"],
102 long_description=long_description,
103 long_description_content_type="text/markdown",
104 classifiers=[
105 'Environment :: Console',
106 'Intended Audience :: Developers',
107 'Intended Audience :: System Administrators',
108 'Programming Language :: Python :: 3.7',
109 'Topic :: Security',
110 'Topic :: Software Development :: Build Tools'
111 ]
112 )
113
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -63,7 +63,8 @@
"wcwidth==0.1.7",
"zipp==0.6.0",
"GitPython==3.1.7",
- "gitdb==4.0.5"
+ "gitdb==4.0.5",
+ "packaging==20.4"
]
},
install_requires=[
@@ -88,7 +89,8 @@
"urllib3==1.25.10",
"dpath==1.5.0",
"GitPython==3.1.7",
- "gitdb==4.0.5"
+ "gitdb==4.0.5",
+ "packaging==20.4"
],
license="Apache License 2.0",
name="checkov",
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -63,7 +63,8 @@\n \"wcwidth==0.1.7\",\n \"zipp==0.6.0\",\n \"GitPython==3.1.7\",\n- \"gitdb==4.0.5\"\n+ \"gitdb==4.0.5\",\n+ \"packaging==20.4\"\n ]\n },\n install_requires=[\n@@ -88,7 +89,8 @@\n \"urllib3==1.25.10\",\n \"dpath==1.5.0\",\n \"GitPython==3.1.7\",\n- \"gitdb==4.0.5\"\n+ \"gitdb==4.0.5\",\n+ \"packaging==20.4\"\n ],\n license=\"Apache License 2.0\",\n name=\"checkov\",\n", "issue": "No module named 'packaging'\n**Describe the bug**\r\nSince latest docker image v1.0.640. We got an issue when running checkov on our CI.\r\n\r\n**To Reproduce**\r\n```\r\n$ checkov --quiet -d directory\r\nTraceback (most recent call last):\r\n File \"/usr/local/bin/checkov\", line 2, in <module>\r\n from checkov.main import run\r\n File \"/usr/local/lib/python3.8/site-packages/checkov/main.py\", line 23, in <module>\r\n from checkov.terraform.runner import Runner as tf_runner\r\n File \"/usr/local/lib/python3.8/site-packages/checkov/terraform/runner.py\", line 20, in <module>\r\n from checkov.terraform.parser import Parser\r\n File \"/usr/local/lib/python3.8/site-packages/checkov/terraform/parser.py\", line 16, in <module>\r\n from checkov.terraform.module_loading.registry import ModuleLoaderRegistry\r\n File \"/usr/local/lib/python3.8/site-packages/checkov/terraform/module_loading/__init__.py\", line 1, in <module>\r\n from checkov.terraform.module_loading.loaders import *\r\n File \"/usr/local/lib/python3.8/site-packages/checkov/terraform/module_loading/loaders/versions_parser.py\", line 2, in <module>\r\n from packaging import version\r\nModuleNotFoundError: No module named 'packaging'\r\n```\r\n\r\n**Additional context**\r\nUsing Docker Image v1.0.640\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport logging\nimport os\nfrom importlib import util\nfrom os import path\n\nimport setuptools\nfrom setuptools import setup\n\n# read the contents of your README file\nthis_directory = path.abspath(path.dirname(__file__))\nwith open(path.join(this_directory, \"README.md\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nlogger = logging.getLogger(__name__)\nspec = util.spec_from_file_location(\n \"checkov.version\", os.path.join(\"checkov\", \"version.py\")\n)\n# noinspection PyUnresolvedReferences\nmod = util.module_from_spec(spec)\nspec.loader.exec_module(mod) # type: ignore\nversion = mod.version # type: ignore\n\nsetup(\n extras_require={\n \"dev\": [\n \"alabaster==0.7.12\",\n \"attrs==19.3.0\",\n \"babel==2.7.0\",\n \"certifi==2019.11.28\",\n \"chardet==3.0.4\",\n \"coverage==4.5.4\",\n \"coverage-badge==1.0.1\",\n \"docopt==0.6.2\",\n \"docutils==0.15.2\",\n \"idna==2.8\",\n \"imagesize==1.1.0\",\n \"importlib-metadata==1.1.0; python_version < '3.8'\",\n \"jinja2==2.10.3\",\n \"lark-parser==0.7.8\",\n \"markupsafe==1.1.1\",\n \"more-itertools==8.0.0\",\n \"packaging==19.2\",\n \"pluggy==0.13.1\",\n \"py==1.8.0\",\n \"pygments==2.5.2\",\n \"pyparsing==2.4.5\",\n \"pytest==5.3.1\",\n \"bc-python-hcl2>=0.3.10\",\n \"pytz==2019.3\",\n \"pyyaml==5.3.1\",\n \"requests==2.22.0\",\n \"six==1.15.0\",\n \"snowballstemmer==2.0.0\",\n \"sphinx==2.2.1\",\n \"sphinxcontrib-applehelp==1.0.1\",\n \"sphinxcontrib-devhelp==1.0.1\",\n \"sphinxcontrib-htmlhelp==1.0.2\",\n \"sphinxcontrib-jsmath==1.0.1\",\n \"sphinxcontrib-qthelp==1.0.2\",\n \"sphinxcontrib-serializinghtml==1.1.3\",\n \"urllib3==1.25.10\",\n \"wcwidth==0.1.7\",\n \"zipp==0.6.0\",\n \"GitPython==3.1.7\",\n \"gitdb==4.0.5\"\n ]\n },\n install_requires=[\n \"update-checker==0.18.0\",\n \"tqdm==4.49.0\",\n \"boto3==1.12.43\",\n \"chardet==3.0.4\",\n \"colorama==0.4.3\",\n \"deep_merge==0.0.4\",\n \"docopt==0.6.2\",\n \"idna==2.8\",\n \"jmespath==0.10.0\",\n \"junit-xml==1.8\",\n \"lark-parser==0.7.8\",\n \"bc-python-hcl2>=0.3.11\",\n \"pyyaml==5.3.1\",\n \"requests==2.22.0\",\n \"six==1.15.0\",\n \"semantic_version==2.8.5\",\n \"tabulate==0.8.6\",\n \"termcolor==1.1.0\",\n \"urllib3==1.25.10\",\n \"dpath==1.5.0\",\n \"GitPython==3.1.7\",\n \"gitdb==4.0.5\"\n ],\n license=\"Apache License 2.0\",\n name=\"checkov\",\n version=version,\n description=\"Infrastructure as code static analysis\",\n author=\"bridgecrew\",\n author_email=\"[email protected]\",\n url=\"https://github.com/bridgecrewio/checkov\",\n packages=setuptools.find_packages(exclude=[\"tests*\",\"integration_tests*\"]),\n scripts=[\"bin/checkov\",\"bin/checkov.cmd\"],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n classifiers=[\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Security',\n 'Topic :: Software Development :: Build Tools'\n ]\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport logging\nimport os\nfrom importlib import util\nfrom os import path\n\nimport setuptools\nfrom setuptools import setup\n\n# read the contents of your README file\nthis_directory = path.abspath(path.dirname(__file__))\nwith open(path.join(this_directory, \"README.md\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nlogger = logging.getLogger(__name__)\nspec = util.spec_from_file_location(\n \"checkov.version\", os.path.join(\"checkov\", \"version.py\")\n)\n# noinspection PyUnresolvedReferences\nmod = util.module_from_spec(spec)\nspec.loader.exec_module(mod) # type: ignore\nversion = mod.version # type: ignore\n\nsetup(\n extras_require={\n \"dev\": [\n \"alabaster==0.7.12\",\n \"attrs==19.3.0\",\n \"babel==2.7.0\",\n \"certifi==2019.11.28\",\n \"chardet==3.0.4\",\n \"coverage==4.5.4\",\n \"coverage-badge==1.0.1\",\n \"docopt==0.6.2\",\n \"docutils==0.15.2\",\n \"idna==2.8\",\n \"imagesize==1.1.0\",\n \"importlib-metadata==1.1.0; python_version < '3.8'\",\n \"jinja2==2.10.3\",\n \"lark-parser==0.7.8\",\n \"markupsafe==1.1.1\",\n \"more-itertools==8.0.0\",\n \"packaging==19.2\",\n \"pluggy==0.13.1\",\n \"py==1.8.0\",\n \"pygments==2.5.2\",\n \"pyparsing==2.4.5\",\n \"pytest==5.3.1\",\n \"bc-python-hcl2>=0.3.10\",\n \"pytz==2019.3\",\n \"pyyaml==5.3.1\",\n \"requests==2.22.0\",\n \"six==1.15.0\",\n \"snowballstemmer==2.0.0\",\n \"sphinx==2.2.1\",\n \"sphinxcontrib-applehelp==1.0.1\",\n \"sphinxcontrib-devhelp==1.0.1\",\n \"sphinxcontrib-htmlhelp==1.0.2\",\n \"sphinxcontrib-jsmath==1.0.1\",\n \"sphinxcontrib-qthelp==1.0.2\",\n \"sphinxcontrib-serializinghtml==1.1.3\",\n \"urllib3==1.25.10\",\n \"wcwidth==0.1.7\",\n \"zipp==0.6.0\",\n \"GitPython==3.1.7\",\n \"gitdb==4.0.5\",\n \"packaging==20.4\"\n ]\n },\n install_requires=[\n \"update-checker==0.18.0\",\n \"tqdm==4.49.0\",\n \"boto3==1.12.43\",\n \"chardet==3.0.4\",\n \"colorama==0.4.3\",\n \"deep_merge==0.0.4\",\n \"docopt==0.6.2\",\n \"idna==2.8\",\n \"jmespath==0.10.0\",\n \"junit-xml==1.8\",\n \"lark-parser==0.7.8\",\n \"bc-python-hcl2>=0.3.11\",\n \"pyyaml==5.3.1\",\n \"requests==2.22.0\",\n \"six==1.15.0\",\n \"semantic_version==2.8.5\",\n \"tabulate==0.8.6\",\n \"termcolor==1.1.0\",\n \"urllib3==1.25.10\",\n \"dpath==1.5.0\",\n \"GitPython==3.1.7\",\n \"gitdb==4.0.5\",\n \"packaging==20.4\"\n ],\n license=\"Apache License 2.0\",\n name=\"checkov\",\n version=version,\n description=\"Infrastructure as code static analysis\",\n author=\"bridgecrew\",\n author_email=\"[email protected]\",\n url=\"https://github.com/bridgecrewio/checkov\",\n packages=setuptools.find_packages(exclude=[\"tests*\",\"integration_tests*\"]),\n scripts=[\"bin/checkov\",\"bin/checkov.cmd\"],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n classifiers=[\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Security',\n 'Topic :: Software Development :: Build Tools'\n ]\n)\n", "path": "setup.py"}]} | 1,889 | 209 |
gh_patches_debug_3311 | rasdani/github-patches | git_diff | beeware__toga-1785 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Text within MultilineTextInput remains black in macOS Dark Mode
### Describe the bug
When using macOS in dark mode, the text within MultilineTextInput widgets remains black, making it difficult to read.
### Steps to reproduce
1. Add a MultilineTextInput widget to an app
2. Ensure macOS dark mode is enabled
3. View the app in dev mode or run the app
4. Type text into the MultilineTextInput widget
5. See error
### Expected behavior
The color of the text within the MultilineTextInput widget should be white when in dark mode and black in light mode. This works as expected with TextInput widget.
### Screenshots
<img width="270" alt="Screenshot 2023-02-16 at 1 44 35 PM" src="https://user-images.githubusercontent.com/42925864/219470705-d344d219-f0c1-46ed-8fd9-085fccfef03f.png">
### Environment
- Operating System: macOS 13.1
- Python version: 11
- Software versions:
- Briefcase: 0.3.12
- Toga: 0.3.0
### Logs
```
```
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cocoa/src/toga_cocoa/widgets/multilinetextinput.py`
Content:
```
1 from travertino.size import at_least
2
3 from toga_cocoa.colors import native_color
4 from toga_cocoa.libs import (
5 NSBezelBorder,
6 NSScrollView,
7 NSTextView,
8 NSViewWidthSizable,
9 objc_method,
10 )
11
12 from .base import Widget
13
14
15 class TogaTextView(NSTextView):
16 @objc_method
17 def touchBar(self):
18 # Disable the touchbar.
19 return None
20
21
22 class MultilineTextInput(Widget):
23 def create(self):
24 # Create a multiline view, and put it in a scroll view.
25 # The scroll view is the native, because it's the outer container.
26 self.native = NSScrollView.alloc().init()
27 self.native.hasVerticalScroller = True
28 self.native.hasHorizontalScroller = False
29 self.native.autohidesScrollers = False
30 self.native.borderType = NSBezelBorder
31
32 # Disable all autolayout functionality on the outer widget
33 self.native.translatesAutoresizingMaskIntoConstraints = False
34
35 # Create the actual text widget
36 self.text = TogaTextView.alloc().init()
37 self.text.editable = True
38 self.text.selectable = True
39 self.text.verticallyResizable = True
40 self.text.horizontallyResizable = False
41
42 self.text.autoresizingMask = NSViewWidthSizable
43
44 # Put the text view in the scroll window.
45 self.native.documentView = self.text
46
47 # Add the layout constraints
48 self.add_constraints()
49
50 def set_placeholder(self, value):
51 self.text.placeholderString = self.interface.placeholder
52
53 def set_readonly(self, value):
54 self.text.editable = not self.interface.readonly
55
56 def get_value(self):
57 return self.text.string
58
59 def set_value(self, value):
60 self.text.string = value
61
62 def set_color(self, value):
63 self.text.textColor = native_color(value)
64
65 def set_font(self, font):
66 if font:
67 self.text.font = font._impl.native
68
69 def rehint(self):
70 self.interface.intrinsic.width = at_least(self.interface.MIN_WIDTH)
71 self.interface.intrinsic.height = at_least(self.interface.MIN_HEIGHT)
72
73 def set_on_change(self, handler):
74 self.interface.factory.not_implemented("MultilineTextInput.set_on_change()")
75
76 def scroll_to_bottom(self):
77 self.text.scrollToEndOfDocument(None)
78
79 def scroll_to_top(self):
80 self.text.scrollToBeginningOfDocument(None)
81
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cocoa/src/toga_cocoa/widgets/multilinetextinput.py b/cocoa/src/toga_cocoa/widgets/multilinetextinput.py
--- a/cocoa/src/toga_cocoa/widgets/multilinetextinput.py
+++ b/cocoa/src/toga_cocoa/widgets/multilinetextinput.py
@@ -38,6 +38,7 @@
self.text.selectable = True
self.text.verticallyResizable = True
self.text.horizontallyResizable = False
+ self.text.usesAdaptiveColorMappingForDarkAppearance = True
self.text.autoresizingMask = NSViewWidthSizable
| {"golden_diff": "diff --git a/cocoa/src/toga_cocoa/widgets/multilinetextinput.py b/cocoa/src/toga_cocoa/widgets/multilinetextinput.py\n--- a/cocoa/src/toga_cocoa/widgets/multilinetextinput.py\n+++ b/cocoa/src/toga_cocoa/widgets/multilinetextinput.py\n@@ -38,6 +38,7 @@\n self.text.selectable = True\n self.text.verticallyResizable = True\n self.text.horizontallyResizable = False\n+ self.text.usesAdaptiveColorMappingForDarkAppearance = True\n \n self.text.autoresizingMask = NSViewWidthSizable\n", "issue": "Text within MultilineTextInput remains black in macOS Dark Mode\n### Describe the bug\n\nWhen using macOS in dark mode, the text within MultilineTextInput widgets remains black, making it difficult to read.\n\n### Steps to reproduce\n\n1. Add a MultilineTextInput widget to an app\r\n2. Ensure macOS dark mode is enabled\r\n3. View the app in dev mode or run the app \r\n4. Type text into the MultilineTextInput widget\r\n5. See error\r\n\n\n### Expected behavior\n\nThe color of the text within the MultilineTextInput widget should be white when in dark mode and black in light mode. This works as expected with TextInput widget.\n\n### Screenshots\n\n<img width=\"270\" alt=\"Screenshot 2023-02-16 at 1 44 35 PM\" src=\"https://user-images.githubusercontent.com/42925864/219470705-d344d219-f0c1-46ed-8fd9-085fccfef03f.png\">\r\n\n\n### Environment\n\n- Operating System: macOS 13.1\r\n- Python version: 11\r\n- Software versions:\r\n - Briefcase: 0.3.12\r\n - Toga: 0.3.0\r\n\n\n### Logs\n\n```\r\n\r\n```\r\n\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "from travertino.size import at_least\n\nfrom toga_cocoa.colors import native_color\nfrom toga_cocoa.libs import (\n NSBezelBorder,\n NSScrollView,\n NSTextView,\n NSViewWidthSizable,\n objc_method,\n)\n\nfrom .base import Widget\n\n\nclass TogaTextView(NSTextView):\n @objc_method\n def touchBar(self):\n # Disable the touchbar.\n return None\n\n\nclass MultilineTextInput(Widget):\n def create(self):\n # Create a multiline view, and put it in a scroll view.\n # The scroll view is the native, because it's the outer container.\n self.native = NSScrollView.alloc().init()\n self.native.hasVerticalScroller = True\n self.native.hasHorizontalScroller = False\n self.native.autohidesScrollers = False\n self.native.borderType = NSBezelBorder\n\n # Disable all autolayout functionality on the outer widget\n self.native.translatesAutoresizingMaskIntoConstraints = False\n\n # Create the actual text widget\n self.text = TogaTextView.alloc().init()\n self.text.editable = True\n self.text.selectable = True\n self.text.verticallyResizable = True\n self.text.horizontallyResizable = False\n\n self.text.autoresizingMask = NSViewWidthSizable\n\n # Put the text view in the scroll window.\n self.native.documentView = self.text\n\n # Add the layout constraints\n self.add_constraints()\n\n def set_placeholder(self, value):\n self.text.placeholderString = self.interface.placeholder\n\n def set_readonly(self, value):\n self.text.editable = not self.interface.readonly\n\n def get_value(self):\n return self.text.string\n\n def set_value(self, value):\n self.text.string = value\n\n def set_color(self, value):\n self.text.textColor = native_color(value)\n\n def set_font(self, font):\n if font:\n self.text.font = font._impl.native\n\n def rehint(self):\n self.interface.intrinsic.width = at_least(self.interface.MIN_WIDTH)\n self.interface.intrinsic.height = at_least(self.interface.MIN_HEIGHT)\n\n def set_on_change(self, handler):\n self.interface.factory.not_implemented(\"MultilineTextInput.set_on_change()\")\n\n def scroll_to_bottom(self):\n self.text.scrollToEndOfDocument(None)\n\n def scroll_to_top(self):\n self.text.scrollToBeginningOfDocument(None)\n", "path": "cocoa/src/toga_cocoa/widgets/multilinetextinput.py"}], "after_files": [{"content": "from travertino.size import at_least\n\nfrom toga_cocoa.colors import native_color\nfrom toga_cocoa.libs import (\n NSBezelBorder,\n NSScrollView,\n NSTextView,\n NSViewWidthSizable,\n objc_method,\n)\n\nfrom .base import Widget\n\n\nclass TogaTextView(NSTextView):\n @objc_method\n def touchBar(self):\n # Disable the touchbar.\n return None\n\n\nclass MultilineTextInput(Widget):\n def create(self):\n # Create a multiline view, and put it in a scroll view.\n # The scroll view is the native, because it's the outer container.\n self.native = NSScrollView.alloc().init()\n self.native.hasVerticalScroller = True\n self.native.hasHorizontalScroller = False\n self.native.autohidesScrollers = False\n self.native.borderType = NSBezelBorder\n\n # Disable all autolayout functionality on the outer widget\n self.native.translatesAutoresizingMaskIntoConstraints = False\n\n # Create the actual text widget\n self.text = TogaTextView.alloc().init()\n self.text.editable = True\n self.text.selectable = True\n self.text.verticallyResizable = True\n self.text.horizontallyResizable = False\n self.text.usesAdaptiveColorMappingForDarkAppearance = True\n\n self.text.autoresizingMask = NSViewWidthSizable\n\n # Put the text view in the scroll window.\n self.native.documentView = self.text\n\n # Add the layout constraints\n self.add_constraints()\n\n def set_placeholder(self, value):\n self.text.placeholderString = self.interface.placeholder\n\n def set_readonly(self, value):\n self.text.editable = not self.interface.readonly\n\n def get_value(self):\n return self.text.string\n\n def set_value(self, value):\n self.text.string = value\n\n def set_color(self, value):\n self.text.textColor = native_color(value)\n\n def set_font(self, font):\n if font:\n self.text.font = font._impl.native\n\n def rehint(self):\n self.interface.intrinsic.width = at_least(self.interface.MIN_WIDTH)\n self.interface.intrinsic.height = at_least(self.interface.MIN_HEIGHT)\n\n def set_on_change(self, handler):\n self.interface.factory.not_implemented(\"MultilineTextInput.set_on_change()\")\n\n def scroll_to_bottom(self):\n self.text.scrollToEndOfDocument(None)\n\n def scroll_to_top(self):\n self.text.scrollToBeginningOfDocument(None)\n", "path": "cocoa/src/toga_cocoa/widgets/multilinetextinput.py"}]} | 1,235 | 140 |
gh_patches_debug_37 | rasdani/github-patches | git_diff | nextcloud__appstore-67 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
After clicking confirm button I got a 404
- click the confirm link in the email
- click the button on that page
- getting redirected to https://.../accounts/login/ instead of https://.../login/ which is not available
cc @BernhardPosselt @adsworth
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/settings/base.py`
Content:
```
1 """
2 Django settings for nextcloudappstore project.
3
4 Generated by 'django-admin startproject' using Django 1.9.6.
5
6 For more information on this file, see
7 https://docs.djangoproject.com/en/1.9/topics/settings/
8
9 For the full list of settings and their values, see
10 https://docs.djangoproject.com/en/1.9/ref/settings/
11 """
12
13 from os.path import dirname, abspath, join, pardir, realpath
14
15 # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
16 from django.conf.global_settings import LANGUAGES
17
18 BASE_DIR = realpath(join(dirname(dirname(abspath(__file__))), pardir))
19
20 # Quick-start development settings - unsuitable for production
21 # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
22
23 # Application definition
24
25 INSTALLED_APPS = [
26 'nextcloudappstore.core.apps.CoreConfig',
27 'parler',
28 'captcha',
29 'rest_framework',
30 'corsheaders',
31 'allauth',
32 'allauth.account',
33 'allauth.socialaccount',
34 'allauth.socialaccount.providers.github',
35 'allauth.socialaccount.providers.bitbucket',
36 'django.contrib.admin',
37 'django.contrib.auth',
38 'django.contrib.contenttypes',
39 'django.contrib.sessions',
40 'django.contrib.messages',
41 'django.contrib.sites',
42 'django.contrib.staticfiles',
43 ]
44
45 MIDDLEWARE_CLASSES = [
46 'django.middleware.security.SecurityMiddleware',
47 'django.contrib.sessions.middleware.SessionMiddleware',
48 'corsheaders.middleware.CorsMiddleware',
49 'django.middleware.common.CommonMiddleware',
50 'django.middleware.csrf.CsrfViewMiddleware',
51 'django.contrib.auth.middleware.AuthenticationMiddleware',
52 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
53 'django.contrib.messages.middleware.MessageMiddleware',
54 'django.middleware.clickjacking.XFrameOptionsMiddleware',
55 ]
56
57 ROOT_URLCONF = 'nextcloudappstore.urls'
58
59 TEMPLATES = [
60 {
61 'BACKEND': 'django.template.backends.django.DjangoTemplates',
62 'DIRS': [],
63 'APP_DIRS': True,
64 'OPTIONS': {
65 'context_processors': [
66 'django.template.context_processors.debug',
67 'django.template.context_processors.request',
68 'django.contrib.auth.context_processors.auth',
69 'django.contrib.messages.context_processors.messages',
70 ],
71 },
72 },
73 ]
74
75 WSGI_APPLICATION = 'nextcloudappstore.wsgi.application'
76
77 # Database
78 # https://docs.djangoproject.com/en/1.9/ref/settings/#databases
79
80 DATABASES = {
81 'default': {
82 'ENGINE': 'django.db.backends.sqlite3',
83 'NAME': join(BASE_DIR, 'db.sqlite3'),
84 'TEST': {
85 'NAME': join(BASE_DIR, 'test.sqlite3'),
86 }
87 }
88 }
89
90 AUTHENTICATION_BACKENDS = (
91 # Needed to login by username in Django admin, regardless of `allauth`
92 'django.contrib.auth.backends.ModelBackend',
93
94 # `allauth` specific authentication methods, such as login by e-mail
95 'allauth.account.auth_backends.AuthenticationBackend',
96 )
97
98 # Password validation
99 # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
100
101 AUTH_PASSWORD_VALIDATORS = [
102 {
103 'NAME': 'django.contrib.auth.password_validation'
104 '.UserAttributeSimilarityValidator',
105 },
106 {
107 'NAME': 'django.contrib.auth.password_validation'
108 '.MinimumLengthValidator',
109 },
110 {
111 'NAME': 'django.contrib.auth.password_validation'
112 '.CommonPasswordValidator',
113 },
114 {
115 'NAME': 'django.contrib.auth.password_validation'
116 '.NumericPasswordValidator',
117 },
118 ]
119
120 REST_FRAMEWORK = {
121 'DEFAULT_RENDERER_CLASSES': (
122 'djangorestframework_camel_case.render.CamelCaseJSONRenderer',
123 ),
124 'DEFAULT_PARSER_CLASSES': (
125 'djangorestframework_camel_case.parser.CamelCaseJSONParser',
126 ),
127 'DEFAULT_THROTTLE_RATES': {
128 'app_upload': '100/day'
129 }
130 }
131
132 SITE_ID = 1
133
134 # Allauth configuration
135 # http://django-allauth.readthedocs.io/en/latest/configuration.html
136 ACCOUNT_EMAIL_REQUIRED = True
137 ACCOUNT_EMAIL_VERIFICATION = "mandatory"
138 ACCOUNT_LOGOUT_ON_GET = True
139 ACCOUNT_LOGOUT_REDIRECT_URL = 'home'
140 ACCOUNT_SESSION_REMEMBER = True
141 ACCOUNT_SIGNUP_FORM_CLASS = \
142 'nextcloudappstore.core.user.forms.SignupFormRecaptcha'
143
144 # Internationalization
145 # https://docs.djangoproject.com/en/1.9/topics/i18n/
146 LANGUAGE_CODE = 'en-us'
147 TIME_ZONE = 'UTC'
148 USE_I18N = True
149 USE_L10N = True
150 USE_TZ = True
151
152 PARLER_LANGUAGES = {
153 1: [{'code': code} for code, trans in LANGUAGES],
154 'default': {
155 'fallbacks': ['en'],
156 'hide_untranslated': False,
157 }
158 }
159
160 # Static files (CSS, JavaScript, Images)
161 # https://docs.djangoproject.com/en/1.9/howto/static-files/
162 MEDIA_ROOT = join(BASE_DIR, 'media')
163 RELEASE_DOWNLOAD_ROOT = None
164 STATIC_URL = '/static/'
165 MEDIA_URL = '/media/'
166
167 # Default security settings
168 SECURE_BROWSER_XSS_FILTER = True
169 SECURE_CONTENT_TYPE_NOSNIFF = True
170 CORS_ORIGIN_ALLOW_ALL = True
171 CORS_URLS_REGEX = r'^/api/.*$'
172 CORS_ALLOW_HEADERS = (
173 'x-requested-with',
174 'content-type',
175 'accept',
176 'origin',
177 'authorization',
178 'x-csrftoken',
179 'if-none-match',
180 )
181 CORS_EXPOSE_HEADERS = (
182 'etag',
183 'x-content-type-options',
184 'content-type',
185 )
186
187 # use modern no Captcha reCaptcha
188 NOCAPTCHA = True
189
190 LOGIN_REDIRECT_URL = 'home'
191
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nextcloudappstore/settings/base.py b/nextcloudappstore/settings/base.py
--- a/nextcloudappstore/settings/base.py
+++ b/nextcloudappstore/settings/base.py
@@ -188,3 +188,4 @@
NOCAPTCHA = True
LOGIN_REDIRECT_URL = 'home'
+LOGIN_URL = 'account_login'
| {"golden_diff": "diff --git a/nextcloudappstore/settings/base.py b/nextcloudappstore/settings/base.py\n--- a/nextcloudappstore/settings/base.py\n+++ b/nextcloudappstore/settings/base.py\n@@ -188,3 +188,4 @@\n NOCAPTCHA = True\n \n LOGIN_REDIRECT_URL = 'home'\n+LOGIN_URL = 'account_login'\n", "issue": "After clicking confirm button I got a 404\n- click the confirm link in the email\n- click the button on that page\n- getting redirected to https://.../accounts/login/ instead of https://.../login/ which is not available\n\ncc @BernhardPosselt @adsworth \n\n", "before_files": [{"content": "\"\"\"\nDjango settings for nextcloudappstore project.\n\nGenerated by 'django-admin startproject' using Django 1.9.6.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.9/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.9/ref/settings/\n\"\"\"\n\nfrom os.path import dirname, abspath, join, pardir, realpath\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nfrom django.conf.global_settings import LANGUAGES\n\nBASE_DIR = realpath(join(dirname(dirname(abspath(__file__))), pardir))\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/\n\n# Application definition\n\nINSTALLED_APPS = [\n 'nextcloudappstore.core.apps.CoreConfig',\n 'parler',\n 'captcha',\n 'rest_framework',\n 'corsheaders',\n 'allauth',\n 'allauth.account',\n 'allauth.socialaccount',\n 'allauth.socialaccount.providers.github',\n 'allauth.socialaccount.providers.bitbucket',\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.sites',\n 'django.contrib.staticfiles',\n]\n\nMIDDLEWARE_CLASSES = [\n 'django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'corsheaders.middleware.CorsMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n]\n\nROOT_URLCONF = 'nextcloudappstore.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'nextcloudappstore.wsgi.application'\n\n# Database\n# https://docs.djangoproject.com/en/1.9/ref/settings/#databases\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3',\n 'NAME': join(BASE_DIR, 'db.sqlite3'),\n 'TEST': {\n 'NAME': join(BASE_DIR, 'test.sqlite3'),\n }\n }\n}\n\nAUTHENTICATION_BACKENDS = (\n # Needed to login by username in Django admin, regardless of `allauth`\n 'django.contrib.auth.backends.ModelBackend',\n\n # `allauth` specific authentication methods, such as login by e-mail\n 'allauth.account.auth_backends.AuthenticationBackend',\n)\n\n# Password validation\n# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.NumericPasswordValidator',\n },\n]\n\nREST_FRAMEWORK = {\n 'DEFAULT_RENDERER_CLASSES': (\n 'djangorestframework_camel_case.render.CamelCaseJSONRenderer',\n ),\n 'DEFAULT_PARSER_CLASSES': (\n 'djangorestframework_camel_case.parser.CamelCaseJSONParser',\n ),\n 'DEFAULT_THROTTLE_RATES': {\n 'app_upload': '100/day'\n }\n}\n\nSITE_ID = 1\n\n# Allauth configuration\n# http://django-allauth.readthedocs.io/en/latest/configuration.html\nACCOUNT_EMAIL_REQUIRED = True\nACCOUNT_EMAIL_VERIFICATION = \"mandatory\"\nACCOUNT_LOGOUT_ON_GET = True\nACCOUNT_LOGOUT_REDIRECT_URL = 'home'\nACCOUNT_SESSION_REMEMBER = True\nACCOUNT_SIGNUP_FORM_CLASS = \\\n 'nextcloudappstore.core.user.forms.SignupFormRecaptcha'\n\n# Internationalization\n# https://docs.djangoproject.com/en/1.9/topics/i18n/\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\n\nPARLER_LANGUAGES = {\n 1: [{'code': code} for code, trans in LANGUAGES],\n 'default': {\n 'fallbacks': ['en'],\n 'hide_untranslated': False,\n }\n}\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/1.9/howto/static-files/\nMEDIA_ROOT = join(BASE_DIR, 'media')\nRELEASE_DOWNLOAD_ROOT = None\nSTATIC_URL = '/static/'\nMEDIA_URL = '/media/'\n\n# Default security settings\nSECURE_BROWSER_XSS_FILTER = True\nSECURE_CONTENT_TYPE_NOSNIFF = True\nCORS_ORIGIN_ALLOW_ALL = True\nCORS_URLS_REGEX = r'^/api/.*$'\nCORS_ALLOW_HEADERS = (\n 'x-requested-with',\n 'content-type',\n 'accept',\n 'origin',\n 'authorization',\n 'x-csrftoken',\n 'if-none-match',\n)\nCORS_EXPOSE_HEADERS = (\n 'etag',\n 'x-content-type-options',\n 'content-type',\n)\n\n# use modern no Captcha reCaptcha\nNOCAPTCHA = True\n\nLOGIN_REDIRECT_URL = 'home'\n", "path": "nextcloudappstore/settings/base.py"}], "after_files": [{"content": "\"\"\"\nDjango settings for nextcloudappstore project.\n\nGenerated by 'django-admin startproject' using Django 1.9.6.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.9/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.9/ref/settings/\n\"\"\"\n\nfrom os.path import dirname, abspath, join, pardir, realpath\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nfrom django.conf.global_settings import LANGUAGES\n\nBASE_DIR = realpath(join(dirname(dirname(abspath(__file__))), pardir))\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/\n\n# Application definition\n\nINSTALLED_APPS = [\n 'nextcloudappstore.core.apps.CoreConfig',\n 'parler',\n 'captcha',\n 'rest_framework',\n 'corsheaders',\n 'allauth',\n 'allauth.account',\n 'allauth.socialaccount',\n 'allauth.socialaccount.providers.github',\n 'allauth.socialaccount.providers.bitbucket',\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.sites',\n 'django.contrib.staticfiles',\n]\n\nMIDDLEWARE_CLASSES = [\n 'django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'corsheaders.middleware.CorsMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n]\n\nROOT_URLCONF = 'nextcloudappstore.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'nextcloudappstore.wsgi.application'\n\n# Database\n# https://docs.djangoproject.com/en/1.9/ref/settings/#databases\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3',\n 'NAME': join(BASE_DIR, 'db.sqlite3'),\n 'TEST': {\n 'NAME': join(BASE_DIR, 'test.sqlite3'),\n }\n }\n}\n\nAUTHENTICATION_BACKENDS = (\n # Needed to login by username in Django admin, regardless of `allauth`\n 'django.contrib.auth.backends.ModelBackend',\n\n # `allauth` specific authentication methods, such as login by e-mail\n 'allauth.account.auth_backends.AuthenticationBackend',\n)\n\n# Password validation\n# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation'\n '.NumericPasswordValidator',\n },\n]\n\nREST_FRAMEWORK = {\n 'DEFAULT_RENDERER_CLASSES': (\n 'djangorestframework_camel_case.render.CamelCaseJSONRenderer',\n ),\n 'DEFAULT_PARSER_CLASSES': (\n 'djangorestframework_camel_case.parser.CamelCaseJSONParser',\n ),\n 'DEFAULT_THROTTLE_RATES': {\n 'app_upload': '100/day'\n }\n}\n\nSITE_ID = 1\n\n# Allauth configuration\n# http://django-allauth.readthedocs.io/en/latest/configuration.html\nACCOUNT_EMAIL_REQUIRED = True\nACCOUNT_EMAIL_VERIFICATION = \"mandatory\"\nACCOUNT_LOGOUT_ON_GET = True\nACCOUNT_LOGOUT_REDIRECT_URL = 'home'\nACCOUNT_SESSION_REMEMBER = True\nACCOUNT_SIGNUP_FORM_CLASS = \\\n 'nextcloudappstore.core.user.forms.SignupFormRecaptcha'\n\n# Internationalization\n# https://docs.djangoproject.com/en/1.9/topics/i18n/\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\n\nPARLER_LANGUAGES = {\n 1: [{'code': code} for code, trans in LANGUAGES],\n 'default': {\n 'fallbacks': ['en'],\n 'hide_untranslated': False,\n }\n}\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/1.9/howto/static-files/\nMEDIA_ROOT = join(BASE_DIR, 'media')\nRELEASE_DOWNLOAD_ROOT = None\nSTATIC_URL = '/static/'\nMEDIA_URL = '/media/'\n\n# Default security settings\nSECURE_BROWSER_XSS_FILTER = True\nSECURE_CONTENT_TYPE_NOSNIFF = True\nCORS_ORIGIN_ALLOW_ALL = True\nCORS_URLS_REGEX = r'^/api/.*$'\nCORS_ALLOW_HEADERS = (\n 'x-requested-with',\n 'content-type',\n 'accept',\n 'origin',\n 'authorization',\n 'x-csrftoken',\n 'if-none-match',\n)\nCORS_EXPOSE_HEADERS = (\n 'etag',\n 'x-content-type-options',\n 'content-type',\n)\n\n# use modern no Captcha reCaptcha\nNOCAPTCHA = True\n\nLOGIN_REDIRECT_URL = 'home'\nLOGIN_URL = 'account_login'\n", "path": "nextcloudappstore/settings/base.py"}]} | 2,020 | 79 |
gh_patches_debug_5843 | rasdani/github-patches | git_diff | svthalia__concrexit-2277 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
True personal Agenda ICal Feed
### Problem
I believe it is annoying that when exporting the Personal iCal Feed to the agenda, you get the events your registered for + the open events + the events with optional registration. In practice this is very annoying as you don't want all these open events you might not go to in your agenda.
### Solution
That is why I suggest:
- Creating a 3rd button "iCal feed (personal)", which exports an iCal feed only containing the events you actually registered for.
- Renaming the current "iCal feed personal" to "iCal feed personal + open events"
### Motivation
A better user experience
### Describe alternatives you've considered
If this is not possible, I would consider adding an "add to agenda" button to the event pages so you can add events to your agenda individually.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/events/feeds.py`
Content:
```
1 """The feeds defined by the events package."""
2 from django.conf import settings
3 from django.db.models.query_utils import Q
4 from django.urls import reverse
5 from django.utils.translation import activate
6 from django.utils.translation import gettext as _
7 from django_ical.views import ICalFeed
8
9 from events.models import Event, FeedToken
10
11
12 class EventFeed(ICalFeed):
13 """Output an iCal feed containing all published events."""
14
15 def __init__(self, lang="en"):
16 super().__init__()
17 self.lang = lang
18 self.user = None
19
20 def __call__(self, request, *args, **kwargs):
21 if "u" in request.GET:
22 self.user = FeedToken.get_member(request.GET["u"])
23 else:
24 self.user = None
25
26 return super().__call__(request, args, kwargs)
27
28 def product_id(self):
29 return f"-//{settings.SITE_DOMAIN}//EventCalendar//{self.lang.upper()}"
30
31 def file_name(self):
32 return f"thalia_{self.lang}.ics"
33
34 def title(self):
35 activate(self.lang)
36 return _("Study Association Thalia event calendar")
37
38 def items(self):
39 query = Q(published=True)
40
41 if self.user:
42 query &= Q(registration_start__isnull=True) | (
43 Q(eventregistration__member=self.user)
44 & Q(eventregistration__date_cancelled=None)
45 )
46
47 return Event.objects.filter(query).order_by("-start")
48
49 def item_title(self, item):
50 return item.title
51
52 def item_description(self, item):
53 return f'{item.description} <a href="' f'{self.item_link(item)}">Website</a>'
54
55 def item_start_datetime(self, item):
56 return item.start
57
58 def item_end_datetime(self, item):
59 return item.end
60
61 def item_link(self, item):
62 return settings.BASE_URL + reverse("events:event", kwargs={"pk": item.id})
63
64 def item_location(self, item):
65 return f"{item.location} - {item.map_location}"
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/events/feeds.py b/website/events/feeds.py
--- a/website/events/feeds.py
+++ b/website/events/feeds.py
@@ -39,9 +39,8 @@
query = Q(published=True)
if self.user:
- query &= Q(registration_start__isnull=True) | (
- Q(eventregistration__member=self.user)
- & Q(eventregistration__date_cancelled=None)
+ query &= Q(eventregistration__member=self.user) & Q(
+ eventregistration__date_cancelled=None
)
return Event.objects.filter(query).order_by("-start")
| {"golden_diff": "diff --git a/website/events/feeds.py b/website/events/feeds.py\n--- a/website/events/feeds.py\n+++ b/website/events/feeds.py\n@@ -39,9 +39,8 @@\n query = Q(published=True)\n \n if self.user:\n- query &= Q(registration_start__isnull=True) | (\n- Q(eventregistration__member=self.user)\n- & Q(eventregistration__date_cancelled=None)\n+ query &= Q(eventregistration__member=self.user) & Q(\n+ eventregistration__date_cancelled=None\n )\n \n return Event.objects.filter(query).order_by(\"-start\")\n", "issue": "True personal Agenda ICal Feed\n### Problem\r\nI believe it is annoying that when exporting the Personal iCal Feed to the agenda, you get the events your registered for + the open events + the events with optional registration. In practice this is very annoying as you don't want all these open events you might not go to in your agenda.\r\n\r\n### Solution\r\nThat is why I suggest:\r\n- Creating a 3rd button \"iCal feed (personal)\", which exports an iCal feed only containing the events you actually registered for.\r\n- Renaming the current \"iCal feed personal\" to \"iCal feed personal + open events\"\r\n\r\n### Motivation\r\nA better user experience \r\n\r\n### Describe alternatives you've considered\r\nIf this is not possible, I would consider adding an \"add to agenda\" button to the event pages so you can add events to your agenda individually.\r\n\n", "before_files": [{"content": "\"\"\"The feeds defined by the events package.\"\"\"\nfrom django.conf import settings\nfrom django.db.models.query_utils import Q\nfrom django.urls import reverse\nfrom django.utils.translation import activate\nfrom django.utils.translation import gettext as _\nfrom django_ical.views import ICalFeed\n\nfrom events.models import Event, FeedToken\n\n\nclass EventFeed(ICalFeed):\n \"\"\"Output an iCal feed containing all published events.\"\"\"\n\n def __init__(self, lang=\"en\"):\n super().__init__()\n self.lang = lang\n self.user = None\n\n def __call__(self, request, *args, **kwargs):\n if \"u\" in request.GET:\n self.user = FeedToken.get_member(request.GET[\"u\"])\n else:\n self.user = None\n\n return super().__call__(request, args, kwargs)\n\n def product_id(self):\n return f\"-//{settings.SITE_DOMAIN}//EventCalendar//{self.lang.upper()}\"\n\n def file_name(self):\n return f\"thalia_{self.lang}.ics\"\n\n def title(self):\n activate(self.lang)\n return _(\"Study Association Thalia event calendar\")\n\n def items(self):\n query = Q(published=True)\n\n if self.user:\n query &= Q(registration_start__isnull=True) | (\n Q(eventregistration__member=self.user)\n & Q(eventregistration__date_cancelled=None)\n )\n\n return Event.objects.filter(query).order_by(\"-start\")\n\n def item_title(self, item):\n return item.title\n\n def item_description(self, item):\n return f'{item.description} <a href=\"' f'{self.item_link(item)}\">Website</a>'\n\n def item_start_datetime(self, item):\n return item.start\n\n def item_end_datetime(self, item):\n return item.end\n\n def item_link(self, item):\n return settings.BASE_URL + reverse(\"events:event\", kwargs={\"pk\": item.id})\n\n def item_location(self, item):\n return f\"{item.location} - {item.map_location}\"\n", "path": "website/events/feeds.py"}], "after_files": [{"content": "\"\"\"The feeds defined by the events package.\"\"\"\nfrom django.conf import settings\nfrom django.db.models.query_utils import Q\nfrom django.urls import reverse\nfrom django.utils.translation import activate\nfrom django.utils.translation import gettext as _\nfrom django_ical.views import ICalFeed\n\nfrom events.models import Event, FeedToken\n\n\nclass EventFeed(ICalFeed):\n \"\"\"Output an iCal feed containing all published events.\"\"\"\n\n def __init__(self, lang=\"en\"):\n super().__init__()\n self.lang = lang\n self.user = None\n\n def __call__(self, request, *args, **kwargs):\n if \"u\" in request.GET:\n self.user = FeedToken.get_member(request.GET[\"u\"])\n else:\n self.user = None\n\n return super().__call__(request, args, kwargs)\n\n def product_id(self):\n return f\"-//{settings.SITE_DOMAIN}//EventCalendar//{self.lang.upper()}\"\n\n def file_name(self):\n return f\"thalia_{self.lang}.ics\"\n\n def title(self):\n activate(self.lang)\n return _(\"Study Association Thalia event calendar\")\n\n def items(self):\n query = Q(published=True)\n\n if self.user:\n query &= Q(eventregistration__member=self.user) & Q(\n eventregistration__date_cancelled=None\n )\n\n return Event.objects.filter(query).order_by(\"-start\")\n\n def item_title(self, item):\n return item.title\n\n def item_description(self, item):\n return f'{item.description} <a href=\"' f'{self.item_link(item)}\">Website</a>'\n\n def item_start_datetime(self, item):\n return item.start\n\n def item_end_datetime(self, item):\n return item.end\n\n def item_link(self, item):\n return settings.BASE_URL + reverse(\"events:event\", kwargs={\"pk\": item.id})\n\n def item_location(self, item):\n return f\"{item.location} - {item.map_location}\"\n", "path": "website/events/feeds.py"}]} | 996 | 139 |
gh_patches_debug_1549 | rasdani/github-patches | git_diff | scoutapp__scout_apm_python-679 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Disconnect receiver from celery task_failure signal
Celery instrumentation's `uninstall` should call `task_failure.disconnect(task_failure_callback)`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/scout_apm/celery.py`
Content:
```
1 # coding=utf-8
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 import datetime as dt
5 import logging
6
7 from celery.signals import before_task_publish, task_failure, task_postrun, task_prerun
8
9 try:
10 import django
11
12 if django.VERSION < (3, 1):
13 from django.views.debug import get_safe_settings
14 else:
15 from django.views.debug import SafeExceptionReporterFilter
16
17 def get_safe_settings():
18 return SafeExceptionReporterFilter().get_safe_settings()
19
20
21 except ImportError:
22 # Django not installed
23 get_safe_settings = None
24
25 import scout_apm.core
26 from scout_apm.compat import datetime_to_timestamp
27 from scout_apm.core.config import scout_config
28 from scout_apm.core.error import ErrorMonitor
29 from scout_apm.core.tracked_request import TrackedRequest
30
31 logger = logging.getLogger(__name__)
32
33
34 def before_task_publish_callback(headers=None, properties=None, **kwargs):
35 if "scout_task_start" not in headers:
36 headers["scout_task_start"] = datetime_to_timestamp(dt.datetime.utcnow())
37
38
39 def task_prerun_callback(task=None, **kwargs):
40 tracked_request = TrackedRequest.instance()
41 tracked_request.is_real_request = True
42
43 start = getattr(task.request, "scout_task_start", None)
44 if start is not None:
45 now = datetime_to_timestamp(dt.datetime.utcnow())
46 try:
47 queue_time = now - start
48 except TypeError:
49 pass
50 else:
51 tracked_request.tag("queue_time", queue_time)
52
53 task_id = getattr(task.request, "id", None)
54 if task_id:
55 tracked_request.tag("task_id", task_id)
56 parent_task_id = getattr(task.request, "parent_id", None)
57 if parent_task_id:
58 tracked_request.tag("parent_task_id", parent_task_id)
59
60 delivery_info = task.request.delivery_info
61 tracked_request.tag("is_eager", delivery_info.get("is_eager", False))
62 tracked_request.tag("exchange", delivery_info.get("exchange", "unknown"))
63 tracked_request.tag("priority", delivery_info.get("priority", "unknown"))
64 tracked_request.tag("routing_key", delivery_info.get("routing_key", "unknown"))
65 tracked_request.tag("queue", delivery_info.get("queue", "unknown"))
66
67 tracked_request.start_span(operation=("Job/" + task.name))
68
69
70 def task_postrun_callback(task=None, **kwargs):
71 tracked_request = TrackedRequest.instance()
72 tracked_request.stop_span()
73
74
75 def task_failure_callback(
76 sender,
77 task_id=None,
78 exception=None,
79 args=None,
80 kwargs=None,
81 traceback=None,
82 **remaining
83 ):
84 tracked_request = TrackedRequest.instance()
85 tracked_request.tag("error", "true")
86
87 custom_controller = sender.name
88 custom_params = {
89 "celery": {
90 "task_id": task_id,
91 "args": args,
92 "kwargs": kwargs,
93 }
94 }
95
96 # Look up the django settings if populated.
97 environment = None
98 if get_safe_settings:
99 try:
100 environment = get_safe_settings()
101 except django.core.exceptions.ImproperlyConfigured as exc:
102 # Django not setup correctly
103 logger.debug(
104 "Celery integration does not have django configured properly: %r", exc
105 )
106 pass
107 except Exception as exc:
108 logger.debug(
109 "Celery task_failure callback exception: %r", exc, exc_info=exc
110 )
111 pass
112
113 exc_info = (exception.__class__, exception, traceback)
114 ErrorMonitor.send(
115 exc_info,
116 environment=environment,
117 custom_params=custom_params,
118 custom_controller=custom_controller,
119 )
120
121
122 def install(app=None):
123 if app is not None:
124 copy_configuration(app)
125
126 installed = scout_apm.core.install()
127 if not installed:
128 return
129
130 before_task_publish.connect(before_task_publish_callback)
131 task_prerun.connect(task_prerun_callback)
132 task_failure.connect(task_failure_callback)
133 task_postrun.connect(task_postrun_callback)
134
135
136 def copy_configuration(app):
137 prefix = "scout_"
138 prefix_len = len(prefix)
139
140 to_set = {}
141 for key, value in app.conf.items():
142 key_lower = key.lower()
143 if key_lower.startswith(prefix) and len(key_lower) > prefix_len:
144 scout_key = key_lower[prefix_len:]
145 to_set[scout_key] = value
146
147 scout_config.set(**to_set)
148
149
150 def uninstall():
151 before_task_publish.disconnect(before_task_publish_callback)
152 task_prerun.disconnect(task_prerun_callback)
153 task_postrun.disconnect(task_postrun_callback)
154
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/scout_apm/celery.py b/src/scout_apm/celery.py
--- a/src/scout_apm/celery.py
+++ b/src/scout_apm/celery.py
@@ -151,3 +151,4 @@
before_task_publish.disconnect(before_task_publish_callback)
task_prerun.disconnect(task_prerun_callback)
task_postrun.disconnect(task_postrun_callback)
+ task_failure.disconnect(task_failure_callback)
| {"golden_diff": "diff --git a/src/scout_apm/celery.py b/src/scout_apm/celery.py\n--- a/src/scout_apm/celery.py\n+++ b/src/scout_apm/celery.py\n@@ -151,3 +151,4 @@\n before_task_publish.disconnect(before_task_publish_callback)\n task_prerun.disconnect(task_prerun_callback)\n task_postrun.disconnect(task_postrun_callback)\n+ task_failure.disconnect(task_failure_callback)\n", "issue": "Disconnect receiver from celery task_failure signal\nCelery instrumentation's `uninstall` should call `task_failure.disconnect(task_failure_callback)`.\n", "before_files": [{"content": "# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport datetime as dt\nimport logging\n\nfrom celery.signals import before_task_publish, task_failure, task_postrun, task_prerun\n\ntry:\n import django\n\n if django.VERSION < (3, 1):\n from django.views.debug import get_safe_settings\n else:\n from django.views.debug import SafeExceptionReporterFilter\n\n def get_safe_settings():\n return SafeExceptionReporterFilter().get_safe_settings()\n\n\nexcept ImportError:\n # Django not installed\n get_safe_settings = None\n\nimport scout_apm.core\nfrom scout_apm.compat import datetime_to_timestamp\nfrom scout_apm.core.config import scout_config\nfrom scout_apm.core.error import ErrorMonitor\nfrom scout_apm.core.tracked_request import TrackedRequest\n\nlogger = logging.getLogger(__name__)\n\n\ndef before_task_publish_callback(headers=None, properties=None, **kwargs):\n if \"scout_task_start\" not in headers:\n headers[\"scout_task_start\"] = datetime_to_timestamp(dt.datetime.utcnow())\n\n\ndef task_prerun_callback(task=None, **kwargs):\n tracked_request = TrackedRequest.instance()\n tracked_request.is_real_request = True\n\n start = getattr(task.request, \"scout_task_start\", None)\n if start is not None:\n now = datetime_to_timestamp(dt.datetime.utcnow())\n try:\n queue_time = now - start\n except TypeError:\n pass\n else:\n tracked_request.tag(\"queue_time\", queue_time)\n\n task_id = getattr(task.request, \"id\", None)\n if task_id:\n tracked_request.tag(\"task_id\", task_id)\n parent_task_id = getattr(task.request, \"parent_id\", None)\n if parent_task_id:\n tracked_request.tag(\"parent_task_id\", parent_task_id)\n\n delivery_info = task.request.delivery_info\n tracked_request.tag(\"is_eager\", delivery_info.get(\"is_eager\", False))\n tracked_request.tag(\"exchange\", delivery_info.get(\"exchange\", \"unknown\"))\n tracked_request.tag(\"priority\", delivery_info.get(\"priority\", \"unknown\"))\n tracked_request.tag(\"routing_key\", delivery_info.get(\"routing_key\", \"unknown\"))\n tracked_request.tag(\"queue\", delivery_info.get(\"queue\", \"unknown\"))\n\n tracked_request.start_span(operation=(\"Job/\" + task.name))\n\n\ndef task_postrun_callback(task=None, **kwargs):\n tracked_request = TrackedRequest.instance()\n tracked_request.stop_span()\n\n\ndef task_failure_callback(\n sender,\n task_id=None,\n exception=None,\n args=None,\n kwargs=None,\n traceback=None,\n **remaining\n):\n tracked_request = TrackedRequest.instance()\n tracked_request.tag(\"error\", \"true\")\n\n custom_controller = sender.name\n custom_params = {\n \"celery\": {\n \"task_id\": task_id,\n \"args\": args,\n \"kwargs\": kwargs,\n }\n }\n\n # Look up the django settings if populated.\n environment = None\n if get_safe_settings:\n try:\n environment = get_safe_settings()\n except django.core.exceptions.ImproperlyConfigured as exc:\n # Django not setup correctly\n logger.debug(\n \"Celery integration does not have django configured properly: %r\", exc\n )\n pass\n except Exception as exc:\n logger.debug(\n \"Celery task_failure callback exception: %r\", exc, exc_info=exc\n )\n pass\n\n exc_info = (exception.__class__, exception, traceback)\n ErrorMonitor.send(\n exc_info,\n environment=environment,\n custom_params=custom_params,\n custom_controller=custom_controller,\n )\n\n\ndef install(app=None):\n if app is not None:\n copy_configuration(app)\n\n installed = scout_apm.core.install()\n if not installed:\n return\n\n before_task_publish.connect(before_task_publish_callback)\n task_prerun.connect(task_prerun_callback)\n task_failure.connect(task_failure_callback)\n task_postrun.connect(task_postrun_callback)\n\n\ndef copy_configuration(app):\n prefix = \"scout_\"\n prefix_len = len(prefix)\n\n to_set = {}\n for key, value in app.conf.items():\n key_lower = key.lower()\n if key_lower.startswith(prefix) and len(key_lower) > prefix_len:\n scout_key = key_lower[prefix_len:]\n to_set[scout_key] = value\n\n scout_config.set(**to_set)\n\n\ndef uninstall():\n before_task_publish.disconnect(before_task_publish_callback)\n task_prerun.disconnect(task_prerun_callback)\n task_postrun.disconnect(task_postrun_callback)\n", "path": "src/scout_apm/celery.py"}], "after_files": [{"content": "# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport datetime as dt\nimport logging\n\nfrom celery.signals import before_task_publish, task_failure, task_postrun, task_prerun\n\ntry:\n import django\n\n if django.VERSION < (3, 1):\n from django.views.debug import get_safe_settings\n else:\n from django.views.debug import SafeExceptionReporterFilter\n\n def get_safe_settings():\n return SafeExceptionReporterFilter().get_safe_settings()\n\n\nexcept ImportError:\n # Django not installed\n get_safe_settings = None\n\nimport scout_apm.core\nfrom scout_apm.compat import datetime_to_timestamp\nfrom scout_apm.core.config import scout_config\nfrom scout_apm.core.error import ErrorMonitor\nfrom scout_apm.core.tracked_request import TrackedRequest\n\nlogger = logging.getLogger(__name__)\n\n\ndef before_task_publish_callback(headers=None, properties=None, **kwargs):\n if \"scout_task_start\" not in headers:\n headers[\"scout_task_start\"] = datetime_to_timestamp(dt.datetime.utcnow())\n\n\ndef task_prerun_callback(task=None, **kwargs):\n tracked_request = TrackedRequest.instance()\n tracked_request.is_real_request = True\n\n start = getattr(task.request, \"scout_task_start\", None)\n if start is not None:\n now = datetime_to_timestamp(dt.datetime.utcnow())\n try:\n queue_time = now - start\n except TypeError:\n pass\n else:\n tracked_request.tag(\"queue_time\", queue_time)\n\n task_id = getattr(task.request, \"id\", None)\n if task_id:\n tracked_request.tag(\"task_id\", task_id)\n parent_task_id = getattr(task.request, \"parent_id\", None)\n if parent_task_id:\n tracked_request.tag(\"parent_task_id\", parent_task_id)\n\n delivery_info = task.request.delivery_info\n tracked_request.tag(\"is_eager\", delivery_info.get(\"is_eager\", False))\n tracked_request.tag(\"exchange\", delivery_info.get(\"exchange\", \"unknown\"))\n tracked_request.tag(\"priority\", delivery_info.get(\"priority\", \"unknown\"))\n tracked_request.tag(\"routing_key\", delivery_info.get(\"routing_key\", \"unknown\"))\n tracked_request.tag(\"queue\", delivery_info.get(\"queue\", \"unknown\"))\n\n tracked_request.start_span(operation=(\"Job/\" + task.name))\n\n\ndef task_postrun_callback(task=None, **kwargs):\n tracked_request = TrackedRequest.instance()\n tracked_request.stop_span()\n\n\ndef task_failure_callback(\n sender,\n task_id=None,\n exception=None,\n args=None,\n kwargs=None,\n traceback=None,\n **remaining\n):\n tracked_request = TrackedRequest.instance()\n tracked_request.tag(\"error\", \"true\")\n\n custom_controller = sender.name\n custom_params = {\n \"celery\": {\n \"task_id\": task_id,\n \"args\": args,\n \"kwargs\": kwargs,\n }\n }\n\n # Look up the django settings if populated.\n environment = None\n if get_safe_settings:\n try:\n environment = get_safe_settings()\n except django.core.exceptions.ImproperlyConfigured as exc:\n # Django not setup correctly\n logger.debug(\n \"Celery integration does not have django configured properly: %r\", exc\n )\n pass\n except Exception as exc:\n logger.debug(\n \"Celery task_failure callback exception: %r\", exc, exc_info=exc\n )\n pass\n\n exc_info = (exception.__class__, exception, traceback)\n ErrorMonitor.send(\n exc_info,\n environment=environment,\n custom_params=custom_params,\n custom_controller=custom_controller,\n )\n\n\ndef install(app=None):\n if app is not None:\n copy_configuration(app)\n\n installed = scout_apm.core.install()\n if not installed:\n return\n\n before_task_publish.connect(before_task_publish_callback)\n task_prerun.connect(task_prerun_callback)\n task_failure.connect(task_failure_callback)\n task_postrun.connect(task_postrun_callback)\n\n\ndef copy_configuration(app):\n prefix = \"scout_\"\n prefix_len = len(prefix)\n\n to_set = {}\n for key, value in app.conf.items():\n key_lower = key.lower()\n if key_lower.startswith(prefix) and len(key_lower) > prefix_len:\n scout_key = key_lower[prefix_len:]\n to_set[scout_key] = value\n\n scout_config.set(**to_set)\n\n\ndef uninstall():\n before_task_publish.disconnect(before_task_publish_callback)\n task_prerun.disconnect(task_prerun_callback)\n task_postrun.disconnect(task_postrun_callback)\n task_failure.disconnect(task_failure_callback)\n", "path": "src/scout_apm/celery.py"}]} | 1,641 | 105 |
gh_patches_debug_35086 | rasdani/github-patches | git_diff | comic__grand-challenge.org-1932 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Reduce database calls for `update_challenge_results_cache()`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/grandchallenge/challenges/tasks.py`
Content:
```
1 from celery import shared_task
2 from django.core.mail import mail_managers
3 from requests import exceptions, get
4
5 from grandchallenge.challenges.models import Challenge, ExternalChallenge
6 from grandchallenge.evaluation.models import Evaluation
7 from grandchallenge.subdomains.utils import reverse
8
9
10 @shared_task
11 def update_challenge_results_cache():
12 for c in Challenge.objects.all():
13 kwargs = {
14 "cached_num_participants": c.participants_group.user_set.all().count()
15 }
16
17 challenge_results = Evaluation.objects.filter(
18 submission__phase__challenge=c, published=True
19 ).order_by("-created")
20
21 try:
22 kwargs.update(
23 {
24 "cached_num_results": challenge_results.count(),
25 "cached_latest_result": challenge_results.first().created,
26 }
27 )
28 except AttributeError:
29 # No results for this challenge
30 kwargs.update(
31 {"cached_num_results": 0, "cached_latest_result": None}
32 )
33
34 Challenge.objects.filter(pk=c.pk).update(**kwargs)
35
36
37 @shared_task
38 def check_external_challenge_urls():
39 """
40 Checks that all external challenge urls are reachable.
41
42 Emails the managers if any of the challenges are not.
43 """
44 challenges = ExternalChallenge.objects.filter(hidden=False)
45 errors = []
46
47 for challenge in challenges:
48 try:
49 url = challenge.homepage
50 if not url.startswith("http"):
51 url = "http://" + url
52 r = get(url, timeout=60)
53 # raise an exception when we receive a http error (e.g., 404)
54 r.raise_for_status()
55 except exceptions.RequestException as err:
56 update_url = reverse(
57 "challenges:external-update",
58 kwargs={"short_name": challenge.short_name},
59 )
60 errors.append(
61 f"Error when trying to access '{challenge}': {err}. You can "
62 f"update it here: {update_url}"
63 )
64
65 if errors:
66 mail_managers(
67 subject=f"Unreachable external challenges ({len(errors)})",
68 message="\n\n".join(errors),
69 )
70
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/grandchallenge/challenges/tasks.py b/app/grandchallenge/challenges/tasks.py
--- a/app/grandchallenge/challenges/tasks.py
+++ b/app/grandchallenge/challenges/tasks.py
@@ -1,5 +1,7 @@
from celery import shared_task
+from django.contrib.auth import get_user_model
from django.core.mail import mail_managers
+from django.db.models import Count, Max
from requests import exceptions, get
from grandchallenge.challenges.models import Challenge, ExternalChallenge
@@ -9,29 +11,47 @@
@shared_task
def update_challenge_results_cache():
- for c in Challenge.objects.all():
- kwargs = {
- "cached_num_participants": c.participants_group.user_set.all().count()
- }
-
- challenge_results = Evaluation.objects.filter(
- submission__phase__challenge=c, published=True
- ).order_by("-created")
+ challenges = Challenge.objects.all()
+ evaluation_info = (
+ Evaluation.objects.filter(published=True)
+ .values("submission__phase__challenge_id")
+ .annotate(
+ cached_num_results=Count("submission__phase__challenge_id"),
+ cached_latest_result=Max("created"),
+ )
+ )
+ evaluation_info_by_challenge = {
+ str(v["submission__phase__challenge_id"]): v for v in evaluation_info
+ }
+ participant_counts = (
+ get_user_model()
+ .objects.values("groups__participants_of_challenge")
+ .annotate(cached_num_participants=Count("pk"))
+ )
+ participant_counts_by_challenge = {
+ str(v["groups__participants_of_challenge"]): v
+ for v in participant_counts
+ }
- try:
- kwargs.update(
- {
- "cached_num_results": challenge_results.count(),
- "cached_latest_result": challenge_results.first().created,
- }
- )
- except AttributeError:
- # No results for this challenge
- kwargs.update(
- {"cached_num_results": 0, "cached_latest_result": None}
- )
+ for c in challenges:
+ c.cached_num_results = evaluation_info_by_challenge.get(
+ str(c.pk), {}
+ ).get("cached_num_results", 0)
+ c.cached_latest_result = evaluation_info_by_challenge.get(
+ str(c.pk), {}
+ ).get("cached_latest_result", None)
+ c.cached_num_participants = participant_counts_by_challenge.get(
+ str(c.pk), {}
+ ).get("cached_num_participants", 0)
- Challenge.objects.filter(pk=c.pk).update(**kwargs)
+ Challenge.objects.bulk_update(
+ challenges,
+ [
+ "cached_num_results",
+ "cached_num_participants",
+ "cached_latest_result",
+ ],
+ )
@shared_task
| {"golden_diff": "diff --git a/app/grandchallenge/challenges/tasks.py b/app/grandchallenge/challenges/tasks.py\n--- a/app/grandchallenge/challenges/tasks.py\n+++ b/app/grandchallenge/challenges/tasks.py\n@@ -1,5 +1,7 @@\n from celery import shared_task\n+from django.contrib.auth import get_user_model\n from django.core.mail import mail_managers\n+from django.db.models import Count, Max\n from requests import exceptions, get\n \n from grandchallenge.challenges.models import Challenge, ExternalChallenge\n@@ -9,29 +11,47 @@\n \n @shared_task\n def update_challenge_results_cache():\n- for c in Challenge.objects.all():\n- kwargs = {\n- \"cached_num_participants\": c.participants_group.user_set.all().count()\n- }\n-\n- challenge_results = Evaluation.objects.filter(\n- submission__phase__challenge=c, published=True\n- ).order_by(\"-created\")\n+ challenges = Challenge.objects.all()\n+ evaluation_info = (\n+ Evaluation.objects.filter(published=True)\n+ .values(\"submission__phase__challenge_id\")\n+ .annotate(\n+ cached_num_results=Count(\"submission__phase__challenge_id\"),\n+ cached_latest_result=Max(\"created\"),\n+ )\n+ )\n+ evaluation_info_by_challenge = {\n+ str(v[\"submission__phase__challenge_id\"]): v for v in evaluation_info\n+ }\n+ participant_counts = (\n+ get_user_model()\n+ .objects.values(\"groups__participants_of_challenge\")\n+ .annotate(cached_num_participants=Count(\"pk\"))\n+ )\n+ participant_counts_by_challenge = {\n+ str(v[\"groups__participants_of_challenge\"]): v\n+ for v in participant_counts\n+ }\n \n- try:\n- kwargs.update(\n- {\n- \"cached_num_results\": challenge_results.count(),\n- \"cached_latest_result\": challenge_results.first().created,\n- }\n- )\n- except AttributeError:\n- # No results for this challenge\n- kwargs.update(\n- {\"cached_num_results\": 0, \"cached_latest_result\": None}\n- )\n+ for c in challenges:\n+ c.cached_num_results = evaluation_info_by_challenge.get(\n+ str(c.pk), {}\n+ ).get(\"cached_num_results\", 0)\n+ c.cached_latest_result = evaluation_info_by_challenge.get(\n+ str(c.pk), {}\n+ ).get(\"cached_latest_result\", None)\n+ c.cached_num_participants = participant_counts_by_challenge.get(\n+ str(c.pk), {}\n+ ).get(\"cached_num_participants\", 0)\n \n- Challenge.objects.filter(pk=c.pk).update(**kwargs)\n+ Challenge.objects.bulk_update(\n+ challenges,\n+ [\n+ \"cached_num_results\",\n+ \"cached_num_participants\",\n+ \"cached_latest_result\",\n+ ],\n+ )\n \n \n @shared_task\n", "issue": "Reduce database calls for `update_challenge_results_cache()`\n\n", "before_files": [{"content": "from celery import shared_task\nfrom django.core.mail import mail_managers\nfrom requests import exceptions, get\n\nfrom grandchallenge.challenges.models import Challenge, ExternalChallenge\nfrom grandchallenge.evaluation.models import Evaluation\nfrom grandchallenge.subdomains.utils import reverse\n\n\n@shared_task\ndef update_challenge_results_cache():\n for c in Challenge.objects.all():\n kwargs = {\n \"cached_num_participants\": c.participants_group.user_set.all().count()\n }\n\n challenge_results = Evaluation.objects.filter(\n submission__phase__challenge=c, published=True\n ).order_by(\"-created\")\n\n try:\n kwargs.update(\n {\n \"cached_num_results\": challenge_results.count(),\n \"cached_latest_result\": challenge_results.first().created,\n }\n )\n except AttributeError:\n # No results for this challenge\n kwargs.update(\n {\"cached_num_results\": 0, \"cached_latest_result\": None}\n )\n\n Challenge.objects.filter(pk=c.pk).update(**kwargs)\n\n\n@shared_task\ndef check_external_challenge_urls():\n \"\"\"\n Checks that all external challenge urls are reachable.\n\n Emails the managers if any of the challenges are not.\n \"\"\"\n challenges = ExternalChallenge.objects.filter(hidden=False)\n errors = []\n\n for challenge in challenges:\n try:\n url = challenge.homepage\n if not url.startswith(\"http\"):\n url = \"http://\" + url\n r = get(url, timeout=60)\n # raise an exception when we receive a http error (e.g., 404)\n r.raise_for_status()\n except exceptions.RequestException as err:\n update_url = reverse(\n \"challenges:external-update\",\n kwargs={\"short_name\": challenge.short_name},\n )\n errors.append(\n f\"Error when trying to access '{challenge}': {err}. You can \"\n f\"update it here: {update_url}\"\n )\n\n if errors:\n mail_managers(\n subject=f\"Unreachable external challenges ({len(errors)})\",\n message=\"\\n\\n\".join(errors),\n )\n", "path": "app/grandchallenge/challenges/tasks.py"}], "after_files": [{"content": "from celery import shared_task\nfrom django.contrib.auth import get_user_model\nfrom django.core.mail import mail_managers\nfrom django.db.models import Count, Max\nfrom requests import exceptions, get\n\nfrom grandchallenge.challenges.models import Challenge, ExternalChallenge\nfrom grandchallenge.evaluation.models import Evaluation\nfrom grandchallenge.subdomains.utils import reverse\n\n\n@shared_task\ndef update_challenge_results_cache():\n challenges = Challenge.objects.all()\n evaluation_info = (\n Evaluation.objects.filter(published=True)\n .values(\"submission__phase__challenge_id\")\n .annotate(\n cached_num_results=Count(\"submission__phase__challenge_id\"),\n cached_latest_result=Max(\"created\"),\n )\n )\n evaluation_info_by_challenge = {\n str(v[\"submission__phase__challenge_id\"]): v for v in evaluation_info\n }\n participant_counts = (\n get_user_model()\n .objects.values(\"groups__participants_of_challenge\")\n .annotate(cached_num_participants=Count(\"pk\"))\n )\n participant_counts_by_challenge = {\n str(v[\"groups__participants_of_challenge\"]): v\n for v in participant_counts\n }\n\n for c in challenges:\n c.cached_num_results = evaluation_info_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_num_results\", 0)\n c.cached_latest_result = evaluation_info_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_latest_result\", None)\n c.cached_num_participants = participant_counts_by_challenge.get(\n str(c.pk), {}\n ).get(\"cached_num_participants\", 0)\n\n Challenge.objects.bulk_update(\n challenges,\n [\n \"cached_num_results\",\n \"cached_num_participants\",\n \"cached_latest_result\",\n ],\n )\n\n\n@shared_task\ndef check_external_challenge_urls():\n \"\"\"\n Checks that all external challenge urls are reachable.\n\n Emails the managers if any of the challenges are not.\n \"\"\"\n challenges = ExternalChallenge.objects.filter(hidden=False)\n errors = []\n\n for challenge in challenges:\n try:\n url = challenge.homepage\n if not url.startswith(\"http\"):\n url = \"http://\" + url\n r = get(url, timeout=60)\n # raise an exception when we receive a http error (e.g., 404)\n r.raise_for_status()\n except exceptions.RequestException as err:\n update_url = reverse(\n \"challenges:external-update\",\n kwargs={\"short_name\": challenge.short_name},\n )\n errors.append(\n f\"Error when trying to access '{challenge}': {err}. You can \"\n f\"update it here: {update_url}\"\n )\n\n if errors:\n mail_managers(\n subject=f\"Unreachable external challenges ({len(errors)})\",\n message=\"\\n\\n\".join(errors),\n )\n", "path": "app/grandchallenge/challenges/tasks.py"}]} | 837 | 623 |
gh_patches_debug_32059 | rasdani/github-patches | git_diff | engnadeau__pybotics-36 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Package to PyPi
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup
2
3 setup(
4 name='pybotics',
5 version='0.1.3',
6 packages=['examples', 'pybotics', 'tests'],
7 url='https://github.com/nnadeau/pybotics',
8 license='MIT',
9 author='Nicholas Nadeau',
10 author_email='',
11 description='Python Toolbox for Robotics',
12 setup_requires=['pytest-runner'],
13 tests_require=['pytest']
14 )
15
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,14 +1,59 @@
+import pypandoc
from setuptools import setup
+import os
+import git
+
+# tag version
+repo = git.Repo(os.getcwd())
+tag = repo.tags[-1]
+version = tag.name
+
+# requirements
+with open('requirements.txt') as f:
+ requirements = f.read().splitlines()
+
+# long description
+# TODO: pypandoc.convert() function returns wrong RST format, but saving/loading a file works
+file_path = os.path.abspath(os.path.dirname(__file__))
+pypandoc.convert_file('README.md', 'rst', outputfile=os.path.join(file_path, 'README.rst'))
+with open(os.path.join(file_path, 'README.rst'), encoding='utf-8') as f:
+ description = f.read()
setup(
name='pybotics',
- version='0.1.3',
- packages=['examples', 'pybotics', 'tests'],
+ version=version,
+ packages=['pybotics'],
url='https://github.com/nnadeau/pybotics',
license='MIT',
author='Nicholas Nadeau',
- author_email='',
+ author_email='[email protected]',
description='Python Toolbox for Robotics',
+ long_description=description,
+ install_requires=requirements,
setup_requires=['pytest-runner'],
- tests_require=['pytest']
+ tests_require=['pytest'],
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Education',
+ 'Intended Audience :: End Users/Desktop',
+ 'Intended Audience :: Manufacturing',
+ 'Intended Audience :: Science/Research',
+ 'Topic :: Education',
+ 'Topic :: Scientific/Engineering',
+ 'Topic :: Scientific/Engineering :: Artificial Intelligence',
+ 'Topic :: Scientific/Engineering :: Human Machine Interfaces',
+ 'Topic :: Scientific/Engineering :: Mathematics',
+ 'Topic :: Scientific/Engineering :: Physics',
+ 'Topic :: Utilities',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 3 :: Only',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ ],
+ keywords='python robot robotics research automation kinematics geometry',
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,14 +1,59 @@\n+import pypandoc\n from setuptools import setup\n+import os\n+import git\n+\n+# tag version\n+repo = git.Repo(os.getcwd())\n+tag = repo.tags[-1]\n+version = tag.name\n+\n+# requirements\n+with open('requirements.txt') as f:\n+ requirements = f.read().splitlines()\n+\n+# long description\n+# TODO: pypandoc.convert() function returns wrong RST format, but saving/loading a file works\n+file_path = os.path.abspath(os.path.dirname(__file__))\n+pypandoc.convert_file('README.md', 'rst', outputfile=os.path.join(file_path, 'README.rst'))\n+with open(os.path.join(file_path, 'README.rst'), encoding='utf-8') as f:\n+ description = f.read()\n \n setup(\n name='pybotics',\n- version='0.1.3',\n- packages=['examples', 'pybotics', 'tests'],\n+ version=version,\n+ packages=['pybotics'],\n url='https://github.com/nnadeau/pybotics',\n license='MIT',\n author='Nicholas Nadeau',\n- author_email='',\n+ author_email='[email protected]',\n description='Python Toolbox for Robotics',\n+ long_description=description,\n+ install_requires=requirements,\n setup_requires=['pytest-runner'],\n- tests_require=['pytest']\n+ tests_require=['pytest'],\n+ classifiers=[\n+ 'Development Status :: 4 - Beta',\n+ 'Intended Audience :: Developers',\n+ 'Intended Audience :: Education',\n+ 'Intended Audience :: End Users/Desktop',\n+ 'Intended Audience :: Manufacturing',\n+ 'Intended Audience :: Science/Research',\n+ 'Topic :: Education',\n+ 'Topic :: Scientific/Engineering',\n+ 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n+ 'Topic :: Scientific/Engineering :: Human Machine Interfaces',\n+ 'Topic :: Scientific/Engineering :: Mathematics',\n+ 'Topic :: Scientific/Engineering :: Physics',\n+ 'Topic :: Utilities',\n+ 'License :: OSI Approved :: MIT License',\n+ 'Programming Language :: Python :: 3 :: Only',\n+ 'Programming Language :: Python :: 3',\n+ 'Programming Language :: Python :: 3.2',\n+ 'Programming Language :: Python :: 3.3',\n+ 'Programming Language :: Python :: 3.4',\n+ 'Programming Language :: Python :: 3.5',\n+ 'Programming Language :: Python :: 3.6',\n+ ],\n+ keywords='python robot robotics research automation kinematics geometry',\n )\n", "issue": "Add Package to PyPi\n\n", "before_files": [{"content": "from setuptools import setup\n\nsetup(\n name='pybotics',\n version='0.1.3',\n packages=['examples', 'pybotics', 'tests'],\n url='https://github.com/nnadeau/pybotics',\n license='MIT',\n author='Nicholas Nadeau',\n author_email='',\n description='Python Toolbox for Robotics',\n setup_requires=['pytest-runner'],\n tests_require=['pytest']\n)\n", "path": "setup.py"}], "after_files": [{"content": "import pypandoc\nfrom setuptools import setup\nimport os\nimport git\n\n# tag version\nrepo = git.Repo(os.getcwd())\ntag = repo.tags[-1]\nversion = tag.name\n\n# requirements\nwith open('requirements.txt') as f:\n requirements = f.read().splitlines()\n\n# long description\n# TODO: pypandoc.convert() function returns wrong RST format, but saving/loading a file works\nfile_path = os.path.abspath(os.path.dirname(__file__))\npypandoc.convert_file('README.md', 'rst', outputfile=os.path.join(file_path, 'README.rst'))\nwith open(os.path.join(file_path, 'README.rst'), encoding='utf-8') as f:\n description = f.read()\n\nsetup(\n name='pybotics',\n version=version,\n packages=['pybotics'],\n url='https://github.com/nnadeau/pybotics',\n license='MIT',\n author='Nicholas Nadeau',\n author_email='[email protected]',\n description='Python Toolbox for Robotics',\n long_description=description,\n install_requires=requirements,\n setup_requires=['pytest-runner'],\n tests_require=['pytest'],\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: End Users/Desktop',\n 'Intended Audience :: Manufacturing',\n 'Intended Audience :: Science/Research',\n 'Topic :: Education',\n 'Topic :: Scientific/Engineering',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Scientific/Engineering :: Human Machine Interfaces',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Scientific/Engineering :: Physics',\n 'Topic :: Utilities',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3 :: Only',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.2',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n ],\n keywords='python robot robotics research automation kinematics geometry',\n)\n", "path": "setup.py"}]} | 375 | 591 |
gh_patches_debug_399 | rasdani/github-patches | git_diff | bokeh__bokeh-1948 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`publishing` example from the `embed` directory fails
With an error message that is not particularly helpfull. Using bokeh 0.8.
```
➜ embed python publishing.py
Using saved session configuration for http://localhost:5006/
To override, pass 'load_from_config=False' to Session
INFO:requests.packages.urllib3.connectionpool:Starting new HTTP connection (1): localhost
Traceback (most recent call last):
File "publishing.py", line 11, in <module>
Session().register('testuser', 'testpassword')
File "/Users/nicolas/anaconda/lib/python2.7/site-packages/bokeh/session.py", line 208, in register
raise RuntimeError("Unknown Error")
RuntimeError: Unknown Error
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/embed/publishing.py`
Content:
```
1 # The plot server must be running
2 # Go to http://localhost:5006/bokeh to view this plot
3
4 import time
5
6 import numpy as np
7
8 from bokeh.plotting import *
9 from bokeh.session import Session
10 from bokeh import embed
11 Session().register('testuser', 'testpassword')
12 N = 80
13 x = np.linspace(0, 4*np.pi, N)
14 y = np.sin(x)
15 output_server("line_animate")
16 TOOLS = "pan,wheel_zoom,box_zoom,reset,save,box_select"
17 p = figure(tools=TOOLS)
18 p.circle(x, y, color="#3333ee", name="sin")
19 push()
20 renderer = p.select(dict(name="sin"))
21 ds = renderer[0].data_source
22 cursession().publish()
23 tag = embed.autoload_server(p, cursession(), public=True)
24 html = """
25 <html>
26 <head></head>
27 <body>
28 %s
29 </body>
30 </html>
31 """
32 html = html % (tag)
33 with open("publishing.html", "w+") as f:
34 f.write(html)
35 while True:
36 for i in np.hstack((np.linspace(1, -1, 100), np.linspace(-1, 1, 100))):
37 ds.data["y"] = y * i
38 cursession().store_objects(ds)
39 time.sleep(1.0)
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/embed/publishing.py b/examples/embed/publishing.py
--- a/examples/embed/publishing.py
+++ b/examples/embed/publishing.py
@@ -1,4 +1,5 @@
# The plot server must be running
+# The server must run with --multi-user for this example to work
# Go to http://localhost:5006/bokeh to view this plot
import time
| {"golden_diff": "diff --git a/examples/embed/publishing.py b/examples/embed/publishing.py\n--- a/examples/embed/publishing.py\n+++ b/examples/embed/publishing.py\n@@ -1,4 +1,5 @@\n # The plot server must be running\n+# The server must run with --multi-user for this example to work\n # Go to http://localhost:5006/bokeh to view this plot\n \n import time\n", "issue": "`publishing` example from the `embed` directory fails\nWith an error message that is not particularly helpfull. Using bokeh 0.8.\n\n```\n\u279c embed python publishing.py\nUsing saved session configuration for http://localhost:5006/\nTo override, pass 'load_from_config=False' to Session\nINFO:requests.packages.urllib3.connectionpool:Starting new HTTP connection (1): localhost\nTraceback (most recent call last):\n File \"publishing.py\", line 11, in <module>\n Session().register('testuser', 'testpassword')\n File \"/Users/nicolas/anaconda/lib/python2.7/site-packages/bokeh/session.py\", line 208, in register\n raise RuntimeError(\"Unknown Error\")\nRuntimeError: Unknown Error\n```\n\n", "before_files": [{"content": "# The plot server must be running\n# Go to http://localhost:5006/bokeh to view this plot\n\nimport time\n\nimport numpy as np\n\nfrom bokeh.plotting import *\nfrom bokeh.session import Session\nfrom bokeh import embed\nSession().register('testuser', 'testpassword')\nN = 80\nx = np.linspace(0, 4*np.pi, N)\ny = np.sin(x)\noutput_server(\"line_animate\")\nTOOLS = \"pan,wheel_zoom,box_zoom,reset,save,box_select\"\np = figure(tools=TOOLS)\np.circle(x, y, color=\"#3333ee\", name=\"sin\")\npush()\nrenderer = p.select(dict(name=\"sin\"))\nds = renderer[0].data_source\ncursession().publish()\ntag = embed.autoload_server(p, cursession(), public=True)\nhtml = \"\"\"\n<html>\n<head></head>\n<body>\n%s\n</body>\n</html>\n\"\"\"\nhtml = html % (tag)\nwith open(\"publishing.html\", \"w+\") as f:\n f.write(html)\nwhile True:\n for i in np.hstack((np.linspace(1, -1, 100), np.linspace(-1, 1, 100))):\n ds.data[\"y\"] = y * i\n cursession().store_objects(ds)\n time.sleep(1.0)\n", "path": "examples/embed/publishing.py"}], "after_files": [{"content": "# The plot server must be running\n# The server must run with --multi-user for this example to work\n# Go to http://localhost:5006/bokeh to view this plot\n\nimport time\n\nimport numpy as np\n\nfrom bokeh.plotting import *\nfrom bokeh.session import Session\nfrom bokeh import embed\nSession().register('testuser', 'testpassword')\nN = 80\nx = np.linspace(0, 4*np.pi, N)\ny = np.sin(x)\noutput_server(\"line_animate\")\nTOOLS = \"pan,wheel_zoom,box_zoom,reset,save,box_select\"\np = figure(tools=TOOLS)\np.circle(x, y, color=\"#3333ee\", name=\"sin\")\npush()\nrenderer = p.select(dict(name=\"sin\"))\nds = renderer[0].data_source\ncursession().publish()\ntag = embed.autoload_server(p, cursession(), public=True)\nhtml = \"\"\"\n<html>\n<head></head>\n<body>\n%s\n</body>\n</html>\n\"\"\"\nhtml = html % (tag)\nwith open(\"publishing.html\", \"w+\") as f:\n f.write(html)\nwhile True:\n for i in np.hstack((np.linspace(1, -1, 100), np.linspace(-1, 1, 100))):\n ds.data[\"y\"] = y * i\n cursession().store_objects(ds)\n time.sleep(1.0)\n", "path": "examples/embed/publishing.py"}]} | 801 | 91 |
gh_patches_debug_12434 | rasdani/github-patches | git_diff | joke2k__faker-297 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Sex-specific names and Sex attribute don't match
While person distinguisheds between name_male() and name_female(), and profile generates a random ['sex'] attribute, these do not correlate at present. So in 50% of cases (ignoring neutral names like Chris) this results in F with male names and M with female names.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `faker/providers/profile/__init__.py`
Content:
```
1 # coding=utf-8
2
3 from .. import BaseProvider
4 import itertools
5
6
7 class Provider(BaseProvider):
8 """
9 This provider is a collection of functions to generate personal profiles and identities.
10
11 """
12
13 def simple_profile(self):
14 """
15 Generates a basic profile with personal informations
16 """
17
18 return {
19 "username": self.generator.user_name(),
20 "name": self.generator.name(),
21 "sex": self.random_element(["M", "F"]),
22 "address": self.generator.address(),
23 "mail": self.generator.free_email(),
24
25 #"password":self.generator.password()
26 "birthdate": self.generator.date(),
27
28 }
29
30 def profile(self, fields=None):
31 """
32 Generates a complete profile.
33 If "fields" is not empty, only the fields in the list will be returned
34 """
35 if fields is None:
36 fields = []
37
38 d = {
39 "job": self.generator.job(),
40 "company": self.generator.company(),
41 "ssn": self.generator.ssn(),
42 "residence": self.generator.address(),
43 "current_location": (self.generator.latitude(), self.generator.longitude()),
44 "blood_group": "".join(self.random_element(list(itertools.product(["A", "B", "AB", "0"], ["+", "-"])))),
45 "website": [self.generator.url() for i in range(1, self.random_int(2, 5))]
46 }
47
48 d = dict(d, **self.generator.simple_profile())
49 #field selection
50 if len(fields) > 0:
51 d = dict((k, v) for (k, v) in d.items() if k in fields)
52
53 return d
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/faker/providers/profile/__init__.py b/faker/providers/profile/__init__.py
--- a/faker/providers/profile/__init__.py
+++ b/faker/providers/profile/__init__.py
@@ -15,10 +15,15 @@
Generates a basic profile with personal informations
"""
+ sex = self.random_element(["F", "M"])
+ if sex == 'F':
+ name = self.generator.name_female()
+ elif sex == 'M':
+ name = self.generator.name_male()
return {
"username": self.generator.user_name(),
- "name": self.generator.name(),
- "sex": self.random_element(["M", "F"]),
+ "name": name,
+ "sex": sex,
"address": self.generator.address(),
"mail": self.generator.free_email(),
| {"golden_diff": "diff --git a/faker/providers/profile/__init__.py b/faker/providers/profile/__init__.py\n--- a/faker/providers/profile/__init__.py\n+++ b/faker/providers/profile/__init__.py\n@@ -15,10 +15,15 @@\n Generates a basic profile with personal informations\n \"\"\"\n \n+ sex = self.random_element([\"F\", \"M\"])\n+ if sex == 'F':\n+ name = self.generator.name_female()\n+ elif sex == 'M':\n+ name = self.generator.name_male()\n return {\n \"username\": self.generator.user_name(),\n- \"name\": self.generator.name(),\n- \"sex\": self.random_element([\"M\", \"F\"]),\n+ \"name\": name,\n+ \"sex\": sex,\n \"address\": self.generator.address(),\n \"mail\": self.generator.free_email(),\n", "issue": "Sex-specific names and Sex attribute don't match\nWhile person distinguisheds between name_male() and name_female(), and profile generates a random ['sex'] attribute, these do not correlate at present. So in 50% of cases (ignoring neutral names like Chris) this results in F with male names and M with female names.\n\n", "before_files": [{"content": "# coding=utf-8\n\nfrom .. import BaseProvider\nimport itertools\n\n\nclass Provider(BaseProvider):\n \"\"\"\n This provider is a collection of functions to generate personal profiles and identities.\n\n \"\"\"\n\n def simple_profile(self):\n \"\"\"\n Generates a basic profile with personal informations\n \"\"\"\n\n return {\n \"username\": self.generator.user_name(),\n \"name\": self.generator.name(),\n \"sex\": self.random_element([\"M\", \"F\"]),\n \"address\": self.generator.address(),\n \"mail\": self.generator.free_email(),\n\n #\"password\":self.generator.password()\n \"birthdate\": self.generator.date(),\n\n }\n\n def profile(self, fields=None):\n \"\"\"\n Generates a complete profile.\n If \"fields\" is not empty, only the fields in the list will be returned\n \"\"\"\n if fields is None:\n fields = []\n\n d = {\n \"job\": self.generator.job(),\n \"company\": self.generator.company(),\n \"ssn\": self.generator.ssn(),\n \"residence\": self.generator.address(),\n \"current_location\": (self.generator.latitude(), self.generator.longitude()),\n \"blood_group\": \"\".join(self.random_element(list(itertools.product([\"A\", \"B\", \"AB\", \"0\"], [\"+\", \"-\"])))),\n \"website\": [self.generator.url() for i in range(1, self.random_int(2, 5))]\n }\n\n d = dict(d, **self.generator.simple_profile())\n #field selection\n if len(fields) > 0:\n d = dict((k, v) for (k, v) in d.items() if k in fields)\n\n return d\n", "path": "faker/providers/profile/__init__.py"}], "after_files": [{"content": "# coding=utf-8\n\nfrom .. import BaseProvider\nimport itertools\n\n\nclass Provider(BaseProvider):\n \"\"\"\n This provider is a collection of functions to generate personal profiles and identities.\n\n \"\"\"\n\n def simple_profile(self):\n \"\"\"\n Generates a basic profile with personal informations\n \"\"\"\n\n sex = self.random_element([\"F\", \"M\"])\n if sex == 'F':\n name = self.generator.name_female()\n elif sex == 'M':\n name = self.generator.name_male()\n return {\n \"username\": self.generator.user_name(),\n \"name\": name,\n \"sex\": sex,\n \"address\": self.generator.address(),\n \"mail\": self.generator.free_email(),\n\n #\"password\":self.generator.password()\n \"birthdate\": self.generator.date(),\n\n }\n\n def profile(self, fields=None):\n \"\"\"\n Generates a complete profile.\n If \"fields\" is not empty, only the fields in the list will be returned\n \"\"\"\n if fields is None:\n fields = []\n\n d = {\n \"job\": self.generator.job(),\n \"company\": self.generator.company(),\n \"ssn\": self.generator.ssn(),\n \"residence\": self.generator.address(),\n \"current_location\": (self.generator.latitude(), self.generator.longitude()),\n \"blood_group\": \"\".join(self.random_element(list(itertools.product([\"A\", \"B\", \"AB\", \"0\"], [\"+\", \"-\"])))),\n \"website\": [self.generator.url() for i in range(1, self.random_int(2, 5))]\n }\n\n d = dict(d, **self.generator.simple_profile())\n #field selection\n if len(fields) > 0:\n d = dict((k, v) for (k, v) in d.items() if k in fields)\n\n return d\n", "path": "faker/providers/profile/__init__.py"}]} | 778 | 183 |
gh_patches_debug_14604 | rasdani/github-patches | git_diff | bookwyrm-social__bookwyrm-1778 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Receive '403: forbidden' when registering new account
**Describe the bug**
When attempting to register a new account I receive a `403` status from the `/register` endpoint.
*Full message:*
>
> CSRF verification failed. Request aborted.
>
> You are seeing this message because this site requires a CSRF cookie when submitting forms. This cookie is required for security reasons, to ensure that your browser is not being hijacked by third parties.
>
> If you have configured your browser to disable cookies, please re-enable them, at least for this site, or for “same-origin” requests.
>
A friend reported this to me and I was able to reproduce. Cookies are not disabled on my browser.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to https://bookwyrm.social/
2. Fill in new user form and click "Sign Up"
3. Observe failed request and 403 response.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
<img width="798" alt="Screen Shot 2022-01-06 at 2 35 53 PM" src="https://user-images.githubusercontent.com/32184074/148462597-dead2839-f237-4523-b821-bb0f3055214e.png">
<img width="819" alt="Screen Shot 2022-01-06 at 2 34 21 PM" src="https://user-images.githubusercontent.com/32184074/148462625-136f0f46-4fd9-48a9-a02c-df628225c87e.png">
**Instance**
bookwyrm.social
---
**Desktop (please complete the following information):**
- OS: macOS 12.1
- Chrome Version 96.0.4664.110
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bookwyrm/views/landing/landing.py`
Content:
```
1 """ non-interactive pages """
2 from django.template.response import TemplateResponse
3 from django.views import View
4 from django.utils.decorators import method_decorator
5 from django.views.decorators.cache import cache_page
6
7 from bookwyrm import forms
8 from bookwyrm.views import helpers
9 from bookwyrm.views.feed import Feed
10
11
12 # pylint: disable= no-self-use
13 class About(View):
14 """create invites"""
15
16 def get(self, request):
17 """more information about the instance"""
18 return TemplateResponse(request, "landing/about.html")
19
20
21 class Home(View):
22 """landing page or home feed depending on auth"""
23
24 def get(self, request):
25 """this is the same as the feed on the home tab"""
26 if request.user.is_authenticated:
27 feed_view = Feed.as_view()
28 return feed_view(request, "home")
29 landing_view = Landing.as_view()
30 return landing_view(request)
31
32
33 class Landing(View):
34 """preview of recently reviewed books"""
35
36 @method_decorator(cache_page(60 * 60), name="dispatch")
37 def get(self, request):
38 """tiled book activity page"""
39 data = {
40 "register_form": forms.RegisterForm(),
41 "request_form": forms.InviteRequestForm(),
42 "books": helpers.get_landing_books(),
43 }
44 return TemplateResponse(request, "landing/landing.html", data)
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bookwyrm/views/landing/landing.py b/bookwyrm/views/landing/landing.py
--- a/bookwyrm/views/landing/landing.py
+++ b/bookwyrm/views/landing/landing.py
@@ -1,8 +1,6 @@
""" non-interactive pages """
from django.template.response import TemplateResponse
from django.views import View
-from django.utils.decorators import method_decorator
-from django.views.decorators.cache import cache_page
from bookwyrm import forms
from bookwyrm.views import helpers
@@ -33,7 +31,6 @@
class Landing(View):
"""preview of recently reviewed books"""
- @method_decorator(cache_page(60 * 60), name="dispatch")
def get(self, request):
"""tiled book activity page"""
data = {
| {"golden_diff": "diff --git a/bookwyrm/views/landing/landing.py b/bookwyrm/views/landing/landing.py\n--- a/bookwyrm/views/landing/landing.py\n+++ b/bookwyrm/views/landing/landing.py\n@@ -1,8 +1,6 @@\n \"\"\" non-interactive pages \"\"\"\n from django.template.response import TemplateResponse\n from django.views import View\n-from django.utils.decorators import method_decorator\n-from django.views.decorators.cache import cache_page\n \n from bookwyrm import forms\n from bookwyrm.views import helpers\n@@ -33,7 +31,6 @@\n class Landing(View):\n \"\"\"preview of recently reviewed books\"\"\"\n \n- @method_decorator(cache_page(60 * 60), name=\"dispatch\")\n def get(self, request):\n \"\"\"tiled book activity page\"\"\"\n data = {\n", "issue": "Receive '403: forbidden' when registering new account\n**Describe the bug**\r\nWhen attempting to register a new account I receive a `403` status from the `/register` endpoint. \r\n\r\n*Full message:*\r\n> \r\n> CSRF verification failed. Request aborted.\r\n> \r\n> You are seeing this message because this site requires a CSRF cookie when submitting forms. This cookie is required for security reasons, to ensure that your browser is not being hijacked by third parties.\r\n> \r\n> If you have configured your browser to disable cookies, please re-enable them, at least for this site, or for \u201csame-origin\u201d requests.\r\n> \r\n\r\nA friend reported this to me and I was able to reproduce. Cookies are not disabled on my browser. \r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Go to https://bookwyrm.social/\r\n2. Fill in new user form and click \"Sign Up\"\r\n3. Observe failed request and 403 response. \r\n\r\n**Expected behavior**\r\nA clear and concise description of what you expected to happen.\r\n\r\n**Screenshots**\r\n<img width=\"798\" alt=\"Screen Shot 2022-01-06 at 2 35 53 PM\" src=\"https://user-images.githubusercontent.com/32184074/148462597-dead2839-f237-4523-b821-bb0f3055214e.png\">\r\n<img width=\"819\" alt=\"Screen Shot 2022-01-06 at 2 34 21 PM\" src=\"https://user-images.githubusercontent.com/32184074/148462625-136f0f46-4fd9-48a9-a02c-df628225c87e.png\">\r\n\r\n\r\n**Instance**\r\nbookwyrm.social\r\n\r\n---\r\n\r\n**Desktop (please complete the following information):**\r\n - OS: macOS 12.1\r\n - Chrome Version 96.0.4664.110\r\n\n", "before_files": [{"content": "\"\"\" non-interactive pages \"\"\"\nfrom django.template.response import TemplateResponse\nfrom django.views import View\nfrom django.utils.decorators import method_decorator\nfrom django.views.decorators.cache import cache_page\n\nfrom bookwyrm import forms\nfrom bookwyrm.views import helpers\nfrom bookwyrm.views.feed import Feed\n\n\n# pylint: disable= no-self-use\nclass About(View):\n \"\"\"create invites\"\"\"\n\n def get(self, request):\n \"\"\"more information about the instance\"\"\"\n return TemplateResponse(request, \"landing/about.html\")\n\n\nclass Home(View):\n \"\"\"landing page or home feed depending on auth\"\"\"\n\n def get(self, request):\n \"\"\"this is the same as the feed on the home tab\"\"\"\n if request.user.is_authenticated:\n feed_view = Feed.as_view()\n return feed_view(request, \"home\")\n landing_view = Landing.as_view()\n return landing_view(request)\n\n\nclass Landing(View):\n \"\"\"preview of recently reviewed books\"\"\"\n\n @method_decorator(cache_page(60 * 60), name=\"dispatch\")\n def get(self, request):\n \"\"\"tiled book activity page\"\"\"\n data = {\n \"register_form\": forms.RegisterForm(),\n \"request_form\": forms.InviteRequestForm(),\n \"books\": helpers.get_landing_books(),\n }\n return TemplateResponse(request, \"landing/landing.html\", data)\n", "path": "bookwyrm/views/landing/landing.py"}], "after_files": [{"content": "\"\"\" non-interactive pages \"\"\"\nfrom django.template.response import TemplateResponse\nfrom django.views import View\n\nfrom bookwyrm import forms\nfrom bookwyrm.views import helpers\nfrom bookwyrm.views.feed import Feed\n\n\n# pylint: disable= no-self-use\nclass About(View):\n \"\"\"create invites\"\"\"\n\n def get(self, request):\n \"\"\"more information about the instance\"\"\"\n return TemplateResponse(request, \"landing/about.html\")\n\n\nclass Home(View):\n \"\"\"landing page or home feed depending on auth\"\"\"\n\n def get(self, request):\n \"\"\"this is the same as the feed on the home tab\"\"\"\n if request.user.is_authenticated:\n feed_view = Feed.as_view()\n return feed_view(request, \"home\")\n landing_view = Landing.as_view()\n return landing_view(request)\n\n\nclass Landing(View):\n \"\"\"preview of recently reviewed books\"\"\"\n\n def get(self, request):\n \"\"\"tiled book activity page\"\"\"\n data = {\n \"register_form\": forms.RegisterForm(),\n \"request_form\": forms.InviteRequestForm(),\n \"books\": helpers.get_landing_books(),\n }\n return TemplateResponse(request, \"landing/landing.html\", data)\n", "path": "bookwyrm/views/landing/landing.py"}]} | 1,092 | 173 |
gh_patches_debug_5106 | rasdani/github-patches | git_diff | Kinto__kinto-316 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
kinto init fails if config folder does not exist
```
kinto init
Which backend to use? (1 - postgresql, 2 - redis, default - memory)
Traceback (most recent call last):
File "/var/www/kinto.leplat.re/venv/bin/kinto", line 9, in <module>
load_entry_point('kinto==1.9.0', 'console_scripts', 'kinto')()
File "/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/__main__.py", line 44, in main
init(config_file)
File "/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/config/__init__.py", line 61, in init
permission_url=values['permission_url'])
File "/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/config/__init__.py", line 15, in render_template
with codecs.open(destination, 'w+', encoding='utf-8') as output:
File "/var/www/kinto.leplat.re/venv/lib/python2.7/codecs.py", line 881, in open
file = __builtin__.open(filename, mode, buffering)
IOError: [Errno 2] No such file or directory: 'config/kinto.ini'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/config/__init__.py`
Content:
```
1 import os
2 import binascii
3 import codecs
4
5 HERE = os.path.abspath(os.path.dirname(__file__))
6
7
8 def render_template(template, destination, **kwargs):
9 template = os.path.join(HERE, template)
10
11 with codecs.open(template, 'r', encoding='utf-8') as f:
12 raw_template = f.read()
13 rendered = raw_template.format(**kwargs)
14 with codecs.open(destination, 'w+', encoding='utf-8') as output:
15 output.write(rendered)
16
17
18 def init(config_file, backend):
19 values = {}
20 values['secret'] = binascii.b2a_hex(os.urandom(32))
21
22 values['storage_backend'] = "cliquet.storage.%s" % backend
23 values['cache_backend'] = "cliquet.cache.%s" % backend
24 values['permission_backend'] = "cliquet.permission.%s" % backend
25
26 if backend == 'postgresql':
27 postgresql_url = "postgres://postgres:postgres@localhost/postgres"
28 values['storage_url'] = postgresql_url
29 values['cache_url'] = postgresql_url
30 values['permission_url'] = postgresql_url
31
32 elif backend == 'redis':
33 redis_url = "redis://localhost:6379"
34 values['storage_url'] = redis_url + "/1"
35 values['cache_url'] = redis_url + "/2"
36 values['permission_url'] = redis_url + "/3"
37
38 else:
39 values['storage_url'] = ''
40 values['cache_url'] = ''
41 values['permission_url'] = ''
42
43 render_template("kinto.tpl", config_file, **values)
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kinto/config/__init__.py b/kinto/config/__init__.py
--- a/kinto/config/__init__.py
+++ b/kinto/config/__init__.py
@@ -8,6 +8,9 @@
def render_template(template, destination, **kwargs):
template = os.path.join(HERE, template)
+ folder = os.path.dirname(destination)
+ os.makedirs(folder)
+
with codecs.open(template, 'r', encoding='utf-8') as f:
raw_template = f.read()
rendered = raw_template.format(**kwargs)
| {"golden_diff": "diff --git a/kinto/config/__init__.py b/kinto/config/__init__.py\n--- a/kinto/config/__init__.py\n+++ b/kinto/config/__init__.py\n@@ -8,6 +8,9 @@\n def render_template(template, destination, **kwargs):\n template = os.path.join(HERE, template)\n \n+ folder = os.path.dirname(destination)\n+ os.makedirs(folder)\n+\n with codecs.open(template, 'r', encoding='utf-8') as f:\n raw_template = f.read()\n rendered = raw_template.format(**kwargs)\n", "issue": "kinto init fails if config folder does not exist\n```\nkinto init \nWhich backend to use? (1 - postgresql, 2 - redis, default - memory) \nTraceback (most recent call last):\n File \"/var/www/kinto.leplat.re/venv/bin/kinto\", line 9, in <module>\n load_entry_point('kinto==1.9.0', 'console_scripts', 'kinto')()\n File \"/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/__main__.py\", line 44, in main\n init(config_file)\n File \"/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/config/__init__.py\", line 61, in init\n permission_url=values['permission_url'])\n File \"/var/www/kinto.leplat.re/venv/local/lib/python2.7/site-packages/kinto/config/__init__.py\", line 15, in render_template\n with codecs.open(destination, 'w+', encoding='utf-8') as output:\n File \"/var/www/kinto.leplat.re/venv/lib/python2.7/codecs.py\", line 881, in open\n file = __builtin__.open(filename, mode, buffering)\nIOError: [Errno 2] No such file or directory: 'config/kinto.ini'\n```\n\n", "before_files": [{"content": "import os\nimport binascii\nimport codecs\n\nHERE = os.path.abspath(os.path.dirname(__file__))\n\n\ndef render_template(template, destination, **kwargs):\n template = os.path.join(HERE, template)\n\n with codecs.open(template, 'r', encoding='utf-8') as f:\n raw_template = f.read()\n rendered = raw_template.format(**kwargs)\n with codecs.open(destination, 'w+', encoding='utf-8') as output:\n output.write(rendered)\n\n\ndef init(config_file, backend):\n values = {}\n values['secret'] = binascii.b2a_hex(os.urandom(32))\n\n values['storage_backend'] = \"cliquet.storage.%s\" % backend\n values['cache_backend'] = \"cliquet.cache.%s\" % backend\n values['permission_backend'] = \"cliquet.permission.%s\" % backend\n\n if backend == 'postgresql':\n postgresql_url = \"postgres://postgres:postgres@localhost/postgres\"\n values['storage_url'] = postgresql_url\n values['cache_url'] = postgresql_url\n values['permission_url'] = postgresql_url\n\n elif backend == 'redis':\n redis_url = \"redis://localhost:6379\"\n values['storage_url'] = redis_url + \"/1\"\n values['cache_url'] = redis_url + \"/2\"\n values['permission_url'] = redis_url + \"/3\"\n\n else:\n values['storage_url'] = ''\n values['cache_url'] = ''\n values['permission_url'] = ''\n\n render_template(\"kinto.tpl\", config_file, **values)\n", "path": "kinto/config/__init__.py"}], "after_files": [{"content": "import os\nimport binascii\nimport codecs\n\nHERE = os.path.abspath(os.path.dirname(__file__))\n\n\ndef render_template(template, destination, **kwargs):\n template = os.path.join(HERE, template)\n\n folder = os.path.dirname(destination)\n os.makedirs(folder)\n\n with codecs.open(template, 'r', encoding='utf-8') as f:\n raw_template = f.read()\n rendered = raw_template.format(**kwargs)\n with codecs.open(destination, 'w+', encoding='utf-8') as output:\n output.write(rendered)\n\n\ndef init(config_file, backend):\n values = {}\n values['secret'] = binascii.b2a_hex(os.urandom(32))\n\n values['storage_backend'] = \"cliquet.storage.%s\" % backend\n values['cache_backend'] = \"cliquet.cache.%s\" % backend\n values['permission_backend'] = \"cliquet.permission.%s\" % backend\n\n if backend == 'postgresql':\n postgresql_url = \"postgres://postgres:postgres@localhost/postgres\"\n values['storage_url'] = postgresql_url\n values['cache_url'] = postgresql_url\n values['permission_url'] = postgresql_url\n\n elif backend == 'redis':\n redis_url = \"redis://localhost:6379\"\n values['storage_url'] = redis_url + \"/1\"\n values['cache_url'] = redis_url + \"/2\"\n values['permission_url'] = redis_url + \"/3\"\n\n else:\n values['storage_url'] = ''\n values['cache_url'] = ''\n values['permission_url'] = ''\n\n render_template(\"kinto.tpl\", config_file, **values)\n", "path": "kinto/config/__init__.py"}]} | 996 | 122 |
gh_patches_debug_57199 | rasdani/github-patches | git_diff | hylang__hy-917 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug in reader: can't terminate string literals containing \ at end of line.
Common Lisp had docstrings before Python existed. Hy should totally support this.
``` Python
>>> def foo():
"""I'm a docstring!"""
return 42
>>> help(foo)
Help on function foo in module __main__:
foo()
I'm a docstring!
>>> foo.__doc__
"I'm a docstring!"
```
Let's try it in Hy.
```
=> (defn foo ()
... """I'm a docstring"""
... 42)
=> (help foo)
Help on function foo:
foo()
=> foo.__doc__
''
```
?!
Where's my docstring?
(some time later...)
Let's try hy2py.
``` Python
def foo():
''
"I'm a docstring!"
''
return 42
```
I see what you did there, Hy. Yeah, there should probably be a warning about that somewhere. Triple quotes don't work. Why does Python use them anyway? So we can include newlines mostly. Common Lisp strings do that already. Does Hy?
```
=> "one
... two"
'one\ntwo
```
Yup. Looks good.
Escapes also appear to work. But what if I actually wanted backslashes? Do raw strings work?
```
=> r"\foo"
'\\foo
```
Nice. Now **raw** triple quoted strings? This works in Python:
``` Python
>>> r"""\
foo"""
'\\\nfoo'
```
Let's use what we've learned and try it in Hy.
```
=> r"\
... foo"
... "
... ""
... """
... what?
... \
... \
... \\
... \\\
... \\\\
... \"
... \\"
... \\\"
... \\\\"
... C-c C-c
KeyboardInterrupt
```
Something is broken...
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `hy/lex/lexer.py`
Content:
```
1 # Copyright (c) 2013 Nicolas Dandrimont <[email protected]>
2 #
3 # Permission is hereby granted, free of charge, to any person obtaining a
4 # copy of this software and associated documentation files (the "Software"),
5 # to deal in the Software without restriction, including without limitation
6 # the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 # and/or sell copies of the Software, and to permit persons to whom the
8 # Software is furnished to do so, subject to the following conditions:
9 #
10 # The above copyright notice and this permission notice shall be included in
11 # all copies or substantial portions of the Software.
12 #
13 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
18 # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
19 # DEALINGS IN THE SOFTWARE.
20
21 from rply import LexerGenerator
22
23
24 lg = LexerGenerator()
25
26
27 # A regexp for something that should end a quoting/unquoting operator
28 # i.e. a space or a closing brace/paren/curly
29 end_quote = r'(?![\s\)\]\}])'
30
31
32 lg.add('LPAREN', r'\(')
33 lg.add('RPAREN', r'\)')
34 lg.add('LBRACKET', r'\[')
35 lg.add('RBRACKET', r'\]')
36 lg.add('LCURLY', r'\{')
37 lg.add('RCURLY', r'\}')
38 lg.add('HLCURLY', r'#\{')
39 lg.add('QUOTE', r'\'%s' % end_quote)
40 lg.add('QUASIQUOTE', r'`%s' % end_quote)
41 lg.add('UNQUOTESPLICE', r'~@%s' % end_quote)
42 lg.add('UNQUOTE', r'~%s' % end_quote)
43 lg.add('HASHBANG', r'#!.*[^\r\n]')
44 lg.add('HASHREADER', r'#[^{]')
45
46 # A regexp which matches incomplete strings, used to support
47 # multi-line strings in the interpreter
48 partial_string = r'''(?x)
49 (?:u|r|ur|ru)? # prefix
50 " # start string
51 (?:
52 | [^"\\] # non-quote or backslash
53 | \\. # or escaped single character
54 | \\x[0-9a-fA-F]{2} # or escaped raw character
55 | \\u[0-9a-fA-F]{4} # or unicode escape
56 | \\U[0-9a-fA-F]{8} # or long unicode escape
57 )* # one or more times
58 '''
59
60 lg.add('STRING', r'%s"' % partial_string)
61 lg.add('PARTIAL_STRING', partial_string)
62
63 lg.add('IDENTIFIER', r'[^()\[\]{}\'"\s;]+')
64
65
66 lg.ignore(r';.*(?=\r|\n|$)')
67 lg.ignore(r'\s+')
68
69
70 lexer = lg.build()
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/hy/lex/lexer.py b/hy/lex/lexer.py
--- a/hy/lex/lexer.py
+++ b/hy/lex/lexer.py
@@ -50,7 +50,7 @@
" # start string
(?:
| [^"\\] # non-quote or backslash
- | \\. # or escaped single character
+ | \\(.|\n) # or escaped single character or newline
| \\x[0-9a-fA-F]{2} # or escaped raw character
| \\u[0-9a-fA-F]{4} # or unicode escape
| \\U[0-9a-fA-F]{8} # or long unicode escape
| {"golden_diff": "diff --git a/hy/lex/lexer.py b/hy/lex/lexer.py\n--- a/hy/lex/lexer.py\n+++ b/hy/lex/lexer.py\n@@ -50,7 +50,7 @@\n \" # start string\n (?:\n | [^\"\\\\] # non-quote or backslash\n- | \\\\. # or escaped single character\n+ | \\\\(.|\\n) # or escaped single character or newline\n | \\\\x[0-9a-fA-F]{2} # or escaped raw character\n | \\\\u[0-9a-fA-F]{4} # or unicode escape\n | \\\\U[0-9a-fA-F]{8} # or long unicode escape\n", "issue": "Bug in reader: can't terminate string literals containing \\ at end of line.\nCommon Lisp had docstrings before Python existed. Hy should totally support this.\n\n``` Python\n>>> def foo():\n \"\"\"I'm a docstring!\"\"\"\n return 42\n\n>>> help(foo)\nHelp on function foo in module __main__:\n\nfoo()\n I'm a docstring!\n\n>>> foo.__doc__\n\"I'm a docstring!\"\n```\n\nLet's try it in Hy.\n\n```\n=> (defn foo ()\n... \"\"\"I'm a docstring\"\"\"\n... 42)\n=> (help foo)\nHelp on function foo:\n\nfoo()\n\n=> foo.__doc__\n''\n```\n\n?!\nWhere's my docstring?\n(some time later...)\nLet's try hy2py.\n\n``` Python\ndef foo():\n ''\n \"I'm a docstring!\"\n ''\n return 42\n```\n\nI see what you did there, Hy. Yeah, there should probably be a warning about that somewhere. Triple quotes don't work. Why does Python use them anyway? So we can include newlines mostly. Common Lisp strings do that already. Does Hy?\n\n```\n=> \"one\n... two\"\n'one\\ntwo\n```\n\nYup. Looks good.\nEscapes also appear to work. But what if I actually wanted backslashes? Do raw strings work?\n\n```\n=> r\"\\foo\"\n'\\\\foo\n```\n\nNice. Now **raw** triple quoted strings? This works in Python:\n\n``` Python\n>>> r\"\"\"\\\nfoo\"\"\"\n'\\\\\\nfoo'\n```\n\nLet's use what we've learned and try it in Hy.\n\n```\n=> r\"\\\n... foo\"\n... \"\n... \"\"\n... \"\"\"\n... what?\n... \\\n... \\\n... \\\\\n... \\\\\\\n... \\\\\\\\\n... \\\"\n... \\\\\"\n... \\\\\\\"\n... \\\\\\\\\"\n... C-c C-c\n\nKeyboardInterrupt\n```\n\nSomething is broken...\n\n", "before_files": [{"content": "# Copyright (c) 2013 Nicolas Dandrimont <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the \"Software\"),\n# to deal in the Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish, distribute, sublicense,\n# and/or sell copies of the Software, and to permit persons to whom the\n# Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n# DEALINGS IN THE SOFTWARE.\n\nfrom rply import LexerGenerator\n\n\nlg = LexerGenerator()\n\n\n# A regexp for something that should end a quoting/unquoting operator\n# i.e. a space or a closing brace/paren/curly\nend_quote = r'(?![\\s\\)\\]\\}])'\n\n\nlg.add('LPAREN', r'\\(')\nlg.add('RPAREN', r'\\)')\nlg.add('LBRACKET', r'\\[')\nlg.add('RBRACKET', r'\\]')\nlg.add('LCURLY', r'\\{')\nlg.add('RCURLY', r'\\}')\nlg.add('HLCURLY', r'#\\{')\nlg.add('QUOTE', r'\\'%s' % end_quote)\nlg.add('QUASIQUOTE', r'`%s' % end_quote)\nlg.add('UNQUOTESPLICE', r'~@%s' % end_quote)\nlg.add('UNQUOTE', r'~%s' % end_quote)\nlg.add('HASHBANG', r'#!.*[^\\r\\n]')\nlg.add('HASHREADER', r'#[^{]')\n\n# A regexp which matches incomplete strings, used to support\n# multi-line strings in the interpreter\npartial_string = r'''(?x)\n (?:u|r|ur|ru)? # prefix\n \" # start string\n (?:\n | [^\"\\\\] # non-quote or backslash\n | \\\\. # or escaped single character\n | \\\\x[0-9a-fA-F]{2} # or escaped raw character\n | \\\\u[0-9a-fA-F]{4} # or unicode escape\n | \\\\U[0-9a-fA-F]{8} # or long unicode escape\n )* # one or more times\n'''\n\nlg.add('STRING', r'%s\"' % partial_string)\nlg.add('PARTIAL_STRING', partial_string)\n\nlg.add('IDENTIFIER', r'[^()\\[\\]{}\\'\"\\s;]+')\n\n\nlg.ignore(r';.*(?=\\r|\\n|$)')\nlg.ignore(r'\\s+')\n\n\nlexer = lg.build()\n", "path": "hy/lex/lexer.py"}], "after_files": [{"content": "# Copyright (c) 2013 Nicolas Dandrimont <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the \"Software\"),\n# to deal in the Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish, distribute, sublicense,\n# and/or sell copies of the Software, and to permit persons to whom the\n# Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n# DEALINGS IN THE SOFTWARE.\n\nfrom rply import LexerGenerator\n\n\nlg = LexerGenerator()\n\n\n# A regexp for something that should end a quoting/unquoting operator\n# i.e. a space or a closing brace/paren/curly\nend_quote = r'(?![\\s\\)\\]\\}])'\n\n\nlg.add('LPAREN', r'\\(')\nlg.add('RPAREN', r'\\)')\nlg.add('LBRACKET', r'\\[')\nlg.add('RBRACKET', r'\\]')\nlg.add('LCURLY', r'\\{')\nlg.add('RCURLY', r'\\}')\nlg.add('HLCURLY', r'#\\{')\nlg.add('QUOTE', r'\\'%s' % end_quote)\nlg.add('QUASIQUOTE', r'`%s' % end_quote)\nlg.add('UNQUOTESPLICE', r'~@%s' % end_quote)\nlg.add('UNQUOTE', r'~%s' % end_quote)\nlg.add('HASHBANG', r'#!.*[^\\r\\n]')\nlg.add('HASHREADER', r'#[^{]')\n\n# A regexp which matches incomplete strings, used to support\n# multi-line strings in the interpreter\npartial_string = r'''(?x)\n (?:u|r|ur|ru)? # prefix\n \" # start string\n (?:\n | [^\"\\\\] # non-quote or backslash\n | \\\\(.|\\n) # or escaped single character or newline\n | \\\\x[0-9a-fA-F]{2} # or escaped raw character\n | \\\\u[0-9a-fA-F]{4} # or unicode escape\n | \\\\U[0-9a-fA-F]{8} # or long unicode escape\n )* # one or more times\n'''\n\nlg.add('STRING', r'%s\"' % partial_string)\nlg.add('PARTIAL_STRING', partial_string)\n\nlg.add('IDENTIFIER', r'[^()\\[\\]{}\\'\"\\s;]+')\n\n\nlg.ignore(r';.*(?=\\r|\\n|$)')\nlg.ignore(r'\\s+')\n\n\nlexer = lg.build()\n", "path": "hy/lex/lexer.py"}]} | 1,497 | 171 |
gh_patches_debug_5989 | rasdani/github-patches | git_diff | cal-itp__benefits-999 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove agency index pass-through
With the new Courtesy Card designs, we have a better context-setting Agency Index (homepage). Let's remove the auto-pass-through that currently sends the user straight to Eligibility Start.
## Acceptance Criteria
<!-- Remember to consider edge cases -->
- [ ] A user lands on the Agency Index page when viewing the Benefits app
- [ ] The Agency Index continue button takes the user to the Eligibility Index page
## Additional context
Work on the new homepage is happening in #937
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `benefits/core/views.py`
Content:
```
1 """
2 The core application: view definition for the root of the webapp.
3 """
4 from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotFound, HttpResponseServerError
5 from django.shortcuts import redirect
6 from django.template import loader
7 from django.template.response import TemplateResponse
8 from django.urls import reverse
9 from django.utils.translation import gettext as _
10
11 from . import models, session, viewmodels
12 from .middleware import pageview_decorator
13
14 ROUTE_INDEX = "core:index"
15 ROUTE_ELIGIBILITY = "eligibility:index"
16 ROUTE_HELP = "core:help"
17
18 TEMPLATE_PAGE = "core/page.html"
19 TEMPLATE_AGENCY = "core/agency_index.html"
20 TEMPLATE_HELP = "core/help.html"
21
22
23 @pageview_decorator
24 def index(request):
25 """View handler for the main entry page."""
26 session.reset(request)
27
28 agencies = models.TransitAgency.all_active()
29
30 if len(agencies) == 1:
31 agency = agencies[0]
32 return redirect(agency.index_url)
33
34 # generate a button to the landing page for each active agency
35 buttons = [viewmodels.Button.outline_primary(text=a.short_name, url=a.index_url) for a in agencies]
36 buttons[0].classes.append("mt-3")
37 buttons[0].label = _("core.pages.index.chooseprovider")
38
39 page = viewmodels.Page(
40 title=_("core.pages.index.title"),
41 headline=_("core.pages.index.headline"),
42 buttons=buttons,
43 classes="home",
44 )
45
46 return TemplateResponse(request, TEMPLATE_PAGE, page.context_dict())
47
48
49 @pageview_decorator
50 def agency_index(request, agency):
51 """View handler for an agency entry page."""
52 session.reset(request)
53 session.update(request, agency=agency, origin=agency.index_url)
54
55 if len(agency.eligibility_verifiers.all()) == 1:
56 return redirect(reverse(ROUTE_ELIGIBILITY))
57
58 button = viewmodels.Button.primary(text=_("core.pages.index.continue"), url=reverse(ROUTE_ELIGIBILITY))
59
60 page = viewmodels.Page(
61 title=_("core.pages.agency_index.title"),
62 headline=_("core.pages.agency_index.mst_cc.headline"),
63 button=button,
64 classes="home",
65 )
66
67 return TemplateResponse(request, TEMPLATE_AGENCY, page.context_dict())
68
69
70 @pageview_decorator
71 def agency_public_key(request, agency):
72 """View handler returns an agency's public key as plain text."""
73 return HttpResponse(agency.public_key_data, content_type="text/plain")
74
75
76 @pageview_decorator
77 def help(request):
78 """View handler for the help page."""
79 if session.active_agency(request):
80 agency = session.agency(request)
81 buttons = viewmodels.Button.agency_contact_links(agency)
82 else:
83 buttons = [btn for a in models.TransitAgency.all_active() for btn in viewmodels.Button.agency_contact_links(a)]
84
85 buttons.append(viewmodels.Button.home(request, _("core.buttons.back")))
86
87 page = viewmodels.Page(
88 title=_("core.buttons.help"),
89 headline=_("core.buttons.help"),
90 buttons=buttons,
91 )
92
93 return TemplateResponse(request, TEMPLATE_HELP, page.context_dict())
94
95
96 @pageview_decorator
97 def bad_request(request, exception, template_name="400.html"):
98 """View handler for HTTP 400 Bad Request responses."""
99 if session.active_agency(request):
100 session.update(request, origin=session.agency(request).index_url)
101 else:
102 session.update(request, origin=reverse(ROUTE_INDEX))
103
104 home = viewmodels.Button.home(request)
105 page = viewmodels.ErrorPage.server_error(button=home)
106 t = loader.get_template(template_name)
107
108 return HttpResponseBadRequest(t.render(page.context_dict()))
109
110
111 @pageview_decorator
112 def csrf_failure(request, reason):
113 """
114 View handler for CSRF_FAILURE_VIEW with custom data.
115 """
116 if session.active_agency(request):
117 session.update(request, origin=session.agency(request).index_url)
118 else:
119 session.update(request, origin=reverse(ROUTE_INDEX))
120
121 home = viewmodels.Button.home(request)
122 page = viewmodels.ErrorPage.not_found(button=home, path=request.path)
123 t = loader.get_template("400.html")
124
125 return HttpResponseNotFound(t.render(page.context_dict()))
126
127
128 @pageview_decorator
129 def page_not_found(request, exception, template_name="404.html"):
130 """View handler for HTTP 404 Not Found responses."""
131 if session.active_agency(request):
132 session.update(request, origin=session.agency(request).index_url)
133 else:
134 session.update(request, origin=reverse(ROUTE_INDEX))
135
136 home = viewmodels.Button.home(request)
137 # show a more user-friendly message instead of not_found
138 page = viewmodels.ErrorPage.user_error(button=home, path=request.path)
139 t = loader.get_template(template_name)
140
141 return HttpResponseNotFound(t.render(page.context_dict()))
142
143
144 @pageview_decorator
145 def server_error(request, template_name="500.html"):
146 """View handler for HTTP 500 Server Error responses."""
147 if session.active_agency(request):
148 session.update(request, origin=session.agency(request).index_url)
149 else:
150 session.update(request, origin=reverse(ROUTE_INDEX))
151
152 home = viewmodels.Button.home(request)
153 page = viewmodels.ErrorPage.server_error(button=home)
154 t = loader.get_template(template_name)
155
156 return HttpResponseServerError(t.render(page.context_dict()))
157
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/benefits/core/views.py b/benefits/core/views.py
--- a/benefits/core/views.py
+++ b/benefits/core/views.py
@@ -52,9 +52,6 @@
session.reset(request)
session.update(request, agency=agency, origin=agency.index_url)
- if len(agency.eligibility_verifiers.all()) == 1:
- return redirect(reverse(ROUTE_ELIGIBILITY))
-
button = viewmodels.Button.primary(text=_("core.pages.index.continue"), url=reverse(ROUTE_ELIGIBILITY))
page = viewmodels.Page(
| {"golden_diff": "diff --git a/benefits/core/views.py b/benefits/core/views.py\n--- a/benefits/core/views.py\n+++ b/benefits/core/views.py\n@@ -52,9 +52,6 @@\n session.reset(request)\n session.update(request, agency=agency, origin=agency.index_url)\n \n- if len(agency.eligibility_verifiers.all()) == 1:\n- return redirect(reverse(ROUTE_ELIGIBILITY))\n-\n button = viewmodels.Button.primary(text=_(\"core.pages.index.continue\"), url=reverse(ROUTE_ELIGIBILITY))\n \n page = viewmodels.Page(\n", "issue": "Remove agency index pass-through\nWith the new Courtesy Card designs, we have a better context-setting Agency Index (homepage). Let's remove the auto-pass-through that currently sends the user straight to Eligibility Start.\r\n\r\n## Acceptance Criteria\r\n\r\n<!-- Remember to consider edge cases -->\r\n\r\n- [ ] A user lands on the Agency Index page when viewing the Benefits app\r\n- [ ] The Agency Index continue button takes the user to the Eligibility Index page\r\n\r\n## Additional context\r\n\r\nWork on the new homepage is happening in #937\n", "before_files": [{"content": "\"\"\"\nThe core application: view definition for the root of the webapp.\n\"\"\"\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotFound, HttpResponseServerError\nfrom django.shortcuts import redirect\nfrom django.template import loader\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse\nfrom django.utils.translation import gettext as _\n\nfrom . import models, session, viewmodels\nfrom .middleware import pageview_decorator\n\nROUTE_INDEX = \"core:index\"\nROUTE_ELIGIBILITY = \"eligibility:index\"\nROUTE_HELP = \"core:help\"\n\nTEMPLATE_PAGE = \"core/page.html\"\nTEMPLATE_AGENCY = \"core/agency_index.html\"\nTEMPLATE_HELP = \"core/help.html\"\n\n\n@pageview_decorator\ndef index(request):\n \"\"\"View handler for the main entry page.\"\"\"\n session.reset(request)\n\n agencies = models.TransitAgency.all_active()\n\n if len(agencies) == 1:\n agency = agencies[0]\n return redirect(agency.index_url)\n\n # generate a button to the landing page for each active agency\n buttons = [viewmodels.Button.outline_primary(text=a.short_name, url=a.index_url) for a in agencies]\n buttons[0].classes.append(\"mt-3\")\n buttons[0].label = _(\"core.pages.index.chooseprovider\")\n\n page = viewmodels.Page(\n title=_(\"core.pages.index.title\"),\n headline=_(\"core.pages.index.headline\"),\n buttons=buttons,\n classes=\"home\",\n )\n\n return TemplateResponse(request, TEMPLATE_PAGE, page.context_dict())\n\n\n@pageview_decorator\ndef agency_index(request, agency):\n \"\"\"View handler for an agency entry page.\"\"\"\n session.reset(request)\n session.update(request, agency=agency, origin=agency.index_url)\n\n if len(agency.eligibility_verifiers.all()) == 1:\n return redirect(reverse(ROUTE_ELIGIBILITY))\n\n button = viewmodels.Button.primary(text=_(\"core.pages.index.continue\"), url=reverse(ROUTE_ELIGIBILITY))\n\n page = viewmodels.Page(\n title=_(\"core.pages.agency_index.title\"),\n headline=_(\"core.pages.agency_index.mst_cc.headline\"),\n button=button,\n classes=\"home\",\n )\n\n return TemplateResponse(request, TEMPLATE_AGENCY, page.context_dict())\n\n\n@pageview_decorator\ndef agency_public_key(request, agency):\n \"\"\"View handler returns an agency's public key as plain text.\"\"\"\n return HttpResponse(agency.public_key_data, content_type=\"text/plain\")\n\n\n@pageview_decorator\ndef help(request):\n \"\"\"View handler for the help page.\"\"\"\n if session.active_agency(request):\n agency = session.agency(request)\n buttons = viewmodels.Button.agency_contact_links(agency)\n else:\n buttons = [btn for a in models.TransitAgency.all_active() for btn in viewmodels.Button.agency_contact_links(a)]\n\n buttons.append(viewmodels.Button.home(request, _(\"core.buttons.back\")))\n\n page = viewmodels.Page(\n title=_(\"core.buttons.help\"),\n headline=_(\"core.buttons.help\"),\n buttons=buttons,\n )\n\n return TemplateResponse(request, TEMPLATE_HELP, page.context_dict())\n\n\n@pageview_decorator\ndef bad_request(request, exception, template_name=\"400.html\"):\n \"\"\"View handler for HTTP 400 Bad Request responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.server_error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseBadRequest(t.render(page.context_dict()))\n\n\n@pageview_decorator\ndef csrf_failure(request, reason):\n \"\"\"\n View handler for CSRF_FAILURE_VIEW with custom data.\n \"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.not_found(button=home, path=request.path)\n t = loader.get_template(\"400.html\")\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\n@pageview_decorator\ndef page_not_found(request, exception, template_name=\"404.html\"):\n \"\"\"View handler for HTTP 404 Not Found responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n # show a more user-friendly message instead of not_found\n page = viewmodels.ErrorPage.user_error(button=home, path=request.path)\n t = loader.get_template(template_name)\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\n@pageview_decorator\ndef server_error(request, template_name=\"500.html\"):\n \"\"\"View handler for HTTP 500 Server Error responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.server_error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseServerError(t.render(page.context_dict()))\n", "path": "benefits/core/views.py"}], "after_files": [{"content": "\"\"\"\nThe core application: view definition for the root of the webapp.\n\"\"\"\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotFound, HttpResponseServerError\nfrom django.shortcuts import redirect\nfrom django.template import loader\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse\nfrom django.utils.translation import gettext as _\n\nfrom . import models, session, viewmodels\nfrom .middleware import pageview_decorator\n\nROUTE_INDEX = \"core:index\"\nROUTE_ELIGIBILITY = \"eligibility:index\"\nROUTE_HELP = \"core:help\"\n\nTEMPLATE_PAGE = \"core/page.html\"\nTEMPLATE_AGENCY = \"core/agency_index.html\"\nTEMPLATE_HELP = \"core/help.html\"\n\n\n@pageview_decorator\ndef index(request):\n \"\"\"View handler for the main entry page.\"\"\"\n session.reset(request)\n\n agencies = models.TransitAgency.all_active()\n\n if len(agencies) == 1:\n agency = agencies[0]\n return redirect(agency.index_url)\n\n # generate a button to the landing page for each active agency\n buttons = [viewmodels.Button.outline_primary(text=a.short_name, url=a.index_url) for a in agencies]\n buttons[0].classes.append(\"mt-3\")\n buttons[0].label = _(\"core.pages.index.chooseprovider\")\n\n page = viewmodels.Page(\n title=_(\"core.pages.index.title\"),\n headline=_(\"core.pages.index.headline\"),\n buttons=buttons,\n classes=\"home\",\n )\n\n return TemplateResponse(request, TEMPLATE_PAGE, page.context_dict())\n\n\n@pageview_decorator\ndef agency_index(request, agency):\n \"\"\"View handler for an agency entry page.\"\"\"\n session.reset(request)\n session.update(request, agency=agency, origin=agency.index_url)\n\n button = viewmodels.Button.primary(text=_(\"core.pages.index.continue\"), url=reverse(ROUTE_ELIGIBILITY))\n\n page = viewmodels.Page(\n title=_(\"core.pages.agency_index.title\"),\n headline=_(\"core.pages.agency_index.mst_cc.headline\"),\n button=button,\n classes=\"home\",\n )\n\n return TemplateResponse(request, TEMPLATE_AGENCY, page.context_dict())\n\n\n@pageview_decorator\ndef agency_public_key(request, agency):\n \"\"\"View handler returns an agency's public key as plain text.\"\"\"\n return HttpResponse(agency.public_key_data, content_type=\"text/plain\")\n\n\n@pageview_decorator\ndef help(request):\n \"\"\"View handler for the help page.\"\"\"\n if session.active_agency(request):\n agency = session.agency(request)\n buttons = viewmodels.Button.agency_contact_links(agency)\n else:\n buttons = [btn for a in models.TransitAgency.all_active() for btn in viewmodels.Button.agency_contact_links(a)]\n\n buttons.append(viewmodels.Button.home(request, _(\"core.buttons.back\")))\n\n page = viewmodels.Page(\n title=_(\"core.buttons.help\"),\n headline=_(\"core.buttons.help\"),\n buttons=buttons,\n )\n\n return TemplateResponse(request, TEMPLATE_HELP, page.context_dict())\n\n\n@pageview_decorator\ndef bad_request(request, exception, template_name=\"400.html\"):\n \"\"\"View handler for HTTP 400 Bad Request responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.server_error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseBadRequest(t.render(page.context_dict()))\n\n\n@pageview_decorator\ndef csrf_failure(request, reason):\n \"\"\"\n View handler for CSRF_FAILURE_VIEW with custom data.\n \"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.not_found(button=home, path=request.path)\n t = loader.get_template(\"400.html\")\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\n@pageview_decorator\ndef page_not_found(request, exception, template_name=\"404.html\"):\n \"\"\"View handler for HTTP 404 Not Found responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n # show a more user-friendly message instead of not_found\n page = viewmodels.ErrorPage.user_error(button=home, path=request.path)\n t = loader.get_template(template_name)\n\n return HttpResponseNotFound(t.render(page.context_dict()))\n\n\n@pageview_decorator\ndef server_error(request, template_name=\"500.html\"):\n \"\"\"View handler for HTTP 500 Server Error responses.\"\"\"\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n\n home = viewmodels.Button.home(request)\n page = viewmodels.ErrorPage.server_error(button=home)\n t = loader.get_template(template_name)\n\n return HttpResponseServerError(t.render(page.context_dict()))\n", "path": "benefits/core/views.py"}]} | 1,865 | 130 |
gh_patches_debug_14429 | rasdani/github-patches | git_diff | python-pillow__Pillow-1649 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ImageSequence.Iterator name is misleading
Hi Pillow developers,
I found this error message to be rather hilarious:
```
TypeError: 'Iterator' object is not an iterator
```
which was produced by this code:
``` python
from PIL import Image, ImageSequence
with Image.open('multipage.tiff', mode='r') as tif:
i = ImageSequence.Iterator(tif)
next(i)
```
It seems that `ImageSequence.Iterator` does not define an `__iter__()` method, which means that it is a sequence type but not an iterator type. In my opinion, this is misleading and I suggest that a proper `__iter__()` method should be added to the class definition.
Thanks.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `PIL/ImageSequence.py`
Content:
```
1 #
2 # The Python Imaging Library.
3 # $Id$
4 #
5 # sequence support classes
6 #
7 # history:
8 # 1997-02-20 fl Created
9 #
10 # Copyright (c) 1997 by Secret Labs AB.
11 # Copyright (c) 1997 by Fredrik Lundh.
12 #
13 # See the README file for information on usage and redistribution.
14 #
15
16 ##
17
18
19 class Iterator(object):
20 """
21 This class implements an iterator object that can be used to loop
22 over an image sequence.
23
24 You can use the ``[]`` operator to access elements by index. This operator
25 will raise an :py:exc:`IndexError` if you try to access a nonexistent
26 frame.
27
28 :param im: An image object.
29 """
30
31 def __init__(self, im):
32 if not hasattr(im, "seek"):
33 raise AttributeError("im must have seek method")
34 self.im = im
35
36 def __getitem__(self, ix):
37 try:
38 if ix:
39 self.im.seek(ix)
40 return self.im
41 except EOFError:
42 raise IndexError # end of sequence
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/PIL/ImageSequence.py b/PIL/ImageSequence.py
--- a/PIL/ImageSequence.py
+++ b/PIL/ImageSequence.py
@@ -32,6 +32,7 @@
if not hasattr(im, "seek"):
raise AttributeError("im must have seek method")
self.im = im
+ self.position = 0
def __getitem__(self, ix):
try:
@@ -40,3 +41,17 @@
return self.im
except EOFError:
raise IndexError # end of sequence
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ try:
+ self.im.seek(self.position)
+ self.position += 1
+ return self.im
+ except EOFError:
+ raise StopIteration
+
+ def next(self):
+ return self.__next__()
| {"golden_diff": "diff --git a/PIL/ImageSequence.py b/PIL/ImageSequence.py\n--- a/PIL/ImageSequence.py\n+++ b/PIL/ImageSequence.py\n@@ -32,6 +32,7 @@\n if not hasattr(im, \"seek\"):\n raise AttributeError(\"im must have seek method\")\n self.im = im\n+ self.position = 0\n \n def __getitem__(self, ix):\n try:\n@@ -40,3 +41,17 @@\n return self.im\n except EOFError:\n raise IndexError # end of sequence\n+\n+ def __iter__(self):\n+ return self\n+\n+ def __next__(self):\n+ try:\n+ self.im.seek(self.position)\n+ self.position += 1\n+ return self.im\n+ except EOFError:\n+ raise StopIteration\n+\n+ def next(self):\n+ return self.__next__()\n", "issue": "ImageSequence.Iterator name is misleading\nHi Pillow developers,\n\nI found this error message to be rather hilarious:\n\n```\nTypeError: 'Iterator' object is not an iterator\n```\n\nwhich was produced by this code:\n\n``` python\nfrom PIL import Image, ImageSequence\n\nwith Image.open('multipage.tiff', mode='r') as tif:\n i = ImageSequence.Iterator(tif)\n next(i)\n```\n\nIt seems that `ImageSequence.Iterator` does not define an `__iter__()` method, which means that it is a sequence type but not an iterator type. In my opinion, this is misleading and I suggest that a proper `__iter__()` method should be added to the class definition.\n\nThanks.\n\n", "before_files": [{"content": "#\n# The Python Imaging Library.\n# $Id$\n#\n# sequence support classes\n#\n# history:\n# 1997-02-20 fl Created\n#\n# Copyright (c) 1997 by Secret Labs AB.\n# Copyright (c) 1997 by Fredrik Lundh.\n#\n# See the README file for information on usage and redistribution.\n#\n\n##\n\n\nclass Iterator(object):\n \"\"\"\n This class implements an iterator object that can be used to loop\n over an image sequence.\n\n You can use the ``[]`` operator to access elements by index. This operator\n will raise an :py:exc:`IndexError` if you try to access a nonexistent\n frame.\n\n :param im: An image object.\n \"\"\"\n\n def __init__(self, im):\n if not hasattr(im, \"seek\"):\n raise AttributeError(\"im must have seek method\")\n self.im = im\n\n def __getitem__(self, ix):\n try:\n if ix:\n self.im.seek(ix)\n return self.im\n except EOFError:\n raise IndexError # end of sequence\n", "path": "PIL/ImageSequence.py"}], "after_files": [{"content": "#\n# The Python Imaging Library.\n# $Id$\n#\n# sequence support classes\n#\n# history:\n# 1997-02-20 fl Created\n#\n# Copyright (c) 1997 by Secret Labs AB.\n# Copyright (c) 1997 by Fredrik Lundh.\n#\n# See the README file for information on usage and redistribution.\n#\n\n##\n\n\nclass Iterator(object):\n \"\"\"\n This class implements an iterator object that can be used to loop\n over an image sequence.\n\n You can use the ``[]`` operator to access elements by index. This operator\n will raise an :py:exc:`IndexError` if you try to access a nonexistent\n frame.\n\n :param im: An image object.\n \"\"\"\n\n def __init__(self, im):\n if not hasattr(im, \"seek\"):\n raise AttributeError(\"im must have seek method\")\n self.im = im\n self.position = 0\n\n def __getitem__(self, ix):\n try:\n if ix:\n self.im.seek(ix)\n return self.im\n except EOFError:\n raise IndexError # end of sequence\n\n def __iter__(self):\n return self\n\n def __next__(self):\n try:\n self.im.seek(self.position)\n self.position += 1\n return self.im\n except EOFError:\n raise StopIteration\n\n def next(self):\n return self.__next__()\n", "path": "PIL/ImageSequence.py"}]} | 728 | 196 |
gh_patches_debug_2054 | rasdani/github-patches | git_diff | carpentries__amy-770 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Names show up multiple times in assignment pulldown
1. Go to an event.
2. Try to assign to assign to someone other than yourself.
3. Selection dialog with pulldown appears so that you can choose person.
4. Some names (currently Greg Wilson and Tracy Teal, possibly others) show up multiple times in that list.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `workshops/lookups.py`
Content:
```
1 from functools import reduce
2 import operator
3 import re
4
5 from django.contrib.auth.models import Group
6 from django.db.models import Q
7
8 from selectable.base import ModelLookup
9 from selectable.registry import registry
10 from selectable.decorators import login_required
11
12 from workshops import models
13
14
15 @login_required
16 class EventLookup(ModelLookup):
17 model = models.Event
18 search_fields = ('slug__icontains', )
19
20
21 @login_required
22 class HostLookup(ModelLookup):
23 model = models.Host
24 search_fields = (
25 'domain__icontains',
26 'fullname__icontains'
27 )
28
29
30 @login_required
31 class PersonLookup(ModelLookup):
32 model = models.Person
33 search_fields = (
34 'personal__icontains',
35 'family__icontains',
36 'email__icontains',
37 'username__icontains'
38 )
39
40 def get_query(self, request, term):
41 """Override this method to allow for additional lookup method: """
42 # original code from selectable.base.ModelLookup.get_query:
43 qs = self.get_queryset()
44 if term:
45 search_filters = []
46 if self.search_fields:
47 for field in self.search_fields:
48 search_filters.append(Q(**{field: term}))
49
50 # tokenizing part
51 tokens = re.split('\s+', term)
52 if len(tokens) == 2:
53 name1, name2 = tokens
54 complex_q = (
55 Q(personal__icontains=name1) & Q(family__icontains=name2)
56 ) | (
57 Q(personal__icontains=name2) & Q(family__icontains=name1)
58 )
59 search_filters.append(complex_q)
60
61 # this is brilliant: it applies OR to all search filters
62 qs = qs.filter(reduce(operator.or_, search_filters))
63
64 return qs
65
66
67 @login_required
68 class AdminLookup(ModelLookup):
69 """The same as PersonLookup, but allows only to select administrators.
70
71 Administrator is anyone with superuser power or in "administrators" group.
72 """
73 model = models.Person
74 search_fields = (
75 'personal__icontains',
76 'family__icontains',
77 'email__icontains',
78 'username__icontains'
79 )
80
81 def get_query(self, request, term):
82 results = super().get_query(request, term)
83 admin_group = Group.objects.get(name='administrators')
84 results = results.filter(
85 Q(is_superuser=True) | Q(groups__in=[admin_group])
86 )
87 return results
88
89
90 @login_required
91 class AirportLookup(ModelLookup):
92 model = models.Airport
93 search_fields = (
94 'iata__icontains',
95 'fullname__icontains'
96 )
97
98
99 registry.register(EventLookup)
100 registry.register(HostLookup)
101 registry.register(PersonLookup)
102 registry.register(AdminLookup)
103 registry.register(AirportLookup)
104
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/workshops/lookups.py b/workshops/lookups.py
--- a/workshops/lookups.py
+++ b/workshops/lookups.py
@@ -83,7 +83,7 @@
admin_group = Group.objects.get(name='administrators')
results = results.filter(
Q(is_superuser=True) | Q(groups__in=[admin_group])
- )
+ ).distinct()
return results
| {"golden_diff": "diff --git a/workshops/lookups.py b/workshops/lookups.py\n--- a/workshops/lookups.py\n+++ b/workshops/lookups.py\n@@ -83,7 +83,7 @@\n admin_group = Group.objects.get(name='administrators')\n results = results.filter(\n Q(is_superuser=True) | Q(groups__in=[admin_group])\n- )\n+ ).distinct()\n return results\n", "issue": "Names show up multiple times in assignment pulldown\n1. Go to an event.\n2. Try to assign to assign to someone other than yourself.\n3. Selection dialog with pulldown appears so that you can choose person.\n4. Some names (currently Greg Wilson and Tracy Teal, possibly others) show up multiple times in that list.\n\n", "before_files": [{"content": "from functools import reduce\nimport operator\nimport re\n\nfrom django.contrib.auth.models import Group\nfrom django.db.models import Q\n\nfrom selectable.base import ModelLookup\nfrom selectable.registry import registry\nfrom selectable.decorators import login_required\n\nfrom workshops import models\n\n\n@login_required\nclass EventLookup(ModelLookup):\n model = models.Event\n search_fields = ('slug__icontains', )\n\n\n@login_required\nclass HostLookup(ModelLookup):\n model = models.Host\n search_fields = (\n 'domain__icontains',\n 'fullname__icontains'\n )\n\n\n@login_required\nclass PersonLookup(ModelLookup):\n model = models.Person\n search_fields = (\n 'personal__icontains',\n 'family__icontains',\n 'email__icontains',\n 'username__icontains'\n )\n\n def get_query(self, request, term):\n \"\"\"Override this method to allow for additional lookup method: \"\"\"\n # original code from selectable.base.ModelLookup.get_query:\n qs = self.get_queryset()\n if term:\n search_filters = []\n if self.search_fields:\n for field in self.search_fields:\n search_filters.append(Q(**{field: term}))\n\n # tokenizing part\n tokens = re.split('\\s+', term)\n if len(tokens) == 2:\n name1, name2 = tokens\n complex_q = (\n Q(personal__icontains=name1) & Q(family__icontains=name2)\n ) | (\n Q(personal__icontains=name2) & Q(family__icontains=name1)\n )\n search_filters.append(complex_q)\n\n # this is brilliant: it applies OR to all search filters\n qs = qs.filter(reduce(operator.or_, search_filters))\n\n return qs\n\n\n@login_required\nclass AdminLookup(ModelLookup):\n \"\"\"The same as PersonLookup, but allows only to select administrators.\n\n Administrator is anyone with superuser power or in \"administrators\" group.\n \"\"\"\n model = models.Person\n search_fields = (\n 'personal__icontains',\n 'family__icontains',\n 'email__icontains',\n 'username__icontains'\n )\n\n def get_query(self, request, term):\n results = super().get_query(request, term)\n admin_group = Group.objects.get(name='administrators')\n results = results.filter(\n Q(is_superuser=True) | Q(groups__in=[admin_group])\n )\n return results\n\n\n@login_required\nclass AirportLookup(ModelLookup):\n model = models.Airport\n search_fields = (\n 'iata__icontains',\n 'fullname__icontains'\n )\n\n\nregistry.register(EventLookup)\nregistry.register(HostLookup)\nregistry.register(PersonLookup)\nregistry.register(AdminLookup)\nregistry.register(AirportLookup)\n", "path": "workshops/lookups.py"}], "after_files": [{"content": "from functools import reduce\nimport operator\nimport re\n\nfrom django.contrib.auth.models import Group\nfrom django.db.models import Q\n\nfrom selectable.base import ModelLookup\nfrom selectable.registry import registry\nfrom selectable.decorators import login_required\n\nfrom workshops import models\n\n\n@login_required\nclass EventLookup(ModelLookup):\n model = models.Event\n search_fields = ('slug__icontains', )\n\n\n@login_required\nclass HostLookup(ModelLookup):\n model = models.Host\n search_fields = (\n 'domain__icontains',\n 'fullname__icontains'\n )\n\n\n@login_required\nclass PersonLookup(ModelLookup):\n model = models.Person\n search_fields = (\n 'personal__icontains',\n 'family__icontains',\n 'email__icontains',\n 'username__icontains'\n )\n\n def get_query(self, request, term):\n \"\"\"Override this method to allow for additional lookup method: \"\"\"\n # original code from selectable.base.ModelLookup.get_query:\n qs = self.get_queryset()\n if term:\n search_filters = []\n if self.search_fields:\n for field in self.search_fields:\n search_filters.append(Q(**{field: term}))\n\n # tokenizing part\n tokens = re.split('\\s+', term)\n if len(tokens) == 2:\n name1, name2 = tokens\n complex_q = (\n Q(personal__icontains=name1) & Q(family__icontains=name2)\n ) | (\n Q(personal__icontains=name2) & Q(family__icontains=name1)\n )\n search_filters.append(complex_q)\n\n # this is brilliant: it applies OR to all search filters\n qs = qs.filter(reduce(operator.or_, search_filters))\n\n return qs\n\n\n@login_required\nclass AdminLookup(ModelLookup):\n \"\"\"The same as PersonLookup, but allows only to select administrators.\n\n Administrator is anyone with superuser power or in \"administrators\" group.\n \"\"\"\n model = models.Person\n search_fields = (\n 'personal__icontains',\n 'family__icontains',\n 'email__icontains',\n 'username__icontains'\n )\n\n def get_query(self, request, term):\n results = super().get_query(request, term)\n admin_group = Group.objects.get(name='administrators')\n results = results.filter(\n Q(is_superuser=True) | Q(groups__in=[admin_group])\n ).distinct()\n return results\n\n\n@login_required\nclass AirportLookup(ModelLookup):\n model = models.Airport\n search_fields = (\n 'iata__icontains',\n 'fullname__icontains'\n )\n\n\nregistry.register(EventLookup)\nregistry.register(HostLookup)\nregistry.register(PersonLookup)\nregistry.register(AdminLookup)\nregistry.register(AirportLookup)\n", "path": "workshops/lookups.py"}]} | 1,118 | 93 |
gh_patches_debug_34157 | rasdani/github-patches | git_diff | Textualize__textual-2095 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Consider updating `DirectoryTree` so that it takes `Path` as well as a `str` as the path to browse
Some people tend to favour using `Path` over `str` for paths and the like, so I feel it would be an idea to accept a `Path` as the path.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/textual/widgets/_directory_tree.py`
Content:
```
1 from __future__ import annotations
2
3 from dataclasses import dataclass
4 from pathlib import Path
5 from typing import ClassVar
6
7 from rich.style import Style
8 from rich.text import Text, TextType
9
10 from .._types import MessageTarget
11 from ..message import Message
12 from ._tree import TOGGLE_STYLE, Tree, TreeNode
13
14
15 @dataclass
16 class DirEntry:
17 """Attaches directory information ot a node."""
18
19 path: str
20 is_dir: bool
21 loaded: bool = False
22
23
24 class DirectoryTree(Tree[DirEntry]):
25 """A Tree widget that presents files and directories.
26
27 Args:
28 path: Path to directory.
29 name: The name of the widget, or None for no name. Defaults to None.
30 id: The ID of the widget in the DOM, or None for no ID. Defaults to None.
31 classes: A space-separated list of classes, or None for no classes. Defaults to None.
32 disabled: Whether the directory tree is disabled or not.
33 """
34
35 COMPONENT_CLASSES: ClassVar[set[str]] = {
36 "directory-tree--folder",
37 "directory-tree--file",
38 "directory-tree--extension",
39 "directory-tree--hidden",
40 }
41 """
42 | Class | Description |
43 | :- | :- |
44 | `directory-tree--extension` | Target the extension of a file name. |
45 | `directory-tree--file` | Target files in the directory structure. |
46 | `directory-tree--folder` | Target folders in the directory structure. |
47 | `directory-tree--hidden` | Target hidden items in the directory structure. |
48
49 See also the [component classes for `Tree`][textual.widgets.Tree.COMPONENT_CLASSES].
50 """
51
52 DEFAULT_CSS = """
53 DirectoryTree > .directory-tree--folder {
54 text-style: bold;
55 }
56
57 DirectoryTree > .directory-tree--file {
58
59 }
60
61 DirectoryTree > .directory-tree--extension {
62 text-style: italic;
63 }
64
65 DirectoryTree > .directory-tree--hidden {
66 color: $text 50%;
67 }
68 """
69
70 class FileSelected(Message, bubble=True):
71 """Posted when a file is selected.
72
73 Can be handled using `on_directory_tree_file_selected` in a subclass of
74 `DirectoryTree` or in a parent widget in the DOM.
75
76 Attributes:
77 path: The path of the file that was selected.
78 """
79
80 def __init__(self, path: str) -> None:
81 self.path: str = path
82 super().__init__()
83
84 def __init__(
85 self,
86 path: str,
87 *,
88 name: str | None = None,
89 id: str | None = None,
90 classes: str | None = None,
91 disabled: bool = False,
92 ) -> None:
93 self.path = path
94 super().__init__(
95 path,
96 data=DirEntry(path, True),
97 name=name,
98 id=id,
99 classes=classes,
100 disabled=disabled,
101 )
102
103 def process_label(self, label: TextType):
104 """Process a str or Text into a label. Maybe overridden in a subclass to modify how labels are rendered.
105
106 Args:
107 label: Label.
108
109 Returns:
110 A Rich Text object.
111 """
112 if isinstance(label, str):
113 text_label = Text(label)
114 else:
115 text_label = label
116 first_line = text_label.split()[0]
117 return first_line
118
119 def render_label(self, node: TreeNode[DirEntry], base_style: Style, style: Style):
120 node_label = node._label.copy()
121 node_label.stylize(style)
122
123 if node._allow_expand:
124 prefix = ("📂 " if node.is_expanded else "📁 ", base_style + TOGGLE_STYLE)
125 node_label.stylize_before(
126 self.get_component_rich_style("directory-tree--folder", partial=True)
127 )
128 else:
129 prefix = (
130 "📄 ",
131 base_style,
132 )
133 node_label.stylize_before(
134 self.get_component_rich_style("directory-tree--file", partial=True),
135 )
136 node_label.highlight_regex(
137 r"\..+$",
138 self.get_component_rich_style(
139 "directory-tree--extension", partial=True
140 ),
141 )
142
143 if node_label.plain.startswith("."):
144 node_label.stylize_before(
145 self.get_component_rich_style("directory-tree--hidden")
146 )
147
148 text = Text.assemble(prefix, node_label)
149 return text
150
151 def load_directory(self, node: TreeNode[DirEntry]) -> None:
152 assert node.data is not None
153 dir_path = Path(node.data.path)
154 node.data.loaded = True
155 directory = sorted(
156 list(dir_path.iterdir()),
157 key=lambda path: (not path.is_dir(), path.name.lower()),
158 )
159 for path in directory:
160 node.add(
161 path.name,
162 data=DirEntry(str(path), path.is_dir()),
163 allow_expand=path.is_dir(),
164 )
165 node.expand()
166
167 def on_mount(self) -> None:
168 self.load_directory(self.root)
169
170 def on_tree_node_expanded(self, event: Tree.NodeSelected) -> None:
171 event.stop()
172 dir_entry = event.node.data
173 if dir_entry is None:
174 return
175 if dir_entry.is_dir:
176 if not dir_entry.loaded:
177 self.load_directory(event.node)
178 else:
179 self.post_message(self.FileSelected(dir_entry.path))
180
181 def on_tree_node_selected(self, event: Tree.NodeSelected) -> None:
182 event.stop()
183 dir_entry = event.node.data
184 if dir_entry is None:
185 return
186 if not dir_entry.is_dir:
187 self.post_message(self.FileSelected(dir_entry.path))
188
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/textual/widgets/_directory_tree.py b/src/textual/widgets/_directory_tree.py
--- a/src/textual/widgets/_directory_tree.py
+++ b/src/textual/widgets/_directory_tree.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import os
from dataclasses import dataclass
from pathlib import Path
from typing import ClassVar
@@ -7,14 +8,13 @@
from rich.style import Style
from rich.text import Text, TextType
-from .._types import MessageTarget
from ..message import Message
from ._tree import TOGGLE_STYLE, Tree, TreeNode
@dataclass
class DirEntry:
- """Attaches directory information ot a node."""
+ """Attaches directory information to a node."""
path: str
is_dir: bool
@@ -26,9 +26,9 @@
Args:
path: Path to directory.
- name: The name of the widget, or None for no name. Defaults to None.
- id: The ID of the widget in the DOM, or None for no ID. Defaults to None.
- classes: A space-separated list of classes, or None for no classes. Defaults to None.
+ name: The name of the widget, or None for no name.
+ id: The ID of the widget in the DOM, or None for no ID.
+ classes: A space-separated list of classes, or None for no classes.
disabled: Whether the directory tree is disabled or not.
"""
@@ -83,17 +83,18 @@
def __init__(
self,
- path: str,
+ path: str | Path,
*,
name: str | None = None,
id: str | None = None,
classes: str | None = None,
disabled: bool = False,
) -> None:
- self.path = path
+ str_path = os.fspath(path)
+ self.path = str_path
super().__init__(
- path,
- data=DirEntry(path, True),
+ str_path,
+ data=DirEntry(str_path, True),
name=name,
id=id,
classes=classes,
| {"golden_diff": "diff --git a/src/textual/widgets/_directory_tree.py b/src/textual/widgets/_directory_tree.py\n--- a/src/textual/widgets/_directory_tree.py\n+++ b/src/textual/widgets/_directory_tree.py\n@@ -1,5 +1,6 @@\n from __future__ import annotations\n \n+import os\n from dataclasses import dataclass\n from pathlib import Path\n from typing import ClassVar\n@@ -7,14 +8,13 @@\n from rich.style import Style\n from rich.text import Text, TextType\n \n-from .._types import MessageTarget\n from ..message import Message\n from ._tree import TOGGLE_STYLE, Tree, TreeNode\n \n \n @dataclass\n class DirEntry:\n- \"\"\"Attaches directory information ot a node.\"\"\"\n+ \"\"\"Attaches directory information to a node.\"\"\"\n \n path: str\n is_dir: bool\n@@ -26,9 +26,9 @@\n \n Args:\n path: Path to directory.\n- name: The name of the widget, or None for no name. Defaults to None.\n- id: The ID of the widget in the DOM, or None for no ID. Defaults to None.\n- classes: A space-separated list of classes, or None for no classes. Defaults to None.\n+ name: The name of the widget, or None for no name.\n+ id: The ID of the widget in the DOM, or None for no ID.\n+ classes: A space-separated list of classes, or None for no classes.\n disabled: Whether the directory tree is disabled or not.\n \"\"\"\n \n@@ -83,17 +83,18 @@\n \n def __init__(\n self,\n- path: str,\n+ path: str | Path,\n *,\n name: str | None = None,\n id: str | None = None,\n classes: str | None = None,\n disabled: bool = False,\n ) -> None:\n- self.path = path\n+ str_path = os.fspath(path)\n+ self.path = str_path\n super().__init__(\n- path,\n- data=DirEntry(path, True),\n+ str_path,\n+ data=DirEntry(str_path, True),\n name=name,\n id=id,\n classes=classes,\n", "issue": "Consider updating `DirectoryTree` so that it takes `Path` as well as a `str` as the path to browse\nSome people tend to favour using `Path` over `str` for paths and the like, so I feel it would be an idea to accept a `Path` as the path.\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom dataclasses import dataclass\nfrom pathlib import Path\nfrom typing import ClassVar\n\nfrom rich.style import Style\nfrom rich.text import Text, TextType\n\nfrom .._types import MessageTarget\nfrom ..message import Message\nfrom ._tree import TOGGLE_STYLE, Tree, TreeNode\n\n\n@dataclass\nclass DirEntry:\n \"\"\"Attaches directory information ot a node.\"\"\"\n\n path: str\n is_dir: bool\n loaded: bool = False\n\n\nclass DirectoryTree(Tree[DirEntry]):\n \"\"\"A Tree widget that presents files and directories.\n\n Args:\n path: Path to directory.\n name: The name of the widget, or None for no name. Defaults to None.\n id: The ID of the widget in the DOM, or None for no ID. Defaults to None.\n classes: A space-separated list of classes, or None for no classes. Defaults to None.\n disabled: Whether the directory tree is disabled or not.\n \"\"\"\n\n COMPONENT_CLASSES: ClassVar[set[str]] = {\n \"directory-tree--folder\",\n \"directory-tree--file\",\n \"directory-tree--extension\",\n \"directory-tree--hidden\",\n }\n \"\"\"\n | Class | Description |\n | :- | :- |\n | `directory-tree--extension` | Target the extension of a file name. |\n | `directory-tree--file` | Target files in the directory structure. |\n | `directory-tree--folder` | Target folders in the directory structure. |\n | `directory-tree--hidden` | Target hidden items in the directory structure. |\n\n See also the [component classes for `Tree`][textual.widgets.Tree.COMPONENT_CLASSES].\n \"\"\"\n\n DEFAULT_CSS = \"\"\"\n DirectoryTree > .directory-tree--folder {\n text-style: bold;\n }\n\n DirectoryTree > .directory-tree--file {\n\n }\n\n DirectoryTree > .directory-tree--extension {\n text-style: italic;\n }\n\n DirectoryTree > .directory-tree--hidden {\n color: $text 50%;\n }\n \"\"\"\n\n class FileSelected(Message, bubble=True):\n \"\"\"Posted when a file is selected.\n\n Can be handled using `on_directory_tree_file_selected` in a subclass of\n `DirectoryTree` or in a parent widget in the DOM.\n\n Attributes:\n path: The path of the file that was selected.\n \"\"\"\n\n def __init__(self, path: str) -> None:\n self.path: str = path\n super().__init__()\n\n def __init__(\n self,\n path: str,\n *,\n name: str | None = None,\n id: str | None = None,\n classes: str | None = None,\n disabled: bool = False,\n ) -> None:\n self.path = path\n super().__init__(\n path,\n data=DirEntry(path, True),\n name=name,\n id=id,\n classes=classes,\n disabled=disabled,\n )\n\n def process_label(self, label: TextType):\n \"\"\"Process a str or Text into a label. Maybe overridden in a subclass to modify how labels are rendered.\n\n Args:\n label: Label.\n\n Returns:\n A Rich Text object.\n \"\"\"\n if isinstance(label, str):\n text_label = Text(label)\n else:\n text_label = label\n first_line = text_label.split()[0]\n return first_line\n\n def render_label(self, node: TreeNode[DirEntry], base_style: Style, style: Style):\n node_label = node._label.copy()\n node_label.stylize(style)\n\n if node._allow_expand:\n prefix = (\"\ud83d\udcc2 \" if node.is_expanded else \"\ud83d\udcc1 \", base_style + TOGGLE_STYLE)\n node_label.stylize_before(\n self.get_component_rich_style(\"directory-tree--folder\", partial=True)\n )\n else:\n prefix = (\n \"\ud83d\udcc4 \",\n base_style,\n )\n node_label.stylize_before(\n self.get_component_rich_style(\"directory-tree--file\", partial=True),\n )\n node_label.highlight_regex(\n r\"\\..+$\",\n self.get_component_rich_style(\n \"directory-tree--extension\", partial=True\n ),\n )\n\n if node_label.plain.startswith(\".\"):\n node_label.stylize_before(\n self.get_component_rich_style(\"directory-tree--hidden\")\n )\n\n text = Text.assemble(prefix, node_label)\n return text\n\n def load_directory(self, node: TreeNode[DirEntry]) -> None:\n assert node.data is not None\n dir_path = Path(node.data.path)\n node.data.loaded = True\n directory = sorted(\n list(dir_path.iterdir()),\n key=lambda path: (not path.is_dir(), path.name.lower()),\n )\n for path in directory:\n node.add(\n path.name,\n data=DirEntry(str(path), path.is_dir()),\n allow_expand=path.is_dir(),\n )\n node.expand()\n\n def on_mount(self) -> None:\n self.load_directory(self.root)\n\n def on_tree_node_expanded(self, event: Tree.NodeSelected) -> None:\n event.stop()\n dir_entry = event.node.data\n if dir_entry is None:\n return\n if dir_entry.is_dir:\n if not dir_entry.loaded:\n self.load_directory(event.node)\n else:\n self.post_message(self.FileSelected(dir_entry.path))\n\n def on_tree_node_selected(self, event: Tree.NodeSelected) -> None:\n event.stop()\n dir_entry = event.node.data\n if dir_entry is None:\n return\n if not dir_entry.is_dir:\n self.post_message(self.FileSelected(dir_entry.path))\n", "path": "src/textual/widgets/_directory_tree.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport os\nfrom dataclasses import dataclass\nfrom pathlib import Path\nfrom typing import ClassVar\n\nfrom rich.style import Style\nfrom rich.text import Text, TextType\n\nfrom ..message import Message\nfrom ._tree import TOGGLE_STYLE, Tree, TreeNode\n\n\n@dataclass\nclass DirEntry:\n \"\"\"Attaches directory information to a node.\"\"\"\n\n path: str\n is_dir: bool\n loaded: bool = False\n\n\nclass DirectoryTree(Tree[DirEntry]):\n \"\"\"A Tree widget that presents files and directories.\n\n Args:\n path: Path to directory.\n name: The name of the widget, or None for no name.\n id: The ID of the widget in the DOM, or None for no ID.\n classes: A space-separated list of classes, or None for no classes.\n disabled: Whether the directory tree is disabled or not.\n \"\"\"\n\n COMPONENT_CLASSES: ClassVar[set[str]] = {\n \"directory-tree--folder\",\n \"directory-tree--file\",\n \"directory-tree--extension\",\n \"directory-tree--hidden\",\n }\n \"\"\"\n | Class | Description |\n | :- | :- |\n | `directory-tree--extension` | Target the extension of a file name. |\n | `directory-tree--file` | Target files in the directory structure. |\n | `directory-tree--folder` | Target folders in the directory structure. |\n | `directory-tree--hidden` | Target hidden items in the directory structure. |\n\n See also the [component classes for `Tree`][textual.widgets.Tree.COMPONENT_CLASSES].\n \"\"\"\n\n DEFAULT_CSS = \"\"\"\n DirectoryTree > .directory-tree--folder {\n text-style: bold;\n }\n\n DirectoryTree > .directory-tree--file {\n\n }\n\n DirectoryTree > .directory-tree--extension {\n text-style: italic;\n }\n\n DirectoryTree > .directory-tree--hidden {\n color: $text 50%;\n }\n \"\"\"\n\n class FileSelected(Message, bubble=True):\n \"\"\"Posted when a file is selected.\n\n Can be handled using `on_directory_tree_file_selected` in a subclass of\n `DirectoryTree` or in a parent widget in the DOM.\n\n Attributes:\n path: The path of the file that was selected.\n \"\"\"\n\n def __init__(self, path: str) -> None:\n self.path: str = path\n super().__init__()\n\n def __init__(\n self,\n path: str | Path,\n *,\n name: str | None = None,\n id: str | None = None,\n classes: str | None = None,\n disabled: bool = False,\n ) -> None:\n str_path = os.fspath(path)\n self.path = str_path\n super().__init__(\n str_path,\n data=DirEntry(str_path, True),\n name=name,\n id=id,\n classes=classes,\n disabled=disabled,\n )\n\n def process_label(self, label: TextType):\n \"\"\"Process a str or Text into a label. Maybe overridden in a subclass to modify how labels are rendered.\n\n Args:\n label: Label.\n\n Returns:\n A Rich Text object.\n \"\"\"\n if isinstance(label, str):\n text_label = Text(label)\n else:\n text_label = label\n first_line = text_label.split()[0]\n return first_line\n\n def render_label(self, node: TreeNode[DirEntry], base_style: Style, style: Style):\n node_label = node._label.copy()\n node_label.stylize(style)\n\n if node._allow_expand:\n prefix = (\"\ud83d\udcc2 \" if node.is_expanded else \"\ud83d\udcc1 \", base_style + TOGGLE_STYLE)\n node_label.stylize_before(\n self.get_component_rich_style(\"directory-tree--folder\", partial=True)\n )\n else:\n prefix = (\n \"\ud83d\udcc4 \",\n base_style,\n )\n node_label.stylize_before(\n self.get_component_rich_style(\"directory-tree--file\", partial=True),\n )\n node_label.highlight_regex(\n r\"\\..+$\",\n self.get_component_rich_style(\n \"directory-tree--extension\", partial=True\n ),\n )\n\n if node_label.plain.startswith(\".\"):\n node_label.stylize_before(\n self.get_component_rich_style(\"directory-tree--hidden\")\n )\n\n text = Text.assemble(prefix, node_label)\n return text\n\n def load_directory(self, node: TreeNode[DirEntry]) -> None:\n assert node.data is not None\n dir_path = Path(node.data.path)\n node.data.loaded = True\n directory = sorted(\n list(dir_path.iterdir()),\n key=lambda path: (not path.is_dir(), path.name.lower()),\n )\n for path in directory:\n node.add(\n path.name,\n data=DirEntry(str(path), path.is_dir()),\n allow_expand=path.is_dir(),\n )\n node.expand()\n\n def on_mount(self) -> None:\n self.load_directory(self.root)\n\n def on_tree_node_expanded(self, event: Tree.NodeSelected) -> None:\n event.stop()\n dir_entry = event.node.data\n if dir_entry is None:\n return\n if dir_entry.is_dir:\n if not dir_entry.loaded:\n self.load_directory(event.node)\n else:\n self.post_message(self.FileSelected(dir_entry.path))\n\n def on_tree_node_selected(self, event: Tree.NodeSelected) -> None:\n event.stop()\n dir_entry = event.node.data\n if dir_entry is None:\n return\n if not dir_entry.is_dir:\n self.post_message(self.FileSelected(dir_entry.path))\n", "path": "src/textual/widgets/_directory_tree.py"}]} | 1,999 | 484 |
gh_patches_debug_8739 | rasdani/github-patches | git_diff | cookiecutter__cookiecutter-1989 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Python 3.12 support
* Cookiecutter version: 2.5.0
* Python version: 3.12
* Operating System: All
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 """cookiecutter distutils configuration."""
2 from pathlib import Path
3
4 from setuptools import setup
5
6
7 def _get_version() -> str:
8 """Read cookiecutter/VERSION.txt and return its contents."""
9 path = Path("cookiecutter").resolve()
10 version_file = path / "VERSION.txt"
11 return version_file.read_text().strip()
12
13
14 version = _get_version()
15
16
17 with open('README.md', encoding='utf-8') as readme_file:
18 readme = readme_file.read()
19
20
21 requirements = [
22 'binaryornot>=0.4.4',
23 'Jinja2>=2.7,<4.0.0',
24 'click>=7.0,<9.0.0',
25 'pyyaml>=5.3.1',
26 'python-slugify>=4.0.0',
27 'requests>=2.23.0',
28 'arrow',
29 'rich',
30 ]
31
32 setup(
33 name='cookiecutter',
34 version=version,
35 description=(
36 'A command-line utility that creates projects from project '
37 'templates, e.g. creating a Python package project from a '
38 'Python package project template.'
39 ),
40 long_description=readme,
41 long_description_content_type='text/markdown',
42 author='Audrey Feldroy',
43 author_email='[email protected]',
44 url='https://github.com/cookiecutter/cookiecutter',
45 project_urls={
46 "Documentation": "https://cookiecutter.readthedocs.io",
47 "Issues": "https://github.com/cookiecutter/cookiecutter/issues",
48 "Discord": "https://discord.gg/9BrxzPKuEW",
49 },
50 packages=['cookiecutter'],
51 package_dir={'cookiecutter': 'cookiecutter'},
52 entry_points={'console_scripts': ['cookiecutter = cookiecutter.__main__:main']},
53 include_package_data=True,
54 python_requires='>=3.7',
55 install_requires=requirements,
56 license='BSD',
57 zip_safe=False,
58 classifiers=[
59 "Development Status :: 5 - Production/Stable",
60 "Environment :: Console",
61 "Intended Audience :: Developers",
62 "Natural Language :: English",
63 "License :: OSI Approved :: BSD License",
64 "Programming Language :: Python :: 3 :: Only",
65 "Programming Language :: Python :: 3",
66 "Programming Language :: Python :: 3.7",
67 "Programming Language :: Python :: 3.8",
68 "Programming Language :: Python :: 3.9",
69 "Programming Language :: Python :: 3.10",
70 "Programming Language :: Python :: 3.11",
71 "Programming Language :: Python :: Implementation :: CPython",
72 "Programming Language :: Python :: Implementation :: PyPy",
73 "Programming Language :: Python",
74 "Topic :: Software Development",
75 ],
76 keywords=[
77 "cookiecutter",
78 "Python",
79 "projects",
80 "project templates",
81 "Jinja2",
82 "skeleton",
83 "scaffolding",
84 "project directory",
85 "package",
86 "packaging",
87 ],
88 )
89
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -68,6 +68,7 @@
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: Python",
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -68,6 +68,7 @@\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n+ \"Programming Language :: Python :: 3.12\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Programming Language :: Python\",\n", "issue": "Add Python 3.12 support\n* Cookiecutter version: 2.5.0\r\n* Python version: 3.12\r\n* Operating System: All\r\n\n", "before_files": [{"content": "\"\"\"cookiecutter distutils configuration.\"\"\"\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef _get_version() -> str:\n \"\"\"Read cookiecutter/VERSION.txt and return its contents.\"\"\"\n path = Path(\"cookiecutter\").resolve()\n version_file = path / \"VERSION.txt\"\n return version_file.read_text().strip()\n\n\nversion = _get_version()\n\n\nwith open('README.md', encoding='utf-8') as readme_file:\n readme = readme_file.read()\n\n\nrequirements = [\n 'binaryornot>=0.4.4',\n 'Jinja2>=2.7,<4.0.0',\n 'click>=7.0,<9.0.0',\n 'pyyaml>=5.3.1',\n 'python-slugify>=4.0.0',\n 'requests>=2.23.0',\n 'arrow',\n 'rich',\n]\n\nsetup(\n name='cookiecutter',\n version=version,\n description=(\n 'A command-line utility that creates projects from project '\n 'templates, e.g. creating a Python package project from a '\n 'Python package project template.'\n ),\n long_description=readme,\n long_description_content_type='text/markdown',\n author='Audrey Feldroy',\n author_email='[email protected]',\n url='https://github.com/cookiecutter/cookiecutter',\n project_urls={\n \"Documentation\": \"https://cookiecutter.readthedocs.io\",\n \"Issues\": \"https://github.com/cookiecutter/cookiecutter/issues\",\n \"Discord\": \"https://discord.gg/9BrxzPKuEW\",\n },\n packages=['cookiecutter'],\n package_dir={'cookiecutter': 'cookiecutter'},\n entry_points={'console_scripts': ['cookiecutter = cookiecutter.__main__:main']},\n include_package_data=True,\n python_requires='>=3.7',\n install_requires=requirements,\n license='BSD',\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Developers\",\n \"Natural Language :: English\",\n \"License :: OSI Approved :: BSD License\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Programming Language :: Python\",\n \"Topic :: Software Development\",\n ],\n keywords=[\n \"cookiecutter\",\n \"Python\",\n \"projects\",\n \"project templates\",\n \"Jinja2\",\n \"skeleton\",\n \"scaffolding\",\n \"project directory\",\n \"package\",\n \"packaging\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "\"\"\"cookiecutter distutils configuration.\"\"\"\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef _get_version() -> str:\n \"\"\"Read cookiecutter/VERSION.txt and return its contents.\"\"\"\n path = Path(\"cookiecutter\").resolve()\n version_file = path / \"VERSION.txt\"\n return version_file.read_text().strip()\n\n\nversion = _get_version()\n\n\nwith open('README.md', encoding='utf-8') as readme_file:\n readme = readme_file.read()\n\n\nrequirements = [\n 'binaryornot>=0.4.4',\n 'Jinja2>=2.7,<4.0.0',\n 'click>=7.0,<9.0.0',\n 'pyyaml>=5.3.1',\n 'python-slugify>=4.0.0',\n 'requests>=2.23.0',\n 'arrow',\n 'rich',\n]\n\nsetup(\n name='cookiecutter',\n version=version,\n description=(\n 'A command-line utility that creates projects from project '\n 'templates, e.g. creating a Python package project from a '\n 'Python package project template.'\n ),\n long_description=readme,\n long_description_content_type='text/markdown',\n author='Audrey Feldroy',\n author_email='[email protected]',\n url='https://github.com/cookiecutter/cookiecutter',\n project_urls={\n \"Documentation\": \"https://cookiecutter.readthedocs.io\",\n \"Issues\": \"https://github.com/cookiecutter/cookiecutter/issues\",\n \"Discord\": \"https://discord.gg/9BrxzPKuEW\",\n },\n packages=['cookiecutter'],\n package_dir={'cookiecutter': 'cookiecutter'},\n entry_points={'console_scripts': ['cookiecutter = cookiecutter.__main__:main']},\n include_package_data=True,\n python_requires='>=3.7',\n install_requires=requirements,\n license='BSD',\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Developers\",\n \"Natural Language :: English\",\n \"License :: OSI Approved :: BSD License\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Programming Language :: Python :: 3.12\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Programming Language :: Python\",\n \"Topic :: Software Development\",\n ],\n keywords=[\n \"cookiecutter\",\n \"Python\",\n \"projects\",\n \"project templates\",\n \"Jinja2\",\n \"skeleton\",\n \"scaffolding\",\n \"project directory\",\n \"package\",\n \"packaging\",\n ],\n)\n", "path": "setup.py"}]} | 1,120 | 115 |
gh_patches_debug_3825 | rasdani/github-patches | git_diff | conda__conda-5335 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
conda env update : does not support --prefix
The `conda env update` does not support the `--prefix` / `-p` argument.
```
$ conda info | grep version
conda version : 4.2.12
conda-env version : 4.2.12
conda-build version : 1.20.3
python version : 2.7.11.final.0
requests version : 2.10.0
$ conda env update -p ./conda-env
usage: conda-env [-h] {attach,create,export,list,remove,upload,update} ...
conda-env: error: unrecognized arguments: -p
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conda_env/cli/main_update.py`
Content:
```
1 from argparse import RawDescriptionHelpFormatter
2 import os
3 import textwrap
4 import sys
5
6 from conda import config
7 from conda.cli import common
8 from conda.cli import install as cli_install
9 from conda.misc import touch_nonadmin
10 from ..installers.base import get_installer, InvalidInstaller
11 from .. import specs as install_specs
12 from .. import exceptions
13 # for conda env
14 from conda_env.cli.common import get_prefix
15 from ..exceptions import CondaEnvException
16 description = """
17 Update the current environment based on environment file
18 """
19
20 example = """
21 examples:
22 conda env update
23 conda env update -n=foo
24 conda env update -f=/path/to/environment.yml
25 conda env update --name=foo --file=environment.yml
26 conda env update vader/deathstar
27 """
28
29
30 def configure_parser(sub_parsers):
31 p = sub_parsers.add_parser(
32 'update',
33 formatter_class=RawDescriptionHelpFormatter,
34 description=description,
35 help=description,
36 epilog=example,
37 )
38 p.add_argument(
39 '-n', '--name',
40 action='store',
41 help='name of environment (in %s)' % os.pathsep.join(config.envs_dirs),
42 default=None,
43 )
44 p.add_argument(
45 '-f', '--file',
46 action='store',
47 help='environment definition (default: environment.yml)',
48 default='environment.yml',
49 )
50 p.add_argument(
51 '--prune',
52 action='store_true',
53 default=False,
54 help='remove installed packages not defined in environment.yml',
55 )
56 p.add_argument(
57 '-q', '--quiet',
58 action='store_true',
59 default=False,
60 )
61 p.add_argument(
62 'remote_definition',
63 help='remote environment definition / IPython notebook',
64 action='store',
65 default=None,
66 nargs='?'
67 )
68 common.add_parser_json(p)
69 p.set_defaults(func=execute)
70
71
72 def execute(args, parser):
73 name = args.remote_definition or args.name
74
75 try:
76 spec = install_specs.detect(name=name, filename=args.file,
77 directory=os.getcwd())
78 env = spec.environment
79 except exceptions.SpecNotFound:
80 raise
81
82 if not args.name:
83 if not env.name:
84 # Note, this is a hack fofr get_prefix that assumes argparse results
85 # TODO Refactor common.get_prefix
86 name = os.environ.get('CONDA_DEFAULT_ENV', False)
87 if not name:
88 msg = "Unable to determine environment\n\n"
89 msg += textwrap.dedent("""
90 Please re-run this command with one of the following options:
91
92 * Provide an environment name via --name or -n
93 * Re-run this command inside an activated conda environment.""").lstrip()
94 # TODO Add json support
95 raise CondaEnvException(msg)
96
97 # Note: stubbing out the args object as all of the
98 # conda.cli.common code thinks that name will always
99 # be specified.
100 args.name = env.name
101
102 prefix = get_prefix(args, search=False)
103 # CAN'T Check with this function since it assumes we will create prefix.
104 # cli_install.check_prefix(prefix, json=args.json)
105
106 # TODO, add capability
107 # common.ensure_override_channels_requires_channel(args)
108 # channel_urls = args.channel or ()
109
110 for installer_type, specs in env.dependencies.items():
111 try:
112 installer = get_installer(installer_type)
113 installer.install(prefix, specs, args, env, prune=args.prune)
114 except InvalidInstaller:
115 sys.stderr.write(textwrap.dedent("""
116 Unable to install package for {0}.
117
118 Please double check and ensure you dependencies file has
119 the correct spelling. You might also try installing the
120 conda-env-{0} package to see if provides the required
121 installer.
122 """).lstrip().format(installer_type)
123 )
124 return -1
125
126 touch_nonadmin(prefix)
127 if not args.json:
128 print(cli_install.print_activate(args.name if args.name else prefix))
129
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conda_env/cli/main_update.py b/conda_env/cli/main_update.py
--- a/conda_env/cli/main_update.py
+++ b/conda_env/cli/main_update.py
@@ -79,7 +79,7 @@
except exceptions.SpecNotFound:
raise
- if not args.name:
+ if not (args.name or args.prefix):
if not env.name:
# Note, this is a hack fofr get_prefix that assumes argparse results
# TODO Refactor common.get_prefix
| {"golden_diff": "diff --git a/conda_env/cli/main_update.py b/conda_env/cli/main_update.py\n--- a/conda_env/cli/main_update.py\n+++ b/conda_env/cli/main_update.py\n@@ -79,7 +79,7 @@\n except exceptions.SpecNotFound:\n raise\n \n- if not args.name:\n+ if not (args.name or args.prefix):\n if not env.name:\n # Note, this is a hack fofr get_prefix that assumes argparse results\n # TODO Refactor common.get_prefix\n", "issue": "conda env update : does not support --prefix\nThe `conda env update` does not support the `--prefix` / `-p` argument.\r\n\r\n```\r\n$ conda info | grep version\r\n conda version : 4.2.12\r\n conda-env version : 4.2.12\r\n conda-build version : 1.20.3\r\n python version : 2.7.11.final.0\r\n requests version : 2.10.0\r\n\r\n$ conda env update -p ./conda-env\r\nusage: conda-env [-h] {attach,create,export,list,remove,upload,update} ...\r\nconda-env: error: unrecognized arguments: -p\r\n```\r\n\n", "before_files": [{"content": "from argparse import RawDescriptionHelpFormatter\nimport os\nimport textwrap\nimport sys\n\nfrom conda import config\nfrom conda.cli import common\nfrom conda.cli import install as cli_install\nfrom conda.misc import touch_nonadmin\nfrom ..installers.base import get_installer, InvalidInstaller\nfrom .. import specs as install_specs\nfrom .. import exceptions\n# for conda env\nfrom conda_env.cli.common import get_prefix\nfrom ..exceptions import CondaEnvException\ndescription = \"\"\"\nUpdate the current environment based on environment file\n\"\"\"\n\nexample = \"\"\"\nexamples:\n conda env update\n conda env update -n=foo\n conda env update -f=/path/to/environment.yml\n conda env update --name=foo --file=environment.yml\n conda env update vader/deathstar\n\"\"\"\n\n\ndef configure_parser(sub_parsers):\n p = sub_parsers.add_parser(\n 'update',\n formatter_class=RawDescriptionHelpFormatter,\n description=description,\n help=description,\n epilog=example,\n )\n p.add_argument(\n '-n', '--name',\n action='store',\n help='name of environment (in %s)' % os.pathsep.join(config.envs_dirs),\n default=None,\n )\n p.add_argument(\n '-f', '--file',\n action='store',\n help='environment definition (default: environment.yml)',\n default='environment.yml',\n )\n p.add_argument(\n '--prune',\n action='store_true',\n default=False,\n help='remove installed packages not defined in environment.yml',\n )\n p.add_argument(\n '-q', '--quiet',\n action='store_true',\n default=False,\n )\n p.add_argument(\n 'remote_definition',\n help='remote environment definition / IPython notebook',\n action='store',\n default=None,\n nargs='?'\n )\n common.add_parser_json(p)\n p.set_defaults(func=execute)\n\n\ndef execute(args, parser):\n name = args.remote_definition or args.name\n\n try:\n spec = install_specs.detect(name=name, filename=args.file,\n directory=os.getcwd())\n env = spec.environment\n except exceptions.SpecNotFound:\n raise\n\n if not args.name:\n if not env.name:\n # Note, this is a hack fofr get_prefix that assumes argparse results\n # TODO Refactor common.get_prefix\n name = os.environ.get('CONDA_DEFAULT_ENV', False)\n if not name:\n msg = \"Unable to determine environment\\n\\n\"\n msg += textwrap.dedent(\"\"\"\n Please re-run this command with one of the following options:\n\n * Provide an environment name via --name or -n\n * Re-run this command inside an activated conda environment.\"\"\").lstrip()\n # TODO Add json support\n raise CondaEnvException(msg)\n\n # Note: stubbing out the args object as all of the\n # conda.cli.common code thinks that name will always\n # be specified.\n args.name = env.name\n\n prefix = get_prefix(args, search=False)\n # CAN'T Check with this function since it assumes we will create prefix.\n # cli_install.check_prefix(prefix, json=args.json)\n\n # TODO, add capability\n # common.ensure_override_channels_requires_channel(args)\n # channel_urls = args.channel or ()\n\n for installer_type, specs in env.dependencies.items():\n try:\n installer = get_installer(installer_type)\n installer.install(prefix, specs, args, env, prune=args.prune)\n except InvalidInstaller:\n sys.stderr.write(textwrap.dedent(\"\"\"\n Unable to install package for {0}.\n\n Please double check and ensure you dependencies file has\n the correct spelling. You might also try installing the\n conda-env-{0} package to see if provides the required\n installer.\n \"\"\").lstrip().format(installer_type)\n )\n return -1\n\n touch_nonadmin(prefix)\n if not args.json:\n print(cli_install.print_activate(args.name if args.name else prefix))\n", "path": "conda_env/cli/main_update.py"}], "after_files": [{"content": "from argparse import RawDescriptionHelpFormatter\nimport os\nimport textwrap\nimport sys\n\nfrom conda import config\nfrom conda.cli import common\nfrom conda.cli import install as cli_install\nfrom conda.misc import touch_nonadmin\nfrom ..installers.base import get_installer, InvalidInstaller\nfrom .. import specs as install_specs\nfrom .. import exceptions\n# for conda env\nfrom conda_env.cli.common import get_prefix\nfrom ..exceptions import CondaEnvException\ndescription = \"\"\"\nUpdate the current environment based on environment file\n\"\"\"\n\nexample = \"\"\"\nexamples:\n conda env update\n conda env update -n=foo\n conda env update -f=/path/to/environment.yml\n conda env update --name=foo --file=environment.yml\n conda env update vader/deathstar\n\"\"\"\n\n\ndef configure_parser(sub_parsers):\n p = sub_parsers.add_parser(\n 'update',\n formatter_class=RawDescriptionHelpFormatter,\n description=description,\n help=description,\n epilog=example,\n )\n p.add_argument(\n '-n', '--name',\n action='store',\n help='name of environment (in %s)' % os.pathsep.join(config.envs_dirs),\n default=None,\n )\n p.add_argument(\n '-f', '--file',\n action='store',\n help='environment definition (default: environment.yml)',\n default='environment.yml',\n )\n p.add_argument(\n '--prune',\n action='store_true',\n default=False,\n help='remove installed packages not defined in environment.yml',\n )\n p.add_argument(\n '-q', '--quiet',\n action='store_true',\n default=False,\n )\n p.add_argument(\n 'remote_definition',\n help='remote environment definition / IPython notebook',\n action='store',\n default=None,\n nargs='?'\n )\n common.add_parser_json(p)\n p.set_defaults(func=execute)\n\n\ndef execute(args, parser):\n name = args.remote_definition or args.name\n\n try:\n spec = install_specs.detect(name=name, filename=args.file,\n directory=os.getcwd())\n env = spec.environment\n except exceptions.SpecNotFound:\n raise\n\n if not (args.name or args.prefix):\n if not env.name:\n # Note, this is a hack fofr get_prefix that assumes argparse results\n # TODO Refactor common.get_prefix\n name = os.environ.get('CONDA_DEFAULT_ENV', False)\n if not name:\n msg = \"Unable to determine environment\\n\\n\"\n msg += textwrap.dedent(\"\"\"\n Please re-run this command with one of the following options:\n\n * Provide an environment name via --name or -n\n * Re-run this command inside an activated conda environment.\"\"\").lstrip()\n # TODO Add json support\n raise CondaEnvException(msg)\n\n # Note: stubbing out the args object as all of the\n # conda.cli.common code thinks that name will always\n # be specified.\n args.name = env.name\n\n prefix = get_prefix(args, search=False)\n # CAN'T Check with this function since it assumes we will create prefix.\n # cli_install.check_prefix(prefix, json=args.json)\n\n # TODO, add capability\n # common.ensure_override_channels_requires_channel(args)\n # channel_urls = args.channel or ()\n\n for installer_type, specs in env.dependencies.items():\n try:\n installer = get_installer(installer_type)\n installer.install(prefix, specs, args, env, prune=args.prune)\n except InvalidInstaller:\n sys.stderr.write(textwrap.dedent(\"\"\"\n Unable to install package for {0}.\n\n Please double check and ensure you dependencies file has\n the correct spelling. You might also try installing the\n conda-env-{0} package to see if provides the required\n installer.\n \"\"\").lstrip().format(installer_type)\n )\n return -1\n\n touch_nonadmin(prefix)\n if not args.json:\n print(cli_install.print_activate(args.name if args.name else prefix))\n", "path": "conda_env/cli/main_update.py"}]} | 1,564 | 112 |
gh_patches_debug_16946 | rasdani/github-patches | git_diff | comic__grand-challenge.org-1375 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Import error DICOM file
**Describe the bug**
When uploading a DICOM file in which the Window Center and Window Width attributes are set to floating values (e.g. 2047.0), the file importer raises an error (see screenshot). The issue can be solved by converting the floating values to integers and writing a new DICOM file.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to 'Reader Studies'.
2. Select a reader study.
3. Go to 'Cases'.
4. Click 'Add Cases'.
5. Click 'Choose Files'.
6. Select a DICOM image with Window Center/Width set to a floating value.
7. Click 'Submit'.
8. An error will be raised.
**Screenshots**
<img width="828" alt="import_error" src="https://user-images.githubusercontent.com/47858231/83017756-817de600-a024-11ea-9094-d7e4d60cb01f.PNG">
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/grandchallenge/cases/image_builders/utils.py`
Content:
```
1 from pathlib import Path
2 from tempfile import TemporaryDirectory, TemporaryFile
3 from typing import AnyStr, Optional, Sequence, Tuple
4 from uuid import uuid4
5
6 import SimpleITK
7 from django.conf import settings
8 from django.core.files import File
9
10 from grandchallenge.cases.models import Image, ImageFile
11
12
13 def convert_itk_to_internal(
14 simple_itk_image: SimpleITK.Image,
15 name: Optional[AnyStr] = None,
16 use_spacing: Optional[bool] = True,
17 ) -> Tuple[Image, Sequence[ImageFile]]:
18 color_space = simple_itk_image.GetNumberOfComponentsPerPixel()
19 color_space = {
20 1: Image.COLOR_SPACE_GRAY,
21 3: Image.COLOR_SPACE_RGB,
22 4: Image.COLOR_SPACE_RGBA,
23 }.get(color_space, None)
24 if color_space is None:
25 raise ValueError("Unknown color space for MetaIO image.")
26
27 with TemporaryDirectory() as work_dir:
28 work_dir = Path(work_dir)
29
30 pk = uuid4()
31 if not name:
32 name = str(pk)
33 SimpleITK.WriteImage(
34 simple_itk_image,
35 str(work_dir / f"{pk}.{settings.ITK_INTERNAL_FILE_FORMAT}"),
36 True,
37 )
38
39 if simple_itk_image.GetDimension() == 4:
40 timepoints = simple_itk_image.GetSize()[-1]
41 else:
42 timepoints = None
43 depth = simple_itk_image.GetDepth()
44 metadata_keys = simple_itk_image.GetMetaDataKeys()
45 window_center = (
46 float(simple_itk_image.GetMetaData("WindowCenter"))
47 if "WindowCenter" in metadata_keys
48 else None
49 )
50 window_width = (
51 float(simple_itk_image.GetMetaData("WindowWidth"))
52 if "WindowWidth" in metadata_keys
53 else None
54 )
55 db_image = Image(
56 pk=pk,
57 name=name,
58 width=simple_itk_image.GetWidth(),
59 height=simple_itk_image.GetHeight(),
60 depth=depth if depth else None,
61 window_center=window_center,
62 window_width=window_width,
63 timepoints=timepoints,
64 resolution_levels=None,
65 color_space=color_space,
66 voxel_width_mm=simple_itk_image.GetSpacing()[0]
67 if use_spacing
68 else None,
69 voxel_height_mm=simple_itk_image.GetSpacing()[1]
70 if use_spacing
71 else None,
72 voxel_depth_mm=simple_itk_image.GetSpacing()[2] if depth else None,
73 )
74 db_image_files = []
75 for _file in work_dir.iterdir():
76 temp_file = TemporaryFile()
77 with open(str(_file), "rb") as open_file:
78 buffer = True
79 while buffer:
80 buffer = open_file.read(1024)
81 temp_file.write(buffer)
82 db_image_file = ImageFile(
83 image=db_image,
84 image_type=ImageFile.IMAGE_TYPE_MHD,
85 file=File(temp_file, name=_file.name),
86 )
87 db_image_files.append(db_image_file)
88
89 return db_image, db_image_files
90
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/grandchallenge/cases/image_builders/utils.py b/app/grandchallenge/cases/image_builders/utils.py
--- a/app/grandchallenge/cases/image_builders/utils.py
+++ b/app/grandchallenge/cases/image_builders/utils.py
@@ -41,17 +41,16 @@
else:
timepoints = None
depth = simple_itk_image.GetDepth()
- metadata_keys = simple_itk_image.GetMetaDataKeys()
- window_center = (
- float(simple_itk_image.GetMetaData("WindowCenter"))
- if "WindowCenter" in metadata_keys
- else None
- )
- window_width = (
- float(simple_itk_image.GetMetaData("WindowWidth"))
- if "WindowWidth" in metadata_keys
- else None
- )
+
+ try:
+ window_center = float(simple_itk_image.GetMetaData("WindowCenter"))
+ except (RuntimeError, ValueError):
+ window_center = None
+ try:
+ window_width = float(simple_itk_image.GetMetaData("WindowWidth"))
+ except (RuntimeError, ValueError):
+ window_width = None
+
db_image = Image(
pk=pk,
name=name,
| {"golden_diff": "diff --git a/app/grandchallenge/cases/image_builders/utils.py b/app/grandchallenge/cases/image_builders/utils.py\n--- a/app/grandchallenge/cases/image_builders/utils.py\n+++ b/app/grandchallenge/cases/image_builders/utils.py\n@@ -41,17 +41,16 @@\n else:\n timepoints = None\n depth = simple_itk_image.GetDepth()\n- metadata_keys = simple_itk_image.GetMetaDataKeys()\n- window_center = (\n- float(simple_itk_image.GetMetaData(\"WindowCenter\"))\n- if \"WindowCenter\" in metadata_keys\n- else None\n- )\n- window_width = (\n- float(simple_itk_image.GetMetaData(\"WindowWidth\"))\n- if \"WindowWidth\" in metadata_keys\n- else None\n- )\n+\n+ try:\n+ window_center = float(simple_itk_image.GetMetaData(\"WindowCenter\"))\n+ except (RuntimeError, ValueError):\n+ window_center = None\n+ try:\n+ window_width = float(simple_itk_image.GetMetaData(\"WindowWidth\"))\n+ except (RuntimeError, ValueError):\n+ window_width = None\n+\n db_image = Image(\n pk=pk,\n name=name,\n", "issue": "Import error DICOM file\n**Describe the bug**\r\nWhen uploading a DICOM file in which the Window Center and Window Width attributes are set to floating values (e.g. 2047.0), the file importer raises an error (see screenshot). The issue can be solved by converting the floating values to integers and writing a new DICOM file. \r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Go to 'Reader Studies'.\r\n2. Select a reader study. \r\n3. Go to 'Cases'.\r\n4. Click 'Add Cases'.\r\n5. Click 'Choose Files'. \r\n6. Select a DICOM image with Window Center/Width set to a floating value. \r\n7. Click 'Submit'.\r\n8. An error will be raised.\r\n\r\n**Screenshots**\r\n<img width=\"828\" alt=\"import_error\" src=\"https://user-images.githubusercontent.com/47858231/83017756-817de600-a024-11ea-9094-d7e4d60cb01f.PNG\">\r\n\n", "before_files": [{"content": "from pathlib import Path\nfrom tempfile import TemporaryDirectory, TemporaryFile\nfrom typing import AnyStr, Optional, Sequence, Tuple\nfrom uuid import uuid4\n\nimport SimpleITK\nfrom django.conf import settings\nfrom django.core.files import File\n\nfrom grandchallenge.cases.models import Image, ImageFile\n\n\ndef convert_itk_to_internal(\n simple_itk_image: SimpleITK.Image,\n name: Optional[AnyStr] = None,\n use_spacing: Optional[bool] = True,\n) -> Tuple[Image, Sequence[ImageFile]]:\n color_space = simple_itk_image.GetNumberOfComponentsPerPixel()\n color_space = {\n 1: Image.COLOR_SPACE_GRAY,\n 3: Image.COLOR_SPACE_RGB,\n 4: Image.COLOR_SPACE_RGBA,\n }.get(color_space, None)\n if color_space is None:\n raise ValueError(\"Unknown color space for MetaIO image.\")\n\n with TemporaryDirectory() as work_dir:\n work_dir = Path(work_dir)\n\n pk = uuid4()\n if not name:\n name = str(pk)\n SimpleITK.WriteImage(\n simple_itk_image,\n str(work_dir / f\"{pk}.{settings.ITK_INTERNAL_FILE_FORMAT}\"),\n True,\n )\n\n if simple_itk_image.GetDimension() == 4:\n timepoints = simple_itk_image.GetSize()[-1]\n else:\n timepoints = None\n depth = simple_itk_image.GetDepth()\n metadata_keys = simple_itk_image.GetMetaDataKeys()\n window_center = (\n float(simple_itk_image.GetMetaData(\"WindowCenter\"))\n if \"WindowCenter\" in metadata_keys\n else None\n )\n window_width = (\n float(simple_itk_image.GetMetaData(\"WindowWidth\"))\n if \"WindowWidth\" in metadata_keys\n else None\n )\n db_image = Image(\n pk=pk,\n name=name,\n width=simple_itk_image.GetWidth(),\n height=simple_itk_image.GetHeight(),\n depth=depth if depth else None,\n window_center=window_center,\n window_width=window_width,\n timepoints=timepoints,\n resolution_levels=None,\n color_space=color_space,\n voxel_width_mm=simple_itk_image.GetSpacing()[0]\n if use_spacing\n else None,\n voxel_height_mm=simple_itk_image.GetSpacing()[1]\n if use_spacing\n else None,\n voxel_depth_mm=simple_itk_image.GetSpacing()[2] if depth else None,\n )\n db_image_files = []\n for _file in work_dir.iterdir():\n temp_file = TemporaryFile()\n with open(str(_file), \"rb\") as open_file:\n buffer = True\n while buffer:\n buffer = open_file.read(1024)\n temp_file.write(buffer)\n db_image_file = ImageFile(\n image=db_image,\n image_type=ImageFile.IMAGE_TYPE_MHD,\n file=File(temp_file, name=_file.name),\n )\n db_image_files.append(db_image_file)\n\n return db_image, db_image_files\n", "path": "app/grandchallenge/cases/image_builders/utils.py"}], "after_files": [{"content": "from pathlib import Path\nfrom tempfile import TemporaryDirectory, TemporaryFile\nfrom typing import AnyStr, Optional, Sequence, Tuple\nfrom uuid import uuid4\n\nimport SimpleITK\nfrom django.conf import settings\nfrom django.core.files import File\n\nfrom grandchallenge.cases.models import Image, ImageFile\n\n\ndef convert_itk_to_internal(\n simple_itk_image: SimpleITK.Image,\n name: Optional[AnyStr] = None,\n use_spacing: Optional[bool] = True,\n) -> Tuple[Image, Sequence[ImageFile]]:\n color_space = simple_itk_image.GetNumberOfComponentsPerPixel()\n color_space = {\n 1: Image.COLOR_SPACE_GRAY,\n 3: Image.COLOR_SPACE_RGB,\n 4: Image.COLOR_SPACE_RGBA,\n }.get(color_space, None)\n if color_space is None:\n raise ValueError(\"Unknown color space for MetaIO image.\")\n\n with TemporaryDirectory() as work_dir:\n work_dir = Path(work_dir)\n\n pk = uuid4()\n if not name:\n name = str(pk)\n SimpleITK.WriteImage(\n simple_itk_image,\n str(work_dir / f\"{pk}.{settings.ITK_INTERNAL_FILE_FORMAT}\"),\n True,\n )\n\n if simple_itk_image.GetDimension() == 4:\n timepoints = simple_itk_image.GetSize()[-1]\n else:\n timepoints = None\n depth = simple_itk_image.GetDepth()\n\n try:\n window_center = float(simple_itk_image.GetMetaData(\"WindowCenter\"))\n except (RuntimeError, ValueError):\n window_center = None\n try:\n window_width = float(simple_itk_image.GetMetaData(\"WindowWidth\"))\n except (RuntimeError, ValueError):\n window_width = None\n\n db_image = Image(\n pk=pk,\n name=name,\n width=simple_itk_image.GetWidth(),\n height=simple_itk_image.GetHeight(),\n depth=depth if depth else None,\n window_center=window_center,\n window_width=window_width,\n timepoints=timepoints,\n resolution_levels=None,\n color_space=color_space,\n voxel_width_mm=simple_itk_image.GetSpacing()[0]\n if use_spacing\n else None,\n voxel_height_mm=simple_itk_image.GetSpacing()[1]\n if use_spacing\n else None,\n voxel_depth_mm=simple_itk_image.GetSpacing()[2] if depth else None,\n )\n db_image_files = []\n for _file in work_dir.iterdir():\n temp_file = TemporaryFile()\n with open(str(_file), \"rb\") as open_file:\n buffer = True\n while buffer:\n buffer = open_file.read(1024)\n temp_file.write(buffer)\n db_image_file = ImageFile(\n image=db_image,\n image_type=ImageFile.IMAGE_TYPE_MHD,\n file=File(temp_file, name=_file.name),\n )\n db_image_files.append(db_image_file)\n\n return db_image, db_image_files\n", "path": "app/grandchallenge/cases/image_builders/utils.py"}]} | 1,321 | 270 |
gh_patches_debug_15339 | rasdani/github-patches | git_diff | ethereum__web3.py-2730 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ipfshttpclient is unmaintained
* Version: not-relevant
* Python: not-relevant
* OS: not-relevant
* `pip freeze` output
```
not-relevant
```
### What was wrong?
According to https://github.com/ipfs-shipyard/py-ipfs-http-client/issues/316 and https://github.com/NixOS/nixpkgs/issues/185294 ; we surmise that `ipfshttpclient` is currently unmaintained or has some breakages unresolved at the moment.
I wanted to understand if you had plans to migrate away to another library or mark the IPFS backend as an extra module or deprecated for now?
### How can it be fixed?
- Drop the IPFS backend (deprecation)
- Move it as an extra module (with appropriate documentation)
- Offer up help to maintain the Python IPFS HTTP client
- Move to another library (homegrown or whatever.)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 from setuptools import (
3 find_packages,
4 setup,
5 )
6
7 extras_require = {
8 "tester": [
9 "eth-tester[py-evm]==v0.7.0-beta.1",
10 "py-geth>=3.10.0",
11 ],
12 "linter": [
13 "black>=22.1.0",
14 "flake8==3.8.3",
15 "isort>=4.2.15,<4.3.5",
16 "mypy==0.910",
17 "types-setuptools>=57.4.4",
18 "types-requests>=2.26.1",
19 "types-protobuf==3.19.13",
20 ],
21 "docs": [
22 "mock",
23 "click>=5.1",
24 "configparser==3.5.0",
25 "contextlib2>=0.5.4",
26 "py-geth>=3.9.1",
27 "py-solc>=0.4.0",
28 "pytest>=6.2.5",
29 "sphinx>=4.2.0",
30 "sphinx_rtd_theme>=0.5.2",
31 "toposort>=1.4",
32 "towncrier==18.5.0",
33 "urllib3",
34 "wheel",
35 ],
36 "dev": [
37 "bumpversion",
38 "flaky>=3.7.0",
39 "hypothesis>=3.31.2",
40 "importlib-metadata<5.0;python_version<'3.8'",
41 "pytest>=6.2.5",
42 "pytest-asyncio>=0.18.1",
43 "pytest-mock>=1.10",
44 "pytest-pythonpath>=0.3",
45 "pytest-watch>=4.2",
46 "pytest-xdist>=1.29",
47 "setuptools>=38.6.0",
48 "tox>=1.8.0",
49 "tqdm>4.32",
50 "twine>=1.13",
51 "pluggy==0.13.1",
52 "when-changed>=0.3.0",
53 ],
54 }
55
56 extras_require["dev"] = (
57 extras_require["tester"]
58 + extras_require["linter"]
59 + extras_require["docs"]
60 + extras_require["dev"]
61 )
62
63 with open("./README.md") as readme:
64 long_description = readme.read()
65
66 setup(
67 name="web3",
68 # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.
69 version="6.0.0-beta.8",
70 description="""Web3.py""",
71 long_description_content_type="text/markdown",
72 long_description=long_description,
73 author="Piper Merriam",
74 author_email="[email protected]",
75 url="https://github.com/ethereum/web3.py",
76 include_package_data=True,
77 install_requires=[
78 "aiohttp>=3.7.4.post0",
79 "eth-abi>=3.0.0",
80 "eth-account>=0.7.0",
81 "eth-hash[pycryptodome]>=0.2.0",
82 "eth-typing>=3.0.0",
83 "eth-utils>=2.0.0",
84 "hexbytes>=0.1.0",
85 "ipfshttpclient==0.8.0a2",
86 "jsonschema>=4.0.0",
87 "lru-dict>=1.1.6",
88 "protobuf>=4.21.6",
89 "pywin32>=223;platform_system=='Windows'",
90 "requests>=2.16.0",
91 # remove typing_extensions after python_requires>=3.8, see web3._utils.compat
92 "typing-extensions>=3.7.4.1,<5;python_version<'3.8'",
93 "websockets>=10.0.0",
94 ],
95 python_requires=">=3.7.2",
96 extras_require=extras_require,
97 py_modules=["web3", "ens", "ethpm"],
98 entry_points={"pytest11": ["pytest_ethereum = web3.tools.pytest_ethereum.plugins"]},
99 license="MIT",
100 zip_safe=False,
101 keywords="ethereum",
102 packages=find_packages(exclude=["tests", "tests.*"]),
103 package_data={"web3": ["py.typed"]},
104 classifiers=[
105 "Development Status :: 5 - Production/Stable",
106 "Intended Audience :: Developers",
107 "License :: OSI Approved :: MIT License",
108 "Natural Language :: English",
109 "Programming Language :: Python :: 3",
110 "Programming Language :: Python :: 3.7",
111 "Programming Language :: Python :: 3.8",
112 "Programming Language :: Python :: 3.9",
113 "Programming Language :: Python :: 3.10",
114 ],
115 )
116
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -51,12 +51,16 @@
"pluggy==0.13.1",
"when-changed>=0.3.0",
],
+ "ipfs": [
+ "ipfshttpclient==0.8.0a2",
+ ],
}
extras_require["dev"] = (
extras_require["tester"]
+ extras_require["linter"]
+ extras_require["docs"]
+ + extras_require["ipfs"]
+ extras_require["dev"]
)
@@ -82,7 +86,6 @@
"eth-typing>=3.0.0",
"eth-utils>=2.0.0",
"hexbytes>=0.1.0",
- "ipfshttpclient==0.8.0a2",
"jsonschema>=4.0.0",
"lru-dict>=1.1.6",
"protobuf>=4.21.6",
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -51,12 +51,16 @@\n \"pluggy==0.13.1\",\n \"when-changed>=0.3.0\",\n ],\n+ \"ipfs\": [\n+ \"ipfshttpclient==0.8.0a2\",\n+ ],\n }\n \n extras_require[\"dev\"] = (\n extras_require[\"tester\"]\n + extras_require[\"linter\"]\n + extras_require[\"docs\"]\n+ + extras_require[\"ipfs\"]\n + extras_require[\"dev\"]\n )\n \n@@ -82,7 +86,6 @@\n \"eth-typing>=3.0.0\",\n \"eth-utils>=2.0.0\",\n \"hexbytes>=0.1.0\",\n- \"ipfshttpclient==0.8.0a2\",\n \"jsonschema>=4.0.0\",\n \"lru-dict>=1.1.6\",\n \"protobuf>=4.21.6\",\n", "issue": "ipfshttpclient is unmaintained\n* Version: not-relevant\r\n* Python: not-relevant\r\n* OS: not-relevant\r\n* `pip freeze` output\r\n\r\n```\r\nnot-relevant\r\n```\r\n\r\n\r\n### What was wrong?\r\n\r\nAccording to https://github.com/ipfs-shipyard/py-ipfs-http-client/issues/316 and https://github.com/NixOS/nixpkgs/issues/185294 ; we surmise that `ipfshttpclient` is currently unmaintained or has some breakages unresolved at the moment.\r\n\r\nI wanted to understand if you had plans to migrate away to another library or mark the IPFS backend as an extra module or deprecated for now?\r\n\r\n### How can it be fixed?\r\n\r\n- Drop the IPFS backend (deprecation)\r\n- Move it as an extra module (with appropriate documentation)\r\n- Offer up help to maintain the Python IPFS HTTP client\r\n- Move to another library (homegrown or whatever.)\n", "before_files": [{"content": "#!/usr/bin/env python\nfrom setuptools import (\n find_packages,\n setup,\n)\n\nextras_require = {\n \"tester\": [\n \"eth-tester[py-evm]==v0.7.0-beta.1\",\n \"py-geth>=3.10.0\",\n ],\n \"linter\": [\n \"black>=22.1.0\",\n \"flake8==3.8.3\",\n \"isort>=4.2.15,<4.3.5\",\n \"mypy==0.910\",\n \"types-setuptools>=57.4.4\",\n \"types-requests>=2.26.1\",\n \"types-protobuf==3.19.13\",\n ],\n \"docs\": [\n \"mock\",\n \"click>=5.1\",\n \"configparser==3.5.0\",\n \"contextlib2>=0.5.4\",\n \"py-geth>=3.9.1\",\n \"py-solc>=0.4.0\",\n \"pytest>=6.2.5\",\n \"sphinx>=4.2.0\",\n \"sphinx_rtd_theme>=0.5.2\",\n \"toposort>=1.4\",\n \"towncrier==18.5.0\",\n \"urllib3\",\n \"wheel\",\n ],\n \"dev\": [\n \"bumpversion\",\n \"flaky>=3.7.0\",\n \"hypothesis>=3.31.2\",\n \"importlib-metadata<5.0;python_version<'3.8'\",\n \"pytest>=6.2.5\",\n \"pytest-asyncio>=0.18.1\",\n \"pytest-mock>=1.10\",\n \"pytest-pythonpath>=0.3\",\n \"pytest-watch>=4.2\",\n \"pytest-xdist>=1.29\",\n \"setuptools>=38.6.0\",\n \"tox>=1.8.0\",\n \"tqdm>4.32\",\n \"twine>=1.13\",\n \"pluggy==0.13.1\",\n \"when-changed>=0.3.0\",\n ],\n}\n\nextras_require[\"dev\"] = (\n extras_require[\"tester\"]\n + extras_require[\"linter\"]\n + extras_require[\"docs\"]\n + extras_require[\"dev\"]\n)\n\nwith open(\"./README.md\") as readme:\n long_description = readme.read()\n\nsetup(\n name=\"web3\",\n # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.\n version=\"6.0.0-beta.8\",\n description=\"\"\"Web3.py\"\"\",\n long_description_content_type=\"text/markdown\",\n long_description=long_description,\n author=\"Piper Merriam\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ethereum/web3.py\",\n include_package_data=True,\n install_requires=[\n \"aiohttp>=3.7.4.post0\",\n \"eth-abi>=3.0.0\",\n \"eth-account>=0.7.0\",\n \"eth-hash[pycryptodome]>=0.2.0\",\n \"eth-typing>=3.0.0\",\n \"eth-utils>=2.0.0\",\n \"hexbytes>=0.1.0\",\n \"ipfshttpclient==0.8.0a2\",\n \"jsonschema>=4.0.0\",\n \"lru-dict>=1.1.6\",\n \"protobuf>=4.21.6\",\n \"pywin32>=223;platform_system=='Windows'\",\n \"requests>=2.16.0\",\n # remove typing_extensions after python_requires>=3.8, see web3._utils.compat\n \"typing-extensions>=3.7.4.1,<5;python_version<'3.8'\",\n \"websockets>=10.0.0\",\n ],\n python_requires=\">=3.7.2\",\n extras_require=extras_require,\n py_modules=[\"web3\", \"ens\", \"ethpm\"],\n entry_points={\"pytest11\": [\"pytest_ethereum = web3.tools.pytest_ethereum.plugins\"]},\n license=\"MIT\",\n zip_safe=False,\n keywords=\"ethereum\",\n packages=find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={\"web3\": [\"py.typed\"]},\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Natural Language :: English\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nfrom setuptools import (\n find_packages,\n setup,\n)\n\nextras_require = {\n \"tester\": [\n \"eth-tester[py-evm]==v0.7.0-beta.1\",\n \"py-geth>=3.10.0\",\n ],\n \"linter\": [\n \"black>=22.1.0\",\n \"flake8==3.8.3\",\n \"isort>=4.2.15,<4.3.5\",\n \"mypy==0.910\",\n \"types-setuptools>=57.4.4\",\n \"types-requests>=2.26.1\",\n \"types-protobuf==3.19.13\",\n ],\n \"docs\": [\n \"mock\",\n \"click>=5.1\",\n \"configparser==3.5.0\",\n \"contextlib2>=0.5.4\",\n \"py-geth>=3.9.1\",\n \"py-solc>=0.4.0\",\n \"pytest>=6.2.5\",\n \"sphinx>=4.2.0\",\n \"sphinx_rtd_theme>=0.5.2\",\n \"toposort>=1.4\",\n \"towncrier==18.5.0\",\n \"urllib3\",\n \"wheel\",\n ],\n \"dev\": [\n \"bumpversion\",\n \"flaky>=3.7.0\",\n \"hypothesis>=3.31.2\",\n \"importlib-metadata<5.0;python_version<'3.8'\",\n \"pytest>=6.2.5\",\n \"pytest-asyncio>=0.18.1\",\n \"pytest-mock>=1.10\",\n \"pytest-pythonpath>=0.3\",\n \"pytest-watch>=4.2\",\n \"pytest-xdist>=1.29\",\n \"setuptools>=38.6.0\",\n \"tox>=1.8.0\",\n \"tqdm>4.32\",\n \"twine>=1.13\",\n \"pluggy==0.13.1\",\n \"when-changed>=0.3.0\",\n ],\n \"ipfs\": [\n \"ipfshttpclient==0.8.0a2\",\n ],\n}\n\nextras_require[\"dev\"] = (\n extras_require[\"tester\"]\n + extras_require[\"linter\"]\n + extras_require[\"docs\"]\n + extras_require[\"ipfs\"]\n + extras_require[\"dev\"]\n)\n\nwith open(\"./README.md\") as readme:\n long_description = readme.read()\n\nsetup(\n name=\"web3\",\n # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.\n version=\"6.0.0-beta.8\",\n description=\"\"\"Web3.py\"\"\",\n long_description_content_type=\"text/markdown\",\n long_description=long_description,\n author=\"Piper Merriam\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ethereum/web3.py\",\n include_package_data=True,\n install_requires=[\n \"aiohttp>=3.7.4.post0\",\n \"eth-abi>=3.0.0\",\n \"eth-account>=0.7.0\",\n \"eth-hash[pycryptodome]>=0.2.0\",\n \"eth-typing>=3.0.0\",\n \"eth-utils>=2.0.0\",\n \"hexbytes>=0.1.0\",\n \"jsonschema>=4.0.0\",\n \"lru-dict>=1.1.6\",\n \"protobuf>=4.21.6\",\n \"pywin32>=223;platform_system=='Windows'\",\n \"requests>=2.16.0\",\n # remove typing_extensions after python_requires>=3.8, see web3._utils.compat\n \"typing-extensions>=3.7.4.1,<5;python_version<'3.8'\",\n \"websockets>=10.0.0\",\n ],\n python_requires=\">=3.7.2\",\n extras_require=extras_require,\n py_modules=[\"web3\", \"ens\", \"ethpm\"],\n entry_points={\"pytest11\": [\"pytest_ethereum = web3.tools.pytest_ethereum.plugins\"]},\n license=\"MIT\",\n zip_safe=False,\n keywords=\"ethereum\",\n packages=find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={\"web3\": [\"py.typed\"]},\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Natural Language :: English\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n)\n", "path": "setup.py"}]} | 1,769 | 232 |
gh_patches_debug_35711 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-3683 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Link to open an issue from a failed Build with pre-filled details
There are several issues that are reported where there are missing information in the details section.
To solve this I was thinking on generating a link inside this message that could contain all the information pre-filled:

I found that github supports _some_ query arguments in the URL but I didn't found a way to use those key/args inside the template itself.
https://help.github.com/articles/about-automation-for-issues-and-pull-requests-with-query-parameters/
I think it would be awesome if the user can just click a link and the username, project url, etc could be pre-filled automatically.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `readthedocs/builds/views.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 """Views for builds app."""
4
5 import logging
6
7 from django.contrib import messages
8 from django.contrib.auth.decorators import login_required
9 from django.http import (
10 HttpResponseForbidden,
11 HttpResponsePermanentRedirect,
12 HttpResponseRedirect,
13 )
14 from django.shortcuts import get_object_or_404
15 from django.urls import reverse
16 from django.utils.decorators import method_decorator
17 from django.views.generic import DetailView, ListView
18
19 from readthedocs.builds.models import Build, Version
20 from readthedocs.core.permissions import AdminPermission
21 from readthedocs.core.utils import trigger_build
22 from readthedocs.projects.models import Project
23
24
25 log = logging.getLogger(__name__)
26
27
28 class BuildBase:
29 model = Build
30
31 def get_queryset(self):
32 self.project_slug = self.kwargs.get('project_slug', None)
33 self.project = get_object_or_404(
34 Project.objects.protected(self.request.user),
35 slug=self.project_slug,
36 )
37 queryset = Build.objects.public(
38 user=self.request.user,
39 project=self.project,
40 )
41
42 return queryset
43
44
45 class BuildTriggerMixin:
46
47 @method_decorator(login_required)
48 def post(self, request, project_slug):
49 project = get_object_or_404(Project, slug=project_slug)
50
51 if not AdminPermission.is_admin(request.user, project):
52 return HttpResponseForbidden()
53
54 version_slug = request.POST.get('version_slug')
55 version = get_object_or_404(
56 Version,
57 project=project,
58 slug=version_slug,
59 )
60
61 update_docs_task, build = trigger_build(
62 project=project,
63 version=version,
64 )
65 if (update_docs_task, build) == (None, None):
66 # Build was skipped
67 messages.add_message(
68 request,
69 messages.WARNING,
70 "This project is currently disabled and can't trigger new builds.",
71 )
72 return HttpResponseRedirect(
73 reverse('builds_project_list', args=[project.slug]),
74 )
75
76 return HttpResponseRedirect(
77 reverse('builds_detail', args=[project.slug, build.pk]),
78 )
79
80
81 class BuildList(BuildBase, BuildTriggerMixin, ListView):
82
83 def get_context_data(self, **kwargs):
84 context = super().get_context_data(**kwargs)
85
86 active_builds = self.get_queryset().exclude(
87 state='finished',
88 ).values('id')
89
90 context['project'] = self.project
91 context['active_builds'] = active_builds
92 context['versions'] = Version.objects.public(
93 user=self.request.user,
94 project=self.project,
95 )
96 context['build_qs'] = self.get_queryset()
97
98 return context
99
100
101 class BuildDetail(BuildBase, DetailView):
102 pk_url_kwarg = 'build_pk'
103
104 def get_context_data(self, **kwargs):
105 context = super().get_context_data(**kwargs)
106 context['project'] = self.project
107 return context
108
109
110 # Old build view redirects
111
112
113 def builds_redirect_list(request, project_slug): # pylint: disable=unused-argument
114 return HttpResponsePermanentRedirect(
115 reverse('builds_project_list', args=[project_slug]),
116 )
117
118
119 def builds_redirect_detail(request, project_slug, pk): # pylint: disable=unused-argument
120 return HttpResponsePermanentRedirect(
121 reverse('builds_detail', args=[project_slug, pk]),
122 )
123
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/readthedocs/builds/views.py b/readthedocs/builds/views.py
--- a/readthedocs/builds/views.py
+++ b/readthedocs/builds/views.py
@@ -3,6 +3,7 @@
"""Views for builds app."""
import logging
+import textwrap
from django.contrib import messages
from django.contrib.auth.decorators import login_required
@@ -15,7 +16,10 @@
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views.generic import DetailView, ListView
+from requests.utils import quote
+from urllib.parse import urlparse
+from readthedocs.doc_builder.exceptions import BuildEnvironmentError
from readthedocs.builds.models import Build, Version
from readthedocs.core.permissions import AdminPermission
from readthedocs.core.utils import trigger_build
@@ -104,6 +108,49 @@
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['project'] = self.project
+
+ build = self.get_object()
+ if build.error != BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format(build_id=build.pk):
+ # Do not suggest to open an issue if the error is not generic
+ return context
+
+ scheme = (
+ 'https://github.com/rtfd/readthedocs.org/issues/new'
+ '?title={title}{build_id}'
+ '&body={body}'
+ )
+
+ # TODO: we could use ``.github/ISSUE_TEMPLATE.md`` here, but we would
+ # need to add some variables to it which could impact in the UX when
+ # filling an issue from the web
+ body = """
+ ## Details:
+
+ * Project URL: https://readthedocs.org/projects/{project_slug}/
+ * Build URL(if applicable): https://readthedocs.org{build_path}
+ * Read the Docs username(if applicable): {username}
+
+ ## Expected Result
+
+ *A description of what you wanted to happen*
+
+ ## Actual Result
+
+ *A description of what actually happened*""".format(
+ project_slug=self.project,
+ build_path=self.request.path,
+ username=self.request.user,
+ )
+
+ scheme_dict = {
+ 'title': quote('Build error with build id #'),
+ 'build_id': context['build'].id,
+ 'body': quote(textwrap.dedent(body)),
+ }
+
+ issue_url = scheme.format(**scheme_dict)
+ issue_url = urlparse(issue_url).geturl()
+ context['issue_url'] = issue_url
return context
| {"golden_diff": "diff --git a/readthedocs/builds/views.py b/readthedocs/builds/views.py\n--- a/readthedocs/builds/views.py\n+++ b/readthedocs/builds/views.py\n@@ -3,6 +3,7 @@\n \"\"\"Views for builds app.\"\"\"\n \n import logging\n+import textwrap\n \n from django.contrib import messages\n from django.contrib.auth.decorators import login_required\n@@ -15,7 +16,10 @@\n from django.urls import reverse\n from django.utils.decorators import method_decorator\n from django.views.generic import DetailView, ListView\n+from requests.utils import quote\n+from urllib.parse import urlparse\n \n+from readthedocs.doc_builder.exceptions import BuildEnvironmentError\n from readthedocs.builds.models import Build, Version\n from readthedocs.core.permissions import AdminPermission\n from readthedocs.core.utils import trigger_build\n@@ -104,6 +108,49 @@\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['project'] = self.project\n+\n+ build = self.get_object()\n+ if build.error != BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format(build_id=build.pk):\n+ # Do not suggest to open an issue if the error is not generic\n+ return context\n+\n+ scheme = (\n+ 'https://github.com/rtfd/readthedocs.org/issues/new'\n+ '?title={title}{build_id}'\n+ '&body={body}'\n+ )\n+\n+ # TODO: we could use ``.github/ISSUE_TEMPLATE.md`` here, but we would\n+ # need to add some variables to it which could impact in the UX when\n+ # filling an issue from the web\n+ body = \"\"\"\n+ ## Details:\n+\n+ * Project URL: https://readthedocs.org/projects/{project_slug}/\n+ * Build URL(if applicable): https://readthedocs.org{build_path}\n+ * Read the Docs username(if applicable): {username}\n+\n+ ## Expected Result\n+\n+ *A description of what you wanted to happen*\n+\n+ ## Actual Result\n+\n+ *A description of what actually happened*\"\"\".format(\n+ project_slug=self.project,\n+ build_path=self.request.path,\n+ username=self.request.user,\n+ )\n+\n+ scheme_dict = {\n+ 'title': quote('Build error with build id #'),\n+ 'build_id': context['build'].id,\n+ 'body': quote(textwrap.dedent(body)),\n+ }\n+\n+ issue_url = scheme.format(**scheme_dict)\n+ issue_url = urlparse(issue_url).geturl()\n+ context['issue_url'] = issue_url\n return context\n", "issue": "Link to open an issue from a failed Build with pre-filled details\nThere are several issues that are reported where there are missing information in the details section.\r\n\r\nTo solve this I was thinking on generating a link inside this message that could contain all the information pre-filled:\r\n\r\n\r\n\r\n\r\nI found that github supports _some_ query arguments in the URL but I didn't found a way to use those key/args inside the template itself.\r\n\r\nhttps://help.github.com/articles/about-automation-for-issues-and-pull-requests-with-query-parameters/\r\n\r\nI think it would be awesome if the user can just click a link and the username, project url, etc could be pre-filled automatically.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Views for builds app.\"\"\"\n\nimport logging\n\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required\nfrom django.http import (\n HttpResponseForbidden,\n HttpResponsePermanentRedirect,\n HttpResponseRedirect,\n)\nfrom django.shortcuts import get_object_or_404\nfrom django.urls import reverse\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import DetailView, ListView\n\nfrom readthedocs.builds.models import Build, Version\nfrom readthedocs.core.permissions import AdminPermission\nfrom readthedocs.core.utils import trigger_build\nfrom readthedocs.projects.models import Project\n\n\nlog = logging.getLogger(__name__)\n\n\nclass BuildBase:\n model = Build\n\n def get_queryset(self):\n self.project_slug = self.kwargs.get('project_slug', None)\n self.project = get_object_or_404(\n Project.objects.protected(self.request.user),\n slug=self.project_slug,\n )\n queryset = Build.objects.public(\n user=self.request.user,\n project=self.project,\n )\n\n return queryset\n\n\nclass BuildTriggerMixin:\n\n @method_decorator(login_required)\n def post(self, request, project_slug):\n project = get_object_or_404(Project, slug=project_slug)\n\n if not AdminPermission.is_admin(request.user, project):\n return HttpResponseForbidden()\n\n version_slug = request.POST.get('version_slug')\n version = get_object_or_404(\n Version,\n project=project,\n slug=version_slug,\n )\n\n update_docs_task, build = trigger_build(\n project=project,\n version=version,\n )\n if (update_docs_task, build) == (None, None):\n # Build was skipped\n messages.add_message(\n request,\n messages.WARNING,\n \"This project is currently disabled and can't trigger new builds.\",\n )\n return HttpResponseRedirect(\n reverse('builds_project_list', args=[project.slug]),\n )\n\n return HttpResponseRedirect(\n reverse('builds_detail', args=[project.slug, build.pk]),\n )\n\n\nclass BuildList(BuildBase, BuildTriggerMixin, ListView):\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n active_builds = self.get_queryset().exclude(\n state='finished',\n ).values('id')\n\n context['project'] = self.project\n context['active_builds'] = active_builds\n context['versions'] = Version.objects.public(\n user=self.request.user,\n project=self.project,\n )\n context['build_qs'] = self.get_queryset()\n\n return context\n\n\nclass BuildDetail(BuildBase, DetailView):\n pk_url_kwarg = 'build_pk'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['project'] = self.project\n return context\n\n\n# Old build view redirects\n\n\ndef builds_redirect_list(request, project_slug): # pylint: disable=unused-argument\n return HttpResponsePermanentRedirect(\n reverse('builds_project_list', args=[project_slug]),\n )\n\n\ndef builds_redirect_detail(request, project_slug, pk): # pylint: disable=unused-argument\n return HttpResponsePermanentRedirect(\n reverse('builds_detail', args=[project_slug, pk]),\n )\n", "path": "readthedocs/builds/views.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Views for builds app.\"\"\"\n\nimport logging\nimport textwrap\n\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required\nfrom django.http import (\n HttpResponseForbidden,\n HttpResponsePermanentRedirect,\n HttpResponseRedirect,\n)\nfrom django.shortcuts import get_object_or_404\nfrom django.urls import reverse\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import DetailView, ListView\nfrom requests.utils import quote\nfrom urllib.parse import urlparse\n\nfrom readthedocs.doc_builder.exceptions import BuildEnvironmentError\nfrom readthedocs.builds.models import Build, Version\nfrom readthedocs.core.permissions import AdminPermission\nfrom readthedocs.core.utils import trigger_build\nfrom readthedocs.projects.models import Project\n\n\nlog = logging.getLogger(__name__)\n\n\nclass BuildBase:\n model = Build\n\n def get_queryset(self):\n self.project_slug = self.kwargs.get('project_slug', None)\n self.project = get_object_or_404(\n Project.objects.protected(self.request.user),\n slug=self.project_slug,\n )\n queryset = Build.objects.public(\n user=self.request.user,\n project=self.project,\n )\n\n return queryset\n\n\nclass BuildTriggerMixin:\n\n @method_decorator(login_required)\n def post(self, request, project_slug):\n project = get_object_or_404(Project, slug=project_slug)\n\n if not AdminPermission.is_admin(request.user, project):\n return HttpResponseForbidden()\n\n version_slug = request.POST.get('version_slug')\n version = get_object_or_404(\n Version,\n project=project,\n slug=version_slug,\n )\n\n update_docs_task, build = trigger_build(\n project=project,\n version=version,\n )\n if (update_docs_task, build) == (None, None):\n # Build was skipped\n messages.add_message(\n request,\n messages.WARNING,\n \"This project is currently disabled and can't trigger new builds.\",\n )\n return HttpResponseRedirect(\n reverse('builds_project_list', args=[project.slug]),\n )\n\n return HttpResponseRedirect(\n reverse('builds_detail', args=[project.slug, build.pk]),\n )\n\n\nclass BuildList(BuildBase, BuildTriggerMixin, ListView):\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n active_builds = self.get_queryset().exclude(\n state='finished',\n ).values('id')\n\n context['project'] = self.project\n context['active_builds'] = active_builds\n context['versions'] = Version.objects.public(\n user=self.request.user,\n project=self.project,\n )\n context['build_qs'] = self.get_queryset()\n\n return context\n\n\nclass BuildDetail(BuildBase, DetailView):\n pk_url_kwarg = 'build_pk'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['project'] = self.project\n\n build = self.get_object()\n if build.error != BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format(build_id=build.pk):\n # Do not suggest to open an issue if the error is not generic\n return context\n\n scheme = (\n 'https://github.com/rtfd/readthedocs.org/issues/new'\n '?title={title}{build_id}'\n '&body={body}'\n )\n\n # TODO: we could use ``.github/ISSUE_TEMPLATE.md`` here, but we would\n # need to add some variables to it which could impact in the UX when\n # filling an issue from the web\n body = \"\"\"\n ## Details:\n\n * Project URL: https://readthedocs.org/projects/{project_slug}/\n * Build URL(if applicable): https://readthedocs.org{build_path}\n * Read the Docs username(if applicable): {username}\n\n ## Expected Result\n\n *A description of what you wanted to happen*\n\n ## Actual Result\n\n *A description of what actually happened*\"\"\".format(\n project_slug=self.project,\n build_path=self.request.path,\n username=self.request.user,\n )\n\n scheme_dict = {\n 'title': quote('Build error with build id #'),\n 'build_id': context['build'].id,\n 'body': quote(textwrap.dedent(body)),\n }\n\n issue_url = scheme.format(**scheme_dict)\n issue_url = urlparse(issue_url).geturl()\n context['issue_url'] = issue_url\n return context\n\n\n# Old build view redirects\n\n\ndef builds_redirect_list(request, project_slug): # pylint: disable=unused-argument\n return HttpResponsePermanentRedirect(\n reverse('builds_project_list', args=[project_slug]),\n )\n\n\ndef builds_redirect_detail(request, project_slug, pk): # pylint: disable=unused-argument\n return HttpResponsePermanentRedirect(\n reverse('builds_detail', args=[project_slug, pk]),\n )\n", "path": "readthedocs/builds/views.py"}]} | 1,449 | 583 |
gh_patches_debug_10866 | rasdani/github-patches | git_diff | zestedesavoir__zds-site-3230 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Un template qui n'est pas variabilisé
Comme on peut le constater dans le template [opensearch.xml](https://github.com/zestedesavoir/zds-site/blob/3e91b083ee882396abf4dc0d508595d9bdb101d7/templates/search/opensearch.xml#L13) il y'a une valeur en dur qui y traine.
Il faut remplacer la ligne :
``` python
<Contact>http://zestedesavoir.com/pages/contact/</Contact>
```
par
``` python
<Contact>{% url "zds.pages.views.contact" %}</Contact>
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `zds/search/views.py`
Content:
```
1 # coding: utf-8
2 from django.db.models import Q
3
4 from django.shortcuts import render
5 from django.core.urlresolvers import reverse
6 from haystack.views import SearchView
7
8 from zds import settings
9 from zds.utils.paginator import paginator_range
10
11
12 class CustomSearchView(SearchView):
13
14 def create_response(self):
15 (paginator, page) = self.build_page()
16
17 page_nbr = int(self.request.GET.get('page', 1))
18
19 context = {
20 'query': self.query,
21 'form': self.form,
22 'page': page,
23 'pages': paginator_range(page_nbr, paginator.num_pages),
24 'nb': page_nbr,
25 'paginator': paginator,
26 'suggestion': None,
27 'model_name': '',
28 'models': self.request.GET.getlist('models', ''),
29 }
30
31 if self.results and hasattr(self.results, 'query') and self.results.query.backend.include_spelling:
32 context['suggestion'] = self.form.get_suggestion()
33
34 context.update(self.extra_context())
35 return render(self.request, self.template, context)
36
37 def get_results(self):
38 queryset = super(CustomSearchView, self).get_results()
39
40 # We want to search only on authorized post and topic
41 if self.request.user.is_authenticated():
42 groups = self.request.user.groups
43
44 if groups.count() > 0:
45 return queryset.filter(Q(permissions="public") |
46 Q(permissions__in=[group.name for group in groups.all()]))
47 else:
48 return queryset.filter(permissions="public")
49 else:
50 return queryset.filter(permissions="public")
51
52
53 def opensearch(request):
54 """Generate OpenSearch Description file"""
55
56 return render(request, 'search/opensearch.xml', {
57 'site_name': settings.ZDS_APP['site']['litteral_name'],
58 'site_url': settings.ZDS_APP['site']['url'],
59 'language': settings.LANGUAGE_CODE,
60 'search_url': settings.ZDS_APP['site']['url'] + reverse('haystack_search')
61 }, content_type='application/opensearchdescription+xml')
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/zds/search/views.py b/zds/search/views.py
--- a/zds/search/views.py
+++ b/zds/search/views.py
@@ -56,6 +56,7 @@
return render(request, 'search/opensearch.xml', {
'site_name': settings.ZDS_APP['site']['litteral_name'],
'site_url': settings.ZDS_APP['site']['url'],
+ 'email_contact': settings.ZDS_APP['site']['email_contact'],
'language': settings.LANGUAGE_CODE,
'search_url': settings.ZDS_APP['site']['url'] + reverse('haystack_search')
}, content_type='application/opensearchdescription+xml')
| {"golden_diff": "diff --git a/zds/search/views.py b/zds/search/views.py\n--- a/zds/search/views.py\n+++ b/zds/search/views.py\n@@ -56,6 +56,7 @@\n return render(request, 'search/opensearch.xml', {\n 'site_name': settings.ZDS_APP['site']['litteral_name'],\n 'site_url': settings.ZDS_APP['site']['url'],\n+ 'email_contact': settings.ZDS_APP['site']['email_contact'],\n 'language': settings.LANGUAGE_CODE,\n 'search_url': settings.ZDS_APP['site']['url'] + reverse('haystack_search')\n }, content_type='application/opensearchdescription+xml')\n", "issue": "Un template qui n'est pas variabilis\u00e9\nComme on peut le constater dans le template [opensearch.xml](https://github.com/zestedesavoir/zds-site/blob/3e91b083ee882396abf4dc0d508595d9bdb101d7/templates/search/opensearch.xml#L13) il y'a une valeur en dur qui y traine.\n\nIl faut remplacer la ligne : \n\n``` python\n<Contact>http://zestedesavoir.com/pages/contact/</Contact>\n```\n\npar \n\n``` python\n<Contact>{% url \"zds.pages.views.contact\" %}</Contact>\n```\n\n", "before_files": [{"content": "# coding: utf-8\nfrom django.db.models import Q\n\nfrom django.shortcuts import render\nfrom django.core.urlresolvers import reverse\nfrom haystack.views import SearchView\n\nfrom zds import settings\nfrom zds.utils.paginator import paginator_range\n\n\nclass CustomSearchView(SearchView):\n\n def create_response(self):\n (paginator, page) = self.build_page()\n\n page_nbr = int(self.request.GET.get('page', 1))\n\n context = {\n 'query': self.query,\n 'form': self.form,\n 'page': page,\n 'pages': paginator_range(page_nbr, paginator.num_pages),\n 'nb': page_nbr,\n 'paginator': paginator,\n 'suggestion': None,\n 'model_name': '',\n 'models': self.request.GET.getlist('models', ''),\n }\n\n if self.results and hasattr(self.results, 'query') and self.results.query.backend.include_spelling:\n context['suggestion'] = self.form.get_suggestion()\n\n context.update(self.extra_context())\n return render(self.request, self.template, context)\n\n def get_results(self):\n queryset = super(CustomSearchView, self).get_results()\n\n # We want to search only on authorized post and topic\n if self.request.user.is_authenticated():\n groups = self.request.user.groups\n\n if groups.count() > 0:\n return queryset.filter(Q(permissions=\"public\") |\n Q(permissions__in=[group.name for group in groups.all()]))\n else:\n return queryset.filter(permissions=\"public\")\n else:\n return queryset.filter(permissions=\"public\")\n\n\ndef opensearch(request):\n \"\"\"Generate OpenSearch Description file\"\"\"\n\n return render(request, 'search/opensearch.xml', {\n 'site_name': settings.ZDS_APP['site']['litteral_name'],\n 'site_url': settings.ZDS_APP['site']['url'],\n 'language': settings.LANGUAGE_CODE,\n 'search_url': settings.ZDS_APP['site']['url'] + reverse('haystack_search')\n }, content_type='application/opensearchdescription+xml')\n", "path": "zds/search/views.py"}], "after_files": [{"content": "# coding: utf-8\nfrom django.db.models import Q\n\nfrom django.shortcuts import render\nfrom django.core.urlresolvers import reverse\nfrom haystack.views import SearchView\n\nfrom zds import settings\nfrom zds.utils.paginator import paginator_range\n\n\nclass CustomSearchView(SearchView):\n\n def create_response(self):\n (paginator, page) = self.build_page()\n\n page_nbr = int(self.request.GET.get('page', 1))\n\n context = {\n 'query': self.query,\n 'form': self.form,\n 'page': page,\n 'pages': paginator_range(page_nbr, paginator.num_pages),\n 'nb': page_nbr,\n 'paginator': paginator,\n 'suggestion': None,\n 'model_name': '',\n 'models': self.request.GET.getlist('models', ''),\n }\n\n if self.results and hasattr(self.results, 'query') and self.results.query.backend.include_spelling:\n context['suggestion'] = self.form.get_suggestion()\n\n context.update(self.extra_context())\n return render(self.request, self.template, context)\n\n def get_results(self):\n queryset = super(CustomSearchView, self).get_results()\n\n # We want to search only on authorized post and topic\n if self.request.user.is_authenticated():\n groups = self.request.user.groups\n\n if groups.count() > 0:\n return queryset.filter(Q(permissions=\"public\") |\n Q(permissions__in=[group.name for group in groups.all()]))\n else:\n return queryset.filter(permissions=\"public\")\n else:\n return queryset.filter(permissions=\"public\")\n\n\ndef opensearch(request):\n \"\"\"Generate OpenSearch Description file\"\"\"\n\n return render(request, 'search/opensearch.xml', {\n 'site_name': settings.ZDS_APP['site']['litteral_name'],\n 'site_url': settings.ZDS_APP['site']['url'],\n 'email_contact': settings.ZDS_APP['site']['email_contact'],\n 'language': settings.LANGUAGE_CODE,\n 'search_url': settings.ZDS_APP['site']['url'] + reverse('haystack_search')\n }, content_type='application/opensearchdescription+xml')\n", "path": "zds/search/views.py"}]} | 967 | 146 |
gh_patches_debug_35022 | rasdani/github-patches | git_diff | ansible__ansible-modules-extras-1175 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
alternatives should allow link priorty as an optional parameter
**Issue Type:** “Feature Idea”
**Ansible Version:** 1.9.1
**Ansible Configuration:** Stock configuration
**Environment:** Ubuntu 14.04 x64 managing an Ubuntu 14.04 x64 machine
**Summary:** The alternatives module should allow you to pass in the link priority as an optional parameter. Currently the link priority is hard coded in the module as highlighted below:
https://github.com/ansible/ansible-modules-extras/blob/devel/system/alternatives.py#L61
Ansible should only report a change when the priority and link stayed the same.
**Steps To Reproduce:** None, feature idea
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `system/alternatives.py`
Content:
```
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3
4 """
5 Ansible module to manage symbolic link alternatives.
6 (c) 2014, Gabe Mulley <[email protected]>
7 (c) 2015, David Wittman <[email protected]>
8
9 This file is part of Ansible
10
11 Ansible is free software: you can redistribute it and/or modify
12 it under the terms of the GNU General Public License as published by
13 the Free Software Foundation, either version 3 of the License, or
14 (at your option) any later version.
15
16 Ansible is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU General Public License for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with Ansible. If not, see <http://www.gnu.org/licenses/>.
23 """
24
25 DOCUMENTATION = '''
26 ---
27 module: alternatives
28 short_description: Manages alternative programs for common commands
29 description:
30 - Manages symbolic links using the 'update-alternatives' tool
31 - Useful when multiple programs are installed but provide similar functionality (e.g. different editors).
32 version_added: "1.6"
33 author:
34 - "David Wittman (@DavidWittman)"
35 - "Gabe Mulley (@mulby)"
36 options:
37 name:
38 description:
39 - The generic name of the link.
40 required: true
41 path:
42 description:
43 - The path to the real executable that the link should point to.
44 required: true
45 link:
46 description:
47 - The path to the symbolic link that should point to the real executable.
48 - This option is required on RHEL-based distributions
49 required: false
50 requirements: [ update-alternatives ]
51 '''
52
53 EXAMPLES = '''
54 - name: correct java version selected
55 alternatives: name=java path=/usr/lib/jvm/java-7-openjdk-amd64/jre/bin/java
56
57 - name: alternatives link created
58 alternatives: name=hadoop-conf link=/etc/hadoop/conf path=/etc/hadoop/conf.ansible
59 '''
60
61 DEFAULT_LINK_PRIORITY = 50
62
63 import re
64 from ansible.module_utils.basic import *
65 from ansible.module_utils.pycompat24 import get_exception
66
67
68 def main():
69
70 module = AnsibleModule(
71 argument_spec = dict(
72 name = dict(required=True),
73 path = dict(required=True, type='path'),
74 link = dict(required=False, type='path'),
75 ),
76 supports_check_mode=True,
77 )
78
79 params = module.params
80 name = params['name']
81 path = params['path']
82 link = params['link']
83
84 UPDATE_ALTERNATIVES = module.get_bin_path('update-alternatives',True)
85
86 current_path = None
87 all_alternatives = []
88
89 # Run `update-alternatives --display <name>` to find existing alternatives
90 (rc, display_output, _) = module.run_command(
91 ['env', 'LC_ALL=C', UPDATE_ALTERNATIVES, '--display', name]
92 )
93
94 if rc == 0:
95 # Alternatives already exist for this link group
96 # Parse the output to determine the current path of the symlink and
97 # available alternatives
98 current_path_regex = re.compile(r'^\s*link currently points to (.*)$',
99 re.MULTILINE)
100 alternative_regex = re.compile(r'^(\/.*)\s-\spriority', re.MULTILINE)
101
102 current_path = current_path_regex.search(display_output).group(1)
103 all_alternatives = alternative_regex.findall(display_output)
104
105 if not link:
106 # Read the current symlink target from `update-alternatives --query`
107 # in case we need to install the new alternative before setting it.
108 #
109 # This is only compatible on Debian-based systems, as the other
110 # alternatives don't have --query available
111 rc, query_output, _ = module.run_command(
112 ['env', 'LC_ALL=C', UPDATE_ALTERNATIVES, '--query', name]
113 )
114 if rc == 0:
115 for line in query_output.splitlines():
116 if line.startswith('Link:'):
117 link = line.split()[1]
118 break
119
120 if current_path != path:
121 if module.check_mode:
122 module.exit_json(changed=True, current_path=current_path)
123 try:
124 # install the requested path if necessary
125 if path not in all_alternatives:
126 if not link:
127 module.fail_json(msg="Needed to install the alternative, but unable to do so as we are missing the link")
128
129 module.run_command(
130 [UPDATE_ALTERNATIVES, '--install', link, name, path, str(DEFAULT_LINK_PRIORITY)],
131 check_rc=True
132 )
133
134 # select the requested path
135 module.run_command(
136 [UPDATE_ALTERNATIVES, '--set', name, path],
137 check_rc=True
138 )
139
140 module.exit_json(changed=True)
141 except subprocess.CalledProcessError:
142 e = get_exception()
143 module.fail_json(msg=str(dir(cpe)))
144 else:
145 module.exit_json(changed=False)
146
147
148 main()
149
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/system/alternatives.py b/system/alternatives.py
--- a/system/alternatives.py
+++ b/system/alternatives.py
@@ -47,6 +47,10 @@
- The path to the symbolic link that should point to the real executable.
- This option is required on RHEL-based distributions
required: false
+ priority:
+ description:
+ - The priority of the alternative
+ required: false
requirements: [ update-alternatives ]
'''
@@ -56,9 +60,10 @@
- name: alternatives link created
alternatives: name=hadoop-conf link=/etc/hadoop/conf path=/etc/hadoop/conf.ansible
-'''
-DEFAULT_LINK_PRIORITY = 50
+- name: make java 32 bit an alternative with low priority
+ alternatives: name=java path=/usr/lib/jvm/java-7-openjdk-i386/jre/bin/java priority=-10
+'''
import re
from ansible.module_utils.basic import *
@@ -72,6 +77,8 @@
name = dict(required=True),
path = dict(required=True, type='path'),
link = dict(required=False, type='path'),
+ priority = dict(required=False, type='int',
+ default=50),
),
supports_check_mode=True,
)
@@ -80,6 +87,7 @@
name = params['name']
path = params['path']
link = params['link']
+ priority = params['priority']
UPDATE_ALTERNATIVES = module.get_bin_path('update-alternatives',True)
@@ -127,7 +135,7 @@
module.fail_json(msg="Needed to install the alternative, but unable to do so as we are missing the link")
module.run_command(
- [UPDATE_ALTERNATIVES, '--install', link, name, path, str(DEFAULT_LINK_PRIORITY)],
+ [UPDATE_ALTERNATIVES, '--install', link, name, path, str(priority)],
check_rc=True
)
| {"golden_diff": "diff --git a/system/alternatives.py b/system/alternatives.py\n--- a/system/alternatives.py\n+++ b/system/alternatives.py\n@@ -47,6 +47,10 @@\n - The path to the symbolic link that should point to the real executable.\n - This option is required on RHEL-based distributions\n required: false\n+ priority:\n+ description:\n+ - The priority of the alternative\n+ required: false\n requirements: [ update-alternatives ]\n '''\n \n@@ -56,9 +60,10 @@\n \n - name: alternatives link created\n alternatives: name=hadoop-conf link=/etc/hadoop/conf path=/etc/hadoop/conf.ansible\n-'''\n \n-DEFAULT_LINK_PRIORITY = 50\n+- name: make java 32 bit an alternative with low priority\n+ alternatives: name=java path=/usr/lib/jvm/java-7-openjdk-i386/jre/bin/java priority=-10\n+'''\n \n import re\n from ansible.module_utils.basic import *\n@@ -72,6 +77,8 @@\n name = dict(required=True),\n path = dict(required=True, type='path'),\n link = dict(required=False, type='path'),\n+ priority = dict(required=False, type='int',\n+ default=50),\n ),\n supports_check_mode=True,\n )\n@@ -80,6 +87,7 @@\n name = params['name']\n path = params['path']\n link = params['link']\n+ priority = params['priority']\n \n UPDATE_ALTERNATIVES = module.get_bin_path('update-alternatives',True)\n \n@@ -127,7 +135,7 @@\n module.fail_json(msg=\"Needed to install the alternative, but unable to do so as we are missing the link\")\n \n module.run_command(\n- [UPDATE_ALTERNATIVES, '--install', link, name, path, str(DEFAULT_LINK_PRIORITY)],\n+ [UPDATE_ALTERNATIVES, '--install', link, name, path, str(priority)],\n check_rc=True\n )\n", "issue": "alternatives should allow link priorty as an optional parameter\n**Issue Type:** \u201cFeature Idea\u201d\n**Ansible Version:** 1.9.1\n**Ansible Configuration:** Stock configuration\n**Environment:** Ubuntu 14.04 x64 managing an Ubuntu 14.04 x64 machine\n\n**Summary:** The alternatives module should allow you to pass in the link priority as an optional parameter. Currently the link priority is hard coded in the module as highlighted below:\n\nhttps://github.com/ansible/ansible-modules-extras/blob/devel/system/alternatives.py#L61\n\nAnsible should only report a change when the priority and link stayed the same.\n\n**Steps To Reproduce:** None, feature idea\n\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nAnsible module to manage symbolic link alternatives.\n(c) 2014, Gabe Mulley <[email protected]>\n(c) 2015, David Wittman <[email protected]>\n\nThis file is part of Ansible\n\nAnsible is free software: you can redistribute it and/or modify\nit under the terms of the GNU General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nAnsible is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License\nalong with Ansible. If not, see <http://www.gnu.org/licenses/>.\n\"\"\"\n\nDOCUMENTATION = '''\n---\nmodule: alternatives\nshort_description: Manages alternative programs for common commands\ndescription:\n - Manages symbolic links using the 'update-alternatives' tool\n - Useful when multiple programs are installed but provide similar functionality (e.g. different editors).\nversion_added: \"1.6\"\nauthor:\n - \"David Wittman (@DavidWittman)\"\n - \"Gabe Mulley (@mulby)\"\noptions:\n name:\n description:\n - The generic name of the link.\n required: true\n path:\n description:\n - The path to the real executable that the link should point to.\n required: true\n link:\n description:\n - The path to the symbolic link that should point to the real executable.\n - This option is required on RHEL-based distributions\n required: false\nrequirements: [ update-alternatives ]\n'''\n\nEXAMPLES = '''\n- name: correct java version selected\n alternatives: name=java path=/usr/lib/jvm/java-7-openjdk-amd64/jre/bin/java\n\n- name: alternatives link created\n alternatives: name=hadoop-conf link=/etc/hadoop/conf path=/etc/hadoop/conf.ansible\n'''\n\nDEFAULT_LINK_PRIORITY = 50\n\nimport re\nfrom ansible.module_utils.basic import *\nfrom ansible.module_utils.pycompat24 import get_exception\n\n\ndef main():\n\n module = AnsibleModule(\n argument_spec = dict(\n name = dict(required=True),\n path = dict(required=True, type='path'),\n link = dict(required=False, type='path'),\n ),\n supports_check_mode=True,\n )\n\n params = module.params\n name = params['name']\n path = params['path']\n link = params['link']\n\n UPDATE_ALTERNATIVES = module.get_bin_path('update-alternatives',True)\n\n current_path = None\n all_alternatives = []\n\n # Run `update-alternatives --display <name>` to find existing alternatives\n (rc, display_output, _) = module.run_command(\n ['env', 'LC_ALL=C', UPDATE_ALTERNATIVES, '--display', name]\n )\n\n if rc == 0:\n # Alternatives already exist for this link group\n # Parse the output to determine the current path of the symlink and\n # available alternatives\n current_path_regex = re.compile(r'^\\s*link currently points to (.*)$',\n re.MULTILINE)\n alternative_regex = re.compile(r'^(\\/.*)\\s-\\spriority', re.MULTILINE)\n\n current_path = current_path_regex.search(display_output).group(1)\n all_alternatives = alternative_regex.findall(display_output)\n\n if not link:\n # Read the current symlink target from `update-alternatives --query`\n # in case we need to install the new alternative before setting it.\n #\n # This is only compatible on Debian-based systems, as the other\n # alternatives don't have --query available\n rc, query_output, _ = module.run_command(\n ['env', 'LC_ALL=C', UPDATE_ALTERNATIVES, '--query', name]\n )\n if rc == 0:\n for line in query_output.splitlines():\n if line.startswith('Link:'):\n link = line.split()[1]\n break\n\n if current_path != path:\n if module.check_mode:\n module.exit_json(changed=True, current_path=current_path)\n try:\n # install the requested path if necessary\n if path not in all_alternatives:\n if not link:\n module.fail_json(msg=\"Needed to install the alternative, but unable to do so as we are missing the link\")\n\n module.run_command(\n [UPDATE_ALTERNATIVES, '--install', link, name, path, str(DEFAULT_LINK_PRIORITY)],\n check_rc=True\n )\n\n # select the requested path\n module.run_command(\n [UPDATE_ALTERNATIVES, '--set', name, path],\n check_rc=True\n )\n\n module.exit_json(changed=True)\n except subprocess.CalledProcessError:\n e = get_exception()\n module.fail_json(msg=str(dir(cpe)))\n else:\n module.exit_json(changed=False)\n\n\nmain()\n", "path": "system/alternatives.py"}], "after_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nAnsible module to manage symbolic link alternatives.\n(c) 2014, Gabe Mulley <[email protected]>\n(c) 2015, David Wittman <[email protected]>\n\nThis file is part of Ansible\n\nAnsible is free software: you can redistribute it and/or modify\nit under the terms of the GNU General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nAnsible is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License\nalong with Ansible. If not, see <http://www.gnu.org/licenses/>.\n\"\"\"\n\nDOCUMENTATION = '''\n---\nmodule: alternatives\nshort_description: Manages alternative programs for common commands\ndescription:\n - Manages symbolic links using the 'update-alternatives' tool\n - Useful when multiple programs are installed but provide similar functionality (e.g. different editors).\nversion_added: \"1.6\"\nauthor:\n - \"David Wittman (@DavidWittman)\"\n - \"Gabe Mulley (@mulby)\"\noptions:\n name:\n description:\n - The generic name of the link.\n required: true\n path:\n description:\n - The path to the real executable that the link should point to.\n required: true\n link:\n description:\n - The path to the symbolic link that should point to the real executable.\n - This option is required on RHEL-based distributions\n required: false\n priority:\n description:\n - The priority of the alternative\n required: false\nrequirements: [ update-alternatives ]\n'''\n\nEXAMPLES = '''\n- name: correct java version selected\n alternatives: name=java path=/usr/lib/jvm/java-7-openjdk-amd64/jre/bin/java\n\n- name: alternatives link created\n alternatives: name=hadoop-conf link=/etc/hadoop/conf path=/etc/hadoop/conf.ansible\n\n- name: make java 32 bit an alternative with low priority\n alternatives: name=java path=/usr/lib/jvm/java-7-openjdk-i386/jre/bin/java priority=-10\n'''\n\nimport re\nfrom ansible.module_utils.basic import *\nfrom ansible.module_utils.pycompat24 import get_exception\n\n\ndef main():\n\n module = AnsibleModule(\n argument_spec = dict(\n name = dict(required=True),\n path = dict(required=True, type='path'),\n link = dict(required=False, type='path'),\n priority = dict(required=False, type='int',\n default=50),\n ),\n supports_check_mode=True,\n )\n\n params = module.params\n name = params['name']\n path = params['path']\n link = params['link']\n priority = params['priority']\n\n UPDATE_ALTERNATIVES = module.get_bin_path('update-alternatives',True)\n\n current_path = None\n all_alternatives = []\n\n # Run `update-alternatives --display <name>` to find existing alternatives\n (rc, display_output, _) = module.run_command(\n ['env', 'LC_ALL=C', UPDATE_ALTERNATIVES, '--display', name]\n )\n\n if rc == 0:\n # Alternatives already exist for this link group\n # Parse the output to determine the current path of the symlink and\n # available alternatives\n current_path_regex = re.compile(r'^\\s*link currently points to (.*)$',\n re.MULTILINE)\n alternative_regex = re.compile(r'^(\\/.*)\\s-\\spriority', re.MULTILINE)\n\n current_path = current_path_regex.search(display_output).group(1)\n all_alternatives = alternative_regex.findall(display_output)\n\n if not link:\n # Read the current symlink target from `update-alternatives --query`\n # in case we need to install the new alternative before setting it.\n #\n # This is only compatible on Debian-based systems, as the other\n # alternatives don't have --query available\n rc, query_output, _ = module.run_command(\n ['env', 'LC_ALL=C', UPDATE_ALTERNATIVES, '--query', name]\n )\n if rc == 0:\n for line in query_output.splitlines():\n if line.startswith('Link:'):\n link = line.split()[1]\n break\n\n if current_path != path:\n if module.check_mode:\n module.exit_json(changed=True, current_path=current_path)\n try:\n # install the requested path if necessary\n if path not in all_alternatives:\n if not link:\n module.fail_json(msg=\"Needed to install the alternative, but unable to do so as we are missing the link\")\n\n module.run_command(\n [UPDATE_ALTERNATIVES, '--install', link, name, path, str(priority)],\n check_rc=True\n )\n\n # select the requested path\n module.run_command(\n [UPDATE_ALTERNATIVES, '--set', name, path],\n check_rc=True\n )\n\n module.exit_json(changed=True)\n except subprocess.CalledProcessError:\n e = get_exception()\n module.fail_json(msg=str(dir(cpe)))\n else:\n module.exit_json(changed=False)\n\n\nmain()\n", "path": "system/alternatives.py"}]} | 1,859 | 452 |
gh_patches_debug_32159 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-8238 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Food Warehouse spider missing a handful of stores
The spider for "The Food Warehouse" at https://github.com/alltheplaces/alltheplaces/blob/master/locations/spiders/the_food_warehouse_gb.py appears to be missing a small number (probably around 8) stores. These include:
* https://www.thefoodwarehouse.com/store-locator/exeter
* https://www.thefoodwarehouse.com/store-locator/falkirk
* https://www.thefoodwarehouse.com/store-locator/Norwich-Longwater
* https://www.thefoodwarehouse.com/store-locator/york-foss-islands
The spider gets the store details from https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/ . The above stores are all listed there, and I can't see any obvious problems with the data for them, but they're not returned in the dataset at https://alltheplaces-data.openaddresses.io/runs/2024-04-13-13-32-00/output/the_food_warehouse_gb.geojson . The number of stores returned by the spider appears stable, and it looks like the stores above are consistently absent from the returned datasets.
There's nothing in the error log to indicate any problems. I don't know what it signifies, but the stats at https://alltheplaces-data.openaddresses.io/runs/2024-04-13-13-32-00/stats/the_food_warehouse_gb.json say item_dropped_count=8 and
item_dropped_reasons_count/DropItem=8. I'm guessing that these could be the missing stores, but I don't know why they've been 'dropped'.
There are currently 187 items in the JSON at https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/ . 5 include "coming soon" in the name, so (correctly) aren't returned by the spider. There's another two that include "now open", so may have been "coming soon" at the last run. That would leave 180 stores to be returned. The number actually returned at the last run was 172, which probably matches the 8 described as being dropped in the stats.
Can anyone see what's going wrong here, and why these stores aren't being returned?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/the_food_warehouse_gb.py`
Content:
```
1 from scrapy import Spider
2
3 from locations.categories import Categories
4 from locations.dict_parser import DictParser
5 from locations.hours import OpeningHours
6
7
8 class TheFoodWarehouseGBSpider(Spider):
9 name = "the_food_warehouse_gb"
10 item_attributes = {
11 "brand": "The Food Warehouse",
12 "brand_wikidata": "Q87263899",
13 "extras": Categories.SHOP_SUPERMARKET.value,
14 }
15 allowed_domains = ["www.thefoodwarehouse.com"]
16 start_urls = ["https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/"]
17
18 def parse(self, response):
19 for store in response.json():
20 item = DictParser.parse(store)
21 if "CLOSED" in item["name"].upper() or "COMING SOON" in item["name"].upper():
22 continue
23 item["ref"] = store["storeNo"]
24 item["website"] = "https://www.thefoodwarehouse.com" + store["url"]
25 item["phone"] = store.get("store-number")
26 item["addr_full"] = (
27 item["addr_full"].replace("<br>", "").replace("<br />", "").replace("<p>", "").replace("</p>", "")
28 )
29 item["opening_hours"] = OpeningHours()
30 item["opening_hours"].add_ranges_from_string(store.get("opening-times", ""))
31 yield item
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/the_food_warehouse_gb.py b/locations/spiders/the_food_warehouse_gb.py
--- a/locations/spiders/the_food_warehouse_gb.py
+++ b/locations/spiders/the_food_warehouse_gb.py
@@ -1,8 +1,9 @@
-from scrapy import Spider
+from scrapy import Selector, Spider
from locations.categories import Categories
from locations.dict_parser import DictParser
from locations.hours import OpeningHours
+from locations.pipelines.address_clean_up import merge_address_lines
class TheFoodWarehouseGBSpider(Spider):
@@ -14,18 +15,18 @@
}
allowed_domains = ["www.thefoodwarehouse.com"]
start_urls = ["https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/"]
+ no_refs = True # https://github.com/alltheplaces/alltheplaces/issues/8237
def parse(self, response):
for store in response.json():
item = DictParser.parse(store)
if "CLOSED" in item["name"].upper() or "COMING SOON" in item["name"].upper():
continue
- item["ref"] = store["storeNo"]
- item["website"] = "https://www.thefoodwarehouse.com" + store["url"]
+ if store["url"] != "/store-locator/default-store":
+ item["website"] = "https://www.thefoodwarehouse.com" + store["url"]
+ item["branch"] = item.pop("name").removesuffix(" - Now Open")
item["phone"] = store.get("store-number")
- item["addr_full"] = (
- item["addr_full"].replace("<br>", "").replace("<br />", "").replace("<p>", "").replace("</p>", "")
- )
+ item["addr_full"] = merge_address_lines(Selector(text=item["addr_full"]).xpath("//text()").getall())
item["opening_hours"] = OpeningHours()
item["opening_hours"].add_ranges_from_string(store.get("opening-times", ""))
yield item
| {"golden_diff": "diff --git a/locations/spiders/the_food_warehouse_gb.py b/locations/spiders/the_food_warehouse_gb.py\n--- a/locations/spiders/the_food_warehouse_gb.py\n+++ b/locations/spiders/the_food_warehouse_gb.py\n@@ -1,8 +1,9 @@\n-from scrapy import Spider\n+from scrapy import Selector, Spider\n \n from locations.categories import Categories\n from locations.dict_parser import DictParser\n from locations.hours import OpeningHours\n+from locations.pipelines.address_clean_up import merge_address_lines\n \n \n class TheFoodWarehouseGBSpider(Spider):\n@@ -14,18 +15,18 @@\n }\n allowed_domains = [\"www.thefoodwarehouse.com\"]\n start_urls = [\"https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/\"]\n+ no_refs = True # https://github.com/alltheplaces/alltheplaces/issues/8237\n \n def parse(self, response):\n for store in response.json():\n item = DictParser.parse(store)\n if \"CLOSED\" in item[\"name\"].upper() or \"COMING SOON\" in item[\"name\"].upper():\n continue\n- item[\"ref\"] = store[\"storeNo\"]\n- item[\"website\"] = \"https://www.thefoodwarehouse.com\" + store[\"url\"]\n+ if store[\"url\"] != \"/store-locator/default-store\":\n+ item[\"website\"] = \"https://www.thefoodwarehouse.com\" + store[\"url\"]\n+ item[\"branch\"] = item.pop(\"name\").removesuffix(\" - Now Open\")\n item[\"phone\"] = store.get(\"store-number\")\n- item[\"addr_full\"] = (\n- item[\"addr_full\"].replace(\"<br>\", \"\").replace(\"<br />\", \"\").replace(\"<p>\", \"\").replace(\"</p>\", \"\")\n- )\n+ item[\"addr_full\"] = merge_address_lines(Selector(text=item[\"addr_full\"]).xpath(\"//text()\").getall())\n item[\"opening_hours\"] = OpeningHours()\n item[\"opening_hours\"].add_ranges_from_string(store.get(\"opening-times\", \"\"))\n yield item\n", "issue": "Food Warehouse spider missing a handful of stores\nThe spider for \"The Food Warehouse\" at https://github.com/alltheplaces/alltheplaces/blob/master/locations/spiders/the_food_warehouse_gb.py appears to be missing a small number (probably around 8) stores. These include:\r\n\r\n* https://www.thefoodwarehouse.com/store-locator/exeter\r\n* https://www.thefoodwarehouse.com/store-locator/falkirk\r\n* https://www.thefoodwarehouse.com/store-locator/Norwich-Longwater\r\n* https://www.thefoodwarehouse.com/store-locator/york-foss-islands\r\n\r\nThe spider gets the store details from https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/ . The above stores are all listed there, and I can't see any obvious problems with the data for them, but they're not returned in the dataset at https://alltheplaces-data.openaddresses.io/runs/2024-04-13-13-32-00/output/the_food_warehouse_gb.geojson . The number of stores returned by the spider appears stable, and it looks like the stores above are consistently absent from the returned datasets.\r\n\r\nThere's nothing in the error log to indicate any problems. I don't know what it signifies, but the stats at https://alltheplaces-data.openaddresses.io/runs/2024-04-13-13-32-00/stats/the_food_warehouse_gb.json say item_dropped_count=8 and \r\nitem_dropped_reasons_count/DropItem=8. I'm guessing that these could be the missing stores, but I don't know why they've been 'dropped'.\r\n\r\nThere are currently 187 items in the JSON at https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/ . 5 include \"coming soon\" in the name, so (correctly) aren't returned by the spider. There's another two that include \"now open\", so may have been \"coming soon\" at the last run. That would leave 180 stores to be returned. The number actually returned at the last run was 172, which probably matches the 8 described as being dropped in the stats.\r\n\r\nCan anyone see what's going wrong here, and why these stores aren't being returned?\n", "before_files": [{"content": "from scrapy import Spider\n\nfrom locations.categories import Categories\nfrom locations.dict_parser import DictParser\nfrom locations.hours import OpeningHours\n\n\nclass TheFoodWarehouseGBSpider(Spider):\n name = \"the_food_warehouse_gb\"\n item_attributes = {\n \"brand\": \"The Food Warehouse\",\n \"brand_wikidata\": \"Q87263899\",\n \"extras\": Categories.SHOP_SUPERMARKET.value,\n }\n allowed_domains = [\"www.thefoodwarehouse.com\"]\n start_urls = [\"https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/\"]\n\n def parse(self, response):\n for store in response.json():\n item = DictParser.parse(store)\n if \"CLOSED\" in item[\"name\"].upper() or \"COMING SOON\" in item[\"name\"].upper():\n continue\n item[\"ref\"] = store[\"storeNo\"]\n item[\"website\"] = \"https://www.thefoodwarehouse.com\" + store[\"url\"]\n item[\"phone\"] = store.get(\"store-number\")\n item[\"addr_full\"] = (\n item[\"addr_full\"].replace(\"<br>\", \"\").replace(\"<br />\", \"\").replace(\"<p>\", \"\").replace(\"</p>\", \"\")\n )\n item[\"opening_hours\"] = OpeningHours()\n item[\"opening_hours\"].add_ranges_from_string(store.get(\"opening-times\", \"\"))\n yield item\n", "path": "locations/spiders/the_food_warehouse_gb.py"}], "after_files": [{"content": "from scrapy import Selector, Spider\n\nfrom locations.categories import Categories\nfrom locations.dict_parser import DictParser\nfrom locations.hours import OpeningHours\nfrom locations.pipelines.address_clean_up import merge_address_lines\n\n\nclass TheFoodWarehouseGBSpider(Spider):\n name = \"the_food_warehouse_gb\"\n item_attributes = {\n \"brand\": \"The Food Warehouse\",\n \"brand_wikidata\": \"Q87263899\",\n \"extras\": Categories.SHOP_SUPERMARKET.value,\n }\n allowed_domains = [\"www.thefoodwarehouse.com\"]\n start_urls = [\"https://www.thefoodwarehouse.com/assets/foodwarehouse/ajax/\"]\n no_refs = True # https://github.com/alltheplaces/alltheplaces/issues/8237\n\n def parse(self, response):\n for store in response.json():\n item = DictParser.parse(store)\n if \"CLOSED\" in item[\"name\"].upper() or \"COMING SOON\" in item[\"name\"].upper():\n continue\n if store[\"url\"] != \"/store-locator/default-store\":\n item[\"website\"] = \"https://www.thefoodwarehouse.com\" + store[\"url\"]\n item[\"branch\"] = item.pop(\"name\").removesuffix(\" - Now Open\")\n item[\"phone\"] = store.get(\"store-number\")\n item[\"addr_full\"] = merge_address_lines(Selector(text=item[\"addr_full\"]).xpath(\"//text()\").getall())\n item[\"opening_hours\"] = OpeningHours()\n item[\"opening_hours\"].add_ranges_from_string(store.get(\"opening-times\", \"\"))\n yield item\n", "path": "locations/spiders/the_food_warehouse_gb.py"}]} | 1,095 | 446 |
gh_patches_debug_4291 | rasdani/github-patches | git_diff | learningequality__kolibri-3151 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
inconsistent channel ordering
### Observed behavior
In 'Recommended', channel order is based on order of import:'

In 'Channels', order is not the same:

### Expected behavior
In the 'Channels' sub-section, order should be based on order of import.
### User-facing consequences
Administrators are not able to control the order that channels appear in on that page.
### Context
Desired for one VF deployment.
@rtibbles would you mind linking to the change you made to update the 'Recommended' page?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/__init__.py`
Content:
```
1 from __future__ import absolute_import, print_function, unicode_literals
2
3 # NB! This is not necessarily the version scheme we want, however having a good
4 # tracking of releases once we start doing lots of pre-releases is essential.
5 from .utils.version import get_version
6
7 #: This may not be the exact version as it's subject to modification with
8 #: get_version() - use ``kolibri.__version__`` for the exact version string.
9 VERSION = (0, 6, 1, 'final', 0)
10
11 __author__ = 'Learning Equality'
12 __email__ = '[email protected]'
13 __version__ = str(get_version(VERSION))
14
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kolibri/__init__.py b/kolibri/__init__.py
--- a/kolibri/__init__.py
+++ b/kolibri/__init__.py
@@ -6,7 +6,7 @@
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
-VERSION = (0, 6, 1, 'final', 0)
+VERSION = (0, 6, 2, 'final', 0)
__author__ = 'Learning Equality'
__email__ = '[email protected]'
| {"golden_diff": "diff --git a/kolibri/__init__.py b/kolibri/__init__.py\n--- a/kolibri/__init__.py\n+++ b/kolibri/__init__.py\n@@ -6,7 +6,7 @@\n \n #: This may not be the exact version as it's subject to modification with\n #: get_version() - use ``kolibri.__version__`` for the exact version string.\n-VERSION = (0, 6, 1, 'final', 0)\n+VERSION = (0, 6, 2, 'final', 0)\n \n __author__ = 'Learning Equality'\n __email__ = '[email protected]'\n", "issue": "inconsistent channel ordering\n\r\n### Observed behavior\r\n\r\nIn 'Recommended', channel order is based on order of import:'\r\n\r\n\r\n\r\nIn 'Channels', order is not the same:\r\n\r\n\r\n\r\n\r\n### Expected behavior\r\n\r\nIn the 'Channels' sub-section, order should be based on order of import.\r\n\r\n### User-facing consequences\r\n\r\nAdministrators are not able to control the order that channels appear in on that page.\r\n\r\n### Context\r\n\r\nDesired for one VF deployment.\r\n\r\n@rtibbles would you mind linking to the change you made to update the 'Recommended' page?\r\n\r\n\r\n\n", "before_files": [{"content": "from __future__ import absolute_import, print_function, unicode_literals\n\n# NB! This is not necessarily the version scheme we want, however having a good\n# tracking of releases once we start doing lots of pre-releases is essential.\nfrom .utils.version import get_version\n\n#: This may not be the exact version as it's subject to modification with\n#: get_version() - use ``kolibri.__version__`` for the exact version string.\nVERSION = (0, 6, 1, 'final', 0)\n\n__author__ = 'Learning Equality'\n__email__ = '[email protected]'\n__version__ = str(get_version(VERSION))\n", "path": "kolibri/__init__.py"}], "after_files": [{"content": "from __future__ import absolute_import, print_function, unicode_literals\n\n# NB! This is not necessarily the version scheme we want, however having a good\n# tracking of releases once we start doing lots of pre-releases is essential.\nfrom .utils.version import get_version\n\n#: This may not be the exact version as it's subject to modification with\n#: get_version() - use ``kolibri.__version__`` for the exact version string.\nVERSION = (0, 6, 2, 'final', 0)\n\n__author__ = 'Learning Equality'\n__email__ = '[email protected]'\n__version__ = str(get_version(VERSION))\n", "path": "kolibri/__init__.py"}]} | 656 | 142 |
gh_patches_debug_362 | rasdani/github-patches | git_diff | numpy__numpy-3055 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
2to3 run `execfile` fixer
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setupegg.py`
Content:
```
1 #!/usr/bin/env python
2 """
3 A setup.py script to use setuptools, which gives egg goodness, etc.
4
5 This is used to build installers for OS X through bdist_mpkg.
6
7 Notes
8 -----
9 Using ``python setupegg.py install`` directly results in file permissions being
10 set wrong, with nose refusing to run any tests. To run the tests anyway, use::
11
12 >>> np.test(extra_argv=['--exe'])
13
14 """
15
16 import sys
17 from setuptools import setup
18
19 if sys.version_info[0] >= 3:
20 import imp
21 setupfile = imp.load_source('setupfile', 'setup.py')
22 setupfile.setup_package()
23 else:
24 execfile('setup.py')
25
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setupegg.py b/setupegg.py
--- a/setupegg.py
+++ b/setupegg.py
@@ -21,4 +21,4 @@
setupfile = imp.load_source('setupfile', 'setup.py')
setupfile.setup_package()
else:
- execfile('setup.py')
+ exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
| {"golden_diff": "diff --git a/setupegg.py b/setupegg.py\n--- a/setupegg.py\n+++ b/setupegg.py\n@@ -21,4 +21,4 @@\n setupfile = imp.load_source('setupfile', 'setup.py')\n setupfile.setup_package()\n else:\n- execfile('setup.py')\n+ exec(compile(open('setup.py').read(), 'setup.py', 'exec'))\n", "issue": "2to3 run `execfile` fixer\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\"\"\"\nA setup.py script to use setuptools, which gives egg goodness, etc.\n\nThis is used to build installers for OS X through bdist_mpkg.\n\nNotes\n-----\nUsing ``python setupegg.py install`` directly results in file permissions being\nset wrong, with nose refusing to run any tests. To run the tests anyway, use::\n\n >>> np.test(extra_argv=['--exe'])\n\n\"\"\"\n\nimport sys\nfrom setuptools import setup\n\nif sys.version_info[0] >= 3:\n import imp\n setupfile = imp.load_source('setupfile', 'setup.py')\n setupfile.setup_package()\nelse:\n execfile('setup.py')\n", "path": "setupegg.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\"\"\"\nA setup.py script to use setuptools, which gives egg goodness, etc.\n\nThis is used to build installers for OS X through bdist_mpkg.\n\nNotes\n-----\nUsing ``python setupegg.py install`` directly results in file permissions being\nset wrong, with nose refusing to run any tests. To run the tests anyway, use::\n\n >>> np.test(extra_argv=['--exe'])\n\n\"\"\"\n\nimport sys\nfrom setuptools import setup\n\nif sys.version_info[0] >= 3:\n import imp\n setupfile = imp.load_source('setupfile', 'setup.py')\n setupfile.setup_package()\nelse:\n exec(compile(open('setup.py').read(), 'setup.py', 'exec'))\n", "path": "setupegg.py"}]} | 459 | 94 |
gh_patches_debug_2886 | rasdani/github-patches | git_diff | conda__conda-build-389 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MD5 checking argument not passed to update_index
I was happy to see that there was a `-c` argument to `conda index` which forces it to use md5 hashes instead of file modification times. However, looks like `main_index.py` never passes that argument on to the `update_index()` function, i.e.,
``` python
...
update_index(path, verbose=(not args.quiet), force=args.force)
...
```
should actually be:
``` python
...
update_index(path, verbose=(not args.quiet), force=args.force, check_md5=args.check_md5)
...
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conda_build/main_index.py`
Content:
```
1 from __future__ import absolute_import, division, print_function
2
3 import argparse
4 import os
5 from locale import getpreferredencoding
6 from os.path import abspath
7
8 from conda.compat import PY3
9
10 from conda_build.index import update_index
11
12
13 def main():
14 p = argparse.ArgumentParser(
15 description="Update package index metadata files in given directories")
16
17 p.add_argument('dir',
18 help='Directory that contains an index to be updated.',
19 nargs='*',
20 default=[os.getcwd()])
21
22 p.add_argument('-c', "--check-md5",
23 action="store_true",
24 help="Use MD5 values instead of file modification times for\
25 determining if a package's metadata needs to be \
26 updated.")
27
28 p.add_argument('-f', "--force",
29 action="store_true",
30 help="force reading all files")
31
32 p.add_argument('-q', "--quiet",
33 action="store_true")
34
35 args = p.parse_args()
36
37 dir_paths = [abspath(path) for path in args.dir]
38 # Don't use byte strings in Python 2
39 if not PY3:
40 dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
41
42 for path in dir_paths:
43 update_index(path, verbose=(not args.quiet), force=args.force)
44
45
46 if __name__ == '__main__':
47 main()
48
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conda_build/main_index.py b/conda_build/main_index.py
--- a/conda_build/main_index.py
+++ b/conda_build/main_index.py
@@ -40,7 +40,7 @@
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
- update_index(path, verbose=(not args.quiet), force=args.force)
+ update_index(path, verbose=(not args.quiet), force=args.force, check_md5=args.check_md5)
if __name__ == '__main__':
| {"golden_diff": "diff --git a/conda_build/main_index.py b/conda_build/main_index.py\n--- a/conda_build/main_index.py\n+++ b/conda_build/main_index.py\n@@ -40,7 +40,7 @@\n dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]\n \n for path in dir_paths:\n- update_index(path, verbose=(not args.quiet), force=args.force)\n+ update_index(path, verbose=(not args.quiet), force=args.force, check_md5=args.check_md5)\n \n \n if __name__ == '__main__':\n", "issue": "MD5 checking argument not passed to update_index\nI was happy to see that there was a `-c` argument to `conda index` which forces it to use md5 hashes instead of file modification times. However, looks like `main_index.py` never passes that argument on to the `update_index()` function, i.e., \n\n``` python\n...\nupdate_index(path, verbose=(not args.quiet), force=args.force)\n...\n```\n\nshould actually be:\n\n``` python\n...\nupdate_index(path, verbose=(not args.quiet), force=args.force, check_md5=args.check_md5)\n...\n```\n\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport argparse\nimport os\nfrom locale import getpreferredencoding\nfrom os.path import abspath\n\nfrom conda.compat import PY3\n\nfrom conda_build.index import update_index\n\n\ndef main():\n p = argparse.ArgumentParser(\n description=\"Update package index metadata files in given directories\")\n\n p.add_argument('dir',\n help='Directory that contains an index to be updated.',\n nargs='*',\n default=[os.getcwd()])\n\n p.add_argument('-c', \"--check-md5\",\n action=\"store_true\",\n help=\"Use MD5 values instead of file modification times for\\\n determining if a package's metadata needs to be \\\n updated.\")\n\n p.add_argument('-f', \"--force\",\n action=\"store_true\",\n help=\"force reading all files\")\n\n p.add_argument('-q', \"--quiet\",\n action=\"store_true\")\n\n args = p.parse_args()\n\n dir_paths = [abspath(path) for path in args.dir]\n # Don't use byte strings in Python 2\n if not PY3:\n dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]\n\n for path in dir_paths:\n update_index(path, verbose=(not args.quiet), force=args.force)\n\n\nif __name__ == '__main__':\n main()\n", "path": "conda_build/main_index.py"}], "after_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport argparse\nimport os\nfrom locale import getpreferredencoding\nfrom os.path import abspath\n\nfrom conda.compat import PY3\n\nfrom conda_build.index import update_index\n\n\ndef main():\n p = argparse.ArgumentParser(\n description=\"Update package index metadata files in given directories\")\n\n p.add_argument('dir',\n help='Directory that contains an index to be updated.',\n nargs='*',\n default=[os.getcwd()])\n\n p.add_argument('-c', \"--check-md5\",\n action=\"store_true\",\n help=\"Use MD5 values instead of file modification times for\\\n determining if a package's metadata needs to be \\\n updated.\")\n\n p.add_argument('-f', \"--force\",\n action=\"store_true\",\n help=\"force reading all files\")\n\n p.add_argument('-q', \"--quiet\",\n action=\"store_true\")\n\n args = p.parse_args()\n\n dir_paths = [abspath(path) for path in args.dir]\n # Don't use byte strings in Python 2\n if not PY3:\n dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]\n\n for path in dir_paths:\n update_index(path, verbose=(not args.quiet), force=args.force, check_md5=args.check_md5)\n\n\nif __name__ == '__main__':\n main()\n", "path": "conda_build/main_index.py"}]} | 762 | 124 |
gh_patches_debug_6448 | rasdani/github-patches | git_diff | ipython__ipython-4915 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
greedy completer bug in terminal console
i'm back from finishing my thesis and i'm awed by the massive progress over the last few months. i'm particularly fond of the terminal 2-process frontend, which is super-useful to me. alas the greedy completer is not working properly in this mode. if i start a console session with greedy on, and then run
``` python
x = [0, 1, 2]
x[0].im<TAB>
```
it will autocomplete `imag`, but the next line will be
``` python
x[0]x[0].imag
```
which is obviously not right. this problem (rightfully) does not occur in the 1-process terminal or the qtconsole mode.
i remember this being an issue before in previous versions of ipython, but with the new structure i cannot see yet where this bug arises and how to solve it. best to ask an expert...
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `IPython/terminal/console/completer.py`
Content:
```
1 """Adapt readline completer interface to make ZMQ request.
2 """
3 # -*- coding: utf-8 -*-
4 import readline
5 try:
6 from queue import Empty # Py 3
7 except ImportError:
8 from Queue import Empty # Py 2
9
10 from IPython.config import Configurable
11 from IPython.utils.traitlets import Float
12
13 class ZMQCompleter(Configurable):
14 """Client-side completion machinery.
15
16 How it works: self.complete will be called multiple times, with
17 state=0,1,2,... When state=0 it should compute ALL the completion matches,
18 and then return them for each value of state."""
19
20 timeout = Float(5.0, config=True, help='timeout before completion abort')
21
22 def __init__(self, shell, client, config=None):
23 super(ZMQCompleter,self).__init__(config=config)
24
25 self.shell = shell
26 self.client = client
27 self.matches = []
28
29 def complete_request(self,text):
30 line = readline.get_line_buffer()
31 cursor_pos = readline.get_endidx()
32
33 # send completion request to kernel
34 # Give the kernel up to 0.5s to respond
35 msg_id = self.client.shell_channel.complete(text=text, line=line,
36 cursor_pos=cursor_pos)
37
38 msg = self.client.shell_channel.get_msg(timeout=self.timeout)
39 if msg['parent_header']['msg_id'] == msg_id:
40 return msg["content"]["matches"]
41 return []
42
43 def rlcomplete(self, text, state):
44 if state == 0:
45 try:
46 self.matches = self.complete_request(text)
47 except Empty:
48 #print('WARNING: Kernel timeout on tab completion.')
49 pass
50
51 try:
52 return self.matches[state]
53 except IndexError:
54 return None
55
56 def complete(self, text, line, cursor_pos=None):
57 return self.rlcomplete(text, 0)
58
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/IPython/terminal/console/completer.py b/IPython/terminal/console/completer.py
--- a/IPython/terminal/console/completer.py
+++ b/IPython/terminal/console/completer.py
@@ -8,9 +8,10 @@
from Queue import Empty # Py 2
from IPython.config import Configurable
+from IPython.core.completer import IPCompleter
from IPython.utils.traitlets import Float
-class ZMQCompleter(Configurable):
+class ZMQCompleter(IPCompleter):
"""Client-side completion machinery.
How it works: self.complete will be called multiple times, with
| {"golden_diff": "diff --git a/IPython/terminal/console/completer.py b/IPython/terminal/console/completer.py\n--- a/IPython/terminal/console/completer.py\n+++ b/IPython/terminal/console/completer.py\n@@ -8,9 +8,10 @@\n from Queue import Empty # Py 2\n \n from IPython.config import Configurable\n+from IPython.core.completer import IPCompleter\n from IPython.utils.traitlets import Float\n \n-class ZMQCompleter(Configurable):\n+class ZMQCompleter(IPCompleter):\n \"\"\"Client-side completion machinery.\n \n How it works: self.complete will be called multiple times, with\n", "issue": "greedy completer bug in terminal console\ni'm back from finishing my thesis and i'm awed by the massive progress over the last few months. i'm particularly fond of the terminal 2-process frontend, which is super-useful to me. alas the greedy completer is not working properly in this mode. if i start a console session with greedy on, and then run\n\n``` python\nx = [0, 1, 2]\nx[0].im<TAB>\n```\n\nit will autocomplete `imag`, but the next line will be\n\n``` python\nx[0]x[0].imag\n```\n\nwhich is obviously not right. this problem (rightfully) does not occur in the 1-process terminal or the qtconsole mode.\n\ni remember this being an issue before in previous versions of ipython, but with the new structure i cannot see yet where this bug arises and how to solve it. best to ask an expert...\n\n", "before_files": [{"content": "\"\"\"Adapt readline completer interface to make ZMQ request.\n\"\"\"\n# -*- coding: utf-8 -*-\nimport readline\ntry:\n from queue import Empty # Py 3\nexcept ImportError:\n from Queue import Empty # Py 2\n\nfrom IPython.config import Configurable\nfrom IPython.utils.traitlets import Float\n\nclass ZMQCompleter(Configurable):\n \"\"\"Client-side completion machinery.\n\n How it works: self.complete will be called multiple times, with\n state=0,1,2,... When state=0 it should compute ALL the completion matches,\n and then return them for each value of state.\"\"\"\n\n timeout = Float(5.0, config=True, help='timeout before completion abort')\n \n def __init__(self, shell, client, config=None):\n super(ZMQCompleter,self).__init__(config=config)\n\n self.shell = shell\n self.client = client\n self.matches = []\n \n def complete_request(self,text):\n line = readline.get_line_buffer()\n cursor_pos = readline.get_endidx()\n \n # send completion request to kernel\n # Give the kernel up to 0.5s to respond\n msg_id = self.client.shell_channel.complete(text=text, line=line,\n cursor_pos=cursor_pos)\n \n msg = self.client.shell_channel.get_msg(timeout=self.timeout)\n if msg['parent_header']['msg_id'] == msg_id:\n return msg[\"content\"][\"matches\"]\n return []\n \n def rlcomplete(self, text, state):\n if state == 0:\n try:\n self.matches = self.complete_request(text)\n except Empty:\n #print('WARNING: Kernel timeout on tab completion.')\n pass\n \n try:\n return self.matches[state]\n except IndexError:\n return None\n \n def complete(self, text, line, cursor_pos=None):\n return self.rlcomplete(text, 0)\n", "path": "IPython/terminal/console/completer.py"}], "after_files": [{"content": "\"\"\"Adapt readline completer interface to make ZMQ request.\n\"\"\"\n# -*- coding: utf-8 -*-\nimport readline\ntry:\n from queue import Empty # Py 3\nexcept ImportError:\n from Queue import Empty # Py 2\n\nfrom IPython.config import Configurable\nfrom IPython.core.completer import IPCompleter\nfrom IPython.utils.traitlets import Float\n\nclass ZMQCompleter(IPCompleter):\n \"\"\"Client-side completion machinery.\n\n How it works: self.complete will be called multiple times, with\n state=0,1,2,... When state=0 it should compute ALL the completion matches,\n and then return them for each value of state.\"\"\"\n\n timeout = Float(5.0, config=True, help='timeout before completion abort')\n \n def __init__(self, shell, client, config=None):\n super(ZMQCompleter,self).__init__(config=config)\n\n self.shell = shell\n self.client = client\n self.matches = []\n \n def complete_request(self,text):\n line = readline.get_line_buffer()\n cursor_pos = readline.get_endidx()\n \n # send completion request to kernel\n # Give the kernel up to 0.5s to respond\n msg_id = self.client.shell_channel.complete(text=text, line=line,\n cursor_pos=cursor_pos)\n \n msg = self.client.shell_channel.get_msg(timeout=self.timeout)\n if msg['parent_header']['msg_id'] == msg_id:\n return msg[\"content\"][\"matches\"]\n return []\n \n def rlcomplete(self, text, state):\n if state == 0:\n try:\n self.matches = self.complete_request(text)\n except Empty:\n #print('WARNING: Kernel timeout on tab completion.')\n pass\n \n try:\n return self.matches[state]\n except IndexError:\n return None\n \n def complete(self, text, line, cursor_pos=None):\n return self.rlcomplete(text, 0)\n", "path": "IPython/terminal/console/completer.py"}]} | 970 | 143 |
gh_patches_debug_65242 | rasdani/github-patches | git_diff | streamlink__streamlink-2229 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
powerapp.py No plugin can handle URL
## Error Report
- [X] This is a bug report and I have read the Posting Guidelines.
### Description
powerapp.com.tr should be able to play the stations
### Expected / actual behavior
Inserting the page in the streamlink does not play the stream. About my web browser Firefox I see the picture and hear the sound synonymous
### Reproduction steps / Explicit stream URLs to test
1.www.powerapp.com.tr/tvs/powertv/
streamlink http://www.powerapp.com.tr/tvs/powertv best
### log output
> streamlink http://www.powerapp.com.tr/tvs/powertv best
error: No plugin can handle URL: http://www.powerapp.com.tr/tvs/powertv
> error: No plugin can handle URL: http://www.powerapp.com.tr/tvs/powertv
error:: The term "error:" was not used as the name of a cmdlet, a function, a script file, or a
recognized executable program. Check the spelling of the name, or if the path is correct (provided
contain) and repeat the process.
In line: 1 character: 1
+ error: No plugin can handle URL: http://www.powerapp.com.tr/tvs/power ...
+~~~~~
+ CategoryInfo: ObjectNotFound: (error :: String) [], CommandNotFoundException
+ FullyQualifiedErrorId: CommandNotFoundException
### Additional comments, screenshots, etc.
Screenshot
https://i.ibb.co/g99nXC0/france.jpg
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/powerapp.py`
Content:
```
1 from __future__ import print_function
2 import re
3
4 from streamlink.plugin import Plugin
5 from streamlink.plugin.api import validate
6 from streamlink.stream import HLSStream
7
8
9 class PowerApp(Plugin):
10 url_re = re.compile(r"https?://(?:www.)?powerapp.com.tr/tv/(\w+)")
11 api_url = "http://api.powergroup.com.tr/Channels/{0}/?appRef=iPowerWeb&apiVersion=11"
12 api_schema = validate.Schema(validate.all({
13 "errorCode": 0,
14 "response": {
15 "channel_stream_url": validate.url()
16 }
17 }, validate.get("response")))
18
19 @classmethod
20 def can_handle_url(cls, url):
21 return cls.url_re.match(url) is not None
22
23 def _get_streams(self):
24 channel = self.url_re.match(self.url).group(1)
25
26 res = self.session.http.get(self.api_url.format(channel))
27 data = self.session.http.json(res, schema=self.api_schema)
28
29 return HLSStream.parse_variant_playlist(self.session, data["channel_stream_url"])
30
31
32 __plugin__ = PowerApp
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/streamlink/plugins/powerapp.py b/src/streamlink/plugins/powerapp.py
--- a/src/streamlink/plugins/powerapp.py
+++ b/src/streamlink/plugins/powerapp.py
@@ -7,7 +7,7 @@
class PowerApp(Plugin):
- url_re = re.compile(r"https?://(?:www.)?powerapp.com.tr/tv/(\w+)")
+ url_re = re.compile(r"https?://(?:www.)?powerapp.com.tr/tvs?/(\w+)")
api_url = "http://api.powergroup.com.tr/Channels/{0}/?appRef=iPowerWeb&apiVersion=11"
api_schema = validate.Schema(validate.all({
"errorCode": 0,
| {"golden_diff": "diff --git a/src/streamlink/plugins/powerapp.py b/src/streamlink/plugins/powerapp.py\n--- a/src/streamlink/plugins/powerapp.py\n+++ b/src/streamlink/plugins/powerapp.py\n@@ -7,7 +7,7 @@\n \n \n class PowerApp(Plugin):\n- url_re = re.compile(r\"https?://(?:www.)?powerapp.com.tr/tv/(\\w+)\")\n+ url_re = re.compile(r\"https?://(?:www.)?powerapp.com.tr/tvs?/(\\w+)\")\n api_url = \"http://api.powergroup.com.tr/Channels/{0}/?appRef=iPowerWeb&apiVersion=11\"\n api_schema = validate.Schema(validate.all({\n \"errorCode\": 0,\n", "issue": "powerapp.py No plugin can handle URL\n## Error Report\r\n\r\n- [X] This is a bug report and I have read the Posting Guidelines.\r\n\r\n### Description\r\n\r\n powerapp.com.tr should be able to play the stations\r\n\r\n\r\n### Expected / actual behavior\r\n\r\nInserting the page in the streamlink does not play the stream. About my web browser Firefox I see the picture and hear the sound synonymous\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\n1.www.powerapp.com.tr/tvs/powertv/\r\n\r\nstreamlink http://www.powerapp.com.tr/tvs/powertv best\r\n\r\n### log output\r\n\r\n> streamlink http://www.powerapp.com.tr/tvs/powertv best\r\nerror: No plugin can handle URL: http://www.powerapp.com.tr/tvs/powertv\r\n> error: No plugin can handle URL: http://www.powerapp.com.tr/tvs/powertv\r\nerror:: The term \"error:\" was not used as the name of a cmdlet, a function, a script file, or a\r\nrecognized executable program. Check the spelling of the name, or if the path is correct (provided\r\ncontain) and repeat the process.\r\nIn line: 1 character: 1\r\n+ error: No plugin can handle URL: http://www.powerapp.com.tr/tvs/power ...\r\n+~~~~~\r\n + CategoryInfo: ObjectNotFound: (error :: String) [], CommandNotFoundException\r\n + FullyQualifiedErrorId: CommandNotFoundException\r\n\r\n\r\n### Additional comments, screenshots, etc.\r\n\r\n Screenshot\r\n\r\nhttps://i.ibb.co/g99nXC0/france.jpg\n", "before_files": [{"content": "from __future__ import print_function\nimport re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\n\n\nclass PowerApp(Plugin):\n url_re = re.compile(r\"https?://(?:www.)?powerapp.com.tr/tv/(\\w+)\")\n api_url = \"http://api.powergroup.com.tr/Channels/{0}/?appRef=iPowerWeb&apiVersion=11\"\n api_schema = validate.Schema(validate.all({\n \"errorCode\": 0,\n \"response\": {\n \"channel_stream_url\": validate.url()\n }\n }, validate.get(\"response\")))\n\n @classmethod\n def can_handle_url(cls, url):\n return cls.url_re.match(url) is not None\n\n def _get_streams(self):\n channel = self.url_re.match(self.url).group(1)\n\n res = self.session.http.get(self.api_url.format(channel))\n data = self.session.http.json(res, schema=self.api_schema)\n\n return HLSStream.parse_variant_playlist(self.session, data[\"channel_stream_url\"])\n\n\n__plugin__ = PowerApp\n", "path": "src/streamlink/plugins/powerapp.py"}], "after_files": [{"content": "from __future__ import print_function\nimport re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\n\n\nclass PowerApp(Plugin):\n url_re = re.compile(r\"https?://(?:www.)?powerapp.com.tr/tvs?/(\\w+)\")\n api_url = \"http://api.powergroup.com.tr/Channels/{0}/?appRef=iPowerWeb&apiVersion=11\"\n api_schema = validate.Schema(validate.all({\n \"errorCode\": 0,\n \"response\": {\n \"channel_stream_url\": validate.url()\n }\n }, validate.get(\"response\")))\n\n @classmethod\n def can_handle_url(cls, url):\n return cls.url_re.match(url) is not None\n\n def _get_streams(self):\n channel = self.url_re.match(self.url).group(1)\n\n res = self.session.http.get(self.api_url.format(channel))\n data = self.session.http.json(res, schema=self.api_schema)\n\n return HLSStream.parse_variant_playlist(self.session, data[\"channel_stream_url\"])\n\n\n__plugin__ = PowerApp\n", "path": "src/streamlink/plugins/powerapp.py"}]} | 888 | 156 |
gh_patches_debug_10436 | rasdani/github-patches | git_diff | privacyidea__privacyidea-1480 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Migrate EMail, TAN and VASCO token to Python 3
This slightly changes behavior: The VASCO token initialization now throws a ``ParameterError`` if the secret couldn't be decoded.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `privacyidea/lib/authcache.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # 2017-08-11 Cornelius Kölbel <[email protected]>
4 # initial writeup
5 #
6 # License: AGPLv3
7 # contact: http://www.privacyidea.org
8 #
9 # This code is free software; you can redistribute it and/or
10 # modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
11 # License as published by the Free Software Foundation; either
12 # version 3 of the License, or any later version.
13 #
14 # This code is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNE7SS FOR A PARTICULAR PURPOSE. See the
17 # GNU AFFERO GENERAL PUBLIC LICENSE for more details.
18 #
19 # You should have received a copy of the GNU Affero General Public
20 # License along with this program. If not, see <http://www.gnu.org/licenses/>.
21 #
22 import binascii
23
24 from ..models import AuthCache, db
25 from sqlalchemy import and_
26 from privacyidea.lib.crypto import hash
27 import datetime
28 import logging
29
30 log = logging.getLogger(__name__)
31
32
33 def _hash_password(password):
34 return binascii.hexlify(hash(password, seed=""))
35
36
37 def add_to_cache(username, realm, resolver, password):
38 # Can not store timezone aware timestamps!
39 first_auth = datetime.datetime.utcnow()
40 auth_hash = _hash_password(password)
41 record = AuthCache(username, realm, resolver, auth_hash, first_auth, first_auth)
42 log.debug('Adding record to auth cache: ({!r}, {!r}, {!r}, {!r})'.format(
43 username, realm, resolver, auth_hash))
44 r = record.save()
45 return r
46
47
48 def update_cache_last_auth(cache_id):
49 last_auth = datetime.datetime.utcnow()
50 AuthCache.query.filter(
51 AuthCache.id == cache_id).update({"last_auth": last_auth})
52 db.session.commit()
53
54
55 def delete_from_cache(username, realm, resolver, password):
56 r = db.session.query(AuthCache).filter(AuthCache.username == username,
57 AuthCache.realm == realm,
58 AuthCache.resolver == resolver,
59 AuthCache.authentication ==
60 _hash_password(password)).delete()
61 db.session.commit()
62 return r
63
64
65 def verify_in_cache(username, realm, resolver, password,
66 first_auth = None,
67 last_auth = None):
68 """
69 Verify if the given credentials are cached and if the time is correct.
70
71 :param username:
72 :param realm:
73 :param resolver:
74 :param password:
75 :param first_auth: The timestamp when the entry was first written to the
76 cache. Only find newer entries
77 :param last_auth: The timestamp when the entry was last successfully
78 verified. Only find newer entries
79 :return:
80 """
81 conditions = []
82 conditions.append(AuthCache.username == username)
83 conditions.append(AuthCache.realm == realm)
84 conditions.append(AuthCache.resolver == resolver)
85 auth_hash = _hash_password(password)
86 conditions.append(AuthCache.authentication == auth_hash)
87
88 if first_auth:
89 conditions.append(AuthCache.first_auth > first_auth)
90 if last_auth:
91 conditions.append(AuthCache.last_auth > last_auth)
92
93 filter_condition = and_(*conditions)
94 r = AuthCache.query.filter(filter_condition).first()
95 result = bool(r)
96
97 if result:
98 # Update the last_auth
99 update_cache_last_auth(r.id)
100
101 else:
102 # Delete older entries
103 delete_from_cache(username, realm, resolver, password)
104
105 return result
106
107
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/privacyidea/lib/authcache.py b/privacyidea/lib/authcache.py
--- a/privacyidea/lib/authcache.py
+++ b/privacyidea/lib/authcache.py
@@ -62,6 +62,21 @@
return r
+def cleanup(minutes):
+ """
+ Will delete all authcache entries, where last_auth column is older than
+ the given minutes.
+
+ :param minutes: Age of the last_authentication in minutes
+ :type minutes: int
+ :return:
+ """
+ cleanuptime = datetime.datetime.utcnow() - datetime.timedelta(minutes=minutes)
+ r = db.session.query(AuthCache).filter(AuthCache.last_auth < cleanuptime).delete()
+ db.session.commit()
+ return r
+
+
def verify_in_cache(username, realm, resolver, password,
first_auth = None,
last_auth = None):
| {"golden_diff": "diff --git a/privacyidea/lib/authcache.py b/privacyidea/lib/authcache.py\n--- a/privacyidea/lib/authcache.py\n+++ b/privacyidea/lib/authcache.py\n@@ -62,6 +62,21 @@\n return r\n \n \n+def cleanup(minutes):\n+ \"\"\"\n+ Will delete all authcache entries, where last_auth column is older than\n+ the given minutes.\n+\n+ :param minutes: Age of the last_authentication in minutes\n+ :type minutes: int\n+ :return:\n+ \"\"\"\n+ cleanuptime = datetime.datetime.utcnow() - datetime.timedelta(minutes=minutes)\n+ r = db.session.query(AuthCache).filter(AuthCache.last_auth < cleanuptime).delete()\n+ db.session.commit()\n+ return r\n+\n+\n def verify_in_cache(username, realm, resolver, password,\n first_auth = None,\n last_auth = None):\n", "issue": "Migrate EMail, TAN and VASCO token to Python 3\nThis slightly changes behavior: The VASCO token initialization now throws a ``ParameterError`` if the secret couldn't be decoded.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# 2017-08-11 Cornelius K\u00f6lbel <[email protected]>\n# initial writeup\n#\n# License: AGPLv3\n# contact: http://www.privacyidea.org\n#\n# This code is free software; you can redistribute it and/or\n# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or any later version.\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNE7SS FOR A PARTICULAR PURPOSE. See the\n# GNU AFFERO GENERAL PUBLIC LICENSE for more details.\n#\n# You should have received a copy of the GNU Affero General Public\n# License along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\nimport binascii\n\nfrom ..models import AuthCache, db\nfrom sqlalchemy import and_\nfrom privacyidea.lib.crypto import hash\nimport datetime\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef _hash_password(password):\n return binascii.hexlify(hash(password, seed=\"\"))\n\n\ndef add_to_cache(username, realm, resolver, password):\n # Can not store timezone aware timestamps!\n first_auth = datetime.datetime.utcnow()\n auth_hash = _hash_password(password)\n record = AuthCache(username, realm, resolver, auth_hash, first_auth, first_auth)\n log.debug('Adding record to auth cache: ({!r}, {!r}, {!r}, {!r})'.format(\n username, realm, resolver, auth_hash))\n r = record.save()\n return r\n\n\ndef update_cache_last_auth(cache_id):\n last_auth = datetime.datetime.utcnow()\n AuthCache.query.filter(\n AuthCache.id == cache_id).update({\"last_auth\": last_auth})\n db.session.commit()\n\n\ndef delete_from_cache(username, realm, resolver, password):\n r = db.session.query(AuthCache).filter(AuthCache.username == username,\n AuthCache.realm == realm,\n AuthCache.resolver == resolver,\n AuthCache.authentication ==\n _hash_password(password)).delete()\n db.session.commit()\n return r\n\n\ndef verify_in_cache(username, realm, resolver, password,\n first_auth = None,\n last_auth = None):\n \"\"\"\n Verify if the given credentials are cached and if the time is correct.\n \n :param username: \n :param realm: \n :param resolver: \n :param password: \n :param first_auth: The timestamp when the entry was first written to the \n cache. Only find newer entries \n :param last_auth: The timestamp when the entry was last successfully \n verified. Only find newer entries \n :return: \n \"\"\"\n conditions = []\n conditions.append(AuthCache.username == username)\n conditions.append(AuthCache.realm == realm)\n conditions.append(AuthCache.resolver == resolver)\n auth_hash = _hash_password(password)\n conditions.append(AuthCache.authentication == auth_hash)\n\n if first_auth:\n conditions.append(AuthCache.first_auth > first_auth)\n if last_auth:\n conditions.append(AuthCache.last_auth > last_auth)\n\n filter_condition = and_(*conditions)\n r = AuthCache.query.filter(filter_condition).first()\n result = bool(r)\n\n if result:\n # Update the last_auth\n update_cache_last_auth(r.id)\n\n else:\n # Delete older entries\n delete_from_cache(username, realm, resolver, password)\n\n return result\n\n", "path": "privacyidea/lib/authcache.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# 2017-08-11 Cornelius K\u00f6lbel <[email protected]>\n# initial writeup\n#\n# License: AGPLv3\n# contact: http://www.privacyidea.org\n#\n# This code is free software; you can redistribute it and/or\n# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or any later version.\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNE7SS FOR A PARTICULAR PURPOSE. See the\n# GNU AFFERO GENERAL PUBLIC LICENSE for more details.\n#\n# You should have received a copy of the GNU Affero General Public\n# License along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\nimport binascii\n\nfrom ..models import AuthCache, db\nfrom sqlalchemy import and_\nfrom privacyidea.lib.crypto import hash\nimport datetime\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef _hash_password(password):\n return binascii.hexlify(hash(password, seed=\"\"))\n\n\ndef add_to_cache(username, realm, resolver, password):\n # Can not store timezone aware timestamps!\n first_auth = datetime.datetime.utcnow()\n auth_hash = _hash_password(password)\n record = AuthCache(username, realm, resolver, auth_hash, first_auth, first_auth)\n log.debug('Adding record to auth cache: ({!r}, {!r}, {!r}, {!r})'.format(\n username, realm, resolver, auth_hash))\n r = record.save()\n return r\n\n\ndef update_cache_last_auth(cache_id):\n last_auth = datetime.datetime.utcnow()\n AuthCache.query.filter(\n AuthCache.id == cache_id).update({\"last_auth\": last_auth})\n db.session.commit()\n\n\ndef delete_from_cache(username, realm, resolver, password):\n r = db.session.query(AuthCache).filter(AuthCache.username == username,\n AuthCache.realm == realm,\n AuthCache.resolver == resolver,\n AuthCache.authentication ==\n _hash_password(password)).delete()\n db.session.commit()\n return r\n\n\ndef cleanup(minutes):\n \"\"\"\n Will delete all authcache entries, where last_auth column is older than\n the given minutes.\n\n :param minutes: Age of the last_authentication in minutes\n :type minutes: int\n :return:\n \"\"\"\n cleanuptime = datetime.datetime.utcnow() - datetime.timedelta(minutes=minutes)\n r = db.session.query(AuthCache).filter(AuthCache.last_auth < cleanuptime).delete()\n db.session.commit()\n return r\n\n\ndef verify_in_cache(username, realm, resolver, password,\n first_auth = None,\n last_auth = None):\n \"\"\"\n Verify if the given credentials are cached and if the time is correct.\n \n :param username: \n :param realm: \n :param resolver: \n :param password: \n :param first_auth: The timestamp when the entry was first written to the \n cache. Only find newer entries \n :param last_auth: The timestamp when the entry was last successfully \n verified. Only find newer entries \n :return: \n \"\"\"\n conditions = []\n conditions.append(AuthCache.username == username)\n conditions.append(AuthCache.realm == realm)\n conditions.append(AuthCache.resolver == resolver)\n auth_hash = _hash_password(password)\n conditions.append(AuthCache.authentication == auth_hash)\n\n if first_auth:\n conditions.append(AuthCache.first_auth > first_auth)\n if last_auth:\n conditions.append(AuthCache.last_auth > last_auth)\n\n filter_condition = and_(*conditions)\n r = AuthCache.query.filter(filter_condition).first()\n result = bool(r)\n\n if result:\n # Update the last_auth\n update_cache_last_auth(r.id)\n\n else:\n # Delete older entries\n delete_from_cache(username, realm, resolver, password)\n\n return result\n\n", "path": "privacyidea/lib/authcache.py"}]} | 1,295 | 194 |
gh_patches_debug_17943 | rasdani/github-patches | git_diff | koxudaxi__datamodel-code-generator-1477 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Wrong parent class for pydantic V2 root models
**Describe the bug**
Generator uses `pydantic.BaseModel` as parent class for root model instead of `pydantic.RootModel`
Example schema (`custom_id.yaml`):
```yaml
openapi: 3.0.0
components:
schemas:
CustomId:
description: My custom ID
type: string
format: uuid
```
Used commandline:
```
$ datamodel-codegen --input custom_id.yaml --output-model-type pydantic_v2.BaseModel --output model.py
```
Contents of `model.py`:
```python
from __future__ import annotations
from uuid import UUID
from pydantic import BaseModel, Field
class CustomId(BaseModel):
root: UUID = Field(..., description='My custom ID')
```
**Expected behavior**
```python
from __future__ import annotations
from uuid import UUID
from pydantic import RootModel, Field
class CustomId(RootModel):
root: UUID = Field(..., description='My custom ID')
```
**Version:**
- OS: [e.g. iOS]
- Python version: 3.10.8
- datamodel-code-generator version: 0.21.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `datamodel_code_generator/model/pydantic_v2/root_model.py`
Content:
```
1 from __future__ import annotations
2
3 from typing import ClassVar
4
5 from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
6
7
8 class RootModel(BaseModel):
9 TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'
10 BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'
11
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/datamodel_code_generator/model/pydantic_v2/root_model.py b/datamodel_code_generator/model/pydantic_v2/root_model.py
--- a/datamodel_code_generator/model/pydantic_v2/root_model.py
+++ b/datamodel_code_generator/model/pydantic_v2/root_model.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import ClassVar
+from typing import Any, ClassVar
from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
@@ -8,3 +8,14 @@
class RootModel(BaseModel):
TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'
BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'
+
+ def __init__(
+ self,
+ **kwargs: Any,
+ ) -> None:
+ # Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
+ # be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
+ if 'custom_base_class' in kwargs:
+ kwargs.pop('custom_base_class')
+
+ super().__init__(**kwargs)
| {"golden_diff": "diff --git a/datamodel_code_generator/model/pydantic_v2/root_model.py b/datamodel_code_generator/model/pydantic_v2/root_model.py\n--- a/datamodel_code_generator/model/pydantic_v2/root_model.py\n+++ b/datamodel_code_generator/model/pydantic_v2/root_model.py\n@@ -1,6 +1,6 @@\n from __future__ import annotations\n \n-from typing import ClassVar\n+from typing import Any, ClassVar\n \n from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel\n \n@@ -8,3 +8,14 @@\n class RootModel(BaseModel):\n TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'\n BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'\n+\n+ def __init__(\n+ self,\n+ **kwargs: Any,\n+ ) -> None:\n+ # Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not\n+ # be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!\n+ if 'custom_base_class' in kwargs:\n+ kwargs.pop('custom_base_class')\n+\n+ super().__init__(**kwargs)\n", "issue": "Wrong parent class for pydantic V2 root models \n**Describe the bug**\r\nGenerator uses `pydantic.BaseModel` as parent class for root model instead of `pydantic.RootModel`\r\n\r\nExample schema (`custom_id.yaml`):\r\n```yaml\r\nopenapi: 3.0.0\r\ncomponents:\r\n schemas:\r\n CustomId:\r\n description: My custom ID\r\n type: string\r\n format: uuid\r\n```\r\nUsed commandline:\r\n```\r\n$ datamodel-codegen --input custom_id.yaml --output-model-type pydantic_v2.BaseModel --output model.py\r\n```\r\nContents of `model.py`:\r\n```python\r\nfrom __future__ import annotations\r\n\r\nfrom uuid import UUID\r\n\r\nfrom pydantic import BaseModel, Field\r\n\r\n\r\nclass CustomId(BaseModel):\r\n root: UUID = Field(..., description='My custom ID')\r\n```\r\n\r\n**Expected behavior**\r\n```python\r\nfrom __future__ import annotations\r\n\r\nfrom uuid import UUID\r\n\r\nfrom pydantic import RootModel, Field\r\n\r\n\r\nclass CustomId(RootModel):\r\n root: UUID = Field(..., description='My custom ID')\r\n```\r\n\r\n**Version:**\r\n - OS: [e.g. iOS]\r\n - Python version: 3.10.8\r\n - datamodel-code-generator version: 0.21.1\r\n\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom typing import ClassVar\n\nfrom datamodel_code_generator.model.pydantic_v2.base_model import BaseModel\n\n\nclass RootModel(BaseModel):\n TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'\n BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'\n", "path": "datamodel_code_generator/model/pydantic_v2/root_model.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom typing import Any, ClassVar\n\nfrom datamodel_code_generator.model.pydantic_v2.base_model import BaseModel\n\n\nclass RootModel(BaseModel):\n TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'\n BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'\n\n def __init__(\n self,\n **kwargs: Any,\n ) -> None:\n # Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not\n # be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!\n if 'custom_base_class' in kwargs:\n kwargs.pop('custom_base_class')\n\n super().__init__(**kwargs)\n", "path": "datamodel_code_generator/model/pydantic_v2/root_model.py"}]} | 629 | 272 |
gh_patches_debug_13291 | rasdani/github-patches | git_diff | sunpy__sunpy-2956 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve our warnings in line with PEP 565
See astropy/astropy#8465
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sunpy/util/exceptions.py`
Content:
```
1 # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 """
3 This module contains errors/exceptions and warnings of general use for
4 sunpy. Exceptions that are specific to a given subpackage should *not*
5 be here, but rather in the particular subpackage.
6 """
7
8 class SunpyWarning(Warning):
9 """
10 The base warning class from which all Sunpy warnings should inherit.
11 """
12
13
14 class SunpyUserWarning(UserWarning, SunpyWarning):
15 """
16 The primary warning class for Sunpy.
17
18 Use this if you do not need a specific sub-class.
19 """
20
21
22 class SunpyDeprecationWarning(SunpyWarning):
23 """
24 A warning class to indicate a deprecated feature.
25 """
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sunpy/util/exceptions.py b/sunpy/util/exceptions.py
--- a/sunpy/util/exceptions.py
+++ b/sunpy/util/exceptions.py
@@ -4,6 +4,8 @@
sunpy. Exceptions that are specific to a given subpackage should *not*
be here, but rather in the particular subpackage.
"""
+import sys
+
class SunpyWarning(Warning):
"""
@@ -19,7 +21,11 @@
"""
-class SunpyDeprecationWarning(SunpyWarning):
+# For PEP 565 (https://www.python.org/dev/peps/pep-0565/) compliance.
+DeprecationClass = DeprecationWarning if sys.version_info >= (3, 7) else FutureWarning
+
+
+class SunpyDeprecationWarning(DeprecationClass, SunpyWarning):
"""
A warning class to indicate a deprecated feature.
"""
| {"golden_diff": "diff --git a/sunpy/util/exceptions.py b/sunpy/util/exceptions.py\n--- a/sunpy/util/exceptions.py\n+++ b/sunpy/util/exceptions.py\n@@ -4,6 +4,8 @@\n sunpy. Exceptions that are specific to a given subpackage should *not*\n be here, but rather in the particular subpackage.\n \"\"\"\n+import sys\n+\n \n class SunpyWarning(Warning):\n \"\"\"\n@@ -19,7 +21,11 @@\n \"\"\"\n \n \n-class SunpyDeprecationWarning(SunpyWarning):\n+# For PEP 565 (https://www.python.org/dev/peps/pep-0565/) compliance.\n+DeprecationClass = DeprecationWarning if sys.version_info >= (3, 7) else FutureWarning\n+\n+\n+class SunpyDeprecationWarning(DeprecationClass, SunpyWarning):\n \"\"\"\n A warning class to indicate a deprecated feature.\n \"\"\"\n", "issue": "Improve our warnings in line with PEP 565\nSee astropy/astropy#8465\n", "before_files": [{"content": "# Licensed under a 3-clause BSD style license - see LICENSE.rst\n\"\"\"\nThis module contains errors/exceptions and warnings of general use for\nsunpy. Exceptions that are specific to a given subpackage should *not*\nbe here, but rather in the particular subpackage.\n\"\"\"\n\nclass SunpyWarning(Warning):\n \"\"\"\n The base warning class from which all Sunpy warnings should inherit.\n \"\"\"\n\n\nclass SunpyUserWarning(UserWarning, SunpyWarning):\n \"\"\"\n The primary warning class for Sunpy.\n\n Use this if you do not need a specific sub-class.\n \"\"\"\n\n\nclass SunpyDeprecationWarning(SunpyWarning):\n \"\"\"\n A warning class to indicate a deprecated feature.\n \"\"\"\n", "path": "sunpy/util/exceptions.py"}], "after_files": [{"content": "# Licensed under a 3-clause BSD style license - see LICENSE.rst\n\"\"\"\nThis module contains errors/exceptions and warnings of general use for\nsunpy. Exceptions that are specific to a given subpackage should *not*\nbe here, but rather in the particular subpackage.\n\"\"\"\nimport sys\n\n\nclass SunpyWarning(Warning):\n \"\"\"\n The base warning class from which all Sunpy warnings should inherit.\n \"\"\"\n\n\nclass SunpyUserWarning(UserWarning, SunpyWarning):\n \"\"\"\n The primary warning class for Sunpy.\n\n Use this if you do not need a specific sub-class.\n \"\"\"\n\n\n# For PEP 565 (https://www.python.org/dev/peps/pep-0565/) compliance.\nDeprecationClass = DeprecationWarning if sys.version_info >= (3, 7) else FutureWarning\n\n\nclass SunpyDeprecationWarning(DeprecationClass, SunpyWarning):\n \"\"\"\n A warning class to indicate a deprecated feature.\n \"\"\"\n", "path": "sunpy/util/exceptions.py"}]} | 483 | 209 |
gh_patches_debug_20057 | rasdani/github-patches | git_diff | svthalia__concrexit-1832 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Sales API uses TokenHasScopeForMethod but no scopes per method are provided
### Describe the change
The sales API uses some IsAuthenticatedOrTokenHasScopeForMethod permission classes, but no scopes per method are provided
### Motivation
Simpler code
### Current implementation
See above
### Suggested implementation
Use IsAuthenticatedOrTokenHasScope, or specify scope per method
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/sales/api/v2/admin/views.py`
Content:
```
1 from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope
2 from rest_framework import filters, exceptions
3 from rest_framework.exceptions import PermissionDenied
4 from rest_framework.generics import get_object_or_404
5 from rest_framework.permissions import DjangoModelPermissions
6
7 from sales.api.v2.admin.permissions import IsManager
8 from sales.api.v2.admin.serializers.order import OrderSerializer, OrderListSerializer
9 from sales.api.v2.admin.serializers.shift import ShiftSerializer
10 from sales.models.order import Order
11 from sales.models.shift import Shift
12 from thaliawebsite.api.v2.admin import (
13 AdminCreateAPIView,
14 AdminListAPIView,
15 AdminRetrieveAPIView,
16 AdminUpdateAPIView,
17 AdminDestroyAPIView,
18 )
19 from thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod
20
21
22 class ShiftListView(AdminListAPIView):
23 """Returns an overview of all sales shifts."""
24
25 serializer_class = ShiftSerializer
26 queryset = Shift.objects.all()
27 filter_backends = (
28 filters.OrderingFilter,
29 filters.SearchFilter,
30 )
31 ordering_fields = ("start", "end")
32 permission_classes = [IsAuthenticatedOrTokenHasScope, DjangoModelPermissions]
33 required_scopes = ["sales:admin"]
34
35 def get_queryset(self):
36 queryset = super().get_queryset().filter(locked=False)
37
38 if not self.request.member:
39 queryset = queryset.none()
40 elif not self.request.member.has_perm("sales.override_manager"):
41 queryset = queryset.filter(
42 managers__in=self.request.member.get_member_groups()
43 ).distinct()
44
45 queryset = queryset.select_properties(
46 "active",
47 "total_revenue",
48 "total_revenue_paid",
49 "num_orders",
50 "num_orders_paid",
51 )
52 queryset = queryset.prefetch_related("event", "product_list")
53 queryset = queryset.prefetch_related("orders__order_items",)
54 return queryset
55
56
57 class ShiftDetailView(AdminRetrieveAPIView):
58 serializer_class = ShiftSerializer
59 queryset = Shift.objects.all()
60 permission_classes = [
61 IsAuthenticatedOrTokenHasScope,
62 DjangoModelPermissions,
63 IsManager,
64 ]
65 required_scopes = ["sales:admin"]
66
67
68 class OrderListView(AdminListAPIView, AdminCreateAPIView):
69 method_serializer_classes = {
70 ("GET",): OrderListSerializer,
71 ("POST",): OrderSerializer,
72 }
73 permission_classes = [
74 IsAuthenticatedOrTokenHasScopeForMethod,
75 DjangoModelPermissions,
76 IsManager,
77 ]
78 required_scopes = ["sales:admin"]
79 shift_lookup_field = "pk"
80
81 def get_serializer_class(self):
82 for methods, serializer_cls in self.method_serializer_classes.items():
83 if self.request.method in methods:
84 return serializer_cls
85 raise exceptions.MethodNotAllowed(self.request.method)
86
87 def create(self, request, *args, **kwargs):
88 shift = Shift.objects.get(pk=kwargs["pk"])
89 if shift.locked:
90 raise PermissionDenied
91
92 return super(OrderListView, self).create(request, args, kwargs)
93
94 def get_queryset(self):
95 queryset = Order.objects.all()
96
97 pk = self.kwargs.get("pk")
98 if pk:
99 queryset = queryset.filter(shift=pk)
100
101 queryset = queryset.select_properties(
102 "total_amount", "subtotal", "num_items", "age_restricted"
103 )
104 queryset = queryset.prefetch_related(
105 "shift", "shift__event", "shift__product_list"
106 )
107 queryset = queryset.prefetch_related(
108 "order_items", "order_items__product", "order_items__product__product"
109 )
110 queryset = queryset.prefetch_related("payment")
111 return queryset
112
113 def get_serializer_context(self):
114 context = super(OrderListView, self).get_serializer_context()
115 pk = self.kwargs.get("pk")
116 if pk:
117 shift = get_object_or_404(Shift, pk=self.kwargs.get("pk"))
118 context.update({"shift": shift})
119 return context
120
121
122 class OrderDetailView(AdminRetrieveAPIView, AdminUpdateAPIView, AdminDestroyAPIView):
123 serializer_class = OrderSerializer
124 queryset = Order.objects.all()
125 permission_classes = [
126 IsAuthenticatedOrTokenHasScopeForMethod,
127 DjangoModelPermissions,
128 IsManager,
129 ]
130 required_scopes = ["sales:admin"]
131
132 def get_queryset(self):
133 queryset = super().get_queryset()
134
135 if not self.request.member:
136 queryset = queryset.none()
137 elif not self.request.member.has_perm("sales.override_manager"):
138 queryset = queryset.filter(
139 shift__managers__in=self.request.member.get_member_groups()
140 ).distinct()
141
142 queryset = queryset.select_properties(
143 "total_amount", "subtotal", "num_items", "age_restricted"
144 )
145 queryset = queryset.prefetch_related(
146 "shift", "shift__event", "shift__product_list"
147 )
148 queryset = queryset.prefetch_related(
149 "order_items", "order_items__product", "order_items__product__product"
150 )
151 queryset = queryset.prefetch_related("payment")
152 return queryset
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/sales/api/v2/admin/views.py b/website/sales/api/v2/admin/views.py
--- a/website/sales/api/v2/admin/views.py
+++ b/website/sales/api/v2/admin/views.py
@@ -16,7 +16,6 @@
AdminUpdateAPIView,
AdminDestroyAPIView,
)
-from thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod
class ShiftListView(AdminListAPIView):
@@ -71,7 +70,7 @@
("POST",): OrderSerializer,
}
permission_classes = [
- IsAuthenticatedOrTokenHasScopeForMethod,
+ IsAuthenticatedOrTokenHasScope,
DjangoModelPermissions,
IsManager,
]
@@ -123,7 +122,7 @@
serializer_class = OrderSerializer
queryset = Order.objects.all()
permission_classes = [
- IsAuthenticatedOrTokenHasScopeForMethod,
+ IsAuthenticatedOrTokenHasScope,
DjangoModelPermissions,
IsManager,
]
| {"golden_diff": "diff --git a/website/sales/api/v2/admin/views.py b/website/sales/api/v2/admin/views.py\n--- a/website/sales/api/v2/admin/views.py\n+++ b/website/sales/api/v2/admin/views.py\n@@ -16,7 +16,6 @@\n AdminUpdateAPIView,\n AdminDestroyAPIView,\n )\n-from thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod\n \n \n class ShiftListView(AdminListAPIView):\n@@ -71,7 +70,7 @@\n (\"POST\",): OrderSerializer,\n }\n permission_classes = [\n- IsAuthenticatedOrTokenHasScopeForMethod,\n+ IsAuthenticatedOrTokenHasScope,\n DjangoModelPermissions,\n IsManager,\n ]\n@@ -123,7 +122,7 @@\n serializer_class = OrderSerializer\n queryset = Order.objects.all()\n permission_classes = [\n- IsAuthenticatedOrTokenHasScopeForMethod,\n+ IsAuthenticatedOrTokenHasScope,\n DjangoModelPermissions,\n IsManager,\n ]\n", "issue": "Sales API uses TokenHasScopeForMethod but no scopes per method are provided\n### Describe the change\r\nThe sales API uses some IsAuthenticatedOrTokenHasScopeForMethod permission classes, but no scopes per method are provided\r\n\r\n### Motivation\r\nSimpler code\r\n\r\n### Current implementation\r\nSee above\r\n\r\n### Suggested implementation\r\nUse IsAuthenticatedOrTokenHasScope, or specify scope per method \r\n\r\n\n", "before_files": [{"content": "from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope\nfrom rest_framework import filters, exceptions\nfrom rest_framework.exceptions import PermissionDenied\nfrom rest_framework.generics import get_object_or_404\nfrom rest_framework.permissions import DjangoModelPermissions\n\nfrom sales.api.v2.admin.permissions import IsManager\nfrom sales.api.v2.admin.serializers.order import OrderSerializer, OrderListSerializer\nfrom sales.api.v2.admin.serializers.shift import ShiftSerializer\nfrom sales.models.order import Order\nfrom sales.models.shift import Shift\nfrom thaliawebsite.api.v2.admin import (\n AdminCreateAPIView,\n AdminListAPIView,\n AdminRetrieveAPIView,\n AdminUpdateAPIView,\n AdminDestroyAPIView,\n)\nfrom thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod\n\n\nclass ShiftListView(AdminListAPIView):\n \"\"\"Returns an overview of all sales shifts.\"\"\"\n\n serializer_class = ShiftSerializer\n queryset = Shift.objects.all()\n filter_backends = (\n filters.OrderingFilter,\n filters.SearchFilter,\n )\n ordering_fields = (\"start\", \"end\")\n permission_classes = [IsAuthenticatedOrTokenHasScope, DjangoModelPermissions]\n required_scopes = [\"sales:admin\"]\n\n def get_queryset(self):\n queryset = super().get_queryset().filter(locked=False)\n\n if not self.request.member:\n queryset = queryset.none()\n elif not self.request.member.has_perm(\"sales.override_manager\"):\n queryset = queryset.filter(\n managers__in=self.request.member.get_member_groups()\n ).distinct()\n\n queryset = queryset.select_properties(\n \"active\",\n \"total_revenue\",\n \"total_revenue_paid\",\n \"num_orders\",\n \"num_orders_paid\",\n )\n queryset = queryset.prefetch_related(\"event\", \"product_list\")\n queryset = queryset.prefetch_related(\"orders__order_items\",)\n return queryset\n\n\nclass ShiftDetailView(AdminRetrieveAPIView):\n serializer_class = ShiftSerializer\n queryset = Shift.objects.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n DjangoModelPermissions,\n IsManager,\n ]\n required_scopes = [\"sales:admin\"]\n\n\nclass OrderListView(AdminListAPIView, AdminCreateAPIView):\n method_serializer_classes = {\n (\"GET\",): OrderListSerializer,\n (\"POST\",): OrderSerializer,\n }\n permission_classes = [\n IsAuthenticatedOrTokenHasScopeForMethod,\n DjangoModelPermissions,\n IsManager,\n ]\n required_scopes = [\"sales:admin\"]\n shift_lookup_field = \"pk\"\n\n def get_serializer_class(self):\n for methods, serializer_cls in self.method_serializer_classes.items():\n if self.request.method in methods:\n return serializer_cls\n raise exceptions.MethodNotAllowed(self.request.method)\n\n def create(self, request, *args, **kwargs):\n shift = Shift.objects.get(pk=kwargs[\"pk\"])\n if shift.locked:\n raise PermissionDenied\n\n return super(OrderListView, self).create(request, args, kwargs)\n\n def get_queryset(self):\n queryset = Order.objects.all()\n\n pk = self.kwargs.get(\"pk\")\n if pk:\n queryset = queryset.filter(shift=pk)\n\n queryset = queryset.select_properties(\n \"total_amount\", \"subtotal\", \"num_items\", \"age_restricted\"\n )\n queryset = queryset.prefetch_related(\n \"shift\", \"shift__event\", \"shift__product_list\"\n )\n queryset = queryset.prefetch_related(\n \"order_items\", \"order_items__product\", \"order_items__product__product\"\n )\n queryset = queryset.prefetch_related(\"payment\")\n return queryset\n\n def get_serializer_context(self):\n context = super(OrderListView, self).get_serializer_context()\n pk = self.kwargs.get(\"pk\")\n if pk:\n shift = get_object_or_404(Shift, pk=self.kwargs.get(\"pk\"))\n context.update({\"shift\": shift})\n return context\n\n\nclass OrderDetailView(AdminRetrieveAPIView, AdminUpdateAPIView, AdminDestroyAPIView):\n serializer_class = OrderSerializer\n queryset = Order.objects.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScopeForMethod,\n DjangoModelPermissions,\n IsManager,\n ]\n required_scopes = [\"sales:admin\"]\n\n def get_queryset(self):\n queryset = super().get_queryset()\n\n if not self.request.member:\n queryset = queryset.none()\n elif not self.request.member.has_perm(\"sales.override_manager\"):\n queryset = queryset.filter(\n shift__managers__in=self.request.member.get_member_groups()\n ).distinct()\n\n queryset = queryset.select_properties(\n \"total_amount\", \"subtotal\", \"num_items\", \"age_restricted\"\n )\n queryset = queryset.prefetch_related(\n \"shift\", \"shift__event\", \"shift__product_list\"\n )\n queryset = queryset.prefetch_related(\n \"order_items\", \"order_items__product\", \"order_items__product__product\"\n )\n queryset = queryset.prefetch_related(\"payment\")\n return queryset\n", "path": "website/sales/api/v2/admin/views.py"}], "after_files": [{"content": "from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope\nfrom rest_framework import filters, exceptions\nfrom rest_framework.exceptions import PermissionDenied\nfrom rest_framework.generics import get_object_or_404\nfrom rest_framework.permissions import DjangoModelPermissions\n\nfrom sales.api.v2.admin.permissions import IsManager\nfrom sales.api.v2.admin.serializers.order import OrderSerializer, OrderListSerializer\nfrom sales.api.v2.admin.serializers.shift import ShiftSerializer\nfrom sales.models.order import Order\nfrom sales.models.shift import Shift\nfrom thaliawebsite.api.v2.admin import (\n AdminCreateAPIView,\n AdminListAPIView,\n AdminRetrieveAPIView,\n AdminUpdateAPIView,\n AdminDestroyAPIView,\n)\n\n\nclass ShiftListView(AdminListAPIView):\n \"\"\"Returns an overview of all sales shifts.\"\"\"\n\n serializer_class = ShiftSerializer\n queryset = Shift.objects.all()\n filter_backends = (\n filters.OrderingFilter,\n filters.SearchFilter,\n )\n ordering_fields = (\"start\", \"end\")\n permission_classes = [IsAuthenticatedOrTokenHasScope, DjangoModelPermissions]\n required_scopes = [\"sales:admin\"]\n\n def get_queryset(self):\n queryset = super().get_queryset().filter(locked=False)\n\n if not self.request.member:\n queryset = queryset.none()\n elif not self.request.member.has_perm(\"sales.override_manager\"):\n queryset = queryset.filter(\n managers__in=self.request.member.get_member_groups()\n ).distinct()\n\n queryset = queryset.select_properties(\n \"active\",\n \"total_revenue\",\n \"total_revenue_paid\",\n \"num_orders\",\n \"num_orders_paid\",\n )\n queryset = queryset.prefetch_related(\"event\", \"product_list\")\n queryset = queryset.prefetch_related(\"orders__order_items\",)\n return queryset\n\n\nclass ShiftDetailView(AdminRetrieveAPIView):\n serializer_class = ShiftSerializer\n queryset = Shift.objects.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n DjangoModelPermissions,\n IsManager,\n ]\n required_scopes = [\"sales:admin\"]\n\n\nclass OrderListView(AdminListAPIView, AdminCreateAPIView):\n method_serializer_classes = {\n (\"GET\",): OrderListSerializer,\n (\"POST\",): OrderSerializer,\n }\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n DjangoModelPermissions,\n IsManager,\n ]\n required_scopes = [\"sales:admin\"]\n shift_lookup_field = \"pk\"\n\n def get_serializer_class(self):\n for methods, serializer_cls in self.method_serializer_classes.items():\n if self.request.method in methods:\n return serializer_cls\n raise exceptions.MethodNotAllowed(self.request.method)\n\n def create(self, request, *args, **kwargs):\n shift = Shift.objects.get(pk=kwargs[\"pk\"])\n if shift.locked:\n raise PermissionDenied\n\n return super(OrderListView, self).create(request, args, kwargs)\n\n def get_queryset(self):\n queryset = Order.objects.all()\n\n pk = self.kwargs.get(\"pk\")\n if pk:\n queryset = queryset.filter(shift=pk)\n\n queryset = queryset.select_properties(\n \"total_amount\", \"subtotal\", \"num_items\", \"age_restricted\"\n )\n queryset = queryset.prefetch_related(\n \"shift\", \"shift__event\", \"shift__product_list\"\n )\n queryset = queryset.prefetch_related(\n \"order_items\", \"order_items__product\", \"order_items__product__product\"\n )\n queryset = queryset.prefetch_related(\"payment\")\n return queryset\n\n def get_serializer_context(self):\n context = super(OrderListView, self).get_serializer_context()\n pk = self.kwargs.get(\"pk\")\n if pk:\n shift = get_object_or_404(Shift, pk=self.kwargs.get(\"pk\"))\n context.update({\"shift\": shift})\n return context\n\n\nclass OrderDetailView(AdminRetrieveAPIView, AdminUpdateAPIView, AdminDestroyAPIView):\n serializer_class = OrderSerializer\n queryset = Order.objects.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n DjangoModelPermissions,\n IsManager,\n ]\n required_scopes = [\"sales:admin\"]\n\n def get_queryset(self):\n queryset = super().get_queryset()\n\n if not self.request.member:\n queryset = queryset.none()\n elif not self.request.member.has_perm(\"sales.override_manager\"):\n queryset = queryset.filter(\n shift__managers__in=self.request.member.get_member_groups()\n ).distinct()\n\n queryset = queryset.select_properties(\n \"total_amount\", \"subtotal\", \"num_items\", \"age_restricted\"\n )\n queryset = queryset.prefetch_related(\n \"shift\", \"shift__event\", \"shift__product_list\"\n )\n queryset = queryset.prefetch_related(\n \"order_items\", \"order_items__product\", \"order_items__product__product\"\n )\n queryset = queryset.prefetch_related(\"payment\")\n return queryset\n", "path": "website/sales/api/v2/admin/views.py"}]} | 1,753 | 225 |
gh_patches_debug_14896 | rasdani/github-patches | git_diff | mozilla__telemetry-analysis-service-975 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Don't overwrite creator when saving jobs
Since we can provide view and editing permissions to other users (e.g. admins) with row-level permissions we should stop overwriting the creator on every save of a scheduled Spark job since it would prevent an effective trail of ownership and has in the past led to inconsistencies when updating jobs by an admin.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `atmo/forms/mixins.py`
Content:
```
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, you can obtain one at http://mozilla.org/MPL/2.0/.
4 import uuid
5 from collections import OrderedDict
6
7 from django import forms
8
9 from .cache import CachedFileCache
10 from .fields import CachedFileField
11 from .widgets import CachedFileHiddenInput
12
13
14 class AutoClassFormMixin:
15 """
16 A form mixin that adds the 'form-control' to all field widgets
17 automatically
18 """
19 class_names = {
20 'form-control': {
21 'excluded_widgets': ['file'],
22 }
23 }
24
25 def __init__(self, *args, **kwargs):
26 super().__init__(*args, **kwargs)
27 for field in list(self.fields.values()):
28 classes = field.widget.attrs.get('class', '').split(' ')
29 for class_name, options in list(self.class_names.items()):
30 if class_name in classes:
31 continue
32 excluded_widgets = options.get('excluded_widgets', [])
33 if (hasattr(field.widget, 'input_type') and
34 field.widget.input_type in excluded_widgets):
35 continue
36 field.widget.attrs['class'] = ' '.join([class_name] + classes)
37
38
39 class CreatedByModelFormMixin(forms.ModelForm):
40 """
41 Custom Django form mixin that takes a user object and if the provided
42 model form instance has a primary key checks if the given user
43 matches the 'created_by' field.
44 """
45 def __init__(self, user, *args, **kwargs):
46 self.created_by = user
47 super().__init__(*args, **kwargs)
48
49 def save(self, commit=True):
50 # create the object without committing, since we haven't
51 # set the required created_by field yet
52 obj = super().save(commit=False)
53
54 # set the field to the user that created the object
55 obj.created_by = self.created_by
56
57 if commit:
58 # actually start the real object, and return the model object
59 obj.save()
60 return obj
61
62 def clean(self):
63 """
64 only allow deleting clusters that one created
65 """
66 super().clean()
67 if self.instance.id and self.created_by != self.instance.created_by:
68 raise forms.ValidationError(
69 'Access denied to the data of another user'
70 )
71
72
73 class CachedFileModelFormMixin(forms.ModelForm):
74 """
75 A model form mixin that automatically adds additional hidden form fields
76 to store a random value to be used as the cache key for caching FileField
77 files on submission. That is needed to prevent having to reselect files
78 over and over again when form submission fails for the fields other than
79 the file fields.
80 """
81 def __init__(self, *args, **kwargs):
82 super().__init__(*args, **kwargs)
83 self.cache = CachedFileCache()
84 self.cached_filefields = OrderedDict()
85 self.required_filefields = []
86
87 field_order = []
88 for name, field in list(self.fields.items()):
89 # add any found field to the list of order items
90 field_order.append(name)
91
92 # in case it's a file input
93 if isinstance(field, CachedFileField):
94 # we'll use this later in the clean and save step
95 self.cached_filefields[name] = field
96
97 # store the field that are required so we can validate
98 # them optionally in our clean method
99 if field.real_required:
100 self.required_filefields.append(name)
101
102 # get the name of the cache key field
103 cachekey_input_name = self.cachekey_input_name(name)
104 field_order.append(cachekey_input_name)
105
106 # add the cache key field
107 self.fields[cachekey_input_name] = forms.CharField(
108 max_length=32,
109 widget=CachedFileHiddenInput(),
110 initial=uuid.uuid4().hex
111 )
112
113 self.order_fields(field_order)
114
115 def cachekey_input_name(self, name):
116 return name + '-cache'
117
118 def cachekey_input_data(self, field):
119 name = self.cachekey_input_name(field)
120 return self.cleaned_data[name]
121
122 def save(self, *args, **kwargs):
123 # on save get rid of the cache keys
124 for name in self.cached_filefields:
125 self.cache.remove(self.cachekey_input_data(name))
126 return super().save(*args, **kwargs)
127
128 def clean(self):
129 for field_name in self.cached_filefields:
130 # get the name of the cache key field name and its value
131 cache_key = self.cachekey_input_data(field_name)
132
133 # check form data if the file field was submitted
134 submitted_file = self.cleaned_data.get(field_name)
135 if submitted_file is None:
136 # if not, check the cache and update the cleaned data
137 cached_file = self.cache.retrieve(cache_key, field_name)
138 if cached_file is None:
139 # raise a required validation error if nothing was found
140 if field_name in self.required_filefields:
141 field = self.cached_filefields[field_name]
142 self.add_error(
143 field_name,
144 forms.ValidationError(
145 field.error_messages['required'],
146 code='required'
147 )
148 )
149 else:
150 self.cleaned_data[field_name] = cached_file
151 else:
152 # or store the submitted file for later use (or reset after saving)
153 self.cache.store(cache_key, submitted_file)
154
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/atmo/forms/mixins.py b/atmo/forms/mixins.py
--- a/atmo/forms/mixins.py
+++ b/atmo/forms/mixins.py
@@ -5,6 +5,7 @@
from collections import OrderedDict
from django import forms
+from django.contrib.auth.models import User
from .cache import CachedFileCache
from .fields import CachedFileField
@@ -52,7 +53,10 @@
obj = super().save(commit=False)
# set the field to the user that created the object
- obj.created_by = self.created_by
+ try:
+ obj.created_by
+ except User.DoesNotExist:
+ obj.created_by = self.created_by
if commit:
# actually start the real object, and return the model object
| {"golden_diff": "diff --git a/atmo/forms/mixins.py b/atmo/forms/mixins.py\n--- a/atmo/forms/mixins.py\n+++ b/atmo/forms/mixins.py\n@@ -5,6 +5,7 @@\n from collections import OrderedDict\n \n from django import forms\n+from django.contrib.auth.models import User\n \n from .cache import CachedFileCache\n from .fields import CachedFileField\n@@ -52,7 +53,10 @@\n obj = super().save(commit=False)\n \n # set the field to the user that created the object\n- obj.created_by = self.created_by\n+ try:\n+ obj.created_by\n+ except User.DoesNotExist:\n+ obj.created_by = self.created_by\n \n if commit:\n # actually start the real object, and return the model object\n", "issue": "Don't overwrite creator when saving jobs\nSince we can provide view and editing permissions to other users (e.g. admins) with row-level permissions we should stop overwriting the creator on every save of a scheduled Spark job since it would prevent an effective trail of ownership and has in the past led to inconsistencies when updating jobs by an admin.\n", "before_files": [{"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nimport uuid\nfrom collections import OrderedDict\n\nfrom django import forms\n\nfrom .cache import CachedFileCache\nfrom .fields import CachedFileField\nfrom .widgets import CachedFileHiddenInput\n\n\nclass AutoClassFormMixin:\n \"\"\"\n A form mixin that adds the 'form-control' to all field widgets\n automatically\n \"\"\"\n class_names = {\n 'form-control': {\n 'excluded_widgets': ['file'],\n }\n }\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field in list(self.fields.values()):\n classes = field.widget.attrs.get('class', '').split(' ')\n for class_name, options in list(self.class_names.items()):\n if class_name in classes:\n continue\n excluded_widgets = options.get('excluded_widgets', [])\n if (hasattr(field.widget, 'input_type') and\n field.widget.input_type in excluded_widgets):\n continue\n field.widget.attrs['class'] = ' '.join([class_name] + classes)\n\n\nclass CreatedByModelFormMixin(forms.ModelForm):\n \"\"\"\n Custom Django form mixin that takes a user object and if the provided\n model form instance has a primary key checks if the given user\n matches the 'created_by' field.\n \"\"\"\n def __init__(self, user, *args, **kwargs):\n self.created_by = user\n super().__init__(*args, **kwargs)\n\n def save(self, commit=True):\n # create the object without committing, since we haven't\n # set the required created_by field yet\n obj = super().save(commit=False)\n\n # set the field to the user that created the object\n obj.created_by = self.created_by\n\n if commit:\n # actually start the real object, and return the model object\n obj.save()\n return obj\n\n def clean(self):\n \"\"\"\n only allow deleting clusters that one created\n \"\"\"\n super().clean()\n if self.instance.id and self.created_by != self.instance.created_by:\n raise forms.ValidationError(\n 'Access denied to the data of another user'\n )\n\n\nclass CachedFileModelFormMixin(forms.ModelForm):\n \"\"\"\n A model form mixin that automatically adds additional hidden form fields\n to store a random value to be used as the cache key for caching FileField\n files on submission. That is needed to prevent having to reselect files\n over and over again when form submission fails for the fields other than\n the file fields.\n \"\"\"\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.cache = CachedFileCache()\n self.cached_filefields = OrderedDict()\n self.required_filefields = []\n\n field_order = []\n for name, field in list(self.fields.items()):\n # add any found field to the list of order items\n field_order.append(name)\n\n # in case it's a file input\n if isinstance(field, CachedFileField):\n # we'll use this later in the clean and save step\n self.cached_filefields[name] = field\n\n # store the field that are required so we can validate\n # them optionally in our clean method\n if field.real_required:\n self.required_filefields.append(name)\n\n # get the name of the cache key field\n cachekey_input_name = self.cachekey_input_name(name)\n field_order.append(cachekey_input_name)\n\n # add the cache key field\n self.fields[cachekey_input_name] = forms.CharField(\n max_length=32,\n widget=CachedFileHiddenInput(),\n initial=uuid.uuid4().hex\n )\n\n self.order_fields(field_order)\n\n def cachekey_input_name(self, name):\n return name + '-cache'\n\n def cachekey_input_data(self, field):\n name = self.cachekey_input_name(field)\n return self.cleaned_data[name]\n\n def save(self, *args, **kwargs):\n # on save get rid of the cache keys\n for name in self.cached_filefields:\n self.cache.remove(self.cachekey_input_data(name))\n return super().save(*args, **kwargs)\n\n def clean(self):\n for field_name in self.cached_filefields:\n # get the name of the cache key field name and its value\n cache_key = self.cachekey_input_data(field_name)\n\n # check form data if the file field was submitted\n submitted_file = self.cleaned_data.get(field_name)\n if submitted_file is None:\n # if not, check the cache and update the cleaned data\n cached_file = self.cache.retrieve(cache_key, field_name)\n if cached_file is None:\n # raise a required validation error if nothing was found\n if field_name in self.required_filefields:\n field = self.cached_filefields[field_name]\n self.add_error(\n field_name,\n forms.ValidationError(\n field.error_messages['required'],\n code='required'\n )\n )\n else:\n self.cleaned_data[field_name] = cached_file\n else:\n # or store the submitted file for later use (or reset after saving)\n self.cache.store(cache_key, submitted_file)\n", "path": "atmo/forms/mixins.py"}], "after_files": [{"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nimport uuid\nfrom collections import OrderedDict\n\nfrom django import forms\nfrom django.contrib.auth.models import User\n\nfrom .cache import CachedFileCache\nfrom .fields import CachedFileField\nfrom .widgets import CachedFileHiddenInput\n\n\nclass AutoClassFormMixin:\n \"\"\"\n A form mixin that adds the 'form-control' to all field widgets\n automatically\n \"\"\"\n class_names = {\n 'form-control': {\n 'excluded_widgets': ['file'],\n }\n }\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field in list(self.fields.values()):\n classes = field.widget.attrs.get('class', '').split(' ')\n for class_name, options in list(self.class_names.items()):\n if class_name in classes:\n continue\n excluded_widgets = options.get('excluded_widgets', [])\n if (hasattr(field.widget, 'input_type') and\n field.widget.input_type in excluded_widgets):\n continue\n field.widget.attrs['class'] = ' '.join([class_name] + classes)\n\n\nclass CreatedByModelFormMixin(forms.ModelForm):\n \"\"\"\n Custom Django form mixin that takes a user object and if the provided\n model form instance has a primary key checks if the given user\n matches the 'created_by' field.\n \"\"\"\n def __init__(self, user, *args, **kwargs):\n self.created_by = user\n super().__init__(*args, **kwargs)\n\n def save(self, commit=True):\n # create the object without committing, since we haven't\n # set the required created_by field yet\n obj = super().save(commit=False)\n\n # set the field to the user that created the object\n try:\n obj.created_by\n except User.DoesNotExist:\n obj.created_by = self.created_by\n\n if commit:\n # actually start the real object, and return the model object\n obj.save()\n return obj\n\n def clean(self):\n \"\"\"\n only allow deleting clusters that one created\n \"\"\"\n super().clean()\n if self.instance.id and self.created_by != self.instance.created_by:\n raise forms.ValidationError(\n 'Access denied to the data of another user'\n )\n\n\nclass CachedFileModelFormMixin(forms.ModelForm):\n \"\"\"\n A model form mixin that automatically adds additional hidden form fields\n to store a random value to be used as the cache key for caching FileField\n files on submission. That is needed to prevent having to reselect files\n over and over again when form submission fails for the fields other than\n the file fields.\n \"\"\"\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.cache = CachedFileCache()\n self.cached_filefields = OrderedDict()\n self.required_filefields = []\n\n field_order = []\n for name, field in list(self.fields.items()):\n # add any found field to the list of order items\n field_order.append(name)\n\n # in case it's a file input\n if isinstance(field, CachedFileField):\n # we'll use this later in the clean and save step\n self.cached_filefields[name] = field\n\n # store the field that are required so we can validate\n # them optionally in our clean method\n if field.real_required:\n self.required_filefields.append(name)\n\n # get the name of the cache key field\n cachekey_input_name = self.cachekey_input_name(name)\n field_order.append(cachekey_input_name)\n\n # add the cache key field\n self.fields[cachekey_input_name] = forms.CharField(\n max_length=32,\n widget=CachedFileHiddenInput(),\n initial=uuid.uuid4().hex\n )\n\n self.order_fields(field_order)\n\n def cachekey_input_name(self, name):\n return name + '-cache'\n\n def cachekey_input_data(self, field):\n name = self.cachekey_input_name(field)\n return self.cleaned_data[name]\n\n def save(self, *args, **kwargs):\n # on save get rid of the cache keys\n for name in self.cached_filefields:\n self.cache.remove(self.cachekey_input_data(name))\n return super().save(*args, **kwargs)\n\n def clean(self):\n for field_name in self.cached_filefields:\n # get the name of the cache key field name and its value\n cache_key = self.cachekey_input_data(field_name)\n\n # check form data if the file field was submitted\n submitted_file = self.cleaned_data.get(field_name)\n if submitted_file is None:\n # if not, check the cache and update the cleaned data\n cached_file = self.cache.retrieve(cache_key, field_name)\n if cached_file is None:\n # raise a required validation error if nothing was found\n if field_name in self.required_filefields:\n field = self.cached_filefields[field_name]\n self.add_error(\n field_name,\n forms.ValidationError(\n field.error_messages['required'],\n code='required'\n )\n )\n else:\n self.cleaned_data[field_name] = cached_file\n else:\n # or store the submitted file for later use (or reset after saving)\n self.cache.store(cache_key, submitted_file)\n", "path": "atmo/forms/mixins.py"}]} | 1,844 | 175 |
gh_patches_debug_3639 | rasdani/github-patches | git_diff | Mailu__Mailu-2958 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Vulnerability in ClamAV
## Environment & Version
### Environment
- [ ] docker compose
- [x] kubernetes
- [ ] docker swarm
### Version
- Version: `master`
## Description
ClamAV version 0.105.2 is vulnerable to [CVE-2023-20197](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-20197)
Unfortunately, ClamAV will not provide any update in the 0.105 branch, as it is EOL see https://blog.clamav.net/2023/07/2023-08-16-releases.html
## Replication Steps
```
$ docker run --pull=always --rm -it ghcr.io/mailu/clamav:master clamd --version
master: Pulling from mailu/clamav
Digest: sha256:dd088fc80ab063b0588160a69fce034d5d1f33db6d85d57296154fc51cdeaffa
Status: Image is up to date for ghcr.io/mailu/clamav:master
ClamAV 0.105.2
```
## Observed behaviour
ClamAV is in a vulnerable state
## Expected behaviour
I expect ClamAV to be updated to a fixed version (1.1.1 or 1.0.2)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/rspamd/start.py`
Content:
```
1 #!/usr/bin/env python3
2
3 import os
4 import glob
5 import logging as log
6 import requests
7 import shutil
8 import sys
9 import time
10 from socrate import system,conf
11
12 env = system.set_env()
13
14 # Actual startup script
15
16 config_files = []
17 for rspamd_file in glob.glob("/conf/*"):
18 conf.jinja(rspamd_file, env, os.path.join("/etc/rspamd/local.d", os.path.basename(rspamd_file)))
19 config_files.append(os.path.basename(rspamd_file))
20
21 for override_file in glob.glob("/overrides/*"):
22 if os.path.basename(override_file) not in config_files:
23 shutil.copyfile(override_file, os.path.join("/etc/rspamd/local.d", os.path.basename(override_file)))
24
25 # Admin may not be up just yet
26 healthcheck = f'http://{env["ADMIN_ADDRESS"]}:8080/internal/rspamd/local_domains'
27 while True:
28 time.sleep(1)
29 try:
30 if requests.get(healthcheck,timeout=2).ok:
31 break
32 except:
33 pass
34 log.warning("Admin is not up just yet, retrying in 1 second")
35
36 # Run rspamd
37 os.system("mkdir -m 755 -p /run/rspamd")
38 os.system("chown rspamd:rspamd /run/rspamd")
39 os.system("find /var/lib/rspamd | grep -v /filter | xargs -n1 chown rspamd:rspamd")
40 os.execv("/usr/sbin/rspamd", ["rspamd", "-f", "-u", "rspamd", "-g", "rspamd"])
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/rspamd/start.py b/core/rspamd/start.py
--- a/core/rspamd/start.py
+++ b/core/rspamd/start.py
@@ -37,4 +37,4 @@
os.system("mkdir -m 755 -p /run/rspamd")
os.system("chown rspamd:rspamd /run/rspamd")
os.system("find /var/lib/rspamd | grep -v /filter | xargs -n1 chown rspamd:rspamd")
-os.execv("/usr/sbin/rspamd", ["rspamd", "-f", "-u", "rspamd", "-g", "rspamd"])
+os.execv("/usr/bin/rspamd", ["rspamd", "-f", "-u", "rspamd", "-g", "rspamd"])
| {"golden_diff": "diff --git a/core/rspamd/start.py b/core/rspamd/start.py\n--- a/core/rspamd/start.py\n+++ b/core/rspamd/start.py\n@@ -37,4 +37,4 @@\n os.system(\"mkdir -m 755 -p /run/rspamd\")\n os.system(\"chown rspamd:rspamd /run/rspamd\")\n os.system(\"find /var/lib/rspamd | grep -v /filter | xargs -n1 chown rspamd:rspamd\")\n-os.execv(\"/usr/sbin/rspamd\", [\"rspamd\", \"-f\", \"-u\", \"rspamd\", \"-g\", \"rspamd\"])\n+os.execv(\"/usr/bin/rspamd\", [\"rspamd\", \"-f\", \"-u\", \"rspamd\", \"-g\", \"rspamd\"])\n", "issue": "Vulnerability in ClamAV\n\r\n## Environment & Version\r\n\r\n### Environment\r\n\r\n- [ ] docker compose\r\n- [x] kubernetes\r\n- [ ] docker swarm\r\n\r\n### Version\r\n\r\n- Version: `master`\r\n\r\n## Description\r\nClamAV version 0.105.2 is vulnerable to [CVE-2023-20197](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-20197)\r\nUnfortunately, ClamAV will not provide any update in the 0.105 branch, as it is EOL see https://blog.clamav.net/2023/07/2023-08-16-releases.html\r\n\r\n## Replication Steps\r\n```\r\n$ docker run --pull=always --rm -it ghcr.io/mailu/clamav:master clamd --version\r\nmaster: Pulling from mailu/clamav\r\nDigest: sha256:dd088fc80ab063b0588160a69fce034d5d1f33db6d85d57296154fc51cdeaffa\r\nStatus: Image is up to date for ghcr.io/mailu/clamav:master\r\nClamAV 0.105.2\r\n```\r\n\r\n## Observed behaviour\r\nClamAV is in a vulnerable state\r\n\r\n## Expected behaviour\r\nI expect ClamAV to be updated to a fixed version (1.1.1 or 1.0.2)\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nimport os\nimport glob\nimport logging as log\nimport requests\nimport shutil\nimport sys\nimport time\nfrom socrate import system,conf\n\nenv = system.set_env()\n\n# Actual startup script\n\nconfig_files = []\nfor rspamd_file in glob.glob(\"/conf/*\"):\n conf.jinja(rspamd_file, env, os.path.join(\"/etc/rspamd/local.d\", os.path.basename(rspamd_file)))\n config_files.append(os.path.basename(rspamd_file))\n\nfor override_file in glob.glob(\"/overrides/*\"):\n if os.path.basename(override_file) not in config_files:\n shutil.copyfile(override_file, os.path.join(\"/etc/rspamd/local.d\", os.path.basename(override_file)))\n\n# Admin may not be up just yet\nhealthcheck = f'http://{env[\"ADMIN_ADDRESS\"]}:8080/internal/rspamd/local_domains'\nwhile True:\n time.sleep(1)\n try:\n if requests.get(healthcheck,timeout=2).ok:\n break\n except:\n pass\n log.warning(\"Admin is not up just yet, retrying in 1 second\")\n\n# Run rspamd\nos.system(\"mkdir -m 755 -p /run/rspamd\")\nos.system(\"chown rspamd:rspamd /run/rspamd\")\nos.system(\"find /var/lib/rspamd | grep -v /filter | xargs -n1 chown rspamd:rspamd\")\nos.execv(\"/usr/sbin/rspamd\", [\"rspamd\", \"-f\", \"-u\", \"rspamd\", \"-g\", \"rspamd\"])\n", "path": "core/rspamd/start.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nimport os\nimport glob\nimport logging as log\nimport requests\nimport shutil\nimport sys\nimport time\nfrom socrate import system,conf\n\nenv = system.set_env()\n\n# Actual startup script\n\nconfig_files = []\nfor rspamd_file in glob.glob(\"/conf/*\"):\n conf.jinja(rspamd_file, env, os.path.join(\"/etc/rspamd/local.d\", os.path.basename(rspamd_file)))\n config_files.append(os.path.basename(rspamd_file))\n\nfor override_file in glob.glob(\"/overrides/*\"):\n if os.path.basename(override_file) not in config_files:\n shutil.copyfile(override_file, os.path.join(\"/etc/rspamd/local.d\", os.path.basename(override_file)))\n\n# Admin may not be up just yet\nhealthcheck = f'http://{env[\"ADMIN_ADDRESS\"]}:8080/internal/rspamd/local_domains'\nwhile True:\n time.sleep(1)\n try:\n if requests.get(healthcheck,timeout=2).ok:\n break\n except:\n pass\n log.warning(\"Admin is not up just yet, retrying in 1 second\")\n\n# Run rspamd\nos.system(\"mkdir -m 755 -p /run/rspamd\")\nos.system(\"chown rspamd:rspamd /run/rspamd\")\nos.system(\"find /var/lib/rspamd | grep -v /filter | xargs -n1 chown rspamd:rspamd\")\nos.execv(\"/usr/bin/rspamd\", [\"rspamd\", \"-f\", \"-u\", \"rspamd\", \"-g\", \"rspamd\"])\n", "path": "core/rspamd/start.py"}]} | 1,031 | 175 |
gh_patches_debug_7962 | rasdani/github-patches | git_diff | Gallopsled__pwntools-1304 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
from pwn import * changes __name__
Running `pwntools==3.14.0.dev0` from py3 dev branch.
from pwn import * imports __name__ apparently.
```python
In [1]: __name__
Out[1]: '__main__'
In [2]: from pwn import *
In [3]: __name__
Out[3]: 'pwn.toplevel'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pwn/toplevel.py`
Content:
```
1 # Get all the modules from pwnlib
2 import collections
3 import logging
4 import math
5 import operator
6 import os
7 import re
8 import socks
9 import signal
10 import string
11 import struct
12 import subprocess
13 import sys
14 import tempfile
15 import threading
16 import time
17
18 from pprint import pprint
19
20 import pwnlib
21 from pwnlib import *
22 from pwnlib.asm import *
23 from pwnlib.context import Thread
24 from pwnlib.context import context
25 from pwnlib.dynelf import DynELF
26 from pwnlib.encoders import *
27 from pwnlib.elf.corefile import Core, Corefile, Coredump
28 from pwnlib.elf.elf import ELF, load
29 from pwnlib.encoders import *
30 from pwnlib.exception import PwnlibException
31 from pwnlib.gdb import attach, debug, debug_assembly, debug_shellcode
32 from pwnlib.flag import *
33 from pwnlib.fmtstr import FmtStr, fmtstr_payload
34 from pwnlib.log import getLogger
35 from pwnlib.memleak import MemLeak, RelativeMemLeak
36 from pwnlib.regsort import *
37 from pwnlib.replacements import *
38 from pwnlib.rop import ROP
39 from pwnlib.rop.srop import SigreturnFrame
40 from pwnlib.runner import *
41 from pwnlib.timeout import Timeout
42 from pwnlib.tubes.listen import listen
43 from pwnlib.tubes.process import process, PTY, PIPE, STDOUT
44 from pwnlib.tubes.remote import remote, tcp, udp, connect
45 from pwnlib.tubes.serialtube import serialtube
46 from pwnlib.tubes.server import server
47 from pwnlib.tubes.ssh import ssh
48 from pwnlib.tubes.tube import tube
49 from pwnlib.ui import *
50 from pwnlib.util import crc
51 from pwnlib.util import iters
52 from pwnlib.util import net
53 from pwnlib.util import proc
54 from pwnlib.util import safeeval
55 from pwnlib.util.crc import BitPolynom
56 from pwnlib.util.cyclic import *
57 from pwnlib.util.fiddling import *
58 from pwnlib.util.getdents import *
59 from pwnlib.util.hashes import *
60 from pwnlib.util.lists import *
61 from pwnlib.util.misc import *
62 from pwnlib.util.packing import *
63 from pwnlib.util.proc import pidof
64 from pwnlib.util.sh_string import sh_string, sh_prepare, sh_command_with
65 from pwnlib.util.splash import *
66 from pwnlib.util.web import *
67
68 # Promote these modules, so that "from pwn import *" will let you access them
69
70 __all__ = ["pickle", "StringIO", "BytesIO"]
71 from six.moves import cPickle as pickle, cStringIO as StringIO
72 from six import BytesIO
73
74 error = log.error
75 warning = log.warning
76 warn = log.warning
77 info = log.info
78 debug = log.debug
79 success = log.success
80
81 __all__ += list(globals().keys())
82
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pwn/toplevel.py b/pwn/toplevel.py
--- a/pwn/toplevel.py
+++ b/pwn/toplevel.py
@@ -67,7 +67,6 @@
# Promote these modules, so that "from pwn import *" will let you access them
-__all__ = ["pickle", "StringIO", "BytesIO"]
from six.moves import cPickle as pickle, cStringIO as StringIO
from six import BytesIO
@@ -78,4 +77,4 @@
debug = log.debug
success = log.success
-__all__ += list(globals().keys())
+__all__ = [x for x in globals().keys() if x != '__name__']
| {"golden_diff": "diff --git a/pwn/toplevel.py b/pwn/toplevel.py\n--- a/pwn/toplevel.py\n+++ b/pwn/toplevel.py\n@@ -67,7 +67,6 @@\n \n # Promote these modules, so that \"from pwn import *\" will let you access them\n \n-__all__ = [\"pickle\", \"StringIO\", \"BytesIO\"]\n from six.moves import cPickle as pickle, cStringIO as StringIO\n from six import BytesIO\n \n@@ -78,4 +77,4 @@\n debug = log.debug\n success = log.success\n \n-__all__ += list(globals().keys())\n+__all__ = [x for x in globals().keys() if x != '__name__']\n", "issue": "from pwn import * changes __name__\nRunning `pwntools==3.14.0.dev0` from py3 dev branch.\r\n\r\nfrom pwn import * imports __name__ apparently.\r\n\r\n```python\r\nIn [1]: __name__\r\nOut[1]: '__main__'\r\n\r\nIn [2]: from pwn import *\r\n\r\nIn [3]: __name__\r\nOut[3]: 'pwn.toplevel'\r\n```\n", "before_files": [{"content": "# Get all the modules from pwnlib\nimport collections\nimport logging\nimport math\nimport operator\nimport os\nimport re\nimport socks\nimport signal\nimport string\nimport struct\nimport subprocess\nimport sys\nimport tempfile\nimport threading\nimport time\n\nfrom pprint import pprint\n\nimport pwnlib\nfrom pwnlib import *\nfrom pwnlib.asm import *\nfrom pwnlib.context import Thread\nfrom pwnlib.context import context\nfrom pwnlib.dynelf import DynELF\nfrom pwnlib.encoders import *\nfrom pwnlib.elf.corefile import Core, Corefile, Coredump\nfrom pwnlib.elf.elf import ELF, load\nfrom pwnlib.encoders import *\nfrom pwnlib.exception import PwnlibException\nfrom pwnlib.gdb import attach, debug, debug_assembly, debug_shellcode\nfrom pwnlib.flag import *\nfrom pwnlib.fmtstr import FmtStr, fmtstr_payload\nfrom pwnlib.log import getLogger\nfrom pwnlib.memleak import MemLeak, RelativeMemLeak\nfrom pwnlib.regsort import *\nfrom pwnlib.replacements import *\nfrom pwnlib.rop import ROP\nfrom pwnlib.rop.srop import SigreturnFrame\nfrom pwnlib.runner import *\nfrom pwnlib.timeout import Timeout\nfrom pwnlib.tubes.listen import listen\nfrom pwnlib.tubes.process import process, PTY, PIPE, STDOUT\nfrom pwnlib.tubes.remote import remote, tcp, udp, connect\nfrom pwnlib.tubes.serialtube import serialtube\nfrom pwnlib.tubes.server import server\nfrom pwnlib.tubes.ssh import ssh\nfrom pwnlib.tubes.tube import tube\nfrom pwnlib.ui import *\nfrom pwnlib.util import crc\nfrom pwnlib.util import iters\nfrom pwnlib.util import net\nfrom pwnlib.util import proc\nfrom pwnlib.util import safeeval\nfrom pwnlib.util.crc import BitPolynom\nfrom pwnlib.util.cyclic import *\nfrom pwnlib.util.fiddling import *\nfrom pwnlib.util.getdents import *\nfrom pwnlib.util.hashes import *\nfrom pwnlib.util.lists import *\nfrom pwnlib.util.misc import *\nfrom pwnlib.util.packing import *\nfrom pwnlib.util.proc import pidof\nfrom pwnlib.util.sh_string import sh_string, sh_prepare, sh_command_with\nfrom pwnlib.util.splash import *\nfrom pwnlib.util.web import *\n\n# Promote these modules, so that \"from pwn import *\" will let you access them\n\n__all__ = [\"pickle\", \"StringIO\", \"BytesIO\"]\nfrom six.moves import cPickle as pickle, cStringIO as StringIO\nfrom six import BytesIO\n\nerror = log.error\nwarning = log.warning\nwarn = log.warning\ninfo = log.info\ndebug = log.debug\nsuccess = log.success\n\n__all__ += list(globals().keys())\n", "path": "pwn/toplevel.py"}], "after_files": [{"content": "# Get all the modules from pwnlib\nimport collections\nimport logging\nimport math\nimport operator\nimport os\nimport re\nimport socks\nimport signal\nimport string\nimport struct\nimport subprocess\nimport sys\nimport tempfile\nimport threading\nimport time\n\nfrom pprint import pprint\n\nimport pwnlib\nfrom pwnlib import *\nfrom pwnlib.asm import *\nfrom pwnlib.context import Thread\nfrom pwnlib.context import context\nfrom pwnlib.dynelf import DynELF\nfrom pwnlib.encoders import *\nfrom pwnlib.elf.corefile import Core, Corefile, Coredump\nfrom pwnlib.elf.elf import ELF, load\nfrom pwnlib.encoders import *\nfrom pwnlib.exception import PwnlibException\nfrom pwnlib.gdb import attach, debug, debug_assembly, debug_shellcode\nfrom pwnlib.flag import *\nfrom pwnlib.fmtstr import FmtStr, fmtstr_payload\nfrom pwnlib.log import getLogger\nfrom pwnlib.memleak import MemLeak, RelativeMemLeak\nfrom pwnlib.regsort import *\nfrom pwnlib.replacements import *\nfrom pwnlib.rop import ROP\nfrom pwnlib.rop.srop import SigreturnFrame\nfrom pwnlib.runner import *\nfrom pwnlib.timeout import Timeout\nfrom pwnlib.tubes.listen import listen\nfrom pwnlib.tubes.process import process, PTY, PIPE, STDOUT\nfrom pwnlib.tubes.remote import remote, tcp, udp, connect\nfrom pwnlib.tubes.serialtube import serialtube\nfrom pwnlib.tubes.server import server\nfrom pwnlib.tubes.ssh import ssh\nfrom pwnlib.tubes.tube import tube\nfrom pwnlib.ui import *\nfrom pwnlib.util import crc\nfrom pwnlib.util import iters\nfrom pwnlib.util import net\nfrom pwnlib.util import proc\nfrom pwnlib.util import safeeval\nfrom pwnlib.util.crc import BitPolynom\nfrom pwnlib.util.cyclic import *\nfrom pwnlib.util.fiddling import *\nfrom pwnlib.util.getdents import *\nfrom pwnlib.util.hashes import *\nfrom pwnlib.util.lists import *\nfrom pwnlib.util.misc import *\nfrom pwnlib.util.packing import *\nfrom pwnlib.util.proc import pidof\nfrom pwnlib.util.sh_string import sh_string, sh_prepare, sh_command_with\nfrom pwnlib.util.splash import *\nfrom pwnlib.util.web import *\n\n# Promote these modules, so that \"from pwn import *\" will let you access them\n\nfrom six.moves import cPickle as pickle, cStringIO as StringIO\nfrom six import BytesIO\n\nerror = log.error\nwarning = log.warning\nwarn = log.warning\ninfo = log.info\ndebug = log.debug\nsuccess = log.success\n\n__all__ = [x for x in globals().keys() if x != '__name__']\n", "path": "pwn/toplevel.py"}]} | 1,151 | 158 |
gh_patches_debug_29868 | rasdani/github-patches | git_diff | piskvorky__gensim-1653 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error while summarizing text
Hi,
I've received the following error when trying to summarize the body of this news article:
https://www.theguardian.com/media/2016/jun/19/sun-times-brexit-in-out-shake-it-all-about
The error follows:
File "/home/apps/comment_parser/venv/local/lib/python2.7/site-packages/gensim/summarization/summarizer.py", line 202, in summarize
most_important_docs = summarize_corpus(corpus, ratio=ratio if word_count is None else 1)
File "/home/apps/comment_parser/venv/local/lib/python2.7/site-packages/gensim/summarization/summarizer.py", line 161, in summarize_corpus
pagerank_scores = _pagerank(graph)
File "/home/apps/comment_parser/venv/local/lib/python2.7/site-packages/gensim/summarization/pagerank_weighted.py", line 24, in pagerank_weighted
vals, vecs = eigs(pagerank_matrix.T, k=1) # TODO raise an error if matrix has complex eigenvectors?
File "/usr/lib/python2.7/dist-packages/scipy/sparse/linalg/eigen/arpack/arpack.py", line 1271, in eigs
ncv, v0, maxiter, which, tol)
File "/usr/lib/python2.7/dist-packages/scipy/sparse/linalg/eigen/arpack/arpack.py", line 685, in __init__
raise ValueError("k must be less than ndim(A)-1, k=%d" % k)
ValueError: k must be less than ndim(A)-1, k=1
Regards,
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gensim/summarization/pagerank_weighted.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
5 from numpy import empty as empty_matrix
6 from scipy.sparse import csr_matrix
7 from scipy.sparse.linalg import eigs
8 from six.moves import xrange
9
10 try:
11 from numpy import VisibleDeprecationWarning
12 import warnings
13 warnings.filterwarnings("ignore", category=VisibleDeprecationWarning)
14 except ImportError:
15 pass
16
17
18 def pagerank_weighted(graph, damping=0.85):
19 adjacency_matrix = build_adjacency_matrix(graph)
20 probability_matrix = build_probability_matrix(graph)
21
22 pagerank_matrix = damping * adjacency_matrix.todense() + (1 - damping) * probability_matrix
23
24 vals, vecs = eigs(pagerank_matrix.T, k=1) # TODO raise an error if matrix has complex eigenvectors?
25
26 return process_results(graph, vecs.real)
27
28
29 def build_adjacency_matrix(graph):
30 row = []
31 col = []
32 data = []
33 nodes = graph.nodes()
34 length = len(nodes)
35
36 for i in xrange(length):
37 current_node = nodes[i]
38 neighbors_sum = sum(graph.edge_weight((current_node, neighbor)) for neighbor in graph.neighbors(current_node))
39 for j in xrange(length):
40 edge_weight = float(graph.edge_weight((current_node, nodes[j])))
41 if i != j and edge_weight != 0.0:
42 row.append(i)
43 col.append(j)
44 data.append(edge_weight / neighbors_sum)
45
46 return csr_matrix((data, (row, col)), shape=(length, length))
47
48
49 def build_probability_matrix(graph):
50 dimension = len(graph.nodes())
51 matrix = empty_matrix((dimension, dimension))
52
53 probability = 1.0 / float(dimension)
54 matrix.fill(probability)
55
56 return matrix
57
58
59 def process_results(graph, vecs):
60 scores = {}
61 for i, node in enumerate(graph.nodes()):
62 scores[node] = abs(vecs[i, :])
63
64 return scores
65
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/gensim/summarization/pagerank_weighted.py b/gensim/summarization/pagerank_weighted.py
--- a/gensim/summarization/pagerank_weighted.py
+++ b/gensim/summarization/pagerank_weighted.py
@@ -2,7 +2,9 @@
# -*- coding: utf-8 -*-
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
+import numpy
from numpy import empty as empty_matrix
+from scipy.linalg import eig
from scipy.sparse import csr_matrix
from scipy.sparse.linalg import eigs
from six.moves import xrange
@@ -21,9 +23,10 @@
pagerank_matrix = damping * adjacency_matrix.todense() + (1 - damping) * probability_matrix
- vals, vecs = eigs(pagerank_matrix.T, k=1) # TODO raise an error if matrix has complex eigenvectors?
+ vec = principal_eigenvector(pagerank_matrix.T)
- return process_results(graph, vecs.real)
+ # Because pagerank_matrix is positive, vec is always real (i.e. not complex)
+ return process_results(graph, vec.real)
def build_adjacency_matrix(graph):
@@ -56,9 +59,23 @@
return matrix
-def process_results(graph, vecs):
+def principal_eigenvector(a):
+ # Note that we prefer to use `eigs` even for dense matrix
+ # because we need only one eigenvector. See #441, #438 for discussion.
+
+ # But it doesn't work for dim A < 3, so we just handle this special case
+ if len(a) < 3:
+ vals, vecs = eig(a)
+ ind = numpy.abs(vals).argmax()
+ return vecs[:, ind]
+ else:
+ vals, vecs = eigs(a, k=1)
+ return vecs[:, 0]
+
+
+def process_results(graph, vec):
scores = {}
for i, node in enumerate(graph.nodes()):
- scores[node] = abs(vecs[i, :])
+ scores[node] = abs(vec[i])
return scores
| {"golden_diff": "diff --git a/gensim/summarization/pagerank_weighted.py b/gensim/summarization/pagerank_weighted.py\n--- a/gensim/summarization/pagerank_weighted.py\n+++ b/gensim/summarization/pagerank_weighted.py\n@@ -2,7 +2,9 @@\n # -*- coding: utf-8 -*-\n #\n # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html\n+import numpy\n from numpy import empty as empty_matrix\n+from scipy.linalg import eig\n from scipy.sparse import csr_matrix\n from scipy.sparse.linalg import eigs\n from six.moves import xrange\n@@ -21,9 +23,10 @@\n \n pagerank_matrix = damping * adjacency_matrix.todense() + (1 - damping) * probability_matrix\n \n- vals, vecs = eigs(pagerank_matrix.T, k=1) # TODO raise an error if matrix has complex eigenvectors?\n+ vec = principal_eigenvector(pagerank_matrix.T)\n \n- return process_results(graph, vecs.real)\n+ # Because pagerank_matrix is positive, vec is always real (i.e. not complex)\n+ return process_results(graph, vec.real)\n \n \n def build_adjacency_matrix(graph):\n@@ -56,9 +59,23 @@\n return matrix\n \n \n-def process_results(graph, vecs):\n+def principal_eigenvector(a):\n+ # Note that we prefer to use `eigs` even for dense matrix\n+ # because we need only one eigenvector. See #441, #438 for discussion.\n+\n+ # But it doesn't work for dim A < 3, so we just handle this special case\n+ if len(a) < 3:\n+ vals, vecs = eig(a)\n+ ind = numpy.abs(vals).argmax()\n+ return vecs[:, ind]\n+ else:\n+ vals, vecs = eigs(a, k=1)\n+ return vecs[:, 0]\n+\n+\n+def process_results(graph, vec):\n scores = {}\n for i, node in enumerate(graph.nodes()):\n- scores[node] = abs(vecs[i, :])\n+ scores[node] = abs(vec[i])\n \n return scores\n", "issue": "Error while summarizing text\nHi,\n\nI've received the following error when trying to summarize the body of this news article:\n\nhttps://www.theguardian.com/media/2016/jun/19/sun-times-brexit-in-out-shake-it-all-about\n\nThe error follows:\n\n File \"/home/apps/comment_parser/venv/local/lib/python2.7/site-packages/gensim/summarization/summarizer.py\", line 202, in summarize\n most_important_docs = summarize_corpus(corpus, ratio=ratio if word_count is None else 1)\n File \"/home/apps/comment_parser/venv/local/lib/python2.7/site-packages/gensim/summarization/summarizer.py\", line 161, in summarize_corpus\n pagerank_scores = _pagerank(graph)\n File \"/home/apps/comment_parser/venv/local/lib/python2.7/site-packages/gensim/summarization/pagerank_weighted.py\", line 24, in pagerank_weighted\n vals, vecs = eigs(pagerank_matrix.T, k=1) # TODO raise an error if matrix has complex eigenvectors?\n File \"/usr/lib/python2.7/dist-packages/scipy/sparse/linalg/eigen/arpack/arpack.py\", line 1271, in eigs\n ncv, v0, maxiter, which, tol)\n File \"/usr/lib/python2.7/dist-packages/scipy/sparse/linalg/eigen/arpack/arpack.py\", line 685, in __init__\n raise ValueError(\"k must be less than ndim(A)-1, k=%d\" % k)\nValueError: k must be less than ndim(A)-1, k=1\n\nRegards,\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html\nfrom numpy import empty as empty_matrix\nfrom scipy.sparse import csr_matrix\nfrom scipy.sparse.linalg import eigs\nfrom six.moves import xrange\n\ntry:\n from numpy import VisibleDeprecationWarning\n import warnings\n warnings.filterwarnings(\"ignore\", category=VisibleDeprecationWarning)\nexcept ImportError:\n pass\n\n\ndef pagerank_weighted(graph, damping=0.85):\n adjacency_matrix = build_adjacency_matrix(graph)\n probability_matrix = build_probability_matrix(graph)\n\n pagerank_matrix = damping * adjacency_matrix.todense() + (1 - damping) * probability_matrix\n\n vals, vecs = eigs(pagerank_matrix.T, k=1) # TODO raise an error if matrix has complex eigenvectors?\n\n return process_results(graph, vecs.real)\n\n\ndef build_adjacency_matrix(graph):\n row = []\n col = []\n data = []\n nodes = graph.nodes()\n length = len(nodes)\n\n for i in xrange(length):\n current_node = nodes[i]\n neighbors_sum = sum(graph.edge_weight((current_node, neighbor)) for neighbor in graph.neighbors(current_node))\n for j in xrange(length):\n edge_weight = float(graph.edge_weight((current_node, nodes[j])))\n if i != j and edge_weight != 0.0:\n row.append(i)\n col.append(j)\n data.append(edge_weight / neighbors_sum)\n\n return csr_matrix((data, (row, col)), shape=(length, length))\n\n\ndef build_probability_matrix(graph):\n dimension = len(graph.nodes())\n matrix = empty_matrix((dimension, dimension))\n\n probability = 1.0 / float(dimension)\n matrix.fill(probability)\n\n return matrix\n\n\ndef process_results(graph, vecs):\n scores = {}\n for i, node in enumerate(graph.nodes()):\n scores[node] = abs(vecs[i, :])\n\n return scores\n", "path": "gensim/summarization/pagerank_weighted.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html\nimport numpy\nfrom numpy import empty as empty_matrix\nfrom scipy.linalg import eig\nfrom scipy.sparse import csr_matrix\nfrom scipy.sparse.linalg import eigs\nfrom six.moves import xrange\n\ntry:\n from numpy import VisibleDeprecationWarning\n import warnings\n warnings.filterwarnings(\"ignore\", category=VisibleDeprecationWarning)\nexcept ImportError:\n pass\n\n\ndef pagerank_weighted(graph, damping=0.85):\n adjacency_matrix = build_adjacency_matrix(graph)\n probability_matrix = build_probability_matrix(graph)\n\n pagerank_matrix = damping * adjacency_matrix.todense() + (1 - damping) * probability_matrix\n\n vec = principal_eigenvector(pagerank_matrix.T)\n\n # Because pagerank_matrix is positive, vec is always real (i.e. not complex)\n return process_results(graph, vec.real)\n\n\ndef build_adjacency_matrix(graph):\n row = []\n col = []\n data = []\n nodes = graph.nodes()\n length = len(nodes)\n\n for i in xrange(length):\n current_node = nodes[i]\n neighbors_sum = sum(graph.edge_weight((current_node, neighbor)) for neighbor in graph.neighbors(current_node))\n for j in xrange(length):\n edge_weight = float(graph.edge_weight((current_node, nodes[j])))\n if i != j and edge_weight != 0.0:\n row.append(i)\n col.append(j)\n data.append(edge_weight / neighbors_sum)\n\n return csr_matrix((data, (row, col)), shape=(length, length))\n\n\ndef build_probability_matrix(graph):\n dimension = len(graph.nodes())\n matrix = empty_matrix((dimension, dimension))\n\n probability = 1.0 / float(dimension)\n matrix.fill(probability)\n\n return matrix\n\n\ndef principal_eigenvector(a):\n # Note that we prefer to use `eigs` even for dense matrix\n # because we need only one eigenvector. See #441, #438 for discussion.\n\n # But it doesn't work for dim A < 3, so we just handle this special case\n if len(a) < 3:\n vals, vecs = eig(a)\n ind = numpy.abs(vals).argmax()\n return vecs[:, ind]\n else:\n vals, vecs = eigs(a, k=1)\n return vecs[:, 0]\n\n\ndef process_results(graph, vec):\n scores = {}\n for i, node in enumerate(graph.nodes()):\n scores[node] = abs(vec[i])\n\n return scores\n", "path": "gensim/summarization/pagerank_weighted.py"}]} | 1,203 | 500 |
gh_patches_debug_41193 | rasdani/github-patches | git_diff | microsoft__botbuilder-python-279 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
NumberPrompt Locale Not Fully Implemented
## Version
v4.5
## Describe the bug
Found this bug while investigating for parity with regards to this [`NumberPrompt` bug filed in the dotnet repo](https://github.com/microsoft/botbuilder-dotnet/issues/2288)
* in constructor, `default_locale` attribute is set, but never used in `NumberPrompt`'s implementation (not in `on_prompt()` nor `on_recognize()`
* `on_recognize()` does allow you to pass in `locale` via `Activity`, however locale will not be used if only `default_locale` is specified
* "`English`" is used as string to specify locale, when we should be using the constants provided by the python recognizers-text repo
* Separately, there's definitely a lack of unit test coverage for this feature (only 2 tests written)
## Expected behavior
* implement use of `default_locale`
* implement use of recognizers-text constants to specify locale
[bug]
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py`
Content:
```
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License.
3
4 from typing import Dict
5 from recognizers_number import recognize_number
6 from botbuilder.core.turn_context import TurnContext
7 from botbuilder.schema import ActivityTypes
8 from .prompt import Prompt
9 from .prompt_options import PromptOptions
10 from .prompt_recognizer_result import PromptRecognizerResult
11
12
13 class NumberPrompt(Prompt):
14 # TODO: PromptValidator
15 def __init__(self, dialog_id: str, validator: object, default_locale: str):
16 super(NumberPrompt, self).__init__(dialog_id, validator)
17 self.default_locale = default_locale
18
19 async def on_prompt(
20 self,
21 turn_context: TurnContext,
22 state: Dict[str, object],
23 options: PromptOptions,
24 is_retry: bool,
25 ):
26 if not turn_context:
27 raise TypeError("NumberPrompt.on_prompt(): turn_context cannot be None.")
28 if not options:
29 raise TypeError("NumberPrompt.on_prompt(): options cannot be None.")
30
31 if is_retry and options.retry_prompt is not None:
32 turn_context.send_activity(options.retry_prompt)
33 else:
34 if options.prompt is not None:
35 await turn_context.send_activity(options.prompt)
36
37 async def on_recognize(
38 self,
39 turn_context: TurnContext,
40 state: Dict[str, object],
41 options: PromptOptions,
42 ) -> PromptRecognizerResult:
43 if not turn_context:
44 raise TypeError("NumberPrompt.on_recognize(): turn_context cannot be None.")
45
46 result = PromptRecognizerResult()
47 if turn_context.activity.type == ActivityTypes.message:
48 message = turn_context.activity
49
50 # TODO: Fix constant English with correct constant from text recognizer
51 culture = (
52 turn_context.activity.locale
53 if turn_context.activity.locale is not None
54 else "English"
55 )
56
57 results = recognize_number(message.text, culture)
58 if results:
59 result.succeeded = True
60 result.value = results[0].resolution["value"]
61
62 return result
63
```
Path: `libraries/botbuilder-dialogs/setup.py`
Content:
```
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License.
3
4 import os
5 from setuptools import setup
6
7 REQUIRES = [
8 "recognizers-text-date-time>=1.0.1a0",
9 "recognizers-text-number-with-unit>=1.0.1a0",
10 "recognizers-text-number>=1.0.1a0",
11 "recognizers-text>=1.0.1a0",
12 "recognizers-text-choice>=1.0.1a0",
13 "grapheme>=0.5.0",
14 "emoji>=0.5.2",
15 "botbuilder-schema>=4.4.0b1",
16 "botframework-connector>=4.4.0b1",
17 "botbuilder-core>=4.4.0b1",
18 ]
19
20 TEST_REQUIRES = ["aiounittest>=1.1.0"]
21
22 root = os.path.abspath(os.path.dirname(__file__))
23
24 with open(os.path.join(root, "botbuilder", "dialogs", "about.py")) as f:
25 package_info = {}
26 info = f.read()
27 exec(info, package_info)
28
29 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f:
30 long_description = f.read()
31
32 setup(
33 name=package_info["__title__"],
34 version=package_info["__version__"],
35 url=package_info["__uri__"],
36 author=package_info["__author__"],
37 description=package_info["__description__"],
38 keywords=["BotBuilderDialogs", "bots", "ai", "botframework", "botbuilder"],
39 long_description=long_description,
40 long_description_content_type="text/x-rst",
41 license=package_info["__license__"],
42 packages=[
43 "botbuilder.dialogs",
44 "botbuilder.dialogs.prompts",
45 "botbuilder.dialogs.choices",
46 ],
47 install_requires=REQUIRES + TEST_REQUIRES,
48 tests_require=TEST_REQUIRES,
49 include_package_data=True,
50 classifiers=[
51 "Programming Language :: Python :: 3.7",
52 "Intended Audience :: Developers",
53 "License :: OSI Approved :: MIT License",
54 "Operating System :: OS Independent",
55 "Development Status :: 3 - Alpha",
56 "Topic :: Scientific/Engineering :: Artificial Intelligence",
57 ],
58 )
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py b/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py
--- a/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py
+++ b/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py
@@ -1,18 +1,29 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
-from typing import Dict
+from typing import Callable, Dict
+
+from babel.numbers import parse_decimal
from recognizers_number import recognize_number
+from recognizers_text import Culture, ModelResult
+
from botbuilder.core.turn_context import TurnContext
from botbuilder.schema import ActivityTypes
-from .prompt import Prompt
+
+from .prompt import Prompt, PromptValidatorContext
from .prompt_options import PromptOptions
from .prompt_recognizer_result import PromptRecognizerResult
class NumberPrompt(Prompt):
- # TODO: PromptValidator
- def __init__(self, dialog_id: str, validator: object, default_locale: str):
+ # TODO: PromptValidator needs to be fixed
+ # Does not accept answer as intended (times out)
+ def __init__(
+ self,
+ dialog_id: str,
+ validator: Callable[[PromptValidatorContext], bool] = None,
+ default_locale: str = None,
+ ):
super(NumberPrompt, self).__init__(dialog_id, validator)
self.default_locale = default_locale
@@ -30,9 +41,8 @@
if is_retry and options.retry_prompt is not None:
turn_context.send_activity(options.retry_prompt)
- else:
- if options.prompt is not None:
- await turn_context.send_activity(options.prompt)
+ elif options.prompt is not None:
+ await turn_context.send_activity(options.prompt)
async def on_recognize(
self,
@@ -46,17 +56,25 @@
result = PromptRecognizerResult()
if turn_context.activity.type == ActivityTypes.message:
message = turn_context.activity
+ culture = self._get_culture(turn_context)
+ results: [ModelResult] = recognize_number(message.text, culture)
- # TODO: Fix constant English with correct constant from text recognizer
- culture = (
- turn_context.activity.locale
- if turn_context.activity.locale is not None
- else "English"
- )
-
- results = recognize_number(message.text, culture)
if results:
result.succeeded = True
- result.value = results[0].resolution["value"]
+ result.value = parse_decimal(
+ results[0].resolution["value"], locale=culture.replace("-", "_")
+ )
return result
+
+ def _get_culture(self, turn_context: TurnContext):
+ culture = (
+ turn_context.activity.locale
+ if turn_context.activity.locale
+ else self.default_locale
+ )
+
+ if not culture:
+ culture = Culture.English
+
+ return culture
diff --git a/libraries/botbuilder-dialogs/setup.py b/libraries/botbuilder-dialogs/setup.py
--- a/libraries/botbuilder-dialogs/setup.py
+++ b/libraries/botbuilder-dialogs/setup.py
@@ -12,6 +12,7 @@
"recognizers-text-choice>=1.0.1a0",
"grapheme>=0.5.0",
"emoji>=0.5.2",
+ "babel>=2.7.0",
"botbuilder-schema>=4.4.0b1",
"botframework-connector>=4.4.0b1",
"botbuilder-core>=4.4.0b1",
| {"golden_diff": "diff --git a/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py b/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py\n--- a/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py\n+++ b/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py\n@@ -1,18 +1,29 @@\n # Copyright (c) Microsoft Corporation. All rights reserved.\n # Licensed under the MIT License.\n \n-from typing import Dict\n+from typing import Callable, Dict\n+\n+from babel.numbers import parse_decimal\n from recognizers_number import recognize_number\n+from recognizers_text import Culture, ModelResult\n+\n from botbuilder.core.turn_context import TurnContext\n from botbuilder.schema import ActivityTypes\n-from .prompt import Prompt\n+\n+from .prompt import Prompt, PromptValidatorContext\n from .prompt_options import PromptOptions\n from .prompt_recognizer_result import PromptRecognizerResult\n \n \n class NumberPrompt(Prompt):\n- # TODO: PromptValidator\n- def __init__(self, dialog_id: str, validator: object, default_locale: str):\n+ # TODO: PromptValidator needs to be fixed\n+ # Does not accept answer as intended (times out)\n+ def __init__(\n+ self,\n+ dialog_id: str,\n+ validator: Callable[[PromptValidatorContext], bool] = None,\n+ default_locale: str = None,\n+ ):\n super(NumberPrompt, self).__init__(dialog_id, validator)\n self.default_locale = default_locale\n \n@@ -30,9 +41,8 @@\n \n if is_retry and options.retry_prompt is not None:\n turn_context.send_activity(options.retry_prompt)\n- else:\n- if options.prompt is not None:\n- await turn_context.send_activity(options.prompt)\n+ elif options.prompt is not None:\n+ await turn_context.send_activity(options.prompt)\n \n async def on_recognize(\n self,\n@@ -46,17 +56,25 @@\n result = PromptRecognizerResult()\n if turn_context.activity.type == ActivityTypes.message:\n message = turn_context.activity\n+ culture = self._get_culture(turn_context)\n+ results: [ModelResult] = recognize_number(message.text, culture)\n \n- # TODO: Fix constant English with correct constant from text recognizer\n- culture = (\n- turn_context.activity.locale\n- if turn_context.activity.locale is not None\n- else \"English\"\n- )\n-\n- results = recognize_number(message.text, culture)\n if results:\n result.succeeded = True\n- result.value = results[0].resolution[\"value\"]\n+ result.value = parse_decimal(\n+ results[0].resolution[\"value\"], locale=culture.replace(\"-\", \"_\")\n+ )\n \n return result\n+\n+ def _get_culture(self, turn_context: TurnContext):\n+ culture = (\n+ turn_context.activity.locale\n+ if turn_context.activity.locale\n+ else self.default_locale\n+ )\n+\n+ if not culture:\n+ culture = Culture.English\n+\n+ return culture\ndiff --git a/libraries/botbuilder-dialogs/setup.py b/libraries/botbuilder-dialogs/setup.py\n--- a/libraries/botbuilder-dialogs/setup.py\n+++ b/libraries/botbuilder-dialogs/setup.py\n@@ -12,6 +12,7 @@\n \"recognizers-text-choice>=1.0.1a0\",\n \"grapheme>=0.5.0\",\n \"emoji>=0.5.2\",\n+ \"babel>=2.7.0\",\n \"botbuilder-schema>=4.4.0b1\",\n \"botframework-connector>=4.4.0b1\",\n \"botbuilder-core>=4.4.0b1\",\n", "issue": "NumberPrompt Locale Not Fully Implemented\n## Version\r\nv4.5\r\n\r\n## Describe the bug\r\nFound this bug while investigating for parity with regards to this [`NumberPrompt` bug filed in the dotnet repo](https://github.com/microsoft/botbuilder-dotnet/issues/2288)\r\n* in constructor, `default_locale` attribute is set, but never used in `NumberPrompt`'s implementation (not in `on_prompt()` nor `on_recognize()`\r\n* `on_recognize()` does allow you to pass in `locale` via `Activity`, however locale will not be used if only `default_locale` is specified\r\n* \"`English`\" is used as string to specify locale, when we should be using the constants provided by the python recognizers-text repo\r\n\r\n* Separately, there's definitely a lack of unit test coverage for this feature (only 2 tests written)\r\n\r\n## Expected behavior\r\n* implement use of `default_locale`\r\n* implement use of recognizers-text constants to specify locale\r\n\r\n\r\n\r\n[bug]\r\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nfrom typing import Dict\nfrom recognizers_number import recognize_number\nfrom botbuilder.core.turn_context import TurnContext\nfrom botbuilder.schema import ActivityTypes\nfrom .prompt import Prompt\nfrom .prompt_options import PromptOptions\nfrom .prompt_recognizer_result import PromptRecognizerResult\n\n\nclass NumberPrompt(Prompt):\n # TODO: PromptValidator\n def __init__(self, dialog_id: str, validator: object, default_locale: str):\n super(NumberPrompt, self).__init__(dialog_id, validator)\n self.default_locale = default_locale\n\n async def on_prompt(\n self,\n turn_context: TurnContext,\n state: Dict[str, object],\n options: PromptOptions,\n is_retry: bool,\n ):\n if not turn_context:\n raise TypeError(\"NumberPrompt.on_prompt(): turn_context cannot be None.\")\n if not options:\n raise TypeError(\"NumberPrompt.on_prompt(): options cannot be None.\")\n\n if is_retry and options.retry_prompt is not None:\n turn_context.send_activity(options.retry_prompt)\n else:\n if options.prompt is not None:\n await turn_context.send_activity(options.prompt)\n\n async def on_recognize(\n self,\n turn_context: TurnContext,\n state: Dict[str, object],\n options: PromptOptions,\n ) -> PromptRecognizerResult:\n if not turn_context:\n raise TypeError(\"NumberPrompt.on_recognize(): turn_context cannot be None.\")\n\n result = PromptRecognizerResult()\n if turn_context.activity.type == ActivityTypes.message:\n message = turn_context.activity\n\n # TODO: Fix constant English with correct constant from text recognizer\n culture = (\n turn_context.activity.locale\n if turn_context.activity.locale is not None\n else \"English\"\n )\n\n results = recognize_number(message.text, culture)\n if results:\n result.succeeded = True\n result.value = results[0].resolution[\"value\"]\n\n return result\n", "path": "libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py"}, {"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nREQUIRES = [\n \"recognizers-text-date-time>=1.0.1a0\",\n \"recognizers-text-number-with-unit>=1.0.1a0\",\n \"recognizers-text-number>=1.0.1a0\",\n \"recognizers-text>=1.0.1a0\",\n \"recognizers-text-choice>=1.0.1a0\",\n \"grapheme>=0.5.0\",\n \"emoji>=0.5.2\",\n \"botbuilder-schema>=4.4.0b1\",\n \"botframework-connector>=4.4.0b1\",\n \"botbuilder-core>=4.4.0b1\",\n]\n\nTEST_REQUIRES = [\"aiounittest>=1.1.0\"]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(root, \"botbuilder\", \"dialogs\", \"about.py\")) as f:\n package_info = {}\n info = f.read()\n exec(info, package_info)\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=package_info[\"__title__\"],\n version=package_info[\"__version__\"],\n url=package_info[\"__uri__\"],\n author=package_info[\"__author__\"],\n description=package_info[\"__description__\"],\n keywords=[\"BotBuilderDialogs\", \"bots\", \"ai\", \"botframework\", \"botbuilder\"],\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=package_info[\"__license__\"],\n packages=[\n \"botbuilder.dialogs\",\n \"botbuilder.dialogs.prompts\",\n \"botbuilder.dialogs.choices\",\n ],\n install_requires=REQUIRES + TEST_REQUIRES,\n tests_require=TEST_REQUIRES,\n include_package_data=True,\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 3 - Alpha\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botbuilder-dialogs/setup.py"}], "after_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nfrom typing import Callable, Dict\n\nfrom babel.numbers import parse_decimal\nfrom recognizers_number import recognize_number\nfrom recognizers_text import Culture, ModelResult\n\nfrom botbuilder.core.turn_context import TurnContext\nfrom botbuilder.schema import ActivityTypes\n\nfrom .prompt import Prompt, PromptValidatorContext\nfrom .prompt_options import PromptOptions\nfrom .prompt_recognizer_result import PromptRecognizerResult\n\n\nclass NumberPrompt(Prompt):\n # TODO: PromptValidator needs to be fixed\n # Does not accept answer as intended (times out)\n def __init__(\n self,\n dialog_id: str,\n validator: Callable[[PromptValidatorContext], bool] = None,\n default_locale: str = None,\n ):\n super(NumberPrompt, self).__init__(dialog_id, validator)\n self.default_locale = default_locale\n\n async def on_prompt(\n self,\n turn_context: TurnContext,\n state: Dict[str, object],\n options: PromptOptions,\n is_retry: bool,\n ):\n if not turn_context:\n raise TypeError(\"NumberPrompt.on_prompt(): turn_context cannot be None.\")\n if not options:\n raise TypeError(\"NumberPrompt.on_prompt(): options cannot be None.\")\n\n if is_retry and options.retry_prompt is not None:\n turn_context.send_activity(options.retry_prompt)\n elif options.prompt is not None:\n await turn_context.send_activity(options.prompt)\n\n async def on_recognize(\n self,\n turn_context: TurnContext,\n state: Dict[str, object],\n options: PromptOptions,\n ) -> PromptRecognizerResult:\n if not turn_context:\n raise TypeError(\"NumberPrompt.on_recognize(): turn_context cannot be None.\")\n\n result = PromptRecognizerResult()\n if turn_context.activity.type == ActivityTypes.message:\n message = turn_context.activity\n culture = self._get_culture(turn_context)\n results: [ModelResult] = recognize_number(message.text, culture)\n\n if results:\n result.succeeded = True\n result.value = parse_decimal(\n results[0].resolution[\"value\"], locale=culture.replace(\"-\", \"_\")\n )\n\n return result\n\n def _get_culture(self, turn_context: TurnContext):\n culture = (\n turn_context.activity.locale\n if turn_context.activity.locale\n else self.default_locale\n )\n\n if not culture:\n culture = Culture.English\n\n return culture\n", "path": "libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/number_prompt.py"}, {"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nREQUIRES = [\n \"recognizers-text-date-time>=1.0.1a0\",\n \"recognizers-text-number-with-unit>=1.0.1a0\",\n \"recognizers-text-number>=1.0.1a0\",\n \"recognizers-text>=1.0.1a0\",\n \"recognizers-text-choice>=1.0.1a0\",\n \"grapheme>=0.5.0\",\n \"emoji>=0.5.2\",\n \"babel>=2.7.0\",\n \"botbuilder-schema>=4.4.0b1\",\n \"botframework-connector>=4.4.0b1\",\n \"botbuilder-core>=4.4.0b1\",\n]\n\nTEST_REQUIRES = [\"aiounittest>=1.1.0\"]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(root, \"botbuilder\", \"dialogs\", \"about.py\")) as f:\n package_info = {}\n info = f.read()\n exec(info, package_info)\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=package_info[\"__title__\"],\n version=package_info[\"__version__\"],\n url=package_info[\"__uri__\"],\n author=package_info[\"__author__\"],\n description=package_info[\"__description__\"],\n keywords=[\"BotBuilderDialogs\", \"bots\", \"ai\", \"botframework\", \"botbuilder\"],\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=package_info[\"__license__\"],\n packages=[\n \"botbuilder.dialogs\",\n \"botbuilder.dialogs.prompts\",\n \"botbuilder.dialogs.choices\",\n ],\n install_requires=REQUIRES + TEST_REQUIRES,\n tests_require=TEST_REQUIRES,\n include_package_data=True,\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 3 - Alpha\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botbuilder-dialogs/setup.py"}]} | 1,651 | 832 |
gh_patches_debug_39763 | rasdani/github-patches | git_diff | frappe__frappe-11643 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Feature Request] Clean failed jobs from queue
Currently, there is no feature that allows to clean failed jobs from the job queue. Therefore, failed jobs will accumulate. It should be possible to clear failed jobs.
Discussion reference: https://discuss.erpnext.com/t/cleaning-failed-background-jobs/37886

Observed in Frappe 10.1.x
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `frappe/core/page/background_jobs/background_jobs.py`
Content:
```
1 # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
2 # MIT License. See license.txt
3
4 from __future__ import unicode_literals
5 import frappe
6
7 from rq import Queue, Worker
8 from frappe.utils.background_jobs import get_redis_conn
9 from frappe.utils import format_datetime, cint, convert_utc_to_user_timezone
10 from frappe.utils.scheduler import is_scheduler_inactive
11 from frappe import _
12
13 colors = {
14 'queued': 'orange',
15 'failed': 'red',
16 'started': 'blue',
17 'finished': 'green'
18 }
19
20 @frappe.whitelist()
21 def get_info(show_failed=False):
22 conn = get_redis_conn()
23 queues = Queue.all(conn)
24 workers = Worker.all(conn)
25 jobs = []
26
27 def add_job(j, name):
28 if j.kwargs.get('site')==frappe.local.site:
29 jobs.append({
30 'job_name': j.kwargs.get('kwargs', {}).get('playbook_method') \
31 or j.kwargs.get('kwargs', {}).get('job_type') \
32 or str(j.kwargs.get('job_name')),
33 'status': j.get_status(), 'queue': name,
34 'creation': format_datetime(convert_utc_to_user_timezone(j.created_at)),
35 'color': colors[j.get_status()]
36 })
37 if j.exc_info:
38 jobs[-1]['exc_info'] = j.exc_info
39
40 for w in workers:
41 j = w.get_current_job()
42 if j:
43 add_job(j, w.name)
44
45 for q in queues:
46 if q.name != 'failed':
47 for j in q.get_jobs(): add_job(j, q.name)
48
49 if cint(show_failed):
50 for q in queues:
51 if q.name == 'failed':
52 for j in q.get_jobs()[:10]: add_job(j, q.name)
53
54 return jobs
55
56 @frappe.whitelist()
57 def get_scheduler_status():
58 if is_scheduler_inactive():
59 return [_("Inactive"), "red"]
60 return [_("Active"), "green"]
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/frappe/core/page/background_jobs/background_jobs.py b/frappe/core/page/background_jobs/background_jobs.py
--- a/frappe/core/page/background_jobs/background_jobs.py
+++ b/frappe/core/page/background_jobs/background_jobs.py
@@ -1,58 +1,88 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
-from __future__ import unicode_literals
-import frappe
+import json
+from typing import TYPE_CHECKING, Dict, List
from rq import Queue, Worker
+
+import frappe
+from frappe import _
+from frappe.utils import convert_utc_to_user_timezone, format_datetime
from frappe.utils.background_jobs import get_redis_conn
-from frappe.utils import format_datetime, cint, convert_utc_to_user_timezone
from frappe.utils.scheduler import is_scheduler_inactive
-from frappe import _
-colors = {
+if TYPE_CHECKING:
+ from rq.job import Job
+
+JOB_COLORS = {
'queued': 'orange',
'failed': 'red',
'started': 'blue',
'finished': 'green'
}
+
@frappe.whitelist()
-def get_info(show_failed=False):
+def get_info(show_failed=False) -> List[Dict]:
+ if isinstance(show_failed, str):
+ show_failed = json.loads(show_failed)
+
conn = get_redis_conn()
queues = Queue.all(conn)
workers = Worker.all(conn)
jobs = []
- def add_job(j, name):
- if j.kwargs.get('site')==frappe.local.site:
- jobs.append({
- 'job_name': j.kwargs.get('kwargs', {}).get('playbook_method') \
- or j.kwargs.get('kwargs', {}).get('job_type') \
- or str(j.kwargs.get('job_name')),
- 'status': j.get_status(), 'queue': name,
- 'creation': format_datetime(convert_utc_to_user_timezone(j.created_at)),
- 'color': colors[j.get_status()]
- })
- if j.exc_info:
- jobs[-1]['exc_info'] = j.exc_info
-
- for w in workers:
- j = w.get_current_job()
- if j:
- add_job(j, w.name)
-
- for q in queues:
- if q.name != 'failed':
- for j in q.get_jobs(): add_job(j, q.name)
-
- if cint(show_failed):
- for q in queues:
- if q.name == 'failed':
- for j in q.get_jobs()[:10]: add_job(j, q.name)
+ def add_job(job: 'Job', name: str) -> None:
+ if job.kwargs.get('site') == frappe.local.site:
+ job_info = {
+ 'job_name': job.kwargs.get('kwargs', {}).get('playbook_method')
+ or job.kwargs.get('kwargs', {}).get('job_type')
+ or str(job.kwargs.get('job_name')),
+ 'status': job.get_status(),
+ 'queue': name,
+ 'creation': format_datetime(convert_utc_to_user_timezone(job.created_at)),
+ 'color': JOB_COLORS[job.get_status()]
+ }
+
+ if job.exc_info:
+ job_info['exc_info'] = job.exc_info
+
+ jobs.append(job_info)
+
+ # show worker jobs
+ for worker in workers:
+ job = worker.get_current_job()
+ if job:
+ add_job(job, worker.name)
+
+ for queue in queues:
+ # show active queued jobs
+ if queue.name != 'failed':
+ for job in queue.jobs:
+ add_job(job, queue.name)
+
+ # show failed jobs, if requested
+ if show_failed:
+ fail_registry = queue.failed_job_registry
+ for job_id in fail_registry.get_job_ids():
+ job = queue.fetch_job(job_id)
+ add_job(job, queue.name)
return jobs
+
[email protected]()
+def remove_failed_jobs():
+ conn = get_redis_conn()
+ queues = Queue.all(conn)
+ for queue in queues:
+ fail_registry = queue.failed_job_registry
+ for job_id in fail_registry.get_job_ids():
+ job = queue.fetch_job(job_id)
+ fail_registry.remove(job, delete_job=True)
+
+
@frappe.whitelist()
def get_scheduler_status():
if is_scheduler_inactive():
| {"golden_diff": "diff --git a/frappe/core/page/background_jobs/background_jobs.py b/frappe/core/page/background_jobs/background_jobs.py\n--- a/frappe/core/page/background_jobs/background_jobs.py\n+++ b/frappe/core/page/background_jobs/background_jobs.py\n@@ -1,58 +1,88 @@\n # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors\n # MIT License. See license.txt\n \n-from __future__ import unicode_literals\n-import frappe\n+import json\n+from typing import TYPE_CHECKING, Dict, List\n \n from rq import Queue, Worker\n+\n+import frappe\n+from frappe import _\n+from frappe.utils import convert_utc_to_user_timezone, format_datetime\n from frappe.utils.background_jobs import get_redis_conn\n-from frappe.utils import format_datetime, cint, convert_utc_to_user_timezone\n from frappe.utils.scheduler import is_scheduler_inactive\n-from frappe import _\n \n-colors = {\n+if TYPE_CHECKING:\n+\tfrom rq.job import Job\n+\n+JOB_COLORS = {\n \t'queued': 'orange',\n \t'failed': 'red',\n \t'started': 'blue',\n \t'finished': 'green'\n }\n \n+\n @frappe.whitelist()\n-def get_info(show_failed=False):\n+def get_info(show_failed=False) -> List[Dict]:\n+\tif isinstance(show_failed, str):\n+\t\tshow_failed = json.loads(show_failed)\n+\n \tconn = get_redis_conn()\n \tqueues = Queue.all(conn)\n \tworkers = Worker.all(conn)\n \tjobs = []\n \n-\tdef add_job(j, name):\n-\t\tif j.kwargs.get('site')==frappe.local.site:\n-\t\t\tjobs.append({\n-\t\t\t\t'job_name': j.kwargs.get('kwargs', {}).get('playbook_method') \\\n-\t\t\t\t\tor j.kwargs.get('kwargs', {}).get('job_type') \\\n-\t\t\t\t\tor str(j.kwargs.get('job_name')),\n-\t\t\t\t'status': j.get_status(), 'queue': name,\n-\t\t\t\t'creation': format_datetime(convert_utc_to_user_timezone(j.created_at)),\n-\t\t\t\t'color': colors[j.get_status()]\n-\t\t\t})\n-\t\t\tif j.exc_info:\n-\t\t\t\tjobs[-1]['exc_info'] = j.exc_info\n-\n-\tfor w in workers:\n-\t\tj = w.get_current_job()\n-\t\tif j:\n-\t\t\tadd_job(j, w.name)\n-\n-\tfor q in queues:\n-\t\tif q.name != 'failed':\n-\t\t\tfor j in q.get_jobs(): add_job(j, q.name)\n-\n-\tif cint(show_failed):\n-\t\tfor q in queues:\n-\t\t\tif q.name == 'failed':\n-\t\t\t\tfor j in q.get_jobs()[:10]: add_job(j, q.name)\n+\tdef add_job(job: 'Job', name: str) -> None:\n+\t\tif job.kwargs.get('site') == frappe.local.site:\n+\t\t\tjob_info = {\n+\t\t\t\t'job_name': job.kwargs.get('kwargs', {}).get('playbook_method')\n+\t\t\t\t\tor job.kwargs.get('kwargs', {}).get('job_type')\n+\t\t\t\t\tor str(job.kwargs.get('job_name')),\n+\t\t\t\t'status': job.get_status(),\n+\t\t\t\t'queue': name,\n+\t\t\t\t'creation': format_datetime(convert_utc_to_user_timezone(job.created_at)),\n+\t\t\t\t'color': JOB_COLORS[job.get_status()]\n+\t\t\t}\n+\n+\t\t\tif job.exc_info:\n+\t\t\t\tjob_info['exc_info'] = job.exc_info\n+\n+\t\t\tjobs.append(job_info)\n+\n+\t# show worker jobs\n+\tfor worker in workers:\n+\t\tjob = worker.get_current_job()\n+\t\tif job:\n+\t\t\tadd_job(job, worker.name)\n+\n+\tfor queue in queues:\n+\t\t# show active queued jobs\n+\t\tif queue.name != 'failed':\n+\t\t\tfor job in queue.jobs:\n+\t\t\t\tadd_job(job, queue.name)\n+\n+\t\t# show failed jobs, if requested\n+\t\tif show_failed:\n+\t\t\tfail_registry = queue.failed_job_registry\n+\t\t\tfor job_id in fail_registry.get_job_ids():\n+\t\t\t\tjob = queue.fetch_job(job_id)\n+\t\t\t\tadd_job(job, queue.name)\n \n \treturn jobs\n \n+\[email protected]()\n+def remove_failed_jobs():\n+\tconn = get_redis_conn()\n+\tqueues = Queue.all(conn)\n+\tfor queue in queues:\n+\t\tfail_registry = queue.failed_job_registry\n+\t\tfor job_id in fail_registry.get_job_ids():\n+\t\t\tjob = queue.fetch_job(job_id)\n+\t\t\tfail_registry.remove(job, delete_job=True)\n+\n+\n @frappe.whitelist()\n def get_scheduler_status():\n \tif is_scheduler_inactive():\n", "issue": "[Feature Request] Clean failed jobs from queue\nCurrently, there is no feature that allows to clean failed jobs from the job queue. Therefore, failed jobs will accumulate. It should be possible to clear failed jobs.\r\n\r\nDiscussion reference: https://discuss.erpnext.com/t/cleaning-failed-background-jobs/37886\r\n\r\n\r\n\r\nObserved in Frappe 10.1.x\n", "before_files": [{"content": "# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors\n# MIT License. See license.txt\n\nfrom __future__ import unicode_literals\nimport frappe\n\nfrom rq import Queue, Worker\nfrom frappe.utils.background_jobs import get_redis_conn\nfrom frappe.utils import format_datetime, cint, convert_utc_to_user_timezone\nfrom frappe.utils.scheduler import is_scheduler_inactive\nfrom frappe import _\n\ncolors = {\n\t'queued': 'orange',\n\t'failed': 'red',\n\t'started': 'blue',\n\t'finished': 'green'\n}\n\[email protected]()\ndef get_info(show_failed=False):\n\tconn = get_redis_conn()\n\tqueues = Queue.all(conn)\n\tworkers = Worker.all(conn)\n\tjobs = []\n\n\tdef add_job(j, name):\n\t\tif j.kwargs.get('site')==frappe.local.site:\n\t\t\tjobs.append({\n\t\t\t\t'job_name': j.kwargs.get('kwargs', {}).get('playbook_method') \\\n\t\t\t\t\tor j.kwargs.get('kwargs', {}).get('job_type') \\\n\t\t\t\t\tor str(j.kwargs.get('job_name')),\n\t\t\t\t'status': j.get_status(), 'queue': name,\n\t\t\t\t'creation': format_datetime(convert_utc_to_user_timezone(j.created_at)),\n\t\t\t\t'color': colors[j.get_status()]\n\t\t\t})\n\t\t\tif j.exc_info:\n\t\t\t\tjobs[-1]['exc_info'] = j.exc_info\n\n\tfor w in workers:\n\t\tj = w.get_current_job()\n\t\tif j:\n\t\t\tadd_job(j, w.name)\n\n\tfor q in queues:\n\t\tif q.name != 'failed':\n\t\t\tfor j in q.get_jobs(): add_job(j, q.name)\n\n\tif cint(show_failed):\n\t\tfor q in queues:\n\t\t\tif q.name == 'failed':\n\t\t\t\tfor j in q.get_jobs()[:10]: add_job(j, q.name)\n\n\treturn jobs\n\[email protected]()\ndef get_scheduler_status():\n\tif is_scheduler_inactive():\n\t\treturn [_(\"Inactive\"), \"red\"]\n\treturn [_(\"Active\"), \"green\"]\n", "path": "frappe/core/page/background_jobs/background_jobs.py"}], "after_files": [{"content": "# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors\n# MIT License. See license.txt\n\nimport json\nfrom typing import TYPE_CHECKING, Dict, List\n\nfrom rq import Queue, Worker\n\nimport frappe\nfrom frappe import _\nfrom frappe.utils import convert_utc_to_user_timezone, format_datetime\nfrom frappe.utils.background_jobs import get_redis_conn\nfrom frappe.utils.scheduler import is_scheduler_inactive\n\nif TYPE_CHECKING:\n\tfrom rq.job import Job\n\nJOB_COLORS = {\n\t'queued': 'orange',\n\t'failed': 'red',\n\t'started': 'blue',\n\t'finished': 'green'\n}\n\n\[email protected]()\ndef get_info(show_failed=False) -> List[Dict]:\n\tif isinstance(show_failed, str):\n\t\tshow_failed = json.loads(show_failed)\n\n\tconn = get_redis_conn()\n\tqueues = Queue.all(conn)\n\tworkers = Worker.all(conn)\n\tjobs = []\n\n\tdef add_job(job: 'Job', name: str) -> None:\n\t\tif job.kwargs.get('site') == frappe.local.site:\n\t\t\tjob_info = {\n\t\t\t\t'job_name': job.kwargs.get('kwargs', {}).get('playbook_method')\n\t\t\t\t\tor job.kwargs.get('kwargs', {}).get('job_type')\n\t\t\t\t\tor str(job.kwargs.get('job_name')),\n\t\t\t\t'status': job.get_status(),\n\t\t\t\t'queue': name,\n\t\t\t\t'creation': format_datetime(convert_utc_to_user_timezone(job.created_at)),\n\t\t\t\t'color': JOB_COLORS[job.get_status()]\n\t\t\t}\n\n\t\t\tif job.exc_info:\n\t\t\t\tjob_info['exc_info'] = job.exc_info\n\n\t\t\tjobs.append(job_info)\n\n\t# show worker jobs\n\tfor worker in workers:\n\t\tjob = worker.get_current_job()\n\t\tif job:\n\t\t\tadd_job(job, worker.name)\n\n\tfor queue in queues:\n\t\t# show active queued jobs\n\t\tif queue.name != 'failed':\n\t\t\tfor job in queue.jobs:\n\t\t\t\tadd_job(job, queue.name)\n\n\t\t# show failed jobs, if requested\n\t\tif show_failed:\n\t\t\tfail_registry = queue.failed_job_registry\n\t\t\tfor job_id in fail_registry.get_job_ids():\n\t\t\t\tjob = queue.fetch_job(job_id)\n\t\t\t\tadd_job(job, queue.name)\n\n\treturn jobs\n\n\[email protected]()\ndef remove_failed_jobs():\n\tconn = get_redis_conn()\n\tqueues = Queue.all(conn)\n\tfor queue in queues:\n\t\tfail_registry = queue.failed_job_registry\n\t\tfor job_id in fail_registry.get_job_ids():\n\t\t\tjob = queue.fetch_job(job_id)\n\t\t\tfail_registry.remove(job, delete_job=True)\n\n\[email protected]()\ndef get_scheduler_status():\n\tif is_scheduler_inactive():\n\t\treturn [_(\"Inactive\"), \"red\"]\n\treturn [_(\"Active\"), \"green\"]\n", "path": "frappe/core/page/background_jobs/background_jobs.py"}]} | 968 | 985 |
gh_patches_debug_5445 | rasdani/github-patches | git_diff | comic__grand-challenge.org-1152 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add functionality to filter ImageFile objects by RawImageUploadSession from a client
**Is your feature request related to a problem? Please describe.**
I would like to know which ImageFile was generated by a specific RawImageUploadSession from gcapi
**Describe the solution you'd like**
Add an additional item to the existing set of filters in ImageViewSet.get_queryset()
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/grandchallenge/cases/views.py`
Content:
```
1 from django.contrib import messages
2 from django.http import Http404
3 from django.views.generic import DetailView
4 from guardian.mixins import (
5 LoginRequiredMixin,
6 PermissionRequiredMixin as ObjectPermissionRequiredMixin,
7 )
8 from rest_framework import status
9 from rest_framework.decorators import action
10 from rest_framework.mixins import (
11 CreateModelMixin,
12 ListModelMixin,
13 RetrieveModelMixin,
14 )
15 from rest_framework.permissions import DjangoObjectPermissions
16 from rest_framework.response import Response
17 from rest_framework.viewsets import GenericViewSet, ReadOnlyModelViewSet
18 from rest_framework_guardian.filters import ObjectPermissionsFilter
19
20 from grandchallenge.cases.models import (
21 Image,
22 ImageFile,
23 RawImageFile,
24 RawImageUploadSession,
25 )
26 from grandchallenge.cases.serializers import (
27 ImageSerializer,
28 RawImageFileSerializer,
29 RawImageUploadSessionSerializer,
30 )
31 from grandchallenge.core.permissions.rest_framework import (
32 DjangoObjectOnlyWithCustomPostPermissions,
33 )
34
35
36 class RawImageUploadSessionDetail(
37 LoginRequiredMixin, ObjectPermissionRequiredMixin, DetailView
38 ):
39 model = RawImageUploadSession
40 permission_required = f"{RawImageUploadSession._meta.app_label}.view_{RawImageUploadSession._meta.model_name}"
41 raise_exception = True
42
43
44 class ImageViewSet(ReadOnlyModelViewSet):
45 serializer_class = ImageSerializer
46 queryset = Image.objects.all()
47 permission_classes = [DjangoObjectPermissions]
48 filter_backends = [ObjectPermissionsFilter]
49
50 def get_queryset(self):
51 filters = {
52 "worklist": self.request.query_params.get("worklist", None),
53 "study": self.request.query_params.get("study", None),
54 }
55 filters = {k: v for k, v in filters.items() if v is not None}
56
57 queryset = super().get_queryset().filter(**filters)
58
59 return queryset
60
61
62 def show_image(request, *, pk):
63 from django.shortcuts import render
64
65 try:
66 image_file = ImageFile.objects.select_related("image").get(
67 image=pk, image_type="DZI"
68 )
69 except Image.DoesNotExist:
70 raise Http404("File not found.")
71
72 return render(
73 request,
74 "cases/show_image.html",
75 {"image_file": image_file, "url": image_file.file.url},
76 )
77
78
79 class RawImageUploadSessionViewSet(
80 CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet
81 ):
82 serializer_class = RawImageUploadSessionSerializer
83 queryset = RawImageUploadSession.objects.all()
84 permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]
85 filter_backends = [ObjectPermissionsFilter]
86
87 def perform_create(self, serializer):
88 serializer.save(creator=self.request.user)
89
90 @action(detail=True, methods=["patch"])
91 def process_images(self, request, pk=None):
92 upload_session: RawImageUploadSession = self.get_object()
93 if (
94 upload_session.status == upload_session.PENDING
95 and not upload_session.rawimagefile_set.filter(
96 consumed=True
97 ).exists()
98 ):
99 upload_session.process_images()
100 messages.add_message(
101 request, messages.SUCCESS, "Image processing job queued."
102 )
103 return Response(status=status.HTTP_200_OK)
104 else:
105 messages.add_message(
106 request,
107 messages.ERROR,
108 "Image processing job could not be queued.",
109 )
110 return Response(status=status.HTTP_400_BAD_REQUEST)
111
112
113 class RawImageFileViewSet(
114 CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet
115 ):
116 serializer_class = RawImageFileSerializer
117 queryset = RawImageFile.objects.all()
118 permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]
119 filter_backends = [ObjectPermissionsFilter]
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/grandchallenge/cases/views.py b/app/grandchallenge/cases/views.py
--- a/app/grandchallenge/cases/views.py
+++ b/app/grandchallenge/cases/views.py
@@ -51,6 +51,7 @@
filters = {
"worklist": self.request.query_params.get("worklist", None),
"study": self.request.query_params.get("study", None),
+ "origin": self.request.query_params.get("origin", None),
}
filters = {k: v for k, v in filters.items() if v is not None}
| {"golden_diff": "diff --git a/app/grandchallenge/cases/views.py b/app/grandchallenge/cases/views.py\n--- a/app/grandchallenge/cases/views.py\n+++ b/app/grandchallenge/cases/views.py\n@@ -51,6 +51,7 @@\n filters = {\n \"worklist\": self.request.query_params.get(\"worklist\", None),\n \"study\": self.request.query_params.get(\"study\", None),\n+ \"origin\": self.request.query_params.get(\"origin\", None),\n }\n filters = {k: v for k, v in filters.items() if v is not None}\n", "issue": "Add functionality to filter ImageFile objects by RawImageUploadSession from a client\n**Is your feature request related to a problem? Please describe.**\r\nI would like to know which ImageFile was generated by a specific RawImageUploadSession from gcapi\r\n**Describe the solution you'd like**\r\nAdd an additional item to the existing set of filters in ImageViewSet.get_queryset()\r\n\r\n\n", "before_files": [{"content": "from django.contrib import messages\nfrom django.http import Http404\nfrom django.views.generic import DetailView\nfrom guardian.mixins import (\n LoginRequiredMixin,\n PermissionRequiredMixin as ObjectPermissionRequiredMixin,\n)\nfrom rest_framework import status\nfrom rest_framework.decorators import action\nfrom rest_framework.mixins import (\n CreateModelMixin,\n ListModelMixin,\n RetrieveModelMixin,\n)\nfrom rest_framework.permissions import DjangoObjectPermissions\nfrom rest_framework.response import Response\nfrom rest_framework.viewsets import GenericViewSet, ReadOnlyModelViewSet\nfrom rest_framework_guardian.filters import ObjectPermissionsFilter\n\nfrom grandchallenge.cases.models import (\n Image,\n ImageFile,\n RawImageFile,\n RawImageUploadSession,\n)\nfrom grandchallenge.cases.serializers import (\n ImageSerializer,\n RawImageFileSerializer,\n RawImageUploadSessionSerializer,\n)\nfrom grandchallenge.core.permissions.rest_framework import (\n DjangoObjectOnlyWithCustomPostPermissions,\n)\n\n\nclass RawImageUploadSessionDetail(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, DetailView\n):\n model = RawImageUploadSession\n permission_required = f\"{RawImageUploadSession._meta.app_label}.view_{RawImageUploadSession._meta.model_name}\"\n raise_exception = True\n\n\nclass ImageViewSet(ReadOnlyModelViewSet):\n serializer_class = ImageSerializer\n queryset = Image.objects.all()\n permission_classes = [DjangoObjectPermissions]\n filter_backends = [ObjectPermissionsFilter]\n\n def get_queryset(self):\n filters = {\n \"worklist\": self.request.query_params.get(\"worklist\", None),\n \"study\": self.request.query_params.get(\"study\", None),\n }\n filters = {k: v for k, v in filters.items() if v is not None}\n\n queryset = super().get_queryset().filter(**filters)\n\n return queryset\n\n\ndef show_image(request, *, pk):\n from django.shortcuts import render\n\n try:\n image_file = ImageFile.objects.select_related(\"image\").get(\n image=pk, image_type=\"DZI\"\n )\n except Image.DoesNotExist:\n raise Http404(\"File not found.\")\n\n return render(\n request,\n \"cases/show_image.html\",\n {\"image_file\": image_file, \"url\": image_file.file.url},\n )\n\n\nclass RawImageUploadSessionViewSet(\n CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet\n):\n serializer_class = RawImageUploadSessionSerializer\n queryset = RawImageUploadSession.objects.all()\n permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]\n filter_backends = [ObjectPermissionsFilter]\n\n def perform_create(self, serializer):\n serializer.save(creator=self.request.user)\n\n @action(detail=True, methods=[\"patch\"])\n def process_images(self, request, pk=None):\n upload_session: RawImageUploadSession = self.get_object()\n if (\n upload_session.status == upload_session.PENDING\n and not upload_session.rawimagefile_set.filter(\n consumed=True\n ).exists()\n ):\n upload_session.process_images()\n messages.add_message(\n request, messages.SUCCESS, \"Image processing job queued.\"\n )\n return Response(status=status.HTTP_200_OK)\n else:\n messages.add_message(\n request,\n messages.ERROR,\n \"Image processing job could not be queued.\",\n )\n return Response(status=status.HTTP_400_BAD_REQUEST)\n\n\nclass RawImageFileViewSet(\n CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet\n):\n serializer_class = RawImageFileSerializer\n queryset = RawImageFile.objects.all()\n permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]\n filter_backends = [ObjectPermissionsFilter]\n", "path": "app/grandchallenge/cases/views.py"}], "after_files": [{"content": "from django.contrib import messages\nfrom django.http import Http404\nfrom django.views.generic import DetailView\nfrom guardian.mixins import (\n LoginRequiredMixin,\n PermissionRequiredMixin as ObjectPermissionRequiredMixin,\n)\nfrom rest_framework import status\nfrom rest_framework.decorators import action\nfrom rest_framework.mixins import (\n CreateModelMixin,\n ListModelMixin,\n RetrieveModelMixin,\n)\nfrom rest_framework.permissions import DjangoObjectPermissions\nfrom rest_framework.response import Response\nfrom rest_framework.viewsets import GenericViewSet, ReadOnlyModelViewSet\nfrom rest_framework_guardian.filters import ObjectPermissionsFilter\n\nfrom grandchallenge.cases.models import (\n Image,\n ImageFile,\n RawImageFile,\n RawImageUploadSession,\n)\nfrom grandchallenge.cases.serializers import (\n ImageSerializer,\n RawImageFileSerializer,\n RawImageUploadSessionSerializer,\n)\nfrom grandchallenge.core.permissions.rest_framework import (\n DjangoObjectOnlyWithCustomPostPermissions,\n)\n\n\nclass RawImageUploadSessionDetail(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, DetailView\n):\n model = RawImageUploadSession\n permission_required = f\"{RawImageUploadSession._meta.app_label}.view_{RawImageUploadSession._meta.model_name}\"\n raise_exception = True\n\n\nclass ImageViewSet(ReadOnlyModelViewSet):\n serializer_class = ImageSerializer\n queryset = Image.objects.all()\n permission_classes = [DjangoObjectPermissions]\n filter_backends = [ObjectPermissionsFilter]\n\n def get_queryset(self):\n filters = {\n \"worklist\": self.request.query_params.get(\"worklist\", None),\n \"study\": self.request.query_params.get(\"study\", None),\n \"origin\": self.request.query_params.get(\"origin\", None),\n }\n filters = {k: v for k, v in filters.items() if v is not None}\n\n queryset = super().get_queryset().filter(**filters)\n\n return queryset\n\n\ndef show_image(request, *, pk):\n from django.shortcuts import render\n\n try:\n image_file = ImageFile.objects.select_related(\"image\").get(\n image=pk, image_type=\"DZI\"\n )\n except Image.DoesNotExist:\n raise Http404(\"File not found.\")\n\n return render(\n request,\n \"cases/show_image.html\",\n {\"image_file\": image_file, \"url\": image_file.file.url},\n )\n\n\nclass RawImageUploadSessionViewSet(\n CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet\n):\n serializer_class = RawImageUploadSessionSerializer\n queryset = RawImageUploadSession.objects.all()\n permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]\n filter_backends = [ObjectPermissionsFilter]\n\n def perform_create(self, serializer):\n serializer.save(creator=self.request.user)\n\n @action(detail=True, methods=[\"patch\"])\n def process_images(self, request, pk=None):\n upload_session: RawImageUploadSession = self.get_object()\n if (\n upload_session.status == upload_session.PENDING\n and not upload_session.rawimagefile_set.filter(\n consumed=True\n ).exists()\n ):\n upload_session.process_images()\n messages.add_message(\n request, messages.SUCCESS, \"Image processing job queued.\"\n )\n return Response(status=status.HTTP_200_OK)\n else:\n messages.add_message(\n request,\n messages.ERROR,\n \"Image processing job could not be queued.\",\n )\n return Response(status=status.HTTP_400_BAD_REQUEST)\n\n\nclass RawImageFileViewSet(\n CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet\n):\n serializer_class = RawImageFileSerializer\n queryset = RawImageFile.objects.all()\n permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]\n filter_backends = [ObjectPermissionsFilter]\n", "path": "app/grandchallenge/cases/views.py"}]} | 1,368 | 128 |
gh_patches_debug_24971 | rasdani/github-patches | git_diff | pyjanitor-devs__pyjanitor-513 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[DOC] PyPI page has no project description
# Brief Description of Fix
The PyPI page for `pyjanitor` has no project description. I'm not sure if it previously did, and was lost in a recent version update. I'm not sure how to fix it, but I assume it's something that @ericmjl would be able to change.
# Relevant Context
- [Link to PyPI page](https://pypi.org/project/pyjanitor/)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup
2
3
4 def requirements():
5 with open("requirements.txt", "r+") as f:
6 return f.read()
7
8
9 setup(
10 name="pyjanitor",
11 version="0.18.0",
12 description="Tools for cleaning pandas DataFrames",
13 author="Eric J. Ma",
14 author_email="[email protected]",
15 url="https://github.com/ericmjl/pyjanitor",
16 packages=["janitor"],
17 install_requires=requirements(),
18 python_requires=">=3.6",
19 )
20
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,3 +1,6 @@
+import re
+from pathlib import Path
+
from setuptools import setup
@@ -6,6 +9,36 @@
return f.read()
+def generate_long_description() -> str:
+ """
+ Extra chunks from README for PyPI description.
+
+ Target chunks must be contained within `.. pypi-doc` pair comments,
+ so there must be an even number of comments in README.
+
+ :returns: Extracted description from README
+
+ """
+ # Read the contents of README file
+ this_directory = Path(__file__).parent
+ with open(this_directory / "README.rst", encoding="utf-8") as f:
+ readme = f.read()
+
+ # Find pypi-doc comments in README
+ indices = [m.start() for m in re.finditer(".. pypi-doc", readme)]
+ assert (
+ len(indices) % 2 == 0
+ ), "Odd number of `.. pypi-doc` comments in README"
+
+ # Loop through pairs of comments and save text between pairs
+ long_description = ""
+ for i in range(0, len(indices), 2):
+ start_index = indices[i] + 11
+ end_index = indices[i + 1]
+ long_description += readme[start_index:end_index]
+ return long_description
+
+
setup(
name="pyjanitor",
version="0.18.0",
@@ -16,4 +49,6 @@
packages=["janitor"],
install_requires=requirements(),
python_requires=">=3.6",
+ long_description=generate_long_description(),
+ long_description_content_type="text/x-rst",
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,3 +1,6 @@\n+import re\n+from pathlib import Path\n+\n from setuptools import setup\n \n \n@@ -6,6 +9,36 @@\n return f.read()\n \n \n+def generate_long_description() -> str:\n+ \"\"\"\n+ Extra chunks from README for PyPI description.\n+\n+ Target chunks must be contained within `.. pypi-doc` pair comments,\n+ so there must be an even number of comments in README.\n+\n+ :returns: Extracted description from README\n+\n+ \"\"\"\n+ # Read the contents of README file\n+ this_directory = Path(__file__).parent\n+ with open(this_directory / \"README.rst\", encoding=\"utf-8\") as f:\n+ readme = f.read()\n+\n+ # Find pypi-doc comments in README\n+ indices = [m.start() for m in re.finditer(\".. pypi-doc\", readme)]\n+ assert (\n+ len(indices) % 2 == 0\n+ ), \"Odd number of `.. pypi-doc` comments in README\"\n+\n+ # Loop through pairs of comments and save text between pairs\n+ long_description = \"\"\n+ for i in range(0, len(indices), 2):\n+ start_index = indices[i] + 11\n+ end_index = indices[i + 1]\n+ long_description += readme[start_index:end_index]\n+ return long_description\n+\n+\n setup(\n name=\"pyjanitor\",\n version=\"0.18.0\",\n@@ -16,4 +49,6 @@\n packages=[\"janitor\"],\n install_requires=requirements(),\n python_requires=\">=3.6\",\n+ long_description=generate_long_description(),\n+ long_description_content_type=\"text/x-rst\",\n )\n", "issue": "[DOC] PyPI page has no project description\n# Brief Description of Fix\r\n\r\nThe PyPI page for `pyjanitor` has no project description. I'm not sure if it previously did, and was lost in a recent version update. I'm not sure how to fix it, but I assume it's something that @ericmjl would be able to change. \r\n\r\n# Relevant Context\r\n- [Link to PyPI page](https://pypi.org/project/pyjanitor/)\r\n\n", "before_files": [{"content": "from setuptools import setup\n\n\ndef requirements():\n with open(\"requirements.txt\", \"r+\") as f:\n return f.read()\n\n\nsetup(\n name=\"pyjanitor\",\n version=\"0.18.0\",\n description=\"Tools for cleaning pandas DataFrames\",\n author=\"Eric J. Ma\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ericmjl/pyjanitor\",\n packages=[\"janitor\"],\n install_requires=requirements(),\n python_requires=\">=3.6\",\n)\n", "path": "setup.py"}], "after_files": [{"content": "import re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef requirements():\n with open(\"requirements.txt\", \"r+\") as f:\n return f.read()\n\n\ndef generate_long_description() -> str:\n \"\"\"\n Extra chunks from README for PyPI description.\n\n Target chunks must be contained within `.. pypi-doc` pair comments,\n so there must be an even number of comments in README.\n\n :returns: Extracted description from README\n\n \"\"\"\n # Read the contents of README file\n this_directory = Path(__file__).parent\n with open(this_directory / \"README.rst\", encoding=\"utf-8\") as f:\n readme = f.read()\n\n # Find pypi-doc comments in README\n indices = [m.start() for m in re.finditer(\".. pypi-doc\", readme)]\n assert (\n len(indices) % 2 == 0\n ), \"Odd number of `.. pypi-doc` comments in README\"\n\n # Loop through pairs of comments and save text between pairs\n long_description = \"\"\n for i in range(0, len(indices), 2):\n start_index = indices[i] + 11\n end_index = indices[i + 1]\n long_description += readme[start_index:end_index]\n return long_description\n\n\nsetup(\n name=\"pyjanitor\",\n version=\"0.18.0\",\n description=\"Tools for cleaning pandas DataFrames\",\n author=\"Eric J. Ma\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ericmjl/pyjanitor\",\n packages=[\"janitor\"],\n install_requires=requirements(),\n python_requires=\">=3.6\",\n long_description=generate_long_description(),\n long_description_content_type=\"text/x-rst\",\n)\n", "path": "setup.py"}]} | 503 | 403 |
gh_patches_debug_18823 | rasdani/github-patches | git_diff | yt-dlp__yt-dlp-5104 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
bongacams.com moved to bongacams.net
### DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
- [X] I understand that I will be **blocked** if I remove or skip any mandatory\* field
### Checklist
- [X] I'm reporting a broken site
- [X] I've verified that I'm running yt-dlp version **2022.09.01** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- [X] I've checked that all provided URLs are playable in a browser with the same IP and same login details
- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
### Provide a description that is worded well enough to be understood
bongacams.com has been moved to the new bongacams.net domain, please fix the code to work in the bongacams.net domain
bongacams.com moved to bongacams.net
### DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
- [X] I understand that I will be **blocked** if I remove or skip any mandatory\* field
### Checklist
- [X] I'm reporting a broken site
- [X] I've verified that I'm running yt-dlp version **2022.09.01** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- [X] I've checked that all provided URLs are playable in a browser with the same IP and same login details
- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
### Provide a description that is worded well enough to be understood
bongacams.com has been moved to the new bongacams.net domain, please fix the code to work in the bongacams.net domain
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yt_dlp/extractor/bongacams.py`
Content:
```
1 from .common import InfoExtractor
2 from ..compat import compat_str
3 from ..utils import (
4 int_or_none,
5 try_get,
6 urlencode_postdata,
7 )
8
9
10 class BongaCamsIE(InfoExtractor):
11 _VALID_URL = r'https?://(?P<host>(?:[^/]+\.)?bongacams\d*\.com)/(?P<id>[^/?&#]+)'
12 _TESTS = [{
13 'url': 'https://de.bongacams.com/azumi-8',
14 'only_matching': True,
15 }, {
16 'url': 'https://cn.bongacams.com/azumi-8',
17 'only_matching': True,
18 }]
19
20 def _real_extract(self, url):
21 mobj = self._match_valid_url(url)
22 host = mobj.group('host')
23 channel_id = mobj.group('id')
24
25 amf = self._download_json(
26 'https://%s/tools/amf.php' % host, channel_id,
27 data=urlencode_postdata((
28 ('method', 'getRoomData'),
29 ('args[]', channel_id),
30 ('args[]', 'false'),
31 )), headers={'X-Requested-With': 'XMLHttpRequest'})
32
33 server_url = amf['localData']['videoServerUrl']
34
35 uploader_id = try_get(
36 amf, lambda x: x['performerData']['username'], compat_str) or channel_id
37 uploader = try_get(
38 amf, lambda x: x['performerData']['displayName'], compat_str)
39 like_count = int_or_none(try_get(
40 amf, lambda x: x['performerData']['loversCount']))
41
42 formats = self._extract_m3u8_formats(
43 '%s/hls/stream_%s/playlist.m3u8' % (server_url, uploader_id),
44 channel_id, 'mp4', m3u8_id='hls', live=True)
45 self._sort_formats(formats)
46
47 return {
48 'id': channel_id,
49 'title': uploader or uploader_id,
50 'uploader': uploader,
51 'uploader_id': uploader_id,
52 'like_count': like_count,
53 'age_limit': 18,
54 'is_live': True,
55 'formats': formats,
56 }
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/yt_dlp/extractor/bongacams.py b/yt_dlp/extractor/bongacams.py
--- a/yt_dlp/extractor/bongacams.py
+++ b/yt_dlp/extractor/bongacams.py
@@ -8,13 +8,28 @@
class BongaCamsIE(InfoExtractor):
- _VALID_URL = r'https?://(?P<host>(?:[^/]+\.)?bongacams\d*\.com)/(?P<id>[^/?&#]+)'
+ _VALID_URL = r'https?://(?P<host>(?:[^/]+\.)?bongacams\d*\.(?:com|net))/(?P<id>[^/?&#]+)'
_TESTS = [{
'url': 'https://de.bongacams.com/azumi-8',
'only_matching': True,
}, {
'url': 'https://cn.bongacams.com/azumi-8',
'only_matching': True,
+ }, {
+ 'url': 'https://de.bongacams.net/claireashton',
+ 'info_dict': {
+ 'id': 'claireashton',
+ 'ext': 'mp4',
+ 'title': r're:ClaireAshton \d{4}-\d{2}-\d{2} \d{2}:\d{2}',
+ 'age_limit': 18,
+ 'uploader_id': 'ClaireAshton',
+ 'uploader': 'ClaireAshton',
+ 'like_count': int,
+ 'is_live': True,
+ },
+ 'params': {
+ 'skip_download': True,
+ },
}]
def _real_extract(self, url):
| {"golden_diff": "diff --git a/yt_dlp/extractor/bongacams.py b/yt_dlp/extractor/bongacams.py\n--- a/yt_dlp/extractor/bongacams.py\n+++ b/yt_dlp/extractor/bongacams.py\n@@ -8,13 +8,28 @@\n \n \n class BongaCamsIE(InfoExtractor):\n- _VALID_URL = r'https?://(?P<host>(?:[^/]+\\.)?bongacams\\d*\\.com)/(?P<id>[^/?&#]+)'\n+ _VALID_URL = r'https?://(?P<host>(?:[^/]+\\.)?bongacams\\d*\\.(?:com|net))/(?P<id>[^/?&#]+)'\n _TESTS = [{\n 'url': 'https://de.bongacams.com/azumi-8',\n 'only_matching': True,\n }, {\n 'url': 'https://cn.bongacams.com/azumi-8',\n 'only_matching': True,\n+ }, {\n+ 'url': 'https://de.bongacams.net/claireashton',\n+ 'info_dict': {\n+ 'id': 'claireashton',\n+ 'ext': 'mp4',\n+ 'title': r're:ClaireAshton \\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}',\n+ 'age_limit': 18,\n+ 'uploader_id': 'ClaireAshton',\n+ 'uploader': 'ClaireAshton',\n+ 'like_count': int,\n+ 'is_live': True,\n+ },\n+ 'params': {\n+ 'skip_download': True,\n+ },\n }]\n \n def _real_extract(self, url):\n", "issue": "bongacams.com moved to bongacams.net\n### DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE\r\n\r\n- [X] I understand that I will be **blocked** if I remove or skip any mandatory\\* field\r\n\r\n### Checklist\r\n\r\n- [X] I'm reporting a broken site\r\n- [X] I've verified that I'm running yt-dlp version **2022.09.01** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)\r\n- [X] I've checked that all provided URLs are playable in a browser with the same IP and same login details\r\n- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)\r\n- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates\r\n- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)\r\n- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required\r\n\r\n### Provide a description that is worded well enough to be understood\r\n\r\nbongacams.com has been moved to the new bongacams.net domain, please fix the code to work in the bongacams.net domain\r\n\r\n\nbongacams.com moved to bongacams.net\n### DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE\r\n\r\n- [X] I understand that I will be **blocked** if I remove or skip any mandatory\\* field\r\n\r\n### Checklist\r\n\r\n- [X] I'm reporting a broken site\r\n- [X] I've verified that I'm running yt-dlp version **2022.09.01** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)\r\n- [X] I've checked that all provided URLs are playable in a browser with the same IP and same login details\r\n- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)\r\n- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates\r\n- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)\r\n- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required\r\n\r\n### Provide a description that is worded well enough to be understood\r\n\r\nbongacams.com has been moved to the new bongacams.net domain, please fix the code to work in the bongacams.net domain\r\n\r\n\n", "before_files": [{"content": "from .common import InfoExtractor\nfrom ..compat import compat_str\nfrom ..utils import (\n int_or_none,\n try_get,\n urlencode_postdata,\n)\n\n\nclass BongaCamsIE(InfoExtractor):\n _VALID_URL = r'https?://(?P<host>(?:[^/]+\\.)?bongacams\\d*\\.com)/(?P<id>[^/?&#]+)'\n _TESTS = [{\n 'url': 'https://de.bongacams.com/azumi-8',\n 'only_matching': True,\n }, {\n 'url': 'https://cn.bongacams.com/azumi-8',\n 'only_matching': True,\n }]\n\n def _real_extract(self, url):\n mobj = self._match_valid_url(url)\n host = mobj.group('host')\n channel_id = mobj.group('id')\n\n amf = self._download_json(\n 'https://%s/tools/amf.php' % host, channel_id,\n data=urlencode_postdata((\n ('method', 'getRoomData'),\n ('args[]', channel_id),\n ('args[]', 'false'),\n )), headers={'X-Requested-With': 'XMLHttpRequest'})\n\n server_url = amf['localData']['videoServerUrl']\n\n uploader_id = try_get(\n amf, lambda x: x['performerData']['username'], compat_str) or channel_id\n uploader = try_get(\n amf, lambda x: x['performerData']['displayName'], compat_str)\n like_count = int_or_none(try_get(\n amf, lambda x: x['performerData']['loversCount']))\n\n formats = self._extract_m3u8_formats(\n '%s/hls/stream_%s/playlist.m3u8' % (server_url, uploader_id),\n channel_id, 'mp4', m3u8_id='hls', live=True)\n self._sort_formats(formats)\n\n return {\n 'id': channel_id,\n 'title': uploader or uploader_id,\n 'uploader': uploader,\n 'uploader_id': uploader_id,\n 'like_count': like_count,\n 'age_limit': 18,\n 'is_live': True,\n 'formats': formats,\n }\n", "path": "yt_dlp/extractor/bongacams.py"}], "after_files": [{"content": "from .common import InfoExtractor\nfrom ..compat import compat_str\nfrom ..utils import (\n int_or_none,\n try_get,\n urlencode_postdata,\n)\n\n\nclass BongaCamsIE(InfoExtractor):\n _VALID_URL = r'https?://(?P<host>(?:[^/]+\\.)?bongacams\\d*\\.(?:com|net))/(?P<id>[^/?&#]+)'\n _TESTS = [{\n 'url': 'https://de.bongacams.com/azumi-8',\n 'only_matching': True,\n }, {\n 'url': 'https://cn.bongacams.com/azumi-8',\n 'only_matching': True,\n }, {\n 'url': 'https://de.bongacams.net/claireashton',\n 'info_dict': {\n 'id': 'claireashton',\n 'ext': 'mp4',\n 'title': r're:ClaireAshton \\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}',\n 'age_limit': 18,\n 'uploader_id': 'ClaireAshton',\n 'uploader': 'ClaireAshton',\n 'like_count': int,\n 'is_live': True,\n },\n 'params': {\n 'skip_download': True,\n },\n }]\n\n def _real_extract(self, url):\n mobj = self._match_valid_url(url)\n host = mobj.group('host')\n channel_id = mobj.group('id')\n\n amf = self._download_json(\n 'https://%s/tools/amf.php' % host, channel_id,\n data=urlencode_postdata((\n ('method', 'getRoomData'),\n ('args[]', channel_id),\n ('args[]', 'false'),\n )), headers={'X-Requested-With': 'XMLHttpRequest'})\n\n server_url = amf['localData']['videoServerUrl']\n\n uploader_id = try_get(\n amf, lambda x: x['performerData']['username'], compat_str) or channel_id\n uploader = try_get(\n amf, lambda x: x['performerData']['displayName'], compat_str)\n like_count = int_or_none(try_get(\n amf, lambda x: x['performerData']['loversCount']))\n\n formats = self._extract_m3u8_formats(\n '%s/hls/stream_%s/playlist.m3u8' % (server_url, uploader_id),\n channel_id, 'mp4', m3u8_id='hls', live=True)\n self._sort_formats(formats)\n\n return {\n 'id': channel_id,\n 'title': uploader or uploader_id,\n 'uploader': uploader,\n 'uploader_id': uploader_id,\n 'like_count': like_count,\n 'age_limit': 18,\n 'is_live': True,\n 'formats': formats,\n }\n", "path": "yt_dlp/extractor/bongacams.py"}]} | 1,667 | 395 |
gh_patches_debug_23907 | rasdani/github-patches | git_diff | Project-MONAI__MONAI-443 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
model checkpoint saver/loader dictionary
**Describe the bug**
when `save_dict` of `monai.handlers.CheckpointSaver` is a dictionary with a single item,
loading the file with `monai.handlers.CheckpointLoader` raises an error.
**To Reproduce**
To reproduce the issue:
```python
import logging
import sys
import torch
from ignite.engine import Engine
from monai.handlers import CheckpointLoader, CheckpointSaver
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
net = torch.nn.PReLU()
engine = Engine(lambda e, b: None)
CheckpointSaver(save_dir=".", save_dict={"net": net}, save_final=True).attach(engine)
engine.run([0] * 8, max_epochs=5)
CheckpointLoader(load_path="./net_final_iteration=40.pth", load_dict={"net": net}).attach(engine)
engine.run([0] * 8, max_epochs=1)
```
the output (showing loader failure) is:
```
INFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=5.
INFO:ignite.engine.engine.Engine:Epoch[1] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[2] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[3] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[4] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[5] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Train completed, saved final checkpoint: ./net_final_iteration=40.pth
INFO:ignite.engine.engine.Engine:Engine run complete. Time taken 00:00:00
INFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=1.
ERROR:ignite.engine.engine.Engine:Engine run is terminating due to exception: Object labeled by 'net' from `to_load` is not found in the checkpoint.
INFO:ignite.engine.engine.Engine:Exception_raised, saved exception checkpoint: ./net_final_iteration=40.pth
```
**Expected behavior**
the loader should be able to read this dict structure, to be consistent with the case of `save_dict` where the dict has more than one item, example:
```python
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
net = torch.nn.PReLU()
engine = Engine(lambda e, b: None)
CheckpointSaver(save_dir=".", save_dict={"net": net, 'net1': net}, save_final=True).attach(engine)
engine.run([0] * 8, max_epochs=5)
CheckpointLoader(load_path="./checkpoint_final_iteration=40.pth", load_dict={"net": net}).attach(engine)
engine.run([0] * 8, max_epochs=1)
```
this example uses `{"net": net, 'net1': net}` and the output is expected:
```
INFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=5.
INFO:ignite.engine.engine.Engine:Epoch[1] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[2] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[3] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[4] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Epoch[5] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Train completed, saved final checkpoint: ./checkpoint_final_iteration=40.pth
INFO:ignite.engine.engine.Engine:Engine run complete. Time taken 00:00:00
INFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=1.
INFO:ignite.engine.engine.Engine:Restored all variables from ./checkpoint_final_iteration=40.pth
INFO:ignite.engine.engine.Engine:Epoch[1] Complete. Time taken: 00:00:00
INFO:ignite.engine.engine.Engine:Train completed, saved final checkpoint: ./checkpoint_final_iteration=40.pth
INFO:ignite.engine.engine.Engine:Engine run complete. Time taken 00:00:00
```
**Environment (please complete the following information):**
- OS macos
- Python version 3.6
- MONAI version 1d73f65f3a1c2bf47fb769cf21b0329acfabd114
**Additional context**
see also https://github.com/pytorch/ignite/issues/770
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `monai/handlers/checkpoint_loader.py`
Content:
```
1 # Copyright 2020 MONAI Consortium
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at
5 # http://www.apache.org/licenses/LICENSE-2.0
6 # Unless required by applicable law or agreed to in writing, software
7 # distributed under the License is distributed on an "AS IS" BASIS,
8 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9 # See the License for the specific language governing permissions and
10 # limitations under the License.
11
12 import logging
13 import torch
14 from ignite.engine import Events
15 from ignite.handlers import Checkpoint
16
17
18 class CheckpointLoader:
19 """
20 CheckpointLoader acts as an Ignite handler to load checkpoint data from file.
21 It can load variables for network, optimizer, lr_scheduler.
22 And also can restore training if load the state_dict of Ignite engine.
23
24 Args:
25 load_path (str): the file path of checkpoint, it should be a PyTorch pth file.
26 load_dict (dict): target objects that load checkpoint to. examples::
27
28 {'network': net, 'optimizer': optimizer, 'engine', engine}
29
30 name (str): identifier of logging.logger to use, if None, defaulting to ``engine.logger``.
31
32 """
33
34 def __init__(self, load_path, load_dict, name=None):
35 assert load_path is not None, "must provide clear path to load checkpoint."
36 self.load_path = load_path
37 assert load_dict is not None and len(load_dict) > 0, "must provide target objects to load."
38 self.logger = None if name is None else logging.getLogger(name)
39 for k, v in load_dict.items():
40 if hasattr(v, "module"):
41 load_dict[k] = v.module
42 self.load_dict = load_dict
43
44 def attach(self, engine):
45 if self.logger is None:
46 self.logger = engine.logger
47 return engine.add_event_handler(Events.STARTED, self)
48
49 def __call__(self, engine):
50 checkpoint = torch.load(self.load_path)
51 Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint)
52 self.logger.info(f"Restored all variables from {self.load_path}")
53
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/monai/handlers/checkpoint_loader.py b/monai/handlers/checkpoint_loader.py
--- a/monai/handlers/checkpoint_loader.py
+++ b/monai/handlers/checkpoint_loader.py
@@ -19,7 +19,9 @@
"""
CheckpointLoader acts as an Ignite handler to load checkpoint data from file.
It can load variables for network, optimizer, lr_scheduler.
- And also can restore training if load the state_dict of Ignite engine.
+ If saving checkpoint after `torch.nn.DataParallel`, need to save `model.module` instead
+ as PyTorch recommended and then use this loader to load the model.
+ And also can restore training session if load the state_dict of Ignite engine.
Args:
load_path (str): the file path of checkpoint, it should be a PyTorch pth file.
@@ -48,5 +50,10 @@
def __call__(self, engine):
checkpoint = torch.load(self.load_path)
+ if len(self.load_dict) == 1:
+ key = list(self.load_dict.keys())[0]
+ if not (key in checkpoint):
+ checkpoint = {key: checkpoint}
+
Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint)
self.logger.info(f"Restored all variables from {self.load_path}")
| {"golden_diff": "diff --git a/monai/handlers/checkpoint_loader.py b/monai/handlers/checkpoint_loader.py\n--- a/monai/handlers/checkpoint_loader.py\n+++ b/monai/handlers/checkpoint_loader.py\n@@ -19,7 +19,9 @@\n \"\"\"\n CheckpointLoader acts as an Ignite handler to load checkpoint data from file.\n It can load variables for network, optimizer, lr_scheduler.\n- And also can restore training if load the state_dict of Ignite engine.\n+ If saving checkpoint after `torch.nn.DataParallel`, need to save `model.module` instead\n+ as PyTorch recommended and then use this loader to load the model.\n+ And also can restore training session if load the state_dict of Ignite engine.\n \n Args:\n load_path (str): the file path of checkpoint, it should be a PyTorch pth file.\n@@ -48,5 +50,10 @@\n \n def __call__(self, engine):\n checkpoint = torch.load(self.load_path)\n+ if len(self.load_dict) == 1:\n+ key = list(self.load_dict.keys())[0]\n+ if not (key in checkpoint):\n+ checkpoint = {key: checkpoint}\n+\n Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint)\n self.logger.info(f\"Restored all variables from {self.load_path}\")\n", "issue": "model checkpoint saver/loader dictionary\n**Describe the bug**\r\nwhen `save_dict` of `monai.handlers.CheckpointSaver` is a dictionary with a single item,\r\nloading the file with `monai.handlers.CheckpointLoader` raises an error. \r\n\r\n**To Reproduce**\r\nTo reproduce the issue:\r\n```python\r\nimport logging\r\nimport sys\r\n\r\nimport torch\r\nfrom ignite.engine import Engine\r\n\r\nfrom monai.handlers import CheckpointLoader, CheckpointSaver\r\n\r\nlogging.basicConfig(stream=sys.stdout, level=logging.INFO)\r\n\r\nnet = torch.nn.PReLU()\r\nengine = Engine(lambda e, b: None)\r\n\r\nCheckpointSaver(save_dir=\".\", save_dict={\"net\": net}, save_final=True).attach(engine)\r\nengine.run([0] * 8, max_epochs=5)\r\n\r\nCheckpointLoader(load_path=\"./net_final_iteration=40.pth\", load_dict={\"net\": net}).attach(engine)\r\nengine.run([0] * 8, max_epochs=1)\r\n```\r\nthe output (showing loader failure) is:\r\n```\r\nINFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=5.\r\nINFO:ignite.engine.engine.Engine:Epoch[1] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[2] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[3] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[4] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[5] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Train completed, saved final checkpoint: ./net_final_iteration=40.pth\r\nINFO:ignite.engine.engine.Engine:Engine run complete. Time taken 00:00:00\r\nINFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=1.\r\nERROR:ignite.engine.engine.Engine:Engine run is terminating due to exception: Object labeled by 'net' from `to_load` is not found in the checkpoint.\r\nINFO:ignite.engine.engine.Engine:Exception_raised, saved exception checkpoint: ./net_final_iteration=40.pth\r\n```\r\n\r\n**Expected behavior**\r\nthe loader should be able to read this dict structure, to be consistent with the case of `save_dict` where the dict has more than one item, example:\r\n```python\r\nlogging.basicConfig(stream=sys.stdout, level=logging.INFO)\r\n\r\nnet = torch.nn.PReLU()\r\nengine = Engine(lambda e, b: None)\r\n\r\nCheckpointSaver(save_dir=\".\", save_dict={\"net\": net, 'net1': net}, save_final=True).attach(engine)\r\nengine.run([0] * 8, max_epochs=5)\r\n\r\nCheckpointLoader(load_path=\"./checkpoint_final_iteration=40.pth\", load_dict={\"net\": net}).attach(engine)\r\nengine.run([0] * 8, max_epochs=1)\r\n```\r\nthis example uses `{\"net\": net, 'net1': net}` and the output is expected:\r\n```\r\nINFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=5.\r\nINFO:ignite.engine.engine.Engine:Epoch[1] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[2] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[3] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[4] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Epoch[5] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Train completed, saved final checkpoint: ./checkpoint_final_iteration=40.pth\r\nINFO:ignite.engine.engine.Engine:Engine run complete. Time taken 00:00:00\r\nINFO:ignite.engine.engine.Engine:Engine run starting with max_epochs=1.\r\nINFO:ignite.engine.engine.Engine:Restored all variables from ./checkpoint_final_iteration=40.pth\r\nINFO:ignite.engine.engine.Engine:Epoch[1] Complete. Time taken: 00:00:00\r\nINFO:ignite.engine.engine.Engine:Train completed, saved final checkpoint: ./checkpoint_final_iteration=40.pth\r\nINFO:ignite.engine.engine.Engine:Engine run complete. Time taken 00:00:00\r\n```\r\n\r\n**Environment (please complete the following information):**\r\n - OS macos\r\n - Python version 3.6\r\n - MONAI version 1d73f65f3a1c2bf47fb769cf21b0329acfabd114\r\n\r\n**Additional context**\r\nsee also https://github.com/pytorch/ignite/issues/770\n", "before_files": [{"content": "# Copyright 2020 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nimport torch\nfrom ignite.engine import Events\nfrom ignite.handlers import Checkpoint\n\n\nclass CheckpointLoader:\n \"\"\"\n CheckpointLoader acts as an Ignite handler to load checkpoint data from file.\n It can load variables for network, optimizer, lr_scheduler.\n And also can restore training if load the state_dict of Ignite engine.\n\n Args:\n load_path (str): the file path of checkpoint, it should be a PyTorch pth file.\n load_dict (dict): target objects that load checkpoint to. examples::\n\n {'network': net, 'optimizer': optimizer, 'engine', engine}\n\n name (str): identifier of logging.logger to use, if None, defaulting to ``engine.logger``.\n\n \"\"\"\n\n def __init__(self, load_path, load_dict, name=None):\n assert load_path is not None, \"must provide clear path to load checkpoint.\"\n self.load_path = load_path\n assert load_dict is not None and len(load_dict) > 0, \"must provide target objects to load.\"\n self.logger = None if name is None else logging.getLogger(name)\n for k, v in load_dict.items():\n if hasattr(v, \"module\"):\n load_dict[k] = v.module\n self.load_dict = load_dict\n\n def attach(self, engine):\n if self.logger is None:\n self.logger = engine.logger\n return engine.add_event_handler(Events.STARTED, self)\n\n def __call__(self, engine):\n checkpoint = torch.load(self.load_path)\n Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint)\n self.logger.info(f\"Restored all variables from {self.load_path}\")\n", "path": "monai/handlers/checkpoint_loader.py"}], "after_files": [{"content": "# Copyright 2020 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nimport torch\nfrom ignite.engine import Events\nfrom ignite.handlers import Checkpoint\n\n\nclass CheckpointLoader:\n \"\"\"\n CheckpointLoader acts as an Ignite handler to load checkpoint data from file.\n It can load variables for network, optimizer, lr_scheduler.\n If saving checkpoint after `torch.nn.DataParallel`, need to save `model.module` instead\n as PyTorch recommended and then use this loader to load the model.\n And also can restore training session if load the state_dict of Ignite engine.\n\n Args:\n load_path (str): the file path of checkpoint, it should be a PyTorch pth file.\n load_dict (dict): target objects that load checkpoint to. examples::\n\n {'network': net, 'optimizer': optimizer, 'engine', engine}\n\n name (str): identifier of logging.logger to use, if None, defaulting to ``engine.logger``.\n\n \"\"\"\n\n def __init__(self, load_path, load_dict, name=None):\n assert load_path is not None, \"must provide clear path to load checkpoint.\"\n self.load_path = load_path\n assert load_dict is not None and len(load_dict) > 0, \"must provide target objects to load.\"\n self.logger = None if name is None else logging.getLogger(name)\n for k, v in load_dict.items():\n if hasattr(v, \"module\"):\n load_dict[k] = v.module\n self.load_dict = load_dict\n\n def attach(self, engine):\n if self.logger is None:\n self.logger = engine.logger\n return engine.add_event_handler(Events.STARTED, self)\n\n def __call__(self, engine):\n checkpoint = torch.load(self.load_path)\n if len(self.load_dict) == 1:\n key = list(self.load_dict.keys())[0]\n if not (key in checkpoint):\n checkpoint = {key: checkpoint}\n\n Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint)\n self.logger.info(f\"Restored all variables from {self.load_path}\")\n", "path": "monai/handlers/checkpoint_loader.py"}]} | 1,879 | 302 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.