problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.71k
9.01k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 465
11.3k
| num_tokens_prompt
int64 557
2.05k
| num_tokens_diff
int64 48
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_39470 | rasdani/github-patches | git_diff | microsoft__onnxscript-120 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Set up linters for the project
It helps if we set up linters early in the development process (less big PRs for fixes in the future). We may consider: mypy, pylint, black, isort, pydocstyle, flake8, bandit and xdoctest.
</issue>
<code>
[start of setup.py]
1 # -------------------------------------------------------------------------
2 # Copyright (c) Microsoft Corporation. All rights reserved.
3 # Licensed under the MIT License.
4 # --------------------------------------------------------------------------
5
6 # -*- coding: utf-8 -*-
7
8 from distutils.core import setup
9 from setuptools import find_packages
10 import os
11 this = os.path.dirname(__file__)
12
13 with open(os.path.join(this, "requirements.txt"), "r") as f:
14 requirements = [_ for _ in [_.strip("\r\n ")
15 for _ in f.readlines()] if _ is not None]
16
17 packages = find_packages()
18 assert packages
19
20 # read version from the package file.
21 version_str = '1.0.0'
22 with (open(os.path.join(this, 'onnxscript/__init__.py'), "r")) as f:
23 line = [_ for _ in [_.strip("\r\n ")
24 for _ in f.readlines()] if _.startswith("__version__")]
25 if len(line) > 0:
26 version_str = line[0].split('=')[1].strip('" ')
27
28 README = os.path.join(os.getcwd(), "README.md")
29 with open(README) as f:
30 long_description = f.read()
31 start_pos = long_description.find('## Contributing')
32 if start_pos >= 0:
33 long_description = long_description[:start_pos]
34
35 setup(
36 name='onnx-script',
37 version=version_str,
38 description="Authoring ONNX functions in Python",
39 long_description=long_description,
40 long_description_content_type='text/markdown',
41 license='Apache License v2.0',
42 author='Microsoft Corporation',
43 author_email='[email protected]',
44 url='https://github.com/onnx/onnx-script',
45 packages=packages,
46 include_package_data=True,
47 install_requires=requirements,
48 classifiers=[
49 'Development Status :: 4 - Beta',
50 'Environment :: Console',
51 'Intended Audience :: Developers',
52 'Operating System :: MacOS :: MacOS X',
53 'Operating System :: Microsoft :: Windows',
54 'Programming Language :: Python',
55 'Programming Language :: Python :: 3.7',
56 'Programming Language :: Python :: 3.8',
57 'Programming Language :: Python :: 3.9',
58 'License :: OSI Approved :: Apache Software License'],
59 )
60
[end of setup.py]
[start of onnxscript/__init__.py]
1 # -------------------------------------------------------------------------
2 # Copyright (c) Microsoft Corporation. All rights reserved.
3 # Licensed under the MIT License.
4 # --------------------------------------------------------------------------
5
6 __version__ = '0.1'
7
8 from .main import script, export_onnx_lib, OnnxFunction
9 from .backend.onnx_export import export2python as proto2python
10
11 __all__ = [script, export_onnx_lib, OnnxFunction, proto2python]
12
[end of onnxscript/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/onnxscript/__init__.py b/onnxscript/__init__.py
--- a/onnxscript/__init__.py
+++ b/onnxscript/__init__.py
@@ -3,9 +3,22 @@
# Licensed under the MIT License.
# --------------------------------------------------------------------------
-__version__ = '0.1'
-
+import sys
from .main import script, export_onnx_lib, OnnxFunction
from .backend.onnx_export import export2python as proto2python
-__all__ = [script, export_onnx_lib, OnnxFunction, proto2python]
+if sys.version_info[0:2] >= (3, 8):
+ import importlib.metadata as importlib_metadata
+else:
+ # TODO: Remove this when Python 3.7 is deprecated
+ import importlib_metadata
+
+try:
+ # TODO: should we algin the folder name with package name?
+ # It's onnxscript and onnx-script now. That way, we can use __package__ here.
+ __version__ = importlib_metadata.version("onnx-script")
+except importlib_metadata.PackageNotFoundError:
+ __version__ = None
+
+
+__all__ = ["script", "export_onnx_lib", "OnnxFunction", "proto2python"]
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -5,55 +5,26 @@
# -*- coding: utf-8 -*-
-from distutils.core import setup
-from setuptools import find_packages
import os
-this = os.path.dirname(__file__)
+import setuptools
-with open(os.path.join(this, "requirements.txt"), "r") as f:
- requirements = [_ for _ in [_.strip("\r\n ")
- for _ in f.readlines()] if _ is not None]
+this = os.path.dirname(__file__)
-packages = find_packages()
+packages = setuptools.find_packages()
assert packages
-# read version from the package file.
-version_str = '1.0.0'
-with (open(os.path.join(this, 'onnxscript/__init__.py'), "r")) as f:
- line = [_ for _ in [_.strip("\r\n ")
- for _ in f.readlines()] if _.startswith("__version__")]
- if len(line) > 0:
- version_str = line[0].split('=')[1].strip('" ')
-
README = os.path.join(os.getcwd(), "README.md")
-with open(README) as f:
+with open(README, encoding="utf-8") as f:
long_description = f.read()
start_pos = long_description.find('## Contributing')
if start_pos >= 0:
long_description = long_description[:start_pos]
-setup(
- name='onnx-script',
- version=version_str,
- description="Authoring ONNX functions in Python",
+setuptools.setup(
long_description=long_description,
long_description_content_type='text/markdown',
- license='Apache License v2.0',
- author='Microsoft Corporation',
- author_email='[email protected]',
url='https://github.com/onnx/onnx-script',
packages=packages,
include_package_data=True,
- install_requires=requirements,
- classifiers=[
- 'Development Status :: 4 - Beta',
- 'Environment :: Console',
- 'Intended Audience :: Developers',
- 'Operating System :: MacOS :: MacOS X',
- 'Operating System :: Microsoft :: Windows',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 3.7',
- 'Programming Language :: Python :: 3.8',
- 'Programming Language :: Python :: 3.9',
- 'License :: OSI Approved :: Apache Software License'],
+ package_data={"onnx-script": ["py.typed"], "onnx": ["py.typed"],},
)
| {"golden_diff": "diff --git a/onnxscript/__init__.py b/onnxscript/__init__.py\n--- a/onnxscript/__init__.py\n+++ b/onnxscript/__init__.py\n@@ -3,9 +3,22 @@\n # Licensed under the MIT License.\n # --------------------------------------------------------------------------\n \n-__version__ = '0.1'\n-\n+import sys\n from .main import script, export_onnx_lib, OnnxFunction\n from .backend.onnx_export import export2python as proto2python\n \n-__all__ = [script, export_onnx_lib, OnnxFunction, proto2python]\n+if sys.version_info[0:2] >= (3, 8):\n+ import importlib.metadata as importlib_metadata\n+else:\n+ # TODO: Remove this when Python 3.7 is deprecated\n+ import importlib_metadata\n+\n+try:\n+ # TODO: should we algin the folder name with package name?\n+ # It's onnxscript and onnx-script now. That way, we can use __package__ here.\n+ __version__ = importlib_metadata.version(\"onnx-script\")\n+except importlib_metadata.PackageNotFoundError:\n+ __version__ = None\n+\n+\n+__all__ = [\"script\", \"export_onnx_lib\", \"OnnxFunction\", \"proto2python\"]\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -5,55 +5,26 @@\n \n # -*- coding: utf-8 -*-\n \n-from distutils.core import setup\n-from setuptools import find_packages\n import os\n-this = os.path.dirname(__file__)\n+import setuptools\n \n-with open(os.path.join(this, \"requirements.txt\"), \"r\") as f:\n- requirements = [_ for _ in [_.strip(\"\\r\\n \")\n- for _ in f.readlines()] if _ is not None]\n+this = os.path.dirname(__file__)\n \n-packages = find_packages()\n+packages = setuptools.find_packages()\n assert packages\n \n-# read version from the package file.\n-version_str = '1.0.0'\n-with (open(os.path.join(this, 'onnxscript/__init__.py'), \"r\")) as f:\n- line = [_ for _ in [_.strip(\"\\r\\n \")\n- for _ in f.readlines()] if _.startswith(\"__version__\")]\n- if len(line) > 0:\n- version_str = line[0].split('=')[1].strip('\" ')\n-\n README = os.path.join(os.getcwd(), \"README.md\")\n-with open(README) as f:\n+with open(README, encoding=\"utf-8\") as f:\n long_description = f.read()\n start_pos = long_description.find('## Contributing')\n if start_pos >= 0:\n long_description = long_description[:start_pos]\n \n-setup(\n- name='onnx-script',\n- version=version_str,\n- description=\"Authoring ONNX functions in Python\",\n+setuptools.setup(\n long_description=long_description,\n long_description_content_type='text/markdown',\n- license='Apache License v2.0',\n- author='Microsoft Corporation',\n- author_email='[email protected]',\n url='https://github.com/onnx/onnx-script',\n packages=packages,\n include_package_data=True,\n- install_requires=requirements,\n- classifiers=[\n- 'Development Status :: 4 - Beta',\n- 'Environment :: Console',\n- 'Intended Audience :: Developers',\n- 'Operating System :: MacOS :: MacOS X',\n- 'Operating System :: Microsoft :: Windows',\n- 'Programming Language :: Python',\n- 'Programming Language :: Python :: 3.7',\n- 'Programming Language :: Python :: 3.8',\n- 'Programming Language :: Python :: 3.9',\n- 'License :: OSI Approved :: Apache Software License'],\n+ package_data={\"onnx-script\": [\"py.typed\"], \"onnx\": [\"py.typed\"],},\n )\n", "issue": "Set up linters for the project\nIt helps if we set up linters early in the development process (less big PRs for fixes in the future). We may consider: mypy, pylint, black, isort, pydocstyle, flake8, bandit and xdoctest.\n", "before_files": [{"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n# --------------------------------------------------------------------------\n\n# -*- coding: utf-8 -*-\n\nfrom distutils.core import setup\nfrom setuptools import find_packages\nimport os\nthis = os.path.dirname(__file__)\n\nwith open(os.path.join(this, \"requirements.txt\"), \"r\") as f:\n requirements = [_ for _ in [_.strip(\"\\r\\n \")\n for _ in f.readlines()] if _ is not None]\n\npackages = find_packages()\nassert packages\n\n# read version from the package file.\nversion_str = '1.0.0'\nwith (open(os.path.join(this, 'onnxscript/__init__.py'), \"r\")) as f:\n line = [_ for _ in [_.strip(\"\\r\\n \")\n for _ in f.readlines()] if _.startswith(\"__version__\")]\n if len(line) > 0:\n version_str = line[0].split('=')[1].strip('\" ')\n\nREADME = os.path.join(os.getcwd(), \"README.md\")\nwith open(README) as f:\n long_description = f.read()\n start_pos = long_description.find('## Contributing')\n if start_pos >= 0:\n long_description = long_description[:start_pos]\n\nsetup(\n name='onnx-script',\n version=version_str,\n description=\"Authoring ONNX functions in Python\",\n long_description=long_description,\n long_description_content_type='text/markdown',\n license='Apache License v2.0',\n author='Microsoft Corporation',\n author_email='[email protected]',\n url='https://github.com/onnx/onnx-script',\n packages=packages,\n include_package_data=True,\n install_requires=requirements,\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: Microsoft :: Windows',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'License :: OSI Approved :: Apache Software License'],\n)\n", "path": "setup.py"}, {"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n# --------------------------------------------------------------------------\n\n__version__ = '0.1'\n\nfrom .main import script, export_onnx_lib, OnnxFunction\nfrom .backend.onnx_export import export2python as proto2python\n\n__all__ = [script, export_onnx_lib, OnnxFunction, proto2python]\n", "path": "onnxscript/__init__.py"}]} | 1,288 | 845 |
gh_patches_debug_1350 | rasdani/github-patches | git_diff | fossasia__open-event-server-7659 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Preset roles deletion is allowed
**Describe the bug**
Currently the preset roles like "organizer, coorganizer etc" should not be deleted from the db. But right now it is possible to delete these entries.
**To Reproduce**
Steps to reproduce the behavior:
1. Hit the delete endpoint for role
2. Choose any of the ids pointing to any of the 7 preset roles
3. You can find deletion to be successful
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Stacktrace**
<!-- If applicable, add stacktrace to help explain your problem. -->
**Additional details (please complete the following information):**
- OS: [e.g. MacOS, Ubuntu, CentOS]
- Python Version [e.g. `3.5`, `3.6`]
- `HEAD` Commit hash [e.g. `4629c62`]
**Additional context**
<!-- Add any other context about the problem here. -->
</issue>
<code>
[start of app/api/roles.py]
1 from flask_rest_jsonapi import ResourceDetail, ResourceList
2
3 from app.api.bootstrap import api
4 from app.api.helpers.db import safe_query_kwargs
5 from app.api.helpers.errors import UnprocessableEntityError
6 from app.api.schema.roles import RoleSchema
7 from app.models import db
8 from app.models.role import Role
9 from app.models.role_invite import RoleInvite
10 from app.models.users_events_role import UsersEventsRoles
11
12
13 class RoleList(ResourceList):
14 """
15 List and create role
16 """
17
18 decorators = (api.has_permission('is_admin', methods="POST"),)
19 schema = RoleSchema
20 data_layer = {'session': db.session, 'model': Role}
21
22
23 class RoleDetail(ResourceDetail):
24 """
25 Role detail by id
26 """
27
28 def before_get_object(self, view_kwargs):
29 """
30 before get method to get the resource id for fetching details
31 :param view_kwargs:
32 :return:
33 """
34 if view_kwargs.get('role_invite_id') is not None:
35 role_invite = safe_query_kwargs(RoleInvite, view_kwargs, 'role_invite_id')
36 if role_invite.role_id is not None:
37 view_kwargs['id'] = role_invite.role_id
38 else:
39 view_kwargs['id'] = None
40
41 if view_kwargs.get('users_events_roles_id') is not None:
42 users_events_role = safe_query_kwargs(
43 UsersEventsRoles,
44 view_kwargs,
45 'users_events_roles_id',
46 )
47
48 if users_events_role.role_id is not None:
49 view_kwargs['id'] = users_events_role.role_id
50 else:
51 view_kwargs['id'] = None
52
53 def before_update_object(self, role, data, view_kwargs):
54 """
55 Method to edit object
56 :param role:
57 :param data:
58 :param view_kwargs:
59 :return:
60 """
61 if data.get('name'):
62 if data['name'] in [
63 'owner',
64 'organizer',
65 'coorganizer',
66 'registrar',
67 'moderator',
68 'attendee',
69 'track_organizer',
70 ]:
71 raise UnprocessableEntityError(
72 {'data': 'name'}, "The given name cannot be updated"
73 )
74
75 def before_delete_object(self, obj, kwargs):
76 """
77 method to check proper resource name before deleting
78 :param obj:
79 :param kwargs:
80 :return:
81 """
82 if obj.name in [
83 'owner',
84 'organizer',
85 'coorganizer',
86 'registrar',
87 'moderator',
88 'attendee',
89 'track_organizer',
90 ]:
91 raise UnprocessableEntityError(
92 {'data': 'name'}, "The resource with given name cannot be deleted"
93 )
94
95 decorators = (api.has_permission('is_admin', methods="PATCH,DELETE"),)
96 schema = RoleSchema
97 data_layer = {
98 'session': db.session,
99 'model': Role,
100 'methods': {'before_get_object': before_get_object},
101 }
102
[end of app/api/roles.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/app/api/roles.py b/app/api/roles.py
--- a/app/api/roles.py
+++ b/app/api/roles.py
@@ -97,5 +97,8 @@
data_layer = {
'session': db.session,
'model': Role,
- 'methods': {'before_get_object': before_get_object},
+ 'methods': {
+ 'before_get_object': before_get_object,
+ 'before_delete_object': before_delete_object,
+ },
}
| {"golden_diff": "diff --git a/app/api/roles.py b/app/api/roles.py\n--- a/app/api/roles.py\n+++ b/app/api/roles.py\n@@ -97,5 +97,8 @@\n data_layer = {\n 'session': db.session,\n 'model': Role,\n- 'methods': {'before_get_object': before_get_object},\n+ 'methods': {\n+ 'before_get_object': before_get_object,\n+ 'before_delete_object': before_delete_object,\n+ },\n }\n", "issue": "Preset roles deletion is allowed\n**Describe the bug**\r\nCurrently the preset roles like \"organizer, coorganizer etc\" should not be deleted from the db. But right now it is possible to delete these entries.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Hit the delete endpoint for role \r\n2. Choose any of the ids pointing to any of the 7 preset roles\r\n3. You can find deletion to be successful\r\n\r\n**Expected behavior**\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n\r\n**Stacktrace**\r\n<!-- If applicable, add stacktrace to help explain your problem. -->\r\n\r\n**Additional details (please complete the following information):**\r\n - OS: [e.g. MacOS, Ubuntu, CentOS]\r\n - Python Version [e.g. `3.5`, `3.6`]\r\n - `HEAD` Commit hash [e.g. `4629c62`]\r\n\r\n**Additional context**\r\n<!-- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "from flask_rest_jsonapi import ResourceDetail, ResourceList\n\nfrom app.api.bootstrap import api\nfrom app.api.helpers.db import safe_query_kwargs\nfrom app.api.helpers.errors import UnprocessableEntityError\nfrom app.api.schema.roles import RoleSchema\nfrom app.models import db\nfrom app.models.role import Role\nfrom app.models.role_invite import RoleInvite\nfrom app.models.users_events_role import UsersEventsRoles\n\n\nclass RoleList(ResourceList):\n \"\"\"\n List and create role\n \"\"\"\n\n decorators = (api.has_permission('is_admin', methods=\"POST\"),)\n schema = RoleSchema\n data_layer = {'session': db.session, 'model': Role}\n\n\nclass RoleDetail(ResourceDetail):\n \"\"\"\n Role detail by id\n \"\"\"\n\n def before_get_object(self, view_kwargs):\n \"\"\"\n before get method to get the resource id for fetching details\n :param view_kwargs:\n :return:\n \"\"\"\n if view_kwargs.get('role_invite_id') is not None:\n role_invite = safe_query_kwargs(RoleInvite, view_kwargs, 'role_invite_id')\n if role_invite.role_id is not None:\n view_kwargs['id'] = role_invite.role_id\n else:\n view_kwargs['id'] = None\n\n if view_kwargs.get('users_events_roles_id') is not None:\n users_events_role = safe_query_kwargs(\n UsersEventsRoles,\n view_kwargs,\n 'users_events_roles_id',\n )\n\n if users_events_role.role_id is not None:\n view_kwargs['id'] = users_events_role.role_id\n else:\n view_kwargs['id'] = None\n\n def before_update_object(self, role, data, view_kwargs):\n \"\"\"\n Method to edit object\n :param role:\n :param data:\n :param view_kwargs:\n :return:\n \"\"\"\n if data.get('name'):\n if data['name'] in [\n 'owner',\n 'organizer',\n 'coorganizer',\n 'registrar',\n 'moderator',\n 'attendee',\n 'track_organizer',\n ]:\n raise UnprocessableEntityError(\n {'data': 'name'}, \"The given name cannot be updated\"\n )\n\n def before_delete_object(self, obj, kwargs):\n \"\"\"\n method to check proper resource name before deleting\n :param obj:\n :param kwargs:\n :return:\n \"\"\"\n if obj.name in [\n 'owner',\n 'organizer',\n 'coorganizer',\n 'registrar',\n 'moderator',\n 'attendee',\n 'track_organizer',\n ]:\n raise UnprocessableEntityError(\n {'data': 'name'}, \"The resource with given name cannot be deleted\"\n )\n\n decorators = (api.has_permission('is_admin', methods=\"PATCH,DELETE\"),)\n schema = RoleSchema\n data_layer = {\n 'session': db.session,\n 'model': Role,\n 'methods': {'before_get_object': before_get_object},\n }\n", "path": "app/api/roles.py"}]} | 1,576 | 110 |
gh_patches_debug_7221 | rasdani/github-patches | git_diff | StackStorm__st2-3038 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Cache filters API response for web UI
As the number of actions, rules, etc has grown in our StackStorm cluster, the UI is becoming slower. In particular, the filters view (which we rely on heavily with this many executions) is very slow to become visible and usable. It might help to cache this `/api/v1/executions/views/filters` API response and asynchronously reload this value outside of the request thread, since this is slowly-changing data.
Our typical workflow is to
1. load the main execution page
2. wait for the filters to appear
3. apply some set of filters
4. wait for the next page to load
Here's a waterfall showing a 20s load time for the filters response. This is pretty common for us now.

For reference, we have 572 rules, 1200 actions, 143 triggers, 19 trigger types, and 600k+ executions.
</issue>
<code>
[start of st2common/st2common/models/db/execution.py]
1 # Licensed to the StackStorm, Inc ('StackStorm') under one or more
2 # contributor license agreements. See the NOTICE file distributed with
3 # this work for additional information regarding copyright ownership.
4 # The ASF licenses this file to You under the Apache License, Version 2.0
5 # (the "License"); you may not use this file except in compliance with
6 # the License. You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import copy
17
18 import mongoengine as me
19
20 from st2common import log as logging
21 from st2common.models.db import stormbase
22 from st2common.fields import ComplexDateTimeField
23 from st2common.util import date as date_utils
24 from st2common.util.secrets import get_secret_parameters
25 from st2common.util.secrets import mask_secret_parameters
26 from st2common.constants.types import ResourceType
27
28 __all__ = [
29 'ActionExecutionDB'
30 ]
31
32
33 LOG = logging.getLogger(__name__)
34
35
36 class ActionExecutionDB(stormbase.StormFoundationDB):
37 RESOURCE_TYPE = ResourceType.EXECUTION
38 UID_FIELDS = ['id']
39
40 trigger = stormbase.EscapedDictField()
41 trigger_type = stormbase.EscapedDictField()
42 trigger_instance = stormbase.EscapedDictField()
43 rule = stormbase.EscapedDictField()
44 action = stormbase.EscapedDictField(required=True)
45 runner = stormbase.EscapedDictField(required=True)
46 # Only the diff between the liveaction type and what is replicated
47 # in the ActionExecutionDB object.
48 liveaction = stormbase.EscapedDictField(required=True)
49 status = me.StringField(
50 required=True,
51 help_text='The current status of the liveaction.')
52 start_timestamp = ComplexDateTimeField(
53 default=date_utils.get_datetime_utc_now,
54 help_text='The timestamp when the liveaction was created.')
55 end_timestamp = ComplexDateTimeField(
56 help_text='The timestamp when the liveaction has finished.')
57 parameters = stormbase.EscapedDynamicField(
58 default={},
59 help_text='The key-value pairs passed as to the action runner & action.')
60 result = stormbase.EscapedDynamicField(
61 default={},
62 help_text='Action defined result.')
63 context = me.DictField(
64 default={},
65 help_text='Contextual information on the action execution.')
66 parent = me.StringField()
67 children = me.ListField(field=me.StringField())
68 log = me.ListField(field=me.DictField())
69 # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows.
70 web_url = me.StringField(required=False)
71
72 meta = {
73 'indexes': [
74 {'fields': ['rule.ref']},
75 {'fields': ['action.ref']},
76 {'fields': ['liveaction.id']},
77 {'fields': ['start_timestamp']},
78 {'fields': ['end_timestamp']},
79 {'fields': ['status']},
80 {'fields': ['parent']},
81 {'fields': ['-start_timestamp', 'action.ref', 'status']}
82 ]
83 }
84
85 def get_uid(self):
86 # TODO Construct od from non id field:
87 uid = [self.RESOURCE_TYPE, str(self.id)]
88 return ':'.join(uid)
89
90 def mask_secrets(self, value):
91 result = copy.deepcopy(value)
92
93 execution_parameters = value['parameters']
94 parameters = {}
95 # pylint: disable=no-member
96 parameters.update(value.get('action', {}).get('parameters', {}))
97 parameters.update(value.get('runner', {}).get('runner_parameters', {}))
98
99 secret_parameters = get_secret_parameters(parameters=parameters)
100 result['parameters'] = mask_secret_parameters(parameters=execution_parameters,
101 secret_parameters=secret_parameters)
102 return result
103
104 def get_masked_parameters(self):
105 """
106 Retrieve parameters with the secrets masked.
107
108 :rtype: ``dict``
109 """
110 serializable_dict = self.to_serializable_dict(mask_secrets=True)
111 return serializable_dict['parameters']
112
113
114 MODELS = [ActionExecutionDB]
115
[end of st2common/st2common/models/db/execution.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/st2common/st2common/models/db/execution.py b/st2common/st2common/models/db/execution.py
--- a/st2common/st2common/models/db/execution.py
+++ b/st2common/st2common/models/db/execution.py
@@ -78,6 +78,11 @@
{'fields': ['end_timestamp']},
{'fields': ['status']},
{'fields': ['parent']},
+ {'fields': ['rule.name']},
+ {'fields': ['runner.name']},
+ {'fields': ['trigger.name']},
+ {'fields': ['trigger_type.name']},
+ {'fields': ['context.user']},
{'fields': ['-start_timestamp', 'action.ref', 'status']}
]
}
| {"golden_diff": "diff --git a/st2common/st2common/models/db/execution.py b/st2common/st2common/models/db/execution.py\n--- a/st2common/st2common/models/db/execution.py\n+++ b/st2common/st2common/models/db/execution.py\n@@ -78,6 +78,11 @@\n {'fields': ['end_timestamp']},\n {'fields': ['status']},\n {'fields': ['parent']},\n+ {'fields': ['rule.name']},\n+ {'fields': ['runner.name']},\n+ {'fields': ['trigger.name']},\n+ {'fields': ['trigger_type.name']},\n+ {'fields': ['context.user']},\n {'fields': ['-start_timestamp', 'action.ref', 'status']}\n ]\n }\n", "issue": "Cache filters API response for web UI\nAs the number of actions, rules, etc has grown in our StackStorm cluster, the UI is becoming slower. In particular, the filters view (which we rely on heavily with this many executions) is very slow to become visible and usable. It might help to cache this `/api/v1/executions/views/filters` API response and asynchronously reload this value outside of the request thread, since this is slowly-changing data.\n\nOur typical workflow is to\n1. load the main execution page\n2. wait for the filters to appear\n3. apply some set of filters\n4. wait for the next page to load\n\nHere's a waterfall showing a 20s load time for the filters response. This is pretty common for us now.\n\n\nFor reference, we have 572 rules, 1200 actions, 143 triggers, 19 trigger types, and 600k+ executions.\n\n", "before_files": [{"content": "# Licensed to the StackStorm, Inc ('StackStorm') under one or more\n# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport copy\n\nimport mongoengine as me\n\nfrom st2common import log as logging\nfrom st2common.models.db import stormbase\nfrom st2common.fields import ComplexDateTimeField\nfrom st2common.util import date as date_utils\nfrom st2common.util.secrets import get_secret_parameters\nfrom st2common.util.secrets import mask_secret_parameters\nfrom st2common.constants.types import ResourceType\n\n__all__ = [\n 'ActionExecutionDB'\n]\n\n\nLOG = logging.getLogger(__name__)\n\n\nclass ActionExecutionDB(stormbase.StormFoundationDB):\n RESOURCE_TYPE = ResourceType.EXECUTION\n UID_FIELDS = ['id']\n\n trigger = stormbase.EscapedDictField()\n trigger_type = stormbase.EscapedDictField()\n trigger_instance = stormbase.EscapedDictField()\n rule = stormbase.EscapedDictField()\n action = stormbase.EscapedDictField(required=True)\n runner = stormbase.EscapedDictField(required=True)\n # Only the diff between the liveaction type and what is replicated\n # in the ActionExecutionDB object.\n liveaction = stormbase.EscapedDictField(required=True)\n status = me.StringField(\n required=True,\n help_text='The current status of the liveaction.')\n start_timestamp = ComplexDateTimeField(\n default=date_utils.get_datetime_utc_now,\n help_text='The timestamp when the liveaction was created.')\n end_timestamp = ComplexDateTimeField(\n help_text='The timestamp when the liveaction has finished.')\n parameters = stormbase.EscapedDynamicField(\n default={},\n help_text='The key-value pairs passed as to the action runner & action.')\n result = stormbase.EscapedDynamicField(\n default={},\n help_text='Action defined result.')\n context = me.DictField(\n default={},\n help_text='Contextual information on the action execution.')\n parent = me.StringField()\n children = me.ListField(field=me.StringField())\n log = me.ListField(field=me.DictField())\n # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows.\n web_url = me.StringField(required=False)\n\n meta = {\n 'indexes': [\n {'fields': ['rule.ref']},\n {'fields': ['action.ref']},\n {'fields': ['liveaction.id']},\n {'fields': ['start_timestamp']},\n {'fields': ['end_timestamp']},\n {'fields': ['status']},\n {'fields': ['parent']},\n {'fields': ['-start_timestamp', 'action.ref', 'status']}\n ]\n }\n\n def get_uid(self):\n # TODO Construct od from non id field:\n uid = [self.RESOURCE_TYPE, str(self.id)]\n return ':'.join(uid)\n\n def mask_secrets(self, value):\n result = copy.deepcopy(value)\n\n execution_parameters = value['parameters']\n parameters = {}\n # pylint: disable=no-member\n parameters.update(value.get('action', {}).get('parameters', {}))\n parameters.update(value.get('runner', {}).get('runner_parameters', {}))\n\n secret_parameters = get_secret_parameters(parameters=parameters)\n result['parameters'] = mask_secret_parameters(parameters=execution_parameters,\n secret_parameters=secret_parameters)\n return result\n\n def get_masked_parameters(self):\n \"\"\"\n Retrieve parameters with the secrets masked.\n\n :rtype: ``dict``\n \"\"\"\n serializable_dict = self.to_serializable_dict(mask_secrets=True)\n return serializable_dict['parameters']\n\n\nMODELS = [ActionExecutionDB]\n", "path": "st2common/st2common/models/db/execution.py"}]} | 1,990 | 157 |
gh_patches_debug_946 | rasdani/github-patches | git_diff | xonsh__xonsh-2332 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
xoreutils: echo fails with KeyError: 'help'
Any `echo` invocation fails:
```shell
$ $XONSH_SHOW_TRACEBACK = True
$ echo
xonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 13061, in run
r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 12896, in proxy_four
return f(args, stdin, stdout, stderr)
File "/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py", line 9, in echo
if opts['help']:
KeyError: 'help'
$ echo foo
xonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 13061, in run
r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 12896, in proxy_four
return f(args, stdin, stdout, stderr)
File "/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py", line 9, in echo
if opts['help']:
KeyError: 'help'
$ echo "foo"
xonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 13061, in run
r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 12896, in proxy_four
return f(args, stdin, stdout, stderr)
File "/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py", line 9, in echo
if opts['help']:
KeyError: 'help'
```
Obviously, the problem is that `help` is looked up but missing: http://xon.sh/_modules/xonsh/xoreutils/echo.html#echo
</issue>
<code>
[start of xonsh/xoreutils/echo.py]
1 """Implements a simple echo command for xonsh."""
2
3
4 def echo(args, stdin, stdout, stderr):
5 """A simple echo command."""
6 opts = _echo_parse_args(args)
7 if opts is None:
8 return
9 if opts['help']:
10 print(ECHO_HELP, file=stdout)
11 return 0
12 ender = opts['end']
13 args = map(str, args)
14 if opts['escapes']:
15 args = map(lambda x: x.encode().decode('unicode_escape'), args)
16 print(*args, end=ender, file=stdout)
17
18
19 def _echo_parse_args(args):
20 out = {'escapes': False, 'end': '\n'}
21 if '-e' in args:
22 args.remove('-e')
23 out['escapes'] = True
24 if '-E' in args:
25 args.remove('-E')
26 out['escapes'] = False
27 if '-n' in args:
28 args.remove('-n')
29 out['end'] = ''
30 if '-h' in args or '--help' in args:
31 out['help'] = True
32 return out
33
34
35 ECHO_HELP = """Usage: echo [OPTIONS]... [STRING]...
36 Echo the STRING(s) to standard output.
37
38 -n do not include the trailing newline
39 -e enable interpretation of backslash escapes
40 -E disable interpretation of backslash escapes (default)
41 -h --help display this message and exit
42
43 This version of echo was written in Python for the xonsh project: http://xon.sh
44 Based on echo from GNU coreutils: http://www.gnu.org/software/coreutils/"""
45
[end of xonsh/xoreutils/echo.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/xonsh/xoreutils/echo.py b/xonsh/xoreutils/echo.py
--- a/xonsh/xoreutils/echo.py
+++ b/xonsh/xoreutils/echo.py
@@ -17,7 +17,7 @@
def _echo_parse_args(args):
- out = {'escapes': False, 'end': '\n'}
+ out = {'escapes': False, 'end': '\n', 'help': False}
if '-e' in args:
args.remove('-e')
out['escapes'] = True
| {"golden_diff": "diff --git a/xonsh/xoreutils/echo.py b/xonsh/xoreutils/echo.py\n--- a/xonsh/xoreutils/echo.py\n+++ b/xonsh/xoreutils/echo.py\n@@ -17,7 +17,7 @@\n \n \n def _echo_parse_args(args):\n- out = {'escapes': False, 'end': '\\n'}\n+ out = {'escapes': False, 'end': '\\n', 'help': False}\n if '-e' in args:\n args.remove('-e')\n out['escapes'] = True\n", "issue": "xoreutils: echo fails with KeyError: 'help'\nAny `echo` invocation fails:\r\n\r\n```shell\r\n$ $XONSH_SHOW_TRACEBACK = True\r\n$ echo\r\nxonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 13061, in run\r\n r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 12896, in proxy_four\r\n return f(args, stdin, stdout, stderr)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py\", line 9, in echo\r\n if opts['help']:\r\nKeyError: 'help'\r\n$ echo foo\r\nxonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 13061, in run\r\n r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 12896, in proxy_four\r\n return f(args, stdin, stdout, stderr)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py\", line 9, in echo\r\n if opts['help']:\r\nKeyError: 'help'\r\n$ echo \"foo\"\r\nxonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 13061, in run\r\n r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 12896, in proxy_four\r\n return f(args, stdin, stdout, stderr)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py\", line 9, in echo\r\n if opts['help']:\r\nKeyError: 'help'\r\n```\r\n\r\nObviously, the problem is that `help` is looked up but missing: http://xon.sh/_modules/xonsh/xoreutils/echo.html#echo\n", "before_files": [{"content": "\"\"\"Implements a simple echo command for xonsh.\"\"\"\n\n\ndef echo(args, stdin, stdout, stderr):\n \"\"\"A simple echo command.\"\"\"\n opts = _echo_parse_args(args)\n if opts is None:\n return\n if opts['help']:\n print(ECHO_HELP, file=stdout)\n return 0\n ender = opts['end']\n args = map(str, args)\n if opts['escapes']:\n args = map(lambda x: x.encode().decode('unicode_escape'), args)\n print(*args, end=ender, file=stdout)\n\n\ndef _echo_parse_args(args):\n out = {'escapes': False, 'end': '\\n'}\n if '-e' in args:\n args.remove('-e')\n out['escapes'] = True\n if '-E' in args:\n args.remove('-E')\n out['escapes'] = False\n if '-n' in args:\n args.remove('-n')\n out['end'] = ''\n if '-h' in args or '--help' in args:\n out['help'] = True\n return out\n\n\nECHO_HELP = \"\"\"Usage: echo [OPTIONS]... [STRING]...\nEcho the STRING(s) to standard output.\n\n -n do not include the trailing newline\n -e enable interpretation of backslash escapes\n -E disable interpretation of backslash escapes (default)\n -h --help display this message and exit\n\nThis version of echo was written in Python for the xonsh project: http://xon.sh\nBased on echo from GNU coreutils: http://www.gnu.org/software/coreutils/\"\"\"\n", "path": "xonsh/xoreutils/echo.py"}]} | 1,575 | 129 |
gh_patches_debug_56453 | rasdani/github-patches | git_diff | netket__netket-506 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ExactSampler is not resetting at construction time
Exact Sampler should call Reset at construction time
</issue>
<code>
[start of netket/sampler/exact_sampler.py]
1 import numpy as _np
2 from .abstract_sampler import AbstractSampler
3 from ..machine.density_matrix import AbstractDensityMatrix
4 from ..hilbert import DoubledHilbert
5 import netket.random
6
7
8 class ExactSampler(AbstractSampler):
9 r"""
10 This sampler generates i.i.d. samples from $$|\Psi(s)|^2$$.
11 In order to perform exact sampling, $$|\Psi(s)|^2$$ is precomputed an all
12 the possible values of the quantum numbers $$s$$. This sampler has thus an
13 exponential cost with the number of degrees of freedom, and cannot be used
14 for large systems, where Metropolis-based sampling are instead a viable
15 option.
16 """
17
18 def __init__(self, machine, sample_size=16):
19 r"""
20 Constructs a new ``ExactSampler`` given a machine.
21
22 Args:
23 machine: A machine $$\Psi(s)$$ used for the sampling.
24 The probability distribution being sampled
25 from is $$F(\Psi(s))$$, where the function
26 $$F(X)$$, is arbitrary, by default $$F(X)=|X|^2$$.
27
28 sample_size: The number of independent samples to be generated at each invocation of __next__.
29 """
30 super().__init__(machine, sample_size)
31 if isinstance(machine, AbstractDensityMatrix):
32 self.hilbert = DoubledHilbert(machine.hilbert)
33 else:
34 self.hilbert = machine.hilbert
35 self._machine_pow = 2.0
36
37 def reset(self, init_random=False):
38 self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow
39 self._prob /= self._prob.sum()
40
41 def __next__(self):
42 numbers = netket.random.choice(
43 self._prob.size, size=self.sample_shape[0], replace=True, p=self._prob
44 )
45 return self.hilbert.numbers_to_states(numbers)
46
47 def generate_samples(self, n_samples, init_random=False, samples=None):
48
49 if samples is None:
50 samples = _np.zeros((n_samples, self.sample_shape[0], self.sample_shape[1]))
51
52 numbers = netket.random.choice(
53 self._prob.size,
54 size=self.sample_shape[0] * n_samples,
55 replace=True,
56 p=self._prob,
57 )
58 samples[:] = self.hilbert.numbers_to_states(numbers).reshape(samples.shape)
59
60 return samples
61
62 @property
63 def machine_pow(self):
64 return self._machine_pow
65
66 @machine_pow.setter
67 def machine_pow(self, m_power):
68 self._machine_pow = m_power
69 self.reset()
70
[end of netket/sampler/exact_sampler.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/netket/sampler/exact_sampler.py b/netket/sampler/exact_sampler.py
--- a/netket/sampler/exact_sampler.py
+++ b/netket/sampler/exact_sampler.py
@@ -33,6 +33,7 @@
else:
self.hilbert = machine.hilbert
self._machine_pow = 2.0
+ self.reset()
def reset(self, init_random=False):
self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow
| {"golden_diff": "diff --git a/netket/sampler/exact_sampler.py b/netket/sampler/exact_sampler.py\n--- a/netket/sampler/exact_sampler.py\n+++ b/netket/sampler/exact_sampler.py\n@@ -33,6 +33,7 @@\n else:\n self.hilbert = machine.hilbert\n self._machine_pow = 2.0\n+ self.reset()\n \n def reset(self, init_random=False):\n self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow\n", "issue": "ExactSampler is not resetting at construction time\nExact Sampler should call Reset at construction time \r\n\n", "before_files": [{"content": "import numpy as _np\nfrom .abstract_sampler import AbstractSampler\nfrom ..machine.density_matrix import AbstractDensityMatrix\nfrom ..hilbert import DoubledHilbert\nimport netket.random\n\n\nclass ExactSampler(AbstractSampler):\n r\"\"\"\n This sampler generates i.i.d. samples from $$|\\Psi(s)|^2$$.\n In order to perform exact sampling, $$|\\Psi(s)|^2$$ is precomputed an all\n the possible values of the quantum numbers $$s$$. This sampler has thus an\n exponential cost with the number of degrees of freedom, and cannot be used\n for large systems, where Metropolis-based sampling are instead a viable\n option.\n \"\"\"\n\n def __init__(self, machine, sample_size=16):\n r\"\"\"\n Constructs a new ``ExactSampler`` given a machine.\n\n Args:\n machine: A machine $$\\Psi(s)$$ used for the sampling.\n The probability distribution being sampled\n from is $$F(\\Psi(s))$$, where the function\n $$F(X)$$, is arbitrary, by default $$F(X)=|X|^2$$.\n\n sample_size: The number of independent samples to be generated at each invocation of __next__.\n \"\"\"\n super().__init__(machine, sample_size)\n if isinstance(machine, AbstractDensityMatrix):\n self.hilbert = DoubledHilbert(machine.hilbert)\n else:\n self.hilbert = machine.hilbert\n self._machine_pow = 2.0\n\n def reset(self, init_random=False):\n self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow\n self._prob /= self._prob.sum()\n\n def __next__(self):\n numbers = netket.random.choice(\n self._prob.size, size=self.sample_shape[0], replace=True, p=self._prob\n )\n return self.hilbert.numbers_to_states(numbers)\n\n def generate_samples(self, n_samples, init_random=False, samples=None):\n\n if samples is None:\n samples = _np.zeros((n_samples, self.sample_shape[0], self.sample_shape[1]))\n\n numbers = netket.random.choice(\n self._prob.size,\n size=self.sample_shape[0] * n_samples,\n replace=True,\n p=self._prob,\n )\n samples[:] = self.hilbert.numbers_to_states(numbers).reshape(samples.shape)\n\n return samples\n\n @property\n def machine_pow(self):\n return self._machine_pow\n\n @machine_pow.setter\n def machine_pow(self, m_power):\n self._machine_pow = m_power\n self.reset()\n", "path": "netket/sampler/exact_sampler.py"}]} | 1,262 | 115 |
gh_patches_debug_1487 | rasdani/github-patches | git_diff | huggingface__diffusers-1149 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[Flax] 🚨 0.7.0 not working 🚨
### Describe the bug

### Reproduction
_No response_
### Logs
_No response_
### System Info
TPU v3-8
</issue>
<code>
[start of src/diffusers/models/embeddings_flax.py]
1 # Copyright 2022 The HuggingFace Team. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 import math
15
16 import flax.linen as nn
17 import jax.numpy as jnp
18
19
20 def get_sinusoidal_embeddings(
21 timesteps: jnp.ndarray,
22 embedding_dim: int,
23 freq_shift: float = 1,
24 min_timescale: float = 1,
25 max_timescale: float = 1.0e4,
26 flip_sin_to_cos: bool = False,
27 scale: float = 1.0,
28 ) -> jnp.ndarray:
29 """Returns the positional encoding (same as Tensor2Tensor).
30 Args:
31 timesteps: a 1-D Tensor of N indices, one per batch element.
32 These may be fractional.
33 embedding_dim: The number of output channels.
34 min_timescale: The smallest time unit (should probably be 0.0).
35 max_timescale: The largest time unit.
36 Returns:
37 a Tensor of timing signals [N, num_channels]
38 """
39 assert timesteps.ndim == 1, "Timesteps should be a 1d-array"
40 assert embedding_dim % 2 == 0, f"Embedding dimension {embedding_dim} should be even"
41 num_timescales = float(embedding_dim // 2)
42 log_timescale_increment = math.log(max_timescale / min_timescale) / (num_timescales - freq_shift)
43 inv_timescales = min_timescale * jnp.exp(jnp.arange(num_timescales, dtype=jnp.float32) * -log_timescale_increment)
44 emb = jnp.expand_dims(timesteps, 1) * jnp.expand_dims(inv_timescales, 0)
45
46 # scale embeddings
47 scaled_time = scale * emb
48
49 if flip_sin_to_cos:
50 signal = jnp.concatenate([jnp.cos(scaled_time), jnp.sin(scaled_time)], axis=1)
51 else:
52 signal = jnp.concatenate([jnp.sin(scaled_time), jnp.cos(scaled_time)], axis=1)
53 signal = jnp.reshape(signal, [jnp.shape(timesteps)[0], embedding_dim])
54 return signal
55
56
57 class FlaxTimestepEmbedding(nn.Module):
58 r"""
59 Time step Embedding Module. Learns embeddings for input time steps.
60
61 Args:
62 time_embed_dim (`int`, *optional*, defaults to `32`):
63 Time step embedding dimension
64 dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):
65 Parameters `dtype`
66 """
67 time_embed_dim: int = 32
68 dtype: jnp.dtype = jnp.float32
69
70 @nn.compact
71 def __call__(self, temb):
72 temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name="linear_1")(temb)
73 temb = nn.silu(temb)
74 temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name="linear_2")(temb)
75 return temb
76
77
78 class FlaxTimesteps(nn.Module):
79 r"""
80 Wrapper Module for sinusoidal Time step Embeddings as described in https://arxiv.org/abs/2006.11239
81
82 Args:
83 dim (`int`, *optional*, defaults to `32`):
84 Time step embedding dimension
85 """
86 dim: int = 32
87 freq_shift: float = 1
88
89 @nn.compact
90 def __call__(self, timesteps):
91 return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)
92
[end of src/diffusers/models/embeddings_flax.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/diffusers/models/embeddings_flax.py b/src/diffusers/models/embeddings_flax.py
--- a/src/diffusers/models/embeddings_flax.py
+++ b/src/diffusers/models/embeddings_flax.py
@@ -88,4 +88,6 @@
@nn.compact
def __call__(self, timesteps):
- return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)
+ return get_sinusoidal_embeddings(
+ timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift, flip_sin_to_cos=True
+ )
| {"golden_diff": "diff --git a/src/diffusers/models/embeddings_flax.py b/src/diffusers/models/embeddings_flax.py\n--- a/src/diffusers/models/embeddings_flax.py\n+++ b/src/diffusers/models/embeddings_flax.py\n@@ -88,4 +88,6 @@\n \n @nn.compact\n def __call__(self, timesteps):\n- return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)\n+ return get_sinusoidal_embeddings(\n+ timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift, flip_sin_to_cos=True\n+ )\n", "issue": "[Flax] \ud83d\udea8 0.7.0 not working \ud83d\udea8\n### Describe the bug\n\n\r\n\n\n### Reproduction\n\n_No response_\n\n### Logs\n\n_No response_\n\n### System Info\n\nTPU v3-8\n", "before_files": [{"content": "# Copyright 2022 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\n\ndef get_sinusoidal_embeddings(\n timesteps: jnp.ndarray,\n embedding_dim: int,\n freq_shift: float = 1,\n min_timescale: float = 1,\n max_timescale: float = 1.0e4,\n flip_sin_to_cos: bool = False,\n scale: float = 1.0,\n) -> jnp.ndarray:\n \"\"\"Returns the positional encoding (same as Tensor2Tensor).\n Args:\n timesteps: a 1-D Tensor of N indices, one per batch element.\n These may be fractional.\n embedding_dim: The number of output channels.\n min_timescale: The smallest time unit (should probably be 0.0).\n max_timescale: The largest time unit.\n Returns:\n a Tensor of timing signals [N, num_channels]\n \"\"\"\n assert timesteps.ndim == 1, \"Timesteps should be a 1d-array\"\n assert embedding_dim % 2 == 0, f\"Embedding dimension {embedding_dim} should be even\"\n num_timescales = float(embedding_dim // 2)\n log_timescale_increment = math.log(max_timescale / min_timescale) / (num_timescales - freq_shift)\n inv_timescales = min_timescale * jnp.exp(jnp.arange(num_timescales, dtype=jnp.float32) * -log_timescale_increment)\n emb = jnp.expand_dims(timesteps, 1) * jnp.expand_dims(inv_timescales, 0)\n\n # scale embeddings\n scaled_time = scale * emb\n\n if flip_sin_to_cos:\n signal = jnp.concatenate([jnp.cos(scaled_time), jnp.sin(scaled_time)], axis=1)\n else:\n signal = jnp.concatenate([jnp.sin(scaled_time), jnp.cos(scaled_time)], axis=1)\n signal = jnp.reshape(signal, [jnp.shape(timesteps)[0], embedding_dim])\n return signal\n\n\nclass FlaxTimestepEmbedding(nn.Module):\n r\"\"\"\n Time step Embedding Module. Learns embeddings for input time steps.\n\n Args:\n time_embed_dim (`int`, *optional*, defaults to `32`):\n Time step embedding dimension\n dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n Parameters `dtype`\n \"\"\"\n time_embed_dim: int = 32\n dtype: jnp.dtype = jnp.float32\n\n @nn.compact\n def __call__(self, temb):\n temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_1\")(temb)\n temb = nn.silu(temb)\n temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_2\")(temb)\n return temb\n\n\nclass FlaxTimesteps(nn.Module):\n r\"\"\"\n Wrapper Module for sinusoidal Time step Embeddings as described in https://arxiv.org/abs/2006.11239\n\n Args:\n dim (`int`, *optional*, defaults to `32`):\n Time step embedding dimension\n \"\"\"\n dim: int = 32\n freq_shift: float = 1\n\n @nn.compact\n def __call__(self, timesteps):\n return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)\n", "path": "src/diffusers/models/embeddings_flax.py"}]} | 1,740 | 138 |
gh_patches_debug_48383 | rasdani/github-patches | git_diff | DDMAL__CantusDB-900 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
we need to re-add a restart policy to docker-compose.yml
A recent change to docker-compose.yml removed the `restart: always` policy we added to our containers a couple of weeks ago. We should re-instate this.
</issue>
<code>
[start of django/cantusdb_project/main_app/widgets.py]
1 from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput
2 from django.utils.safestring import mark_safe
3
4 class TextInputWidget(TextInput):
5 def __init__(self):
6 self.attrs = {"class": "form-control form-control-sm"}
7
8
9 class SelectWidget(Select):
10 """
11 not used, this widget does work, but we cannot order the choices by name
12 """
13
14 def __init__(self):
15 attrs = {"class": "form-control custom-select custom-select-sm"}
16 super().__init__(attrs=attrs)
17 # super().choices = choices
18 # self.choices = super().choices
19
20
21 class TextAreaWidget(Textarea):
22 def __init__(self):
23 self.attrs = {"class": "form-control", "rows": "3"}
24
25
26 class VolpianoAreaWidget(Textarea):
27 def __init__(self):
28 self.attrs = {
29 "class": "form-control",
30 "rows": "1.5",
31 "style": "font-family: Volpiano; font-size: xx-large",
32 }
33
34
35 class VolpianoInputWidget(TextInput):
36 def __init__(self):
37 self.attrs = {
38 "class": "form-control form-control-sm",
39 "style": "font-family: Volpiano; font-size: xx-large",
40 }
41
42
43 class CheckboxWidget(CheckboxInput):
44 pass
45
46
47 class AdminTextAreaWidget(Textarea):
48 def __init__(self):
49 self.attrs = {"class": "form-control", "rows": 10, "cols": 75}
50
51 def render(self, name, value, attrs=None, renderer=None):
52 return super().render(name, value, attrs=self.attrs) + mark_safe(
53 '<span style="color: red; font-weight: bold;"> * </span>'
54 )
55
56
57 class AdminTextInputWidget(TextInputWidget):
58 def render(self, name, value, attrs=None, renderer=None):
59 return super().render(name, value) + mark_safe(
60 '<span style="color: red; font-weight: bold;"> * </span>'
61 )
62
[end of django/cantusdb_project/main_app/widgets.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/django/cantusdb_project/main_app/widgets.py b/django/cantusdb_project/main_app/widgets.py
--- a/django/cantusdb_project/main_app/widgets.py
+++ b/django/cantusdb_project/main_app/widgets.py
@@ -1,6 +1,7 @@
from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput
from django.utils.safestring import mark_safe
+
class TextInputWidget(TextInput):
def __init__(self):
self.attrs = {"class": "form-control form-control-sm"}
| {"golden_diff": "diff --git a/django/cantusdb_project/main_app/widgets.py b/django/cantusdb_project/main_app/widgets.py\n--- a/django/cantusdb_project/main_app/widgets.py\n+++ b/django/cantusdb_project/main_app/widgets.py\n@@ -1,6 +1,7 @@\n from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput\n from django.utils.safestring import mark_safe\n \n+\n class TextInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\"class\": \"form-control form-control-sm\"}\n", "issue": "we need to re-add a restart policy to docker-compose.yml\nA recent change to docker-compose.yml removed the `restart: always` policy we added to our containers a couple of weeks ago. We should re-instate this.\n", "before_files": [{"content": "from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput\nfrom django.utils.safestring import mark_safe\n\nclass TextInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\"class\": \"form-control form-control-sm\"}\n\n\nclass SelectWidget(Select):\n \"\"\"\n not used, this widget does work, but we cannot order the choices by name\n \"\"\"\n\n def __init__(self):\n attrs = {\"class\": \"form-control custom-select custom-select-sm\"}\n super().__init__(attrs=attrs)\n # super().choices = choices\n # self.choices = super().choices\n\n\nclass TextAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\"class\": \"form-control\", \"rows\": \"3\"}\n\n\nclass VolpianoAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\n \"class\": \"form-control\",\n \"rows\": \"1.5\",\n \"style\": \"font-family: Volpiano; font-size: xx-large\",\n }\n\n\nclass VolpianoInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\n \"class\": \"form-control form-control-sm\",\n \"style\": \"font-family: Volpiano; font-size: xx-large\",\n }\n\n\nclass CheckboxWidget(CheckboxInput):\n pass\n\n\nclass AdminTextAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\"class\": \"form-control\", \"rows\": 10, \"cols\": 75}\n\n def render(self, name, value, attrs=None, renderer=None):\n return super().render(name, value, attrs=self.attrs) + mark_safe(\n '<span style=\"color: red; font-weight: bold;\"> * </span>'\n )\n\n\nclass AdminTextInputWidget(TextInputWidget):\n def render(self, name, value, attrs=None, renderer=None):\n return super().render(name, value) + mark_safe(\n '<span style=\"color: red; font-weight: bold;\"> * </span>'\n )\n", "path": "django/cantusdb_project/main_app/widgets.py"}]} | 1,156 | 120 |
gh_patches_debug_568 | rasdani/github-patches | git_diff | pex-tool__pex-836 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Release 2.1.0
On the docket:
The prime motivator:
+ [x] Pex does not download foreign abi3 wheels correctly #823
Changes to support the above as well as others:
+ [x] Fix pex resolving for foreign platforms. #835
+ [x] Use pypa/packaging. #831
+ [x] Upgrade vendored setuptools to 42.0.2. #832
+ [x] De-vendor pex just once per version. #833
+ [x] Support VCS urls for vendoring. #834
+ [x] Support python 3.8 in CI. #829
+ [x] Fix pex resolution to respect --ignore-errors. #828
+ [x] Kill `pkg_resources` finders monkey-patching. #827
+ [x] Use flit to distribute pex. #826
+ [x] Cleanup extras_require. #825
</issue>
<code>
[start of pex/version.py]
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = '2.0.3'
5
[end of pex/version.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = '2.0.3'
+__version__ = '2.1.0'
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = '2.0.3'\n+__version__ = '2.1.0'\n", "issue": "Release 2.1.0\nOn the docket:\r\n\r\nThe prime motivator:\r\n+ [x] Pex does not download foreign abi3 wheels correctly #823\r\n\r\nChanges to support the above as well as others:\r\n+ [x] Fix pex resolving for foreign platforms. #835 \r\n+ [x] Use pypa/packaging. #831\r\n+ [x] Upgrade vendored setuptools to 42.0.2. #832\r\n+ [x] De-vendor pex just once per version. #833\r\n+ [x] Support VCS urls for vendoring. #834\r\n+ [x] Support python 3.8 in CI. #829\r\n+ [x] Fix pex resolution to respect --ignore-errors. #828\r\n+ [x] Kill `pkg_resources` finders monkey-patching. #827\r\n+ [x] Use flit to distribute pex. #826\r\n+ [x] Cleanup extras_require. #825\r\n\r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.0.3'\n", "path": "pex/version.py"}]} | 810 | 95 |
gh_patches_debug_13695 | rasdani/github-patches | git_diff | projectmesa__mesa-373 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Allow fixed seed for replication
Figure out how to best handle running a model with a fixed seed, to ensure that it will produce the same result. and implement that.
</issue>
<code>
[start of mesa/model.py]
1 # -*- coding: utf-8 -*-
2 """
3 The model class for Mesa framework.
4
5 Core Objects: Model
6
7 """
8 import datetime as dt
9 import random
10
11
12 class Model:
13 """ Base class for models. """
14 def __init__(self, seed=None):
15 """ Create a new model. Overload this method with the actual code to
16 start the model.
17
18 Args:
19 seed: seed for the random number generator
20
21 Attributes:
22 schedule: schedule object
23 running: a bool indicating if the model should continue running
24
25 """
26 if seed is None:
27 self.seed = dt.datetime.now()
28 else:
29 self.seed = seed
30 random.seed(seed)
31 self.running = True
32 self.schedule = None
33
34 def run_model(self):
35 """ Run the model until the end condition is reached. Overload as
36 needed.
37
38 """
39 while self.running:
40 self.step()
41
42 def step(self):
43 """ A single step. Fill in here. """
44 pass
45
[end of mesa/model.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mesa/model.py b/mesa/model.py
--- a/mesa/model.py
+++ b/mesa/model.py
@@ -7,6 +7,7 @@
"""
import datetime as dt
import random
+import numpy
class Model:
@@ -23,11 +24,14 @@
running: a bool indicating if the model should continue running
"""
+ # seed both the numpy and Python random number generators
if seed is None:
self.seed = dt.datetime.now()
else:
self.seed = seed
random.seed(seed)
+ numpy.random.seed(seed)
+
self.running = True
self.schedule = None
| {"golden_diff": "diff --git a/mesa/model.py b/mesa/model.py\n--- a/mesa/model.py\n+++ b/mesa/model.py\n@@ -7,6 +7,7 @@\n \"\"\"\n import datetime as dt\n import random\n+import numpy\n \n \n class Model:\n@@ -23,11 +24,14 @@\n running: a bool indicating if the model should continue running\n \n \"\"\"\n+ # seed both the numpy and Python random number generators\n if seed is None:\n self.seed = dt.datetime.now()\n else:\n self.seed = seed\n random.seed(seed)\n+ numpy.random.seed(seed)\n+\n self.running = True\n self.schedule = None\n", "issue": "Allow fixed seed for replication\nFigure out how to best handle running a model with a fixed seed, to ensure that it will produce the same result. and implement that.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nThe model class for Mesa framework.\n\nCore Objects: Model\n\n\"\"\"\nimport datetime as dt\nimport random\n\n\nclass Model:\n \"\"\" Base class for models. \"\"\"\n def __init__(self, seed=None):\n \"\"\" Create a new model. Overload this method with the actual code to\n start the model.\n\n Args:\n seed: seed for the random number generator\n\n Attributes:\n schedule: schedule object\n running: a bool indicating if the model should continue running\n\n \"\"\"\n if seed is None:\n self.seed = dt.datetime.now()\n else:\n self.seed = seed\n random.seed(seed)\n self.running = True\n self.schedule = None\n\n def run_model(self):\n \"\"\" Run the model until the end condition is reached. Overload as\n needed.\n\n \"\"\"\n while self.running:\n self.step()\n\n def step(self):\n \"\"\" A single step. Fill in here. \"\"\"\n pass\n", "path": "mesa/model.py"}]} | 862 | 146 |
gh_patches_debug_29583 | rasdani/github-patches | git_diff | Parsl__parsl-2301 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Remove IPyParallel executor
**Is your feature request related to a problem? Please describe.**
The parsl ipp code isn't really maintained any more. As I try to tidy up some stuff in the test suite I find myself spending more time than I'd like to debugging what is happening inside the IPP parsl executor, while at the same time the folklore is "don't use IPP".
**Describe the solution you'd like**
I'd like to remove all IPP tests from the test suite now. This will inevitably lead to the ipp code rotting fast, and so in addition the IPP executor should probably be removed in its entirely.
**Describe alternatives you've considered**
An alternative is to put developer effort into maintaining IPP as a supported executor.
**Additional context**
The folklore on slack seems to be "don't use IPP". This feature request turns that folklore into reality.
I invite comment in support or against from @kylechard @yadudoc @annawoodard especially as regards how this would affect existing users who have not moved to htex (if any?).
see #1328
</issue>
<code>
[start of parsl/executors/ipp_controller.py]
1 class Controller():
2 """This stub exists to issue a more helpful warning about the IPyParallel
3 executor being removed from parsl some time after v0.9.
4
5 It can eventually be removed entirely - perhaps after v0.10
6 """
7 def __init__(self, *args, **kwargs):
8 raise RuntimeError("The IPyParallel executor has been removed from parsl")
9
[end of parsl/executors/ipp_controller.py]
[start of parsl/executors/__init__.py]
1 from parsl.executors.threads import ThreadPoolExecutor
2 from parsl.executors.ipp import IPyParallelExecutor
3 from parsl.executors.workqueue.executor import WorkQueueExecutor
4 from parsl.executors.high_throughput.executor import HighThroughputExecutor
5 from parsl.executors.extreme_scale.executor import ExtremeScaleExecutor
6 from parsl.executors.low_latency.executor import LowLatencyExecutor
7 from parsl.executors.flux.executor import FluxExecutor
8
9 __all__ = ['IPyParallelExecutor',
10 'ThreadPoolExecutor',
11 'HighThroughputExecutor',
12 'ExtremeScaleExecutor',
13 'LowLatencyExecutor',
14 'WorkQueueExecutor',
15 'FluxExecutor']
16
[end of parsl/executors/__init__.py]
[start of parsl/executors/ipp.py]
1 from parsl.executors.base import ParslExecutor
2
3
4 class IPyParallelExecutor(ParslExecutor):
5 """This stub exists to issue a more helpful warning about the IPyParallel
6 executor being removed from parsl some time after v0.9.
7
8 It can eventually be removed entirely - perhaps after v0.10
9 """
10
11 def __new__(*args, **kwargs):
12 raise RuntimeError("The IPyParallel executor has been removed from parsl")
13
[end of parsl/executors/ipp.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/parsl/executors/__init__.py b/parsl/executors/__init__.py
--- a/parsl/executors/__init__.py
+++ b/parsl/executors/__init__.py
@@ -1,13 +1,11 @@
from parsl.executors.threads import ThreadPoolExecutor
-from parsl.executors.ipp import IPyParallelExecutor
from parsl.executors.workqueue.executor import WorkQueueExecutor
from parsl.executors.high_throughput.executor import HighThroughputExecutor
from parsl.executors.extreme_scale.executor import ExtremeScaleExecutor
from parsl.executors.low_latency.executor import LowLatencyExecutor
from parsl.executors.flux.executor import FluxExecutor
-__all__ = ['IPyParallelExecutor',
- 'ThreadPoolExecutor',
+__all__ = ['ThreadPoolExecutor',
'HighThroughputExecutor',
'ExtremeScaleExecutor',
'LowLatencyExecutor',
diff --git a/parsl/executors/ipp.py b/parsl/executors/ipp.py
deleted file mode 100644
--- a/parsl/executors/ipp.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from parsl.executors.base import ParslExecutor
-
-
-class IPyParallelExecutor(ParslExecutor):
- """This stub exists to issue a more helpful warning about the IPyParallel
- executor being removed from parsl some time after v0.9.
-
- It can eventually be removed entirely - perhaps after v0.10
- """
-
- def __new__(*args, **kwargs):
- raise RuntimeError("The IPyParallel executor has been removed from parsl")
diff --git a/parsl/executors/ipp_controller.py b/parsl/executors/ipp_controller.py
deleted file mode 100644
--- a/parsl/executors/ipp_controller.py
+++ /dev/null
@@ -1,8 +0,0 @@
-class Controller():
- """This stub exists to issue a more helpful warning about the IPyParallel
- executor being removed from parsl some time after v0.9.
-
- It can eventually be removed entirely - perhaps after v0.10
- """
- def __init__(self, *args, **kwargs):
- raise RuntimeError("The IPyParallel executor has been removed from parsl")
| {"golden_diff": "diff --git a/parsl/executors/__init__.py b/parsl/executors/__init__.py\n--- a/parsl/executors/__init__.py\n+++ b/parsl/executors/__init__.py\n@@ -1,13 +1,11 @@\n from parsl.executors.threads import ThreadPoolExecutor\n-from parsl.executors.ipp import IPyParallelExecutor\n from parsl.executors.workqueue.executor import WorkQueueExecutor\n from parsl.executors.high_throughput.executor import HighThroughputExecutor\n from parsl.executors.extreme_scale.executor import ExtremeScaleExecutor\n from parsl.executors.low_latency.executor import LowLatencyExecutor\n from parsl.executors.flux.executor import FluxExecutor\n \n-__all__ = ['IPyParallelExecutor',\n- 'ThreadPoolExecutor',\n+__all__ = ['ThreadPoolExecutor',\n 'HighThroughputExecutor',\n 'ExtremeScaleExecutor',\n 'LowLatencyExecutor',\ndiff --git a/parsl/executors/ipp.py b/parsl/executors/ipp.py\ndeleted file mode 100644\n--- a/parsl/executors/ipp.py\n+++ /dev/null\n@@ -1,12 +0,0 @@\n-from parsl.executors.base import ParslExecutor\n-\n-\n-class IPyParallelExecutor(ParslExecutor):\n- \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n- executor being removed from parsl some time after v0.9.\n-\n- It can eventually be removed entirely - perhaps after v0.10\n- \"\"\"\n-\n- def __new__(*args, **kwargs):\n- raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\ndiff --git a/parsl/executors/ipp_controller.py b/parsl/executors/ipp_controller.py\ndeleted file mode 100644\n--- a/parsl/executors/ipp_controller.py\n+++ /dev/null\n@@ -1,8 +0,0 @@\n-class Controller():\n- \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n- executor being removed from parsl some time after v0.9.\n-\n- It can eventually be removed entirely - perhaps after v0.10\n- \"\"\"\n- def __init__(self, *args, **kwargs):\n- raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\n", "issue": "Remove IPyParallel executor\n**Is your feature request related to a problem? Please describe.**\r\nThe parsl ipp code isn't really maintained any more. As I try to tidy up some stuff in the test suite I find myself spending more time than I'd like to debugging what is happening inside the IPP parsl executor, while at the same time the folklore is \"don't use IPP\".\r\n\r\n**Describe the solution you'd like**\r\nI'd like to remove all IPP tests from the test suite now. This will inevitably lead to the ipp code rotting fast, and so in addition the IPP executor should probably be removed in its entirely.\r\n\r\n**Describe alternatives you've considered**\r\nAn alternative is to put developer effort into maintaining IPP as a supported executor.\r\n\r\n**Additional context**\r\nThe folklore on slack seems to be \"don't use IPP\". This feature request turns that folklore into reality.\r\n\r\nI invite comment in support or against from @kylechard @yadudoc @annawoodard especially as regards how this would affect existing users who have not moved to htex (if any?).\r\n\r\nsee #1328 \n", "before_files": [{"content": "class Controller():\n \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n executor being removed from parsl some time after v0.9.\n\n It can eventually be removed entirely - perhaps after v0.10\n \"\"\"\n def __init__(self, *args, **kwargs):\n raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\n", "path": "parsl/executors/ipp_controller.py"}, {"content": "from parsl.executors.threads import ThreadPoolExecutor\nfrom parsl.executors.ipp import IPyParallelExecutor\nfrom parsl.executors.workqueue.executor import WorkQueueExecutor\nfrom parsl.executors.high_throughput.executor import HighThroughputExecutor\nfrom parsl.executors.extreme_scale.executor import ExtremeScaleExecutor\nfrom parsl.executors.low_latency.executor import LowLatencyExecutor\nfrom parsl.executors.flux.executor import FluxExecutor\n\n__all__ = ['IPyParallelExecutor',\n 'ThreadPoolExecutor',\n 'HighThroughputExecutor',\n 'ExtremeScaleExecutor',\n 'LowLatencyExecutor',\n 'WorkQueueExecutor',\n 'FluxExecutor']\n", "path": "parsl/executors/__init__.py"}, {"content": "from parsl.executors.base import ParslExecutor\n\n\nclass IPyParallelExecutor(ParslExecutor):\n \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n executor being removed from parsl some time after v0.9.\n\n It can eventually be removed entirely - perhaps after v0.10\n \"\"\"\n\n def __new__(*args, **kwargs):\n raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\n", "path": "parsl/executors/ipp.py"}]} | 1,191 | 517 |
gh_patches_debug_15651 | rasdani/github-patches | git_diff | explosion__spaCy-1389 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Lemmatizer exceptions: `lemma_strings` get sorted anyway
Lemmatization exceptions have been working inconsistently, even when they are added directly in `corpora/en/wordnet/dict/verb.exc`
The minimal test case of `coping` at https://github.com/explosion/spaCy/issues/389 reveals that, at
https://github.com/explosion/spaCy/blob/master/spacy/lemmatizer.py#L94, the lemmatizer's list of potential forms (where the thing in lemmatizer.exceptions is item [0]) is cast to a `set` (and so loses ordering); then in https://github.com/explosion/spaCy/blob/master/spacy/morphology.pyx#L149, that `set` gets sorted. So lemmatizer exceptions only work if they also come first alphabetically!
I've implemented the fix for this, but I need this issue in order to submit the PR!
One question, though, for @honnibal: this can be fixed either as I did it locally -- return the whole list, with exceptions and then anything that comes back from the lemmatizer's `rules` -- or skip the rules altogether if we used an exception. I think it's more useful downstream if we keep all lemma candidates, even if we're not using them in the default pipeline. But it also seems only destructive to do `sorted(set())` on them!
</issue>
<code>
[start of spacy/lemmatizer.py]
1 # coding: utf8
2 from __future__ import unicode_literals
3
4 from .symbols import POS, NOUN, VERB, ADJ, PUNCT
5 from .symbols import VerbForm_inf, VerbForm_none, Number_sing, Degree_pos
6
7
8 class Lemmatizer(object):
9 @classmethod
10 def load(cls, path, index=None, exc=None, rules=None):
11 return cls(index or {}, exc or {}, rules or {})
12
13 def __init__(self, index, exceptions, rules):
14 self.index = index
15 self.exc = exceptions
16 self.rules = rules
17
18 def __call__(self, string, univ_pos, morphology=None):
19 if univ_pos == NOUN:
20 univ_pos = 'noun'
21 elif univ_pos == VERB:
22 univ_pos = 'verb'
23 elif univ_pos == ADJ:
24 univ_pos = 'adj'
25 elif univ_pos == PUNCT:
26 univ_pos = 'punct'
27 # See Issue #435 for example of where this logic is requied.
28 if self.is_base_form(univ_pos, morphology):
29 return set([string.lower()])
30 lemmas = lemmatize(string, self.index.get(univ_pos, {}),
31 self.exc.get(univ_pos, {}),
32 self.rules.get(univ_pos, []))
33 return lemmas
34
35 def is_base_form(self, univ_pos, morphology=None):
36 """
37 Check whether we're dealing with an uninflected paradigm, so we can
38 avoid lemmatization entirely.
39 """
40 morphology = {} if morphology is None else morphology
41 others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]
42 true_morph_key = morphology.get('morph', 0)
43 if univ_pos == 'noun' and morphology.get('Number') == 'sing':
44 return True
45 elif univ_pos == 'verb' and morphology.get('VerbForm') == 'inf':
46 return True
47 elif univ_pos == 'adj' and morphology.get('Degree') == 'pos':
48 return True
49 elif VerbForm_inf in morphology:
50 return True
51 elif VerbForm_none in morphology:
52 return True
53 elif Number_sing in morphology:
54 return True
55 elif Degree_pos in morphology:
56 return True
57 else:
58 return False
59
60 def noun(self, string, morphology=None):
61 return self(string, 'noun', morphology)
62
63 def verb(self, string, morphology=None):
64 return self(string, 'verb', morphology)
65
66 def adj(self, string, morphology=None):
67 return self(string, 'adj', morphology)
68
69 def punct(self, string, morphology=None):
70 return self(string, 'punct', morphology)
71
72
73 def lemmatize(string, index, exceptions, rules):
74 string = string.lower()
75 forms = []
76 # TODO: Is this correct? See discussion in Issue #435.
77 #if string in index:
78 # forms.append(string)
79 forms.extend(exceptions.get(string, []))
80 oov_forms = []
81 for old, new in rules:
82 if string.endswith(old):
83 form = string[:len(string) - len(old)] + new
84 if not form:
85 pass
86 elif form in index or not form.isalpha():
87 forms.append(form)
88 else:
89 oov_forms.append(form)
90 if not forms:
91 forms.extend(oov_forms)
92 if not forms:
93 forms.append(string)
94 return set(forms)
95
[end of spacy/lemmatizer.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/spacy/lemmatizer.py b/spacy/lemmatizer.py
--- a/spacy/lemmatizer.py
+++ b/spacy/lemmatizer.py
@@ -78,15 +78,16 @@
# forms.append(string)
forms.extend(exceptions.get(string, []))
oov_forms = []
- for old, new in rules:
- if string.endswith(old):
- form = string[:len(string) - len(old)] + new
- if not form:
- pass
- elif form in index or not form.isalpha():
- forms.append(form)
- else:
- oov_forms.append(form)
+ if not forms:
+ for old, new in rules:
+ if string.endswith(old):
+ form = string[:len(string) - len(old)] + new
+ if not form:
+ pass
+ elif form in index or not form.isalpha():
+ forms.append(form)
+ else:
+ oov_forms.append(form)
if not forms:
forms.extend(oov_forms)
if not forms:
| {"golden_diff": "diff --git a/spacy/lemmatizer.py b/spacy/lemmatizer.py\n--- a/spacy/lemmatizer.py\n+++ b/spacy/lemmatizer.py\n@@ -78,15 +78,16 @@\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n oov_forms = []\n- for old, new in rules:\n- if string.endswith(old):\n- form = string[:len(string) - len(old)] + new\n- if not form:\n- pass\n- elif form in index or not form.isalpha():\n- forms.append(form)\n- else:\n- oov_forms.append(form)\n+ if not forms:\n+ for old, new in rules:\n+ if string.endswith(old):\n+ form = string[:len(string) - len(old)] + new\n+ if not form:\n+ pass\n+ elif form in index or not form.isalpha():\n+ forms.append(form)\n+ else:\n+ oov_forms.append(form)\n if not forms:\n forms.extend(oov_forms)\n if not forms:\n", "issue": "Lemmatizer exceptions: `lemma_strings` get sorted anyway\nLemmatization exceptions have been working inconsistently, even when they are added directly in `corpora/en/wordnet/dict/verb.exc`\r\n\r\nThe minimal test case of `coping` at https://github.com/explosion/spaCy/issues/389 reveals that, at \r\nhttps://github.com/explosion/spaCy/blob/master/spacy/lemmatizer.py#L94, the lemmatizer's list of potential forms (where the thing in lemmatizer.exceptions is item [0]) is cast to a `set` (and so loses ordering); then in https://github.com/explosion/spaCy/blob/master/spacy/morphology.pyx#L149, that `set` gets sorted. So lemmatizer exceptions only work if they also come first alphabetically!\r\n\r\nI've implemented the fix for this, but I need this issue in order to submit the PR!\r\n\r\nOne question, though, for @honnibal: this can be fixed either as I did it locally -- return the whole list, with exceptions and then anything that comes back from the lemmatizer's `rules` -- or skip the rules altogether if we used an exception. I think it's more useful downstream if we keep all lemma candidates, even if we're not using them in the default pipeline. But it also seems only destructive to do `sorted(set())` on them!\n", "before_files": [{"content": "# coding: utf8\nfrom __future__ import unicode_literals\n\nfrom .symbols import POS, NOUN, VERB, ADJ, PUNCT\nfrom .symbols import VerbForm_inf, VerbForm_none, Number_sing, Degree_pos\n\n\nclass Lemmatizer(object):\n @classmethod\n def load(cls, path, index=None, exc=None, rules=None):\n return cls(index or {}, exc or {}, rules or {})\n\n def __init__(self, index, exceptions, rules):\n self.index = index\n self.exc = exceptions\n self.rules = rules\n\n def __call__(self, string, univ_pos, morphology=None):\n if univ_pos == NOUN:\n univ_pos = 'noun'\n elif univ_pos == VERB:\n univ_pos = 'verb'\n elif univ_pos == ADJ:\n univ_pos = 'adj'\n elif univ_pos == PUNCT:\n univ_pos = 'punct'\n # See Issue #435 for example of where this logic is requied.\n if self.is_base_form(univ_pos, morphology):\n return set([string.lower()])\n lemmas = lemmatize(string, self.index.get(univ_pos, {}),\n self.exc.get(univ_pos, {}),\n self.rules.get(univ_pos, []))\n return lemmas\n\n def is_base_form(self, univ_pos, morphology=None):\n \"\"\"\n Check whether we're dealing with an uninflected paradigm, so we can\n avoid lemmatization entirely.\n \"\"\"\n morphology = {} if morphology is None else morphology\n others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]\n true_morph_key = morphology.get('morph', 0)\n if univ_pos == 'noun' and morphology.get('Number') == 'sing':\n return True\n elif univ_pos == 'verb' and morphology.get('VerbForm') == 'inf':\n return True\n elif univ_pos == 'adj' and morphology.get('Degree') == 'pos':\n return True\n elif VerbForm_inf in morphology:\n return True\n elif VerbForm_none in morphology:\n return True\n elif Number_sing in morphology:\n return True\n elif Degree_pos in morphology:\n return True\n else:\n return False\n\n def noun(self, string, morphology=None):\n return self(string, 'noun', morphology)\n\n def verb(self, string, morphology=None):\n return self(string, 'verb', morphology)\n\n def adj(self, string, morphology=None):\n return self(string, 'adj', morphology)\n\n def punct(self, string, morphology=None):\n return self(string, 'punct', morphology)\n\n\ndef lemmatize(string, index, exceptions, rules):\n string = string.lower()\n forms = []\n # TODO: Is this correct? See discussion in Issue #435.\n #if string in index:\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n oov_forms = []\n for old, new in rules:\n if string.endswith(old):\n form = string[:len(string) - len(old)] + new\n if not form:\n pass\n elif form in index or not form.isalpha():\n forms.append(form)\n else:\n oov_forms.append(form)\n if not forms:\n forms.extend(oov_forms)\n if not forms:\n forms.append(string)\n return set(forms)\n", "path": "spacy/lemmatizer.py"}]} | 1,783 | 245 |
gh_patches_debug_1628 | rasdani/github-patches | git_diff | apache__tvm-12178 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Exercise TVM under minimal configuration in CI
We have seen a couple bugs due to microTVM being presumed-ON in config.cmake. Namely, you get python errors importing TVM right now when USE_MICRO is OFF. We should have a regression test that verifies basic functionality with everything (or nearly everything) OFF.
Context: apache/tvm#9617
And another micro-related issue of the same kind, which i don't have handy right now.
cc @gigiblender
</issue>
<code>
[start of ci/jenkins/generate.py]
1 #!/usr/bin/env python3
2 # Licensed to the Apache Software Foundation (ASF) under one
3 # or more contributor license agreements. See the NOTICE file
4 # distributed with this work for additional information
5 # regarding copyright ownership. The ASF licenses this file
6 # to you under the Apache License, Version 2.0 (the
7 # "License"); you may not use this file except in compliance
8 # with the License. You may obtain a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing,
13 # software distributed under the License is distributed on an
14 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 # KIND, either express or implied. See the License for the
16 # specific language governing permissions and limitations
17 # under the License.
18 import jinja2
19 import argparse
20 import difflib
21 import re
22 import datetime
23 import textwrap
24
25 from pathlib import Path
26
27
28 REPO_ROOT = Path(__file__).resolve().parent.parent.parent
29 JENKINSFILE_TEMPLATE = REPO_ROOT / "ci" / "jenkins" / "Jenkinsfile.j2"
30 JENKINSFILE = REPO_ROOT / "Jenkinsfile"
31
32
33 data = {
34 "images": [
35 {
36 "name": "ci_arm",
37 "platform": "ARM",
38 },
39 {
40 "name": "ci_cpu",
41 "platform": "CPU",
42 },
43 {
44 "name": "ci_gpu",
45 "platform": "CPU",
46 },
47 {
48 "name": "ci_hexagon",
49 "platform": "CPU",
50 },
51 {
52 "name": "ci_i386",
53 "platform": "CPU",
54 },
55 {
56 "name": "ci_lint",
57 "platform": "CPU",
58 },
59 {
60 "name": "ci_cortexm",
61 "platform": "CPU",
62 },
63 {
64 "name": "ci_wasm",
65 "platform": "CPU",
66 },
67 ]
68 }
69
70
71 def lines_without_generated_tag(content):
72 return [
73 line for line in content.splitlines(keepends=True) if not line.startswith("// Generated at")
74 ]
75
76
77 if __name__ == "__main__":
78 help = "Regenerate Jenkinsfile from template"
79 parser = argparse.ArgumentParser(description=help)
80 parser.add_argument("--check", action="store_true", help="just verify the output didn't change")
81 args = parser.parse_args()
82
83 with open(JENKINSFILE) as f:
84 content = f.read()
85
86 data["generated_time"] = datetime.datetime.now().isoformat()
87
88 environment = jinja2.Environment(
89 loader=jinja2.FileSystemLoader(REPO_ROOT),
90 undefined=jinja2.StrictUndefined,
91 lstrip_blocks=True,
92 trim_blocks=True,
93 keep_trailing_newline=True,
94 )
95 template = environment.get_template(str(JENKINSFILE_TEMPLATE.relative_to(REPO_ROOT)))
96 new_content = template.render(**data)
97
98 diff = "".join(
99 difflib.unified_diff(
100 lines_without_generated_tag(content), lines_without_generated_tag(new_content)
101 )
102 )
103 if args.check:
104 if not diff:
105 print("Success, the newly generated Jenkinsfile matched the one on disk")
106 exit(0)
107 else:
108 print(
109 textwrap.dedent(
110 """
111 Newly generated Jenkinsfile did not match the one on disk! If you have made
112 edits to the Jenkinsfile, move them to 'jenkins/Jenkinsfile.j2' and
113 regenerate the Jenkinsfile from the template with
114
115 python3 -m pip install -r jenkins/requirements.txt
116 python3 jenkins/generate.py
117
118 Diffed changes:
119 """
120 ).strip()
121 )
122 print(diff)
123 exit(1)
124 else:
125 with open(JENKINSFILE, "w") as f:
126 f.write(new_content)
127 if not diff:
128 print(f"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, no changes made")
129 else:
130 print(f"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, changes:")
131 print(diff)
132
[end of ci/jenkins/generate.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/ci/jenkins/generate.py b/ci/jenkins/generate.py
--- a/ci/jenkins/generate.py
+++ b/ci/jenkins/generate.py
@@ -40,6 +40,10 @@
"name": "ci_cpu",
"platform": "CPU",
},
+ {
+ "name": "ci_minimal",
+ "platform": "CPU",
+ },
{
"name": "ci_gpu",
"platform": "CPU",
| {"golden_diff": "diff --git a/ci/jenkins/generate.py b/ci/jenkins/generate.py\n--- a/ci/jenkins/generate.py\n+++ b/ci/jenkins/generate.py\n@@ -40,6 +40,10 @@\n \"name\": \"ci_cpu\",\n \"platform\": \"CPU\",\n },\n+ {\n+ \"name\": \"ci_minimal\",\n+ \"platform\": \"CPU\",\n+ },\n {\n \"name\": \"ci_gpu\",\n \"platform\": \"CPU\",\n", "issue": "Exercise TVM under minimal configuration in CI\nWe have seen a couple bugs due to microTVM being presumed-ON in config.cmake. Namely, you get python errors importing TVM right now when USE_MICRO is OFF. We should have a regression test that verifies basic functionality with everything (or nearly everything) OFF.\r\n\r\nContext: apache/tvm#9617\r\nAnd another micro-related issue of the same kind, which i don't have handy right now.\r\n\r\ncc @gigiblender \n", "before_files": [{"content": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\nimport jinja2\nimport argparse\nimport difflib\nimport re\nimport datetime\nimport textwrap\n\nfrom pathlib import Path\n\n\nREPO_ROOT = Path(__file__).resolve().parent.parent.parent\nJENKINSFILE_TEMPLATE = REPO_ROOT / \"ci\" / \"jenkins\" / \"Jenkinsfile.j2\"\nJENKINSFILE = REPO_ROOT / \"Jenkinsfile\"\n\n\ndata = {\n \"images\": [\n {\n \"name\": \"ci_arm\",\n \"platform\": \"ARM\",\n },\n {\n \"name\": \"ci_cpu\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_gpu\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_hexagon\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_i386\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_lint\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_cortexm\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_wasm\",\n \"platform\": \"CPU\",\n },\n ]\n}\n\n\ndef lines_without_generated_tag(content):\n return [\n line for line in content.splitlines(keepends=True) if not line.startswith(\"// Generated at\")\n ]\n\n\nif __name__ == \"__main__\":\n help = \"Regenerate Jenkinsfile from template\"\n parser = argparse.ArgumentParser(description=help)\n parser.add_argument(\"--check\", action=\"store_true\", help=\"just verify the output didn't change\")\n args = parser.parse_args()\n\n with open(JENKINSFILE) as f:\n content = f.read()\n\n data[\"generated_time\"] = datetime.datetime.now().isoformat()\n\n environment = jinja2.Environment(\n loader=jinja2.FileSystemLoader(REPO_ROOT),\n undefined=jinja2.StrictUndefined,\n lstrip_blocks=True,\n trim_blocks=True,\n keep_trailing_newline=True,\n )\n template = environment.get_template(str(JENKINSFILE_TEMPLATE.relative_to(REPO_ROOT)))\n new_content = template.render(**data)\n\n diff = \"\".join(\n difflib.unified_diff(\n lines_without_generated_tag(content), lines_without_generated_tag(new_content)\n )\n )\n if args.check:\n if not diff:\n print(\"Success, the newly generated Jenkinsfile matched the one on disk\")\n exit(0)\n else:\n print(\n textwrap.dedent(\n \"\"\"\n Newly generated Jenkinsfile did not match the one on disk! If you have made\n edits to the Jenkinsfile, move them to 'jenkins/Jenkinsfile.j2' and\n regenerate the Jenkinsfile from the template with\n\n python3 -m pip install -r jenkins/requirements.txt\n python3 jenkins/generate.py\n\n Diffed changes:\n \"\"\"\n ).strip()\n )\n print(diff)\n exit(1)\n else:\n with open(JENKINSFILE, \"w\") as f:\n f.write(new_content)\n if not diff:\n print(f\"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, no changes made\")\n else:\n print(f\"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, changes:\")\n print(diff)\n", "path": "ci/jenkins/generate.py"}]} | 1,834 | 112 |
gh_patches_debug_977 | rasdani/github-patches | git_diff | medtagger__MedTagger-442 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Getting random scan for non-existing task key results in 500
## Current Behavior
Providing non existing task key results in 500 HTTP code.
## Expected Behavior
Backend should handle this situation appropriate and return 404 HTTP code.
## Steps to Reproduce the Problem
1. Perform a GET `scans/random?task=<task_key>` and provide non existing key.
</issue>
<code>
[start of backend/medtagger/repositories/tasks.py]
1 """Module responsible for definition of TaskRepository."""
2 from typing import List
3
4 from medtagger.database import db_session
5 from medtagger.database.models import Task, LabelTag, Dataset
6 from medtagger.exceptions import InternalErrorException
7
8
9 def get_all_tasks(include_disabled: bool = False) -> List[Task]:
10 """Fetch all tasks from database ordered by key."""
11 query = Task.query
12 if not include_disabled:
13 query = query.filter(~Task.disabled)
14 return query.order_by(Task.key).all()
15
16
17 def get_task_by_key(key: str) -> Task:
18 """Fetch Task from database.
19
20 :param key: key for a Task
21 :return: Task object
22 """
23 with db_session() as session:
24 task = session.query(Task).filter(Task.key == key).one()
25 return task
26
27
28 def add_task(key: str, name: str, image_path: str, datasets_keys: List[str], tags: List[LabelTag]) -> Task:
29 """Add new Task to the database.
30
31 :param key: key that will identify such Task
32 :param name: name that will be used in the Use Interface for such Task
33 :param image_path: path to the image that represents such Task (used in User Interface)
34 :param datasets_keys: Keys of Datasets that Task takes Scans from
35 :param tags: Label Tags that will be created and assigned to Task
36 :return: Task object
37 """
38 with db_session() as session:
39 task = Task(key, name, image_path)
40 datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore
41 task.datasets = datasets
42 task.available_tags = tags
43 session.add(task)
44 return task
45
46
47 def assign_label_tag(tag: LabelTag, task_key: str) -> None:
48 """Assign existing Label Tag to Task.
49
50 :param tag: tag that should be assigned to Task
51 :param task_key: key that will identify such Task
52 """
53 with db_session():
54 task = Task.query.filter(Task.key == task_key).one()
55 task.available_tags.append(tag)
56 task.save()
57
58
59 def unassign_label_tag(tag: LabelTag, task_key: str) -> None:
60 """Unassign Label Tag from Task.
61
62 :param tag: tag that should be unassigned from Task
63 :param task_key: key that will identify such Task
64 """
65 with db_session():
66 task = Task.query.filter(Task.key == task_key).one()
67 task.available_tags.remove(tag)
68 task.save()
69
70
71 def update(task_key: str, name: str = None, image_path: str = None, datasets_keys: List[str] = None) -> Task:
72 """Update Datasets where this Task will be available.
73
74 :param task_key: key that will identify such Task
75 :param name: (optional) new name for such Task
76 :param image_path: (optional) new path to the image which shows on the UI
77 :param datasets_keys: (optional) keys of Datasets which should have this Task
78 """
79 with db_session():
80 task = Task.query.filter(Task.key == task_key).one()
81 if name:
82 task.name = name
83 if image_path:
84 task.image_path = image_path
85 if datasets_keys:
86 datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore
87 task.datasets = datasets
88 return task
89
90
91 def disable(task_key: str) -> None:
92 """Disable existing Task."""
93 disabling_query = Task.query.filter(Task.key == task_key)
94 updated = disabling_query.update({'disabled': True}, synchronize_session='fetch')
95 if not updated:
96 raise InternalErrorException(f'Task "{task_key}" was not disabled due to unknown database error.')
97
98
99 def enable(task_key: str) -> None:
100 """Enable existing Task."""
101 enabling_query = Task.query.filter(Task.key == task_key)
102 updated = enabling_query.update({'disabled': False}, synchronize_session='fetch')
103 if not updated:
104 raise InternalErrorException(f'Task "{task_key}" was not enabled due to unknown database error.')
105
[end of backend/medtagger/repositories/tasks.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/backend/medtagger/repositories/tasks.py b/backend/medtagger/repositories/tasks.py
--- a/backend/medtagger/repositories/tasks.py
+++ b/backend/medtagger/repositories/tasks.py
@@ -21,7 +21,7 @@
:return: Task object
"""
with db_session() as session:
- task = session.query(Task).filter(Task.key == key).one()
+ task = session.query(Task).filter(Task.key == key).first()
return task
| {"golden_diff": "diff --git a/backend/medtagger/repositories/tasks.py b/backend/medtagger/repositories/tasks.py\n--- a/backend/medtagger/repositories/tasks.py\n+++ b/backend/medtagger/repositories/tasks.py\n@@ -21,7 +21,7 @@\n :return: Task object\n \"\"\"\n with db_session() as session:\n- task = session.query(Task).filter(Task.key == key).one()\n+ task = session.query(Task).filter(Task.key == key).first()\n return task\n", "issue": "Getting random scan for non-existing task key results in 500\n## Current Behavior\r\n\r\nProviding non existing task key results in 500 HTTP code.\r\n\r\n## Expected Behavior\r\n\r\nBackend should handle this situation appropriate and return 404 HTTP code.\r\n\r\n## Steps to Reproduce the Problem\r\n\r\n 1. Perform a GET `scans/random?task=<task_key>` and provide non existing key.\r\n\n", "before_files": [{"content": "\"\"\"Module responsible for definition of TaskRepository.\"\"\"\nfrom typing import List\n\nfrom medtagger.database import db_session\nfrom medtagger.database.models import Task, LabelTag, Dataset\nfrom medtagger.exceptions import InternalErrorException\n\n\ndef get_all_tasks(include_disabled: bool = False) -> List[Task]:\n \"\"\"Fetch all tasks from database ordered by key.\"\"\"\n query = Task.query\n if not include_disabled:\n query = query.filter(~Task.disabled)\n return query.order_by(Task.key).all()\n\n\ndef get_task_by_key(key: str) -> Task:\n \"\"\"Fetch Task from database.\n\n :param key: key for a Task\n :return: Task object\n \"\"\"\n with db_session() as session:\n task = session.query(Task).filter(Task.key == key).one()\n return task\n\n\ndef add_task(key: str, name: str, image_path: str, datasets_keys: List[str], tags: List[LabelTag]) -> Task:\n \"\"\"Add new Task to the database.\n\n :param key: key that will identify such Task\n :param name: name that will be used in the Use Interface for such Task\n :param image_path: path to the image that represents such Task (used in User Interface)\n :param datasets_keys: Keys of Datasets that Task takes Scans from\n :param tags: Label Tags that will be created and assigned to Task\n :return: Task object\n \"\"\"\n with db_session() as session:\n task = Task(key, name, image_path)\n datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore\n task.datasets = datasets\n task.available_tags = tags\n session.add(task)\n return task\n\n\ndef assign_label_tag(tag: LabelTag, task_key: str) -> None:\n \"\"\"Assign existing Label Tag to Task.\n\n :param tag: tag that should be assigned to Task\n :param task_key: key that will identify such Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n task.available_tags.append(tag)\n task.save()\n\n\ndef unassign_label_tag(tag: LabelTag, task_key: str) -> None:\n \"\"\"Unassign Label Tag from Task.\n\n :param tag: tag that should be unassigned from Task\n :param task_key: key that will identify such Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n task.available_tags.remove(tag)\n task.save()\n\n\ndef update(task_key: str, name: str = None, image_path: str = None, datasets_keys: List[str] = None) -> Task:\n \"\"\"Update Datasets where this Task will be available.\n\n :param task_key: key that will identify such Task\n :param name: (optional) new name for such Task\n :param image_path: (optional) new path to the image which shows on the UI\n :param datasets_keys: (optional) keys of Datasets which should have this Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n if name:\n task.name = name\n if image_path:\n task.image_path = image_path\n if datasets_keys:\n datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore\n task.datasets = datasets\n return task\n\n\ndef disable(task_key: str) -> None:\n \"\"\"Disable existing Task.\"\"\"\n disabling_query = Task.query.filter(Task.key == task_key)\n updated = disabling_query.update({'disabled': True}, synchronize_session='fetch')\n if not updated:\n raise InternalErrorException(f'Task \"{task_key}\" was not disabled due to unknown database error.')\n\n\ndef enable(task_key: str) -> None:\n \"\"\"Enable existing Task.\"\"\"\n enabling_query = Task.query.filter(Task.key == task_key)\n updated = enabling_query.update({'disabled': False}, synchronize_session='fetch')\n if not updated:\n raise InternalErrorException(f'Task \"{task_key}\" was not enabled due to unknown database error.')\n", "path": "backend/medtagger/repositories/tasks.py"}]} | 1,725 | 114 |
gh_patches_debug_471 | rasdani/github-patches | git_diff | pytorch__rl-402 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[BUG] Loggers registration
## Describe the bug
The [`__init__.py`](https://github.com/facebookresearch/rl/blob/main/torchrl/trainers/loggers/__init__.py) from loggers does not reference the implemented loggers. However, the [docstring from the trainer](https://github.com/facebookresearch/rl/blob/806733f27dfa9a878b75b079de9f18df83f54c2d/torchrl/trainers/helpers/trainers.py#L109) assumes that this kind of import can be run.
</issue>
<code>
[start of torchrl/trainers/loggers/__init__.py]
1 # Copyright (c) Meta Platforms, Inc. and affiliates.
2 #
3 # This source code is licensed under the MIT license found in the
4 # LICENSE file in the root directory of this source tree.
5
6 from .common import Logger
7
[end of torchrl/trainers/loggers/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/torchrl/trainers/loggers/__init__.py b/torchrl/trainers/loggers/__init__.py
--- a/torchrl/trainers/loggers/__init__.py
+++ b/torchrl/trainers/loggers/__init__.py
@@ -4,3 +4,6 @@
# LICENSE file in the root directory of this source tree.
from .common import Logger
+from .csv import CSVLogger
+from .tensorboard import TensorboardLogger
+from .wandb import WandbLogger
| {"golden_diff": "diff --git a/torchrl/trainers/loggers/__init__.py b/torchrl/trainers/loggers/__init__.py\n--- a/torchrl/trainers/loggers/__init__.py\n+++ b/torchrl/trainers/loggers/__init__.py\n@@ -4,3 +4,6 @@\n # LICENSE file in the root directory of this source tree.\n \n from .common import Logger\n+from .csv import CSVLogger\n+from .tensorboard import TensorboardLogger\n+from .wandb import WandbLogger\n", "issue": "[BUG] Loggers registration\n## Describe the bug\r\n\r\nThe [`__init__.py`](https://github.com/facebookresearch/rl/blob/main/torchrl/trainers/loggers/__init__.py) from loggers does not reference the implemented loggers. However, the [docstring from the trainer](https://github.com/facebookresearch/rl/blob/806733f27dfa9a878b75b079de9f18df83f54c2d/torchrl/trainers/helpers/trainers.py#L109) assumes that this kind of import can be run.\n", "before_files": [{"content": "# Copyright (c) Meta Platforms, Inc. and affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nfrom .common import Logger\n", "path": "torchrl/trainers/loggers/__init__.py"}]} | 733 | 114 |
gh_patches_debug_16425 | rasdani/github-patches | git_diff | pantsbuild__pants-15979 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`./pants run` crashes when `remote_cache_eager_fetch = false`
**Describe the bug**
After setting `remote_cache_eager_fetch = false`, a CI step that `./pants run`s a `pex_binary` has started (intermittently) failing with errors like:
```
Exception: Was not present in either the local or remote store: Digest { hash: Fingerprint<46683dec8706b7ac7c4f6011f68b4b8c10ad423ae8ba57745a6f5e01ba5b2f7b>, size_bytes: 11827 }
```
**Pants version**
`PANTS_SHA=5d8a328d72209863986c8959b20305505bc068ba`
**OS**
Linux
**Additional info**
Some BuildSense links where we've seen the failure:
* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/
* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_31_09_277_3793f53b54914135885f2ac951faf210/
* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/
</issue>
<code>
[start of src/python/pants/base/exceptions.py]
1 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 from __future__ import annotations
5
6
7 class TargetDefinitionException(Exception):
8 """Indicates an invalid target definition.
9
10 :API: public
11 """
12
13 def __init__(self, target, msg):
14 """
15 :param target: the target in question
16 :param string msg: a description of the target misconfiguration
17 """
18 super().__init__(f"Invalid target {target}: {msg}")
19
20
21 class BuildConfigurationError(Exception):
22 """Indicates an error in a pants installation's configuration."""
23
24
25 class BackendConfigurationError(BuildConfigurationError):
26 """Indicates a plugin backend with a missing or malformed register module."""
27
28
29 class MappingError(Exception):
30 """Indicates an error mapping addressable objects."""
31
[end of src/python/pants/base/exceptions.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/python/pants/base/exceptions.py b/src/python/pants/base/exceptions.py
--- a/src/python/pants/base/exceptions.py
+++ b/src/python/pants/base/exceptions.py
@@ -3,6 +3,11 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from pants.engine.internals.native_engine import PyFailure
+
class TargetDefinitionException(Exception):
"""Indicates an invalid target definition.
@@ -28,3 +33,15 @@
class MappingError(Exception):
"""Indicates an error mapping addressable objects."""
+
+
+class NativeEngineFailure(Exception):
+ """A wrapper around a `Failure` instance.
+
+ TODO: This type is defined in Python because pyo3 doesn't support declaring Exceptions with
+ additional fields. See https://github.com/PyO3/pyo3/issues/295
+ """
+
+ def __init__(self, msg: str, failure: PyFailure) -> None:
+ super().__init__(msg)
+ self.failure = failure
| {"golden_diff": "diff --git a/src/python/pants/base/exceptions.py b/src/python/pants/base/exceptions.py\n--- a/src/python/pants/base/exceptions.py\n+++ b/src/python/pants/base/exceptions.py\n@@ -3,6 +3,11 @@\n \n from __future__ import annotations\n \n+from typing import TYPE_CHECKING\n+\n+if TYPE_CHECKING:\n+ from pants.engine.internals.native_engine import PyFailure\n+\n \n class TargetDefinitionException(Exception):\n \"\"\"Indicates an invalid target definition.\n@@ -28,3 +33,15 @@\n \n class MappingError(Exception):\n \"\"\"Indicates an error mapping addressable objects.\"\"\"\n+\n+\n+class NativeEngineFailure(Exception):\n+ \"\"\"A wrapper around a `Failure` instance.\n+\n+ TODO: This type is defined in Python because pyo3 doesn't support declaring Exceptions with\n+ additional fields. See https://github.com/PyO3/pyo3/issues/295\n+ \"\"\"\n+\n+ def __init__(self, msg: str, failure: PyFailure) -> None:\n+ super().__init__(msg)\n+ self.failure = failure\n", "issue": "`./pants run` crashes when `remote_cache_eager_fetch = false`\n**Describe the bug**\r\n\r\nAfter setting `remote_cache_eager_fetch = false`, a CI step that `./pants run`s a `pex_binary` has started (intermittently) failing with errors like:\r\n```\r\n Exception: Was not present in either the local or remote store: Digest { hash: Fingerprint<46683dec8706b7ac7c4f6011f68b4b8c10ad423ae8ba57745a6f5e01ba5b2f7b>, size_bytes: 11827 }\r\n```\r\n\r\n**Pants version**\r\n\r\n`PANTS_SHA=5d8a328d72209863986c8959b20305505bc068ba`\r\n\r\n**OS**\r\n\r\nLinux\r\n\r\n**Additional info**\r\n\r\nSome BuildSense links where we've seen the failure:\r\n* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/\r\n* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_31_09_277_3793f53b54914135885f2ac951faf210/\r\n* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/\r\n\n", "before_files": [{"content": "# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\nfrom __future__ import annotations\n\n\nclass TargetDefinitionException(Exception):\n \"\"\"Indicates an invalid target definition.\n\n :API: public\n \"\"\"\n\n def __init__(self, target, msg):\n \"\"\"\n :param target: the target in question\n :param string msg: a description of the target misconfiguration\n \"\"\"\n super().__init__(f\"Invalid target {target}: {msg}\")\n\n\nclass BuildConfigurationError(Exception):\n \"\"\"Indicates an error in a pants installation's configuration.\"\"\"\n\n\nclass BackendConfigurationError(BuildConfigurationError):\n \"\"\"Indicates a plugin backend with a missing or malformed register module.\"\"\"\n\n\nclass MappingError(Exception):\n \"\"\"Indicates an error mapping addressable objects.\"\"\"\n", "path": "src/python/pants/base/exceptions.py"}]} | 1,222 | 244 |
gh_patches_debug_2672 | rasdani/github-patches | git_diff | e-valuation__EvaP-1666 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Make Typescript code Prettier
We should add automated formatting for our typescript files. I think https://prettier.io/ is pretty good, but the choice is open for discussion. The formatting should be done in `manage.py format` and be checked in CI.
</issue>
<code>
[start of evap/evaluation/management/commands/format.py]
1 import subprocess # nosec
2
3 from django.core.management.base import BaseCommand
4
5
6 class Command(BaseCommand):
7 args = ""
8 help = "Runs the code formatter"
9 requires_migrations_checks = False
10
11 def handle(self, *args, **options):
12 subprocess.run(["black", "evap"], check=False) # nosec
13 subprocess.run(["isort", "."], check=False) # nosec
14
[end of evap/evaluation/management/commands/format.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/evap/evaluation/management/commands/format.py b/evap/evaluation/management/commands/format.py
--- a/evap/evaluation/management/commands/format.py
+++ b/evap/evaluation/management/commands/format.py
@@ -11,3 +11,4 @@
def handle(self, *args, **options):
subprocess.run(["black", "evap"], check=False) # nosec
subprocess.run(["isort", "."], check=False) # nosec
+ subprocess.run(["npx", "prettier", "--write", "evap/static/ts/src"], check=False) # nosec
| {"golden_diff": "diff --git a/evap/evaluation/management/commands/format.py b/evap/evaluation/management/commands/format.py\n--- a/evap/evaluation/management/commands/format.py\n+++ b/evap/evaluation/management/commands/format.py\n@@ -11,3 +11,4 @@\n def handle(self, *args, **options):\n subprocess.run([\"black\", \"evap\"], check=False) # nosec\n subprocess.run([\"isort\", \".\"], check=False) # nosec\n+ subprocess.run([\"npx\", \"prettier\", \"--write\", \"evap/static/ts/src\"], check=False) # nosec\n", "issue": "Make Typescript code Prettier\nWe should add automated formatting for our typescript files. I think https://prettier.io/ is pretty good, but the choice is open for discussion. The formatting should be done in `manage.py format` and be checked in CI.\n", "before_files": [{"content": "import subprocess # nosec\n\nfrom django.core.management.base import BaseCommand\n\n\nclass Command(BaseCommand):\n args = \"\"\n help = \"Runs the code formatter\"\n requires_migrations_checks = False\n\n def handle(self, *args, **options):\n subprocess.run([\"black\", \"evap\"], check=False) # nosec\n subprocess.run([\"isort\", \".\"], check=False) # nosec\n", "path": "evap/evaluation/management/commands/format.py"}]} | 714 | 146 |
gh_patches_debug_29734 | rasdani/github-patches | git_diff | bridgecrewio__checkov-3007 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
CKV_AZURE_116 fails with Terraform Azure provider >= v.2.97.0
**Describe the issue**
This issue is regarding CKV_AZURE_116.
Since v2.97.0 of the Azure Terraform provider, the Azure policies add-on is enabled by setting
```terraform
azure_policy_enabled = true
```
as a top-level property in a `azurerm_kubernetes_cluster` resource definition, instead of the following
```terraform
addon_profile {
azure_policy {
enabled = true
}
}
```
The check should be updated to reflect the change.
**Examples**
```terraform
resource "azurerm_kubernetes_cluster" "example" {
azure_policy_enabled = true
}
```
should pass the check for CKV_AZURE_116.
**Version:**
2.0.1160
**Additional context**
None
I will submit a PR to fix this.
</issue>
<code>
[start of checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py]
1 from checkov.common.models.enums import CheckCategories
2 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
3
4
5 class AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):
6 def __init__(self):
7 name = "Ensure that AKS uses Azure Policies Add-on"
8 id = "CKV_AZURE_116"
9 supported_resources = ['azurerm_kubernetes_cluster']
10 categories = [CheckCategories.NETWORKING]
11 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
12
13 def get_inspected_key(self):
14 return "addon_profile/[0]/azure_policy/[0]/enabled"
15
16
17 check = AKSUsesAzurePoliciesAddon()
18
[end of checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py
--- a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py
+++ b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py
@@ -1,8 +1,10 @@
-from checkov.common.models.enums import CheckCategories
-from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
+from typing import Dict, List, Any
+from checkov.common.models.enums import CheckCategories, CheckResult
+from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
-class AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):
+
+class AKSUsesAzurePoliciesAddon(BaseResourceCheck):
def __init__(self):
name = "Ensure that AKS uses Azure Policies Add-on"
id = "CKV_AZURE_116"
@@ -10,8 +12,20 @@
categories = [CheckCategories.NETWORKING]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
- def get_inspected_key(self):
- return "addon_profile/[0]/azure_policy/[0]/enabled"
+ def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:
+ # since Azure provider v2.97.0
+ azure_policy_enabled = conf.get("azure_policy_enabled", [None])[0]
+ if azure_policy_enabled:
+ self.evaluated_keys = ["azure_policy_enabled"]
+ return CheckResult.PASSED
+ # up to and including Azure provider v2.96.0
+ self.evaluated_keys = ["addon_profile/[0]/azure_policy/[0]/enabled"]
+ addon_profile = conf.get("addon_profile", [None])[0]
+ if addon_profile and isinstance(addon_profile, dict):
+ azure_policy = addon_profile.get("azure_policy", [None])[0]
+ if azure_policy and isinstance(azure_policy, dict) and azure_policy.get("enabled", [None])[0]:
+ return CheckResult.PASSED
+ return CheckResult.FAILED
check = AKSUsesAzurePoliciesAddon()
| {"golden_diff": "diff --git a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py\n--- a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py\n+++ b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py\n@@ -1,8 +1,10 @@\n-from checkov.common.models.enums import CheckCategories\n-from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n+from typing import Dict, List, Any\n \n+from checkov.common.models.enums import CheckCategories, CheckResult\n+from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n \n-class AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):\n+\n+class AKSUsesAzurePoliciesAddon(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure that AKS uses Azure Policies Add-on\"\n id = \"CKV_AZURE_116\"\n@@ -10,8 +12,20 @@\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n \n- def get_inspected_key(self):\n- return \"addon_profile/[0]/azure_policy/[0]/enabled\"\n+ def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:\n+ # since Azure provider v2.97.0\n+ azure_policy_enabled = conf.get(\"azure_policy_enabled\", [None])[0]\n+ if azure_policy_enabled:\n+ self.evaluated_keys = [\"azure_policy_enabled\"]\n+ return CheckResult.PASSED\n+ # up to and including Azure provider v2.96.0\n+ self.evaluated_keys = [\"addon_profile/[0]/azure_policy/[0]/enabled\"]\n+ addon_profile = conf.get(\"addon_profile\", [None])[0]\n+ if addon_profile and isinstance(addon_profile, dict):\n+ azure_policy = addon_profile.get(\"azure_policy\", [None])[0]\n+ if azure_policy and isinstance(azure_policy, dict) and azure_policy.get(\"enabled\", [None])[0]:\n+ return CheckResult.PASSED\n+ return CheckResult.FAILED\n \n \n check = AKSUsesAzurePoliciesAddon()\n", "issue": "CKV_AZURE_116 fails with Terraform Azure provider >= v.2.97.0\n**Describe the issue**\r\nThis issue is regarding CKV_AZURE_116.\r\nSince v2.97.0 of the Azure Terraform provider, the Azure policies add-on is enabled by setting\r\n\r\n```terraform\r\nazure_policy_enabled = true\r\n```\r\n\r\nas a top-level property in a `azurerm_kubernetes_cluster` resource definition, instead of the following\r\n\r\n```terraform\r\naddon_profile {\r\n azure_policy {\r\n enabled = true\r\n }\r\n}\r\n```\r\nThe check should be updated to reflect the change.\r\n\r\n**Examples**\r\n```terraform\r\nresource \"azurerm_kubernetes_cluster\" \"example\" {\r\n azure_policy_enabled = true\r\n}\r\n```\r\nshould pass the check for CKV_AZURE_116.\r\n\r\n**Version:**\r\n2.0.1160\r\n\r\n**Additional context**\r\nNone\r\n\r\nI will submit a PR to fix this.\n", "before_files": [{"content": "from checkov.common.models.enums import CheckCategories\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):\n def __init__(self):\n name = \"Ensure that AKS uses Azure Policies Add-on\"\n id = \"CKV_AZURE_116\"\n supported_resources = ['azurerm_kubernetes_cluster']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def get_inspected_key(self):\n return \"addon_profile/[0]/azure_policy/[0]/enabled\"\n\n\ncheck = AKSUsesAzurePoliciesAddon()\n", "path": "checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py"}]} | 949 | 510 |
gh_patches_debug_15245 | rasdani/github-patches | git_diff | kornia__kornia-2232 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Cleanup on setup metadata
_Originally posted by @johnnv1 in https://github.com/kornia/kornia/pull/2225#discussion_r1117693700_
-----
right now I think we have some deadlines in the setup, for example:
- https://github.com/kornia/kornia/blob/master/setup.py#L16
- https://github.com/kornia/kornia/blob/master/setup.cfg#L57
- This isn't used anymore https://github.com/kornia/kornia/tree/master/packaging
</issue>
<code>
[start of kornia/__init__.py]
1 # NOTE: kornia filters and geometry must go first since are the core of the library
2 # and by changing the import order you might get into a circular dependencies issue.
3 from . import filters
4 from . import geometry
5 from . import grad_estimator
6
7 # import the other modules for convenience
8 from . import augmentation, color, contrib, core, enhance, feature, io, losses, metrics, morphology, tracking, utils, x
9
10 # NOTE: we are going to expose to top level very few things
11 from kornia.constants import pi
12 from kornia.testing import xla_is_available
13 from kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image
14
15 # Version variable
16 import sys
17
18 if sys.version_info >= (3, 8): # pragma: >=3.8 cover
19 import importlib.metadata as importlib_metadata
20 else: # pragma: <3.8 cover
21 import importlib_metadata
22
23 __version__ = importlib_metadata.version('kornia')
24
[end of kornia/__init__.py]
[start of kornia/utils/_compat.py]
1 from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar
2
3 import torch
4 from torch import Tensor
5
6 from packaging import version
7
8
9 def torch_version() -> str:
10 """Parse the `torch.__version__` variable and removes +cu*/cpu."""
11 return torch.__version__.split('+')[0]
12
13
14 def torch_version_lt(major: int, minor: int, patch: int) -> bool:
15 _version = version.parse(torch_version())
16 return _version < version.parse(f"{major}.{minor}.{patch}")
17
18
19 def torch_version_le(major: int, minor: int, patch: int) -> bool:
20 _version = version.parse(torch_version())
21 return _version <= version.parse(f"{major}.{minor}.{patch}")
22
23
24 def torch_version_ge(major: int, minor: int, patch: Optional[int] = None) -> bool:
25 _version = version.parse(torch_version())
26 if patch is None:
27 return _version >= version.parse(f"{major}.{minor}")
28 else:
29 return _version >= version.parse(f"{major}.{minor}.{patch}")
30
31
32 if TYPE_CHECKING:
33 # TODO: remove this branch when kornia relies on torch >= 1.10.0
34 def torch_meshgrid(tensors: List[Tensor], indexing: Optional[str] = None) -> Tuple[Tensor, ...]:
35 ...
36
37 else:
38 if torch_version_ge(1, 10, 0):
39
40 def torch_meshgrid(tensors: List[Tensor], indexing: str):
41 return torch.meshgrid(tensors, indexing=indexing)
42
43 else:
44 # TODO: remove this branch when kornia relies on torch >= 1.10.0
45 def torch_meshgrid(tensors: List[Tensor], indexing: str):
46 return torch.meshgrid(tensors)
47
48
49 if TYPE_CHECKING:
50 # TODO: remove this branch when kornia relies on torch >= 1.10.0
51 _T = TypeVar('_T')
52 torch_inference_mode: Callable[..., ContextManager[_T]]
53 else:
54 if torch_version_ge(1, 10, 0):
55 torch_inference_mode = torch.inference_mode
56 else:
57 # TODO: remove this branch when kornia relies on torch >= 1.10.0
58 torch_inference_mode = torch.no_grad
59
[end of kornia/utils/_compat.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kornia/__init__.py b/kornia/__init__.py
--- a/kornia/__init__.py
+++ b/kornia/__init__.py
@@ -13,11 +13,4 @@
from kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image
# Version variable
-import sys
-
-if sys.version_info >= (3, 8): # pragma: >=3.8 cover
- import importlib.metadata as importlib_metadata
-else: # pragma: <3.8 cover
- import importlib_metadata
-
-__version__ = importlib_metadata.version('kornia')
+__version__ = "0.6.11.dev"
diff --git a/kornia/utils/_compat.py b/kornia/utils/_compat.py
--- a/kornia/utils/_compat.py
+++ b/kornia/utils/_compat.py
@@ -1,9 +1,8 @@
from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar
import torch
-from torch import Tensor
-
from packaging import version
+from torch import Tensor
def torch_version() -> str:
| {"golden_diff": "diff --git a/kornia/__init__.py b/kornia/__init__.py\n--- a/kornia/__init__.py\n+++ b/kornia/__init__.py\n@@ -13,11 +13,4 @@\n from kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image\n \n # Version variable\n-import sys\n-\n-if sys.version_info >= (3, 8): # pragma: >=3.8 cover\n- import importlib.metadata as importlib_metadata\n-else: # pragma: <3.8 cover\n- import importlib_metadata\n-\n-__version__ = importlib_metadata.version('kornia')\n+__version__ = \"0.6.11.dev\"\ndiff --git a/kornia/utils/_compat.py b/kornia/utils/_compat.py\n--- a/kornia/utils/_compat.py\n+++ b/kornia/utils/_compat.py\n@@ -1,9 +1,8 @@\n from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar\n \n import torch\n-from torch import Tensor\n-\n from packaging import version\n+from torch import Tensor\n \n \n def torch_version() -> str:\n", "issue": "Cleanup on setup metadata\n_Originally posted by @johnnv1 in https://github.com/kornia/kornia/pull/2225#discussion_r1117693700_\r\n\r\n\r\n-----\r\nright now I think we have some deadlines in the setup, for example:\r\n- https://github.com/kornia/kornia/blob/master/setup.py#L16\r\n- https://github.com/kornia/kornia/blob/master/setup.cfg#L57\r\n- This isn't used anymore https://github.com/kornia/kornia/tree/master/packaging \n", "before_files": [{"content": "# NOTE: kornia filters and geometry must go first since are the core of the library\n# and by changing the import order you might get into a circular dependencies issue.\nfrom . import filters\nfrom . import geometry\nfrom . import grad_estimator\n\n# import the other modules for convenience\nfrom . import augmentation, color, contrib, core, enhance, feature, io, losses, metrics, morphology, tracking, utils, x\n\n# NOTE: we are going to expose to top level very few things\nfrom kornia.constants import pi\nfrom kornia.testing import xla_is_available\nfrom kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image\n\n# Version variable\nimport sys\n\nif sys.version_info >= (3, 8): # pragma: >=3.8 cover\n import importlib.metadata as importlib_metadata\nelse: # pragma: <3.8 cover\n import importlib_metadata\n\n__version__ = importlib_metadata.version('kornia')\n", "path": "kornia/__init__.py"}, {"content": "from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar\n\nimport torch\nfrom torch import Tensor\n\nfrom packaging import version\n\n\ndef torch_version() -> str:\n \"\"\"Parse the `torch.__version__` variable and removes +cu*/cpu.\"\"\"\n return torch.__version__.split('+')[0]\n\n\ndef torch_version_lt(major: int, minor: int, patch: int) -> bool:\n _version = version.parse(torch_version())\n return _version < version.parse(f\"{major}.{minor}.{patch}\")\n\n\ndef torch_version_le(major: int, minor: int, patch: int) -> bool:\n _version = version.parse(torch_version())\n return _version <= version.parse(f\"{major}.{minor}.{patch}\")\n\n\ndef torch_version_ge(major: int, minor: int, patch: Optional[int] = None) -> bool:\n _version = version.parse(torch_version())\n if patch is None:\n return _version >= version.parse(f\"{major}.{minor}\")\n else:\n return _version >= version.parse(f\"{major}.{minor}.{patch}\")\n\n\nif TYPE_CHECKING:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n def torch_meshgrid(tensors: List[Tensor], indexing: Optional[str] = None) -> Tuple[Tensor, ...]:\n ...\n\nelse:\n if torch_version_ge(1, 10, 0):\n\n def torch_meshgrid(tensors: List[Tensor], indexing: str):\n return torch.meshgrid(tensors, indexing=indexing)\n\n else:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n def torch_meshgrid(tensors: List[Tensor], indexing: str):\n return torch.meshgrid(tensors)\n\n\nif TYPE_CHECKING:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n _T = TypeVar('_T')\n torch_inference_mode: Callable[..., ContextManager[_T]]\nelse:\n if torch_version_ge(1, 10, 0):\n torch_inference_mode = torch.inference_mode\n else:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n torch_inference_mode = torch.no_grad\n", "path": "kornia/utils/_compat.py"}]} | 1,566 | 266 |
gh_patches_debug_20585 | rasdani/github-patches | git_diff | e-valuation__EvaP-794 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Deal with update.sh
update.sh is out of date. We can either
- remove it
- update it
- replace it with something else
having a script would be pretty cool to document what needs to be done when updating a production server.
maybe this can go into a management command.
an idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.
Deal with update.sh
update.sh is out of date. We can either
- remove it
- update it
- replace it with something else
having a script would be pretty cool to document what needs to be done when updating a production server.
maybe this can go into a management command.
an idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.
</issue>
<code>
[start of evap/evaluation/management/commands/refresh_results_cache.py]
1 from django.core.management.base import BaseCommand
2 from django.core.cache import cache
3
4 from evap.evaluation.models import Course
5 from evap.evaluation.tools import calculate_results
6
7
8 class Command(BaseCommand):
9 args = ''
10 help = 'Clears the cache and pre-warms it with the results of all courses'
11
12 def handle(self, *args, **options):
13 self.stdout.write("Clearing cache...")
14 cache.clear()
15
16 self.stdout.write("Calculating results for all courses...")
17 for course in Course.objects.all():
18 calculate_results(course)
19
20 self.stdout.write("Done with updating cache.")
21
[end of evap/evaluation/management/commands/refresh_results_cache.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/evap/evaluation/management/commands/refresh_results_cache.py b/evap/evaluation/management/commands/refresh_results_cache.py
--- a/evap/evaluation/management/commands/refresh_results_cache.py
+++ b/evap/evaluation/management/commands/refresh_results_cache.py
@@ -1,4 +1,5 @@
from django.core.management.base import BaseCommand
+from django.core.serializers.base import ProgressBar
from django.core.cache import cache
from evap.evaluation.models import Course
@@ -12,9 +13,15 @@
def handle(self, *args, **options):
self.stdout.write("Clearing cache...")
cache.clear()
+ total_count = Course.objects.count()
self.stdout.write("Calculating results for all courses...")
- for course in Course.objects.all():
+
+ self.stdout.ending = None
+ progress_bar = ProgressBar(self.stdout, total_count)
+
+ for counter, course in enumerate(Course.objects.all()):
+ progress_bar.update(counter + 1)
calculate_results(course)
- self.stdout.write("Done with updating cache.")
+ self.stdout.write("Done with updating cache.\n")
| {"golden_diff": "diff --git a/evap/evaluation/management/commands/refresh_results_cache.py b/evap/evaluation/management/commands/refresh_results_cache.py\n--- a/evap/evaluation/management/commands/refresh_results_cache.py\n+++ b/evap/evaluation/management/commands/refresh_results_cache.py\n@@ -1,4 +1,5 @@\n from django.core.management.base import BaseCommand\n+from django.core.serializers.base import ProgressBar\n from django.core.cache import cache\n \n from evap.evaluation.models import Course\n@@ -12,9 +13,15 @@\n def handle(self, *args, **options):\n self.stdout.write(\"Clearing cache...\")\n cache.clear()\n+ total_count = Course.objects.count()\n \n self.stdout.write(\"Calculating results for all courses...\")\n- for course in Course.objects.all():\n+\n+ self.stdout.ending = None\n+ progress_bar = ProgressBar(self.stdout, total_count)\n+\n+ for counter, course in enumerate(Course.objects.all()):\n+ progress_bar.update(counter + 1)\n calculate_results(course)\n \n- self.stdout.write(\"Done with updating cache.\")\n+ self.stdout.write(\"Done with updating cache.\\n\")\n", "issue": "Deal with update.sh\nupdate.sh is out of date. We can either\n- remove it\n- update it\n- replace it with something else\n\nhaving a script would be pretty cool to document what needs to be done when updating a production server.\n\nmaybe this can go into a management command.\n\nan idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.\n\nDeal with update.sh\nupdate.sh is out of date. We can either\n- remove it\n- update it\n- replace it with something else\n\nhaving a script would be pretty cool to document what needs to be done when updating a production server.\n\nmaybe this can go into a management command.\n\nan idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.\n\n", "before_files": [{"content": "from django.core.management.base import BaseCommand\nfrom django.core.cache import cache\n\nfrom evap.evaluation.models import Course\nfrom evap.evaluation.tools import calculate_results\n\n\nclass Command(BaseCommand):\n args = ''\n help = 'Clears the cache and pre-warms it with the results of all courses'\n\n def handle(self, *args, **options):\n self.stdout.write(\"Clearing cache...\")\n cache.clear()\n\n self.stdout.write(\"Calculating results for all courses...\")\n for course in Course.objects.all():\n calculate_results(course)\n\n self.stdout.write(\"Done with updating cache.\")\n", "path": "evap/evaluation/management/commands/refresh_results_cache.py"}]} | 922 | 258 |
gh_patches_debug_14107 | rasdani/github-patches | git_diff | spack__spack-17427 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Errors untaring source tarballs Docker
Some source tarballs are created in a way that tries to preserve user and group perms.
Yes, it's a bad to create a source tarball for release with these properties -- but I suspect it's easy to go undetected until you run in a security constrained linux.
Cython is one example example I hit when trying to build in a Docker container using ubuntu:latest
```
/bin/tar: Cython-0.25.2/.gitrev: Cannot change ownership to uid 96258, gid 5000: Invalid argument
.... (many many more errors)
/bin/tar: Cython-0.25.2/USAGE.txt: Cannot change ownership to uid 96258, gid 5000: Invalid argument
/bin/tar: Cython-0.25.2: Cannot change ownership to uid 96258, gid 5000: Invalid argument
/bin/tar: Exiting with failure status due to previous errors
```
The tar errors cause spack to stop. I am not sure if there is anyway to defensively avoid this, but if its possible it would be a good enhancement to spack.
</issue>
<code>
[start of lib/spack/spack/util/compression.py]
1 # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
2 # Spack Project Developers. See the top-level COPYRIGHT file for details.
3 #
4 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
5
6 import re
7 import os
8 from itertools import product
9 from spack.util.executable import which
10
11 # Supported archive extensions.
12 PRE_EXTS = ["tar", "TAR"]
13 EXTS = ["gz", "bz2", "xz", "Z"]
14 NOTAR_EXTS = ["zip", "tgz", "tbz2", "txz"]
15
16 # Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
17 ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(
18 PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
19
20
21 def allowed_archive(path):
22 return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
23
24
25 def decompressor_for(path, extension=None):
26 """Get the appropriate decompressor for a path."""
27 if ((extension and re.match(r'\.?zip$', extension)) or
28 path.endswith('.zip')):
29 unzip = which('unzip', required=True)
30 unzip.add_default_arg('-q')
31 return unzip
32 if extension and re.match(r'gz', extension):
33 gunzip = which('gunzip', required=True)
34 return gunzip
35 if extension and re.match(r'bz2', extension):
36 bunzip2 = which('bunzip2', required=True)
37 return bunzip2
38 tar = which('tar', required=True)
39 tar.add_default_arg('-xf')
40 return tar
41
42
43 def strip_extension(path):
44 """Get the part of a path that does not include its compressed
45 type extension."""
46 for type in ALLOWED_ARCHIVE_TYPES:
47 suffix = r'\.%s$' % type
48 if re.search(suffix, path):
49 return re.sub(suffix, "", path)
50 return path
51
52
53 def extension(path):
54 """Get the archive extension for a path."""
55 if path is None:
56 raise ValueError("Can't call extension() on None")
57
58 # Strip sourceforge suffix.
59 if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):
60 path = os.path.dirname(path)
61
62 for t in ALLOWED_ARCHIVE_TYPES:
63 suffix = r'\.%s$' % t
64 if re.search(suffix, path):
65 return t
66 return None
67
[end of lib/spack/spack/util/compression.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -14,7 +14,7 @@
NOTAR_EXTS = ["zip", "tgz", "tbz2", "txz"]
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
-ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(
+ALLOWED_ARCHIVE_TYPES = [".".join(ext) for ext in product(
PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
@@ -36,7 +36,7 @@
bunzip2 = which('bunzip2', required=True)
return bunzip2
tar = which('tar', required=True)
- tar.add_default_arg('-xf')
+ tar.add_default_arg('-oxf')
return tar
| {"golden_diff": "diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py\n--- a/lib/spack/spack/util/compression.py\n+++ b/lib/spack/spack/util/compression.py\n@@ -14,7 +14,7 @@\n NOTAR_EXTS = [\"zip\", \"tgz\", \"tbz2\", \"txz\"]\n \n # Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz\n-ALLOWED_ARCHIVE_TYPES = [\".\".join(l) for l in product(\n+ALLOWED_ARCHIVE_TYPES = [\".\".join(ext) for ext in product(\n PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS\n \n \n@@ -36,7 +36,7 @@\n bunzip2 = which('bunzip2', required=True)\n return bunzip2\n tar = which('tar', required=True)\n- tar.add_default_arg('-xf')\n+ tar.add_default_arg('-oxf')\n return tar\n", "issue": "Errors untaring source tarballs Docker\nSome source tarballs are created in a way that tries to preserve user and group perms. \r\nYes, it's a bad to create a source tarball for release with these properties -- but I suspect it's easy to go undetected until you run in a security constrained linux.\r\n\r\nCython is one example example I hit when trying to build in a Docker container using ubuntu:latest\r\n\r\n```\r\n/bin/tar: Cython-0.25.2/.gitrev: Cannot change ownership to uid 96258, gid 5000: Invalid argument\r\n.... (many many more errors)\r\n/bin/tar: Cython-0.25.2/USAGE.txt: Cannot change ownership to uid 96258, gid 5000: Invalid argument\r\n/bin/tar: Cython-0.25.2: Cannot change ownership to uid 96258, gid 5000: Invalid argument\r\n/bin/tar: Exiting with failure status due to previous errors\r\n```\r\nThe tar errors cause spack to stop. I am not sure if there is anyway to defensively avoid this, but if its possible it would be a good enhancement to spack. \r\n\n", "before_files": [{"content": "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nimport re\nimport os\nfrom itertools import product\nfrom spack.util.executable import which\n\n# Supported archive extensions.\nPRE_EXTS = [\"tar\", \"TAR\"]\nEXTS = [\"gz\", \"bz2\", \"xz\", \"Z\"]\nNOTAR_EXTS = [\"zip\", \"tgz\", \"tbz2\", \"txz\"]\n\n# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz\nALLOWED_ARCHIVE_TYPES = [\".\".join(l) for l in product(\n PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS\n\n\ndef allowed_archive(path):\n return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)\n\n\ndef decompressor_for(path, extension=None):\n \"\"\"Get the appropriate decompressor for a path.\"\"\"\n if ((extension and re.match(r'\\.?zip$', extension)) or\n path.endswith('.zip')):\n unzip = which('unzip', required=True)\n unzip.add_default_arg('-q')\n return unzip\n if extension and re.match(r'gz', extension):\n gunzip = which('gunzip', required=True)\n return gunzip\n if extension and re.match(r'bz2', extension):\n bunzip2 = which('bunzip2', required=True)\n return bunzip2\n tar = which('tar', required=True)\n tar.add_default_arg('-xf')\n return tar\n\n\ndef strip_extension(path):\n \"\"\"Get the part of a path that does not include its compressed\n type extension.\"\"\"\n for type in ALLOWED_ARCHIVE_TYPES:\n suffix = r'\\.%s$' % type\n if re.search(suffix, path):\n return re.sub(suffix, \"\", path)\n return path\n\n\ndef extension(path):\n \"\"\"Get the archive extension for a path.\"\"\"\n if path is None:\n raise ValueError(\"Can't call extension() on None\")\n\n # Strip sourceforge suffix.\n if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):\n path = os.path.dirname(path)\n\n for t in ALLOWED_ARCHIVE_TYPES:\n suffix = r'\\.%s$' % t\n if re.search(suffix, path):\n return t\n return None\n", "path": "lib/spack/spack/util/compression.py"}]} | 1,475 | 228 |
gh_patches_debug_23113 | rasdani/github-patches | git_diff | sopel-irc__sopel-1848 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
xkcd outputs link even if triggered by one
The `xkcd` plugin shouldn't output a link to the comic if it was triggered by a link.
See `wikipedia`, for example. This is an anti-cycling measure in case multiple bots handling the same link(s) are present in the channel.
</issue>
<code>
[start of sopel/modules/xkcd.py]
1 # coding=utf-8
2 """
3 xkcd.py - Sopel xkcd Module
4 Copyright 2010, Michael Yanovich (yanovich.net), and Morgan Goose
5 Copyright 2012, Lior Ramati
6 Copyright 2013, Elsie Powell (embolalia.com)
7 Licensed under the Eiffel Forum License 2.
8
9 https://sopel.chat
10 """
11 from __future__ import unicode_literals, absolute_import, print_function, division
12
13 import random
14 import re
15
16 import requests
17
18 from sopel.modules.search import bing_search
19 from sopel.module import commands, url
20
21
22 ignored_sites = [
23 # For searching the web
24 'almamater.xkcd.com',
25 'blog.xkcd.com',
26 'blag.xkcd.com',
27 'forums.xkcd.com',
28 'fora.xkcd.com',
29 'forums3.xkcd.com',
30 'store.xkcd.com',
31 'wiki.xkcd.com',
32 'what-if.xkcd.com',
33 ]
34 sites_query = ' site:xkcd.com -site:' + ' -site:'.join(ignored_sites)
35
36
37 def get_info(number=None):
38 if number:
39 url = 'https://xkcd.com/{}/info.0.json'.format(number)
40 else:
41 url = 'https://xkcd.com/info.0.json'
42 data = requests.get(url).json()
43 data['url'] = 'https://xkcd.com/' + str(data['num'])
44 return data
45
46
47 def web_search(query):
48 url = bing_search(query + sites_query)
49 if not url:
50 return None
51 match = re.match(r'(?:https?://)?xkcd.com/(\d+)/?', url)
52 if match:
53 return match.group(1)
54
55
56 @commands('xkcd')
57 def xkcd(bot, trigger):
58 """.xkcd - Finds an xkcd comic strip.
59
60 Takes one of 3 inputs:
61
62 * If no input is provided it will return a random comic
63 * If numeric input is provided it will return that comic, or the
64 nth-latest comic if the number is non-positive
65 * If non-numeric input is provided it will return the first search result
66 for those keywords on the xkcd.com site
67 """
68 # get latest comic for rand function and numeric input
69 latest = get_info()
70 max_int = latest['num']
71
72 # if no input is given (pre - lior's edits code)
73 if not trigger.group(2): # get rand comic
74 random.seed()
75 requested = get_info(random.randint(1, max_int + 1))
76 else:
77 query = trigger.group(2).strip()
78
79 numbered = re.match(r"^(#|\+|-)?(\d+)$", query)
80 if numbered:
81 query = int(numbered.group(2))
82 if numbered.group(1) == "-":
83 query = -query
84 return numbered_result(bot, query, latest)
85 else:
86 # Non-number: search the web.
87 if (query.lower() == "latest" or query.lower() == "newest"):
88 requested = latest
89 else:
90 number = web_search(query)
91 if not number:
92 bot.say('Could not find any comics for that query.')
93 return
94 requested = get_info(number)
95
96 say_result(bot, requested)
97
98
99 def numbered_result(bot, query, latest):
100 max_int = latest['num']
101 if query > max_int:
102 bot.say(("Sorry, comic #{} hasn't been posted yet. "
103 "The last comic was #{}").format(query, max_int))
104 return
105 elif query <= -max_int:
106 bot.say(("Sorry, but there were only {} comics "
107 "released yet so far").format(max_int))
108 return
109 elif abs(query) == 0:
110 requested = latest
111 elif query == 404 or max_int + query == 404:
112 bot.say("404 - Not Found") # don't error on that one
113 return
114 elif query > 0:
115 requested = get_info(query)
116 else:
117 # Negative: go back that many from current
118 requested = get_info(max_int + query)
119
120 say_result(bot, requested)
121
122
123 def say_result(bot, result):
124 message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],
125 result['alt'])
126 bot.say(message)
127
128
129 @url(r'xkcd.com/(\d+)')
130 def get_url(bot, trigger, match):
131 latest = get_info()
132 numbered_result(bot, int(match.group(1)), latest)
133
[end of sopel/modules/xkcd.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sopel/modules/xkcd.py b/sopel/modules/xkcd.py
--- a/sopel/modules/xkcd.py
+++ b/sopel/modules/xkcd.py
@@ -96,7 +96,7 @@
say_result(bot, requested)
-def numbered_result(bot, query, latest):
+def numbered_result(bot, query, latest, commanded=True):
max_int = latest['num']
if query > max_int:
bot.say(("Sorry, comic #{} hasn't been posted yet. "
@@ -117,16 +117,18 @@
# Negative: go back that many from current
requested = get_info(max_int + query)
- say_result(bot, requested)
+ say_result(bot, requested, commanded)
-def say_result(bot, result):
- message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],
- result['alt'])
+def say_result(bot, result, commanded=True):
+ message = '{}{} | Alt-text: {}'.format(
+ result['url'] + ' | ' if commanded else '',
+ result['title'], result['alt']
+ )
bot.say(message)
@url(r'xkcd.com/(\d+)')
def get_url(bot, trigger, match):
latest = get_info()
- numbered_result(bot, int(match.group(1)), latest)
+ numbered_result(bot, int(match.group(1)), latest, commanded=False)
| {"golden_diff": "diff --git a/sopel/modules/xkcd.py b/sopel/modules/xkcd.py\n--- a/sopel/modules/xkcd.py\n+++ b/sopel/modules/xkcd.py\n@@ -96,7 +96,7 @@\n say_result(bot, requested)\n \n \n-def numbered_result(bot, query, latest):\n+def numbered_result(bot, query, latest, commanded=True):\n max_int = latest['num']\n if query > max_int:\n bot.say((\"Sorry, comic #{} hasn't been posted yet. \"\n@@ -117,16 +117,18 @@\n # Negative: go back that many from current\n requested = get_info(max_int + query)\n \n- say_result(bot, requested)\n+ say_result(bot, requested, commanded)\n \n \n-def say_result(bot, result):\n- message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],\n- result['alt'])\n+def say_result(bot, result, commanded=True):\n+ message = '{}{} | Alt-text: {}'.format(\n+ result['url'] + ' | ' if commanded else '',\n+ result['title'], result['alt']\n+ )\n bot.say(message)\n \n \n @url(r'xkcd.com/(\\d+)')\n def get_url(bot, trigger, match):\n latest = get_info()\n- numbered_result(bot, int(match.group(1)), latest)\n+ numbered_result(bot, int(match.group(1)), latest, commanded=False)\n", "issue": "xkcd outputs link even if triggered by one\nThe `xkcd` plugin shouldn't output a link to the comic if it was triggered by a link.\r\n\r\nSee `wikipedia`, for example. This is an anti-cycling measure in case multiple bots handling the same link(s) are present in the channel.\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"\nxkcd.py - Sopel xkcd Module\nCopyright 2010, Michael Yanovich (yanovich.net), and Morgan Goose\nCopyright 2012, Lior Ramati\nCopyright 2013, Elsie Powell (embolalia.com)\nLicensed under the Eiffel Forum License 2.\n\nhttps://sopel.chat\n\"\"\"\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport random\nimport re\n\nimport requests\n\nfrom sopel.modules.search import bing_search\nfrom sopel.module import commands, url\n\n\nignored_sites = [\n # For searching the web\n 'almamater.xkcd.com',\n 'blog.xkcd.com',\n 'blag.xkcd.com',\n 'forums.xkcd.com',\n 'fora.xkcd.com',\n 'forums3.xkcd.com',\n 'store.xkcd.com',\n 'wiki.xkcd.com',\n 'what-if.xkcd.com',\n]\nsites_query = ' site:xkcd.com -site:' + ' -site:'.join(ignored_sites)\n\n\ndef get_info(number=None):\n if number:\n url = 'https://xkcd.com/{}/info.0.json'.format(number)\n else:\n url = 'https://xkcd.com/info.0.json'\n data = requests.get(url).json()\n data['url'] = 'https://xkcd.com/' + str(data['num'])\n return data\n\n\ndef web_search(query):\n url = bing_search(query + sites_query)\n if not url:\n return None\n match = re.match(r'(?:https?://)?xkcd.com/(\\d+)/?', url)\n if match:\n return match.group(1)\n\n\n@commands('xkcd')\ndef xkcd(bot, trigger):\n \"\"\".xkcd - Finds an xkcd comic strip.\n\n Takes one of 3 inputs:\n\n * If no input is provided it will return a random comic\n * If numeric input is provided it will return that comic, or the\n nth-latest comic if the number is non-positive\n * If non-numeric input is provided it will return the first search result\n for those keywords on the xkcd.com site\n \"\"\"\n # get latest comic for rand function and numeric input\n latest = get_info()\n max_int = latest['num']\n\n # if no input is given (pre - lior's edits code)\n if not trigger.group(2): # get rand comic\n random.seed()\n requested = get_info(random.randint(1, max_int + 1))\n else:\n query = trigger.group(2).strip()\n\n numbered = re.match(r\"^(#|\\+|-)?(\\d+)$\", query)\n if numbered:\n query = int(numbered.group(2))\n if numbered.group(1) == \"-\":\n query = -query\n return numbered_result(bot, query, latest)\n else:\n # Non-number: search the web.\n if (query.lower() == \"latest\" or query.lower() == \"newest\"):\n requested = latest\n else:\n number = web_search(query)\n if not number:\n bot.say('Could not find any comics for that query.')\n return\n requested = get_info(number)\n\n say_result(bot, requested)\n\n\ndef numbered_result(bot, query, latest):\n max_int = latest['num']\n if query > max_int:\n bot.say((\"Sorry, comic #{} hasn't been posted yet. \"\n \"The last comic was #{}\").format(query, max_int))\n return\n elif query <= -max_int:\n bot.say((\"Sorry, but there were only {} comics \"\n \"released yet so far\").format(max_int))\n return\n elif abs(query) == 0:\n requested = latest\n elif query == 404 or max_int + query == 404:\n bot.say(\"404 - Not Found\") # don't error on that one\n return\n elif query > 0:\n requested = get_info(query)\n else:\n # Negative: go back that many from current\n requested = get_info(max_int + query)\n\n say_result(bot, requested)\n\n\ndef say_result(bot, result):\n message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],\n result['alt'])\n bot.say(message)\n\n\n@url(r'xkcd.com/(\\d+)')\ndef get_url(bot, trigger, match):\n latest = get_info()\n numbered_result(bot, int(match.group(1)), latest)\n", "path": "sopel/modules/xkcd.py"}]} | 1,914 | 325 |
gh_patches_debug_14423 | rasdani/github-patches | git_diff | translate__pootle-4087 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Elasticsearch 1.7.0 breaks Pootle if ES not set up
With `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.
This breaks the default install here:
https://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29
Elasticsearch 1.7.0 breaks Pootle if ES not set up
With `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.
This breaks the default install here:
https://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29
</issue>
<code>
[start of pootle/core/search/backends/elasticsearch.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) Pootle contributors.
5 #
6 # This file is a part of the Pootle project. It is distributed under the GPL3
7 # or later license. See the LICENSE file for a copy of the license and the
8 # AUTHORS file for copyright and authorship information.
9
10 from __future__ import absolute_import
11
12 __all__ = ('ElasticSearchBackend',)
13
14 try:
15 from elasticsearch import Elasticsearch
16 except:
17 Elasticsearch = None
18
19 from ..base import SearchBackend
20
21
22 class ElasticSearchBackend(SearchBackend):
23 def __init__(self, config_name):
24 super(ElasticSearchBackend, self).__init__(config_name)
25 self._es = self._get_es_server()
26 self._create_index_if_missing()
27
28 def _server_setup_and_alive(self):
29 return self._es is not None and self._es.ping()
30
31 def _get_es_server(self):
32 if self._settings is None or Elasticsearch is None:
33 return None
34 return Elasticsearch([
35 {'host': self._settings['HOST'],
36 'port': self._settings['PORT']},
37 ])
38
39 def _create_index_if_missing(self):
40 if self._server_setup_and_alive():
41 if not self._es.indices.exists(self._settings['INDEX_NAME']):
42 self._es.indices.create(self._settings['INDEX_NAME'])
43
44 def _is_valuable_hit(self, unit, hit):
45 return str(unit.id) != hit['_id']
46
47 def search(self, unit):
48 if not self._server_setup_and_alive():
49 return []
50
51 counter = {}
52 res = []
53 language = unit.store.translation_project.language.code
54 es_res = self._es.search(
55 index=self._settings['INDEX_NAME'],
56 doc_type=language,
57 body={
58 "query": {
59 "match": {
60 "source": {
61 "query": unit.source,
62 "fuzziness": self._settings['MIN_SCORE'],
63 }
64 }
65 }
66 }
67 )
68
69 for hit in es_res['hits']['hits']:
70 if self._is_valuable_hit(unit, hit):
71 translation_pair = hit['_source']['source'] + hit['_source']['target']
72 if translation_pair not in counter:
73 counter[translation_pair] = 1
74 res.append({
75 'unit_id': hit['_id'],
76 'source': hit['_source']['source'],
77 'target': hit['_source']['target'],
78 'project': hit['_source']['project'],
79 'path': hit['_source']['path'],
80 'username': hit['_source']['username'],
81 'fullname': hit['_source']['fullname'],
82 'email_md5': hit['_source']['email_md5'],
83 })
84 else:
85 counter[translation_pair] += 1
86
87 for item in res:
88 item['count'] = counter[item['source']+item['target']]
89
90 return res
91
92 def update(self, language, obj):
93 if self._server_setup_and_alive():
94 self._es.index(
95 index=self._settings['INDEX_NAME'],
96 doc_type=language,
97 body=obj,
98 id=obj['id']
99 )
100
[end of pootle/core/search/backends/elasticsearch.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pootle/core/search/backends/elasticsearch.py b/pootle/core/search/backends/elasticsearch.py
--- a/pootle/core/search/backends/elasticsearch.py
+++ b/pootle/core/search/backends/elasticsearch.py
@@ -13,6 +13,7 @@
try:
from elasticsearch import Elasticsearch
+ from elasticsearch.exceptions import ConnectionError
except:
Elasticsearch = None
@@ -26,7 +27,12 @@
self._create_index_if_missing()
def _server_setup_and_alive(self):
- return self._es is not None and self._es.ping()
+ if self._es is None:
+ return False
+ try:
+ return self._es.ping()
+ except ConnectionError:
+ return False
def _get_es_server(self):
if self._settings is None or Elasticsearch is None:
| {"golden_diff": "diff --git a/pootle/core/search/backends/elasticsearch.py b/pootle/core/search/backends/elasticsearch.py\n--- a/pootle/core/search/backends/elasticsearch.py\n+++ b/pootle/core/search/backends/elasticsearch.py\n@@ -13,6 +13,7 @@\n \n try:\n from elasticsearch import Elasticsearch\n+ from elasticsearch.exceptions import ConnectionError\n except:\n Elasticsearch = None\n \n@@ -26,7 +27,12 @@\n self._create_index_if_missing()\n \n def _server_setup_and_alive(self):\n- return self._es is not None and self._es.ping()\n+ if self._es is None:\n+ return False\n+ try:\n+ return self._es.ping()\n+ except ConnectionError:\n+ return False\n \n def _get_es_server(self):\n if self._settings is None or Elasticsearch is None:\n", "issue": "Elasticsearch 1.7.0 breaks Pootle if ES not set up\nWith `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.\n\nThis breaks the default install here:\n\nhttps://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29\n\nElasticsearch 1.7.0 breaks Pootle if ES not set up\nWith `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.\n\nThis breaks the default install here:\n\nhttps://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom __future__ import absolute_import\n\n__all__ = ('ElasticSearchBackend',)\n\ntry:\n from elasticsearch import Elasticsearch\nexcept:\n Elasticsearch = None\n\nfrom ..base import SearchBackend\n\n\nclass ElasticSearchBackend(SearchBackend):\n def __init__(self, config_name):\n super(ElasticSearchBackend, self).__init__(config_name)\n self._es = self._get_es_server()\n self._create_index_if_missing()\n\n def _server_setup_and_alive(self):\n return self._es is not None and self._es.ping()\n\n def _get_es_server(self):\n if self._settings is None or Elasticsearch is None:\n return None\n return Elasticsearch([\n {'host': self._settings['HOST'],\n 'port': self._settings['PORT']},\n ])\n\n def _create_index_if_missing(self):\n if self._server_setup_and_alive():\n if not self._es.indices.exists(self._settings['INDEX_NAME']):\n self._es.indices.create(self._settings['INDEX_NAME'])\n\n def _is_valuable_hit(self, unit, hit):\n return str(unit.id) != hit['_id']\n\n def search(self, unit):\n if not self._server_setup_and_alive():\n return []\n\n counter = {}\n res = []\n language = unit.store.translation_project.language.code\n es_res = self._es.search(\n index=self._settings['INDEX_NAME'],\n doc_type=language,\n body={\n \"query\": {\n \"match\": {\n \"source\": {\n \"query\": unit.source,\n \"fuzziness\": self._settings['MIN_SCORE'],\n }\n }\n }\n }\n )\n\n for hit in es_res['hits']['hits']:\n if self._is_valuable_hit(unit, hit):\n translation_pair = hit['_source']['source'] + hit['_source']['target']\n if translation_pair not in counter:\n counter[translation_pair] = 1\n res.append({\n 'unit_id': hit['_id'],\n 'source': hit['_source']['source'],\n 'target': hit['_source']['target'],\n 'project': hit['_source']['project'],\n 'path': hit['_source']['path'],\n 'username': hit['_source']['username'],\n 'fullname': hit['_source']['fullname'],\n 'email_md5': hit['_source']['email_md5'],\n })\n else:\n counter[translation_pair] += 1\n\n for item in res:\n item['count'] = counter[item['source']+item['target']]\n\n return res\n\n def update(self, language, obj):\n if self._server_setup_and_alive():\n self._es.index(\n index=self._settings['INDEX_NAME'],\n doc_type=language,\n body=obj,\n id=obj['id']\n )\n", "path": "pootle/core/search/backends/elasticsearch.py"}]} | 1,566 | 195 |
gh_patches_debug_199 | rasdani/github-patches | git_diff | twisted__twisted-1695 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Release 22.2.0
|[<img alt="adiroiban's avatar" src="https://avatars.githubusercontent.com/u/204609?s=50" width="50" height="50">](https://github.com/adiroiban)| @adiroiban reported|
|-|-|
|Trac ID|trac#10306|
|Type|enhancement|
|Created|2022-02-08 14:05:11Z|
<details><summary>Searchable metadata</summary>
```
trac-id__10306 10306
type__enhancement enhancement
reporter__adiroiban adiroiban
priority__normal normal
milestone__None None
branch__
branch_author__
status__closed closed
resolution__fixed fixed
component__core core
keywords__None None
time__1644329111193403 1644329111193403
changetime__1646513115841857 1646513115841857
version__None None
owner__None None
```
</details>
</issue>
<code>
[start of src/twisted/_version.py]
1 """
2 Provides Twisted version information.
3 """
4
5 # This file is auto-generated! Do not edit!
6 # Use `python -m incremental.update Twisted` to change this file.
7
8 from incremental import Version
9
10 __version__ = Version("Twisted", 22, 1, 0, post=0)
11 __all__ = ["__version__"]
12
[end of src/twisted/_version.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/twisted/_version.py b/src/twisted/_version.py
--- a/src/twisted/_version.py
+++ b/src/twisted/_version.py
@@ -7,5 +7,5 @@
from incremental import Version
-__version__ = Version("Twisted", 22, 1, 0, post=0)
+__version__ = Version("Twisted", 22, 2, 0, post=0)
__all__ = ["__version__"]
| {"golden_diff": "diff --git a/src/twisted/_version.py b/src/twisted/_version.py\n--- a/src/twisted/_version.py\n+++ b/src/twisted/_version.py\n@@ -7,5 +7,5 @@\n \n from incremental import Version\n \n-__version__ = Version(\"Twisted\", 22, 1, 0, post=0)\n+__version__ = Version(\"Twisted\", 22, 2, 0, post=0)\n __all__ = [\"__version__\"]\n", "issue": "Release 22.2.0\n|[<img alt=\"adiroiban's avatar\" src=\"https://avatars.githubusercontent.com/u/204609?s=50\" width=\"50\" height=\"50\">](https://github.com/adiroiban)| @adiroiban reported|\n|-|-|\n|Trac ID|trac#10306|\n|Type|enhancement|\n|Created|2022-02-08 14:05:11Z|\n\n\n\n<details><summary>Searchable metadata</summary>\n\n```\ntrac-id__10306 10306\ntype__enhancement enhancement\nreporter__adiroiban adiroiban\npriority__normal normal\nmilestone__None None\nbranch__ \nbranch_author__ \nstatus__closed closed\nresolution__fixed fixed\ncomponent__core core\nkeywords__None None\ntime__1644329111193403 1644329111193403\nchangetime__1646513115841857 1646513115841857\nversion__None None\nowner__None None\n\n```\n</details>\n\n", "before_files": [{"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 1, 0, post=0)\n__all__ = [\"__version__\"]\n", "path": "src/twisted/_version.py"}]} | 916 | 114 |
gh_patches_debug_4921 | rasdani/github-patches | git_diff | ocadotechnology__aimmo-123 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Users should see which avatar is theirs
</issue>
<code>
[start of players/views.py]
1 import logging
2
3 from django.http import JsonResponse
4 from django.contrib.auth.decorators import login_required
5 from django.http import HttpResponse
6 from django.views.generic import TemplateView
7
8 import os
9
10 from models import Player
11 from . import app_settings
12
13
14 def _post_code_success_response(message):
15 return create_response("SUCCESS", message)
16
17
18 def create_response(status, message):
19 response = {
20 "status": status,
21 "message": message
22 }
23 return JsonResponse(response)
24
25
26 @login_required
27 def code(request):
28 try:
29 player = request.user.player
30 except Player.DoesNotExist:
31 initial_code_file_name = os.path.join(
32 os.path.abspath(os.path.dirname(__file__)),
33 'avatar_examples/dumb_avatar.py',
34 )
35 with open(initial_code_file_name) as initial_code_file:
36 initial_code = initial_code_file.read()
37 player = Player.objects.create(user=request.user, code=initial_code)
38 if request.method == 'POST':
39 player.code = request.POST['code']
40 player.save()
41
42 return _post_code_success_response("Your code was saved!")
43 else:
44 return HttpResponse(player.code)
45
46
47 def games(request):
48 response = {
49 'main': {
50 'parameters': [],
51 'users': [
52 {
53 'id': player.user.pk,
54 'code': player.code,
55 } for player in Player.objects.all()
56 ]
57 }
58 }
59 return JsonResponse(response)
60
61
62 class WatchView(TemplateView):
63 template_name = 'players/watch.html'
64
65 def get_context_data(self, **kwargs):
66 context = super(WatchView, self).get_context_data(**kwargs)
67 context['game_url_base'], context['game_url_path'] = app_settings.GAME_SERVER_LOCATION_FUNCTION('main')
68 return context
69
[end of players/views.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/players/views.py b/players/views.py
--- a/players/views.py
+++ b/players/views.py
@@ -65,4 +65,5 @@
def get_context_data(self, **kwargs):
context = super(WatchView, self).get_context_data(**kwargs)
context['game_url_base'], context['game_url_path'] = app_settings.GAME_SERVER_LOCATION_FUNCTION('main')
+ context['current_user_player_key'] = self.request.user.pk
return context
| {"golden_diff": "diff --git a/players/views.py b/players/views.py\n--- a/players/views.py\n+++ b/players/views.py\n@@ -65,4 +65,5 @@\n def get_context_data(self, **kwargs):\n context = super(WatchView, self).get_context_data(**kwargs)\n context['game_url_base'], context['game_url_path'] = app_settings.GAME_SERVER_LOCATION_FUNCTION('main')\n+ context['current_user_player_key'] = self.request.user.pk\n return context\n", "issue": "Users should see which avatar is theirs\n\n", "before_files": [{"content": "import logging\n\nfrom django.http import JsonResponse\nfrom django.contrib.auth.decorators import login_required\nfrom django.http import HttpResponse\nfrom django.views.generic import TemplateView\n\nimport os\n\nfrom models import Player\nfrom . import app_settings\n\n\ndef _post_code_success_response(message):\n return create_response(\"SUCCESS\", message)\n\n\ndef create_response(status, message):\n response = {\n \"status\": status,\n \"message\": message\n }\n return JsonResponse(response)\n\n\n@login_required\ndef code(request):\n try:\n player = request.user.player\n except Player.DoesNotExist:\n initial_code_file_name = os.path.join(\n os.path.abspath(os.path.dirname(__file__)),\n 'avatar_examples/dumb_avatar.py',\n )\n with open(initial_code_file_name) as initial_code_file:\n initial_code = initial_code_file.read()\n player = Player.objects.create(user=request.user, code=initial_code)\n if request.method == 'POST':\n player.code = request.POST['code']\n player.save()\n\n return _post_code_success_response(\"Your code was saved!\")\n else:\n return HttpResponse(player.code)\n\n\ndef games(request):\n response = {\n 'main': {\n 'parameters': [],\n 'users': [\n {\n 'id': player.user.pk,\n 'code': player.code,\n } for player in Player.objects.all()\n ]\n }\n }\n return JsonResponse(response)\n\n\nclass WatchView(TemplateView):\n template_name = 'players/watch.html'\n\n def get_context_data(self, **kwargs):\n context = super(WatchView, self).get_context_data(**kwargs)\n context['game_url_base'], context['game_url_path'] = app_settings.GAME_SERVER_LOCATION_FUNCTION('main')\n return context\n", "path": "players/views.py"}]} | 1,042 | 110 |
gh_patches_debug_40748 | rasdani/github-patches | git_diff | vacanza__python-holidays-639 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Ireland considering UK as base class and hence not being a country itself
Issue also opened here:
home-assistant/core#67542
Looks like Ireland is being considered as being part of the UK which is wrong as not all the holidays in the UK exist, or necessarily exist in Ireland.
Take a reference on this comment: https://github.com/home-assistant/core/issues/67542#issuecomment-1058079650
</issue>
<code>
[start of holidays/countries/ireland.py]
1 # python-holidays
2 # ---------------
3 # A fast, efficient Python library for generating country, province and state
4 # specific sets of holidays on the fly. It aims to make determining whether a
5 # specific date is a holiday as fast and flexible as possible.
6 #
7 # Authors: dr-prodigy <[email protected]> (c) 2017-2022
8 # ryanss <[email protected]> (c) 2014-2017
9 # Website: https://github.com/dr-prodigy/python-holidays
10 # License: MIT (see LICENSE file)
11
12 from datetime import date
13
14 from dateutil.easter import easter
15 from dateutil.relativedelta import relativedelta as rd, MO
16
17 from holidays.constants import MAR, MAY, JUN, AUG, OCT, DEC
18 from holidays.constants import MON, TUE, WED, THU, FRI, SAT, SUN, WEEKEND
19 from holidays.holiday_base import HolidayBase
20 from .united_kingdom import UnitedKingdom
21
22
23 class Ireland(UnitedKingdom):
24 country = "IE"
25
26 def __init__(self, **kwargs):
27 HolidayBase.__init__(self, **kwargs)
28
29 def _country_specific(self, year):
30 # Ireland exclusive holidays
31
32 # St. Patrick's Day
33 name = "St. Patrick's Day"
34 self[date(year, MAR, 17)] = name
35 if self.observed and date(year, MAR, 17).weekday() in WEEKEND:
36 self[date(year, MAR, 17) + rd(weekday=MO)] = name + " (Observed)"
37
38 # Easter Monday
39 self[easter(year) + rd(weekday=MO)] = "Easter Monday"
40
41 # May Day bank holiday (first Monday in May)
42 if year >= 1978:
43 name = "May Day"
44 if year == 1995:
45 dt = date(year, MAY, 8)
46 else:
47 dt = date(year, MAY, 1)
48 if dt.weekday() == MON:
49 self[dt] = name
50 elif dt.weekday() == TUE:
51 self[dt + rd(days=+6)] = name
52 elif dt.weekday() == WED:
53 self[dt + rd(days=+5)] = name
54 elif dt.weekday() == THU:
55 self[dt + rd(days=+4)] = name
56 elif dt.weekday() == FRI:
57 self[dt + rd(days=+3)] = name
58 elif dt.weekday() == SAT:
59 self[dt + rd(days=+2)] = name
60 elif dt.weekday() == SUN:
61 self[dt + rd(days=+1)] = name
62
63 # June bank holiday (first Monday in June)
64 self[date(year, JUN, 1) + rd(weekday=MO)] = "June Bank Holiday"
65
66 # Summer bank holiday (first Monday in August)
67 self[date(year, AUG, 1) + rd(weekday=MO)] = "Summer Bank Holiday"
68
69 # October Bank Holiday (last Monday in October)
70 self[date(year, OCT, 31) + rd(weekday=MO(-1))] = "October Bank Holiday"
71
72 # St. Stephen's Day
73 name = "St. Stephen's Day"
74 self[date(year, DEC, 26)] = name
75 if self.observed and date(year, DEC, 26).weekday() == SAT:
76 self[date(year, DEC, 28)] = name + " (Observed)"
77 elif self.observed and date(year, DEC, 26).weekday() == SUN:
78 self[date(year, DEC, 28)] = name + " (Observed)"
79
80
81 class IE(Ireland):
82 pass
83
84
85 class IRL(Ireland):
86 pass
87
[end of holidays/countries/ireland.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/holidays/countries/ireland.py b/holidays/countries/ireland.py
--- a/holidays/countries/ireland.py
+++ b/holidays/countries/ireland.py
@@ -16,20 +16,37 @@
from dateutil.easter import easter
from dateutil.relativedelta import relativedelta as rd, MO
-from holidays.constants import MAR, MAY, JUN, AUG, OCT, DEC
+from holidays.constants import FEB, MAR, MAY, JUN, AUG, OCT, DEC
from holidays.constants import MON, TUE, WED, THU, FRI, SAT, SUN, WEEKEND
from holidays.holiday_base import HolidayBase
-from .united_kingdom import UnitedKingdom
+class Ireland(HolidayBase):
+ """
+ Official holidays in Ireland, as declared in the Citizen's Information
+ bulletin:
+ https://www.citizensinformation.ie/en/employment/employment_rights_and_conditions/leave_and_holidays/public_holidays_in_ireland.html
+ """
-class Ireland(UnitedKingdom):
country = "IE"
+ subdivisions = []
def __init__(self, **kwargs):
HolidayBase.__init__(self, **kwargs)
- def _country_specific(self, year):
- # Ireland exclusive holidays
+ def _populate(self, year):
+ self[date(year, JAN, 1)] = "New Year's Day"
+
+ # St. Brigid's Day
+ if year >= 2023:
+ dt = date(year, FEB, 1)
+ self[dt] = "St. Brigid's Day"
+
+ if self.observed and dt.weekday() != FRI:
+ self[date(year, FEB, 1) + rd(weekday=MO)] = "St. Brigid's Day (Observed)"
+
+ # One-off day of rememberance and recognition
+ if year == 2022:
+ self[date(year, MAR, 18)] = "Day of Rememberance and Recognition"
# St. Patrick's Day
name = "St. Patrick's Day"
@@ -40,7 +57,7 @@
# Easter Monday
self[easter(year) + rd(weekday=MO)] = "Easter Monday"
- # May Day bank holiday (first Monday in May)
+ # May bank holiday (first Monday in May)
if year >= 1978:
name = "May Day"
if year == 1995:
@@ -66,18 +83,24 @@
self[date(year, JUN, 1) + rd(weekday=MO)] = "June Bank Holiday"
# Summer bank holiday (first Monday in August)
- self[date(year, AUG, 1) + rd(weekday=MO)] = "Summer Bank Holiday"
+ self[date(year, AUG, 1) + rd(weekday=MO)] = "August Bank Holiday"
# October Bank Holiday (last Monday in October)
self[date(year, OCT, 31) + rd(weekday=MO(-1))] = "October Bank Holiday"
+ # Christmas Day
+ name = "Christmas Day"
+ self[date(year, DEC, 25)] = "Christmas Day"
+ if self.observed and date(year, DEC, 25).weekday() in WEEKEND:
+ self[date(year, DEC, 25) + rd(weekday=MON)] = name + " (Observed)"
+
# St. Stephen's Day
name = "St. Stephen's Day"
self[date(year, DEC, 26)] = name
if self.observed and date(year, DEC, 26).weekday() == SAT:
- self[date(year, DEC, 28)] = name + " (Observed)"
+ self[date(year, DEC, 26) + rd(weekday=MON)] = name + " (Observed)"
elif self.observed and date(year, DEC, 26).weekday() == SUN:
- self[date(year, DEC, 28)] = name + " (Observed)"
+ self[date(year, DEC, 26) + rd(weekday=TUE)] = name + " (Observed)"
class IE(Ireland):
| {"golden_diff": "diff --git a/holidays/countries/ireland.py b/holidays/countries/ireland.py\n--- a/holidays/countries/ireland.py\n+++ b/holidays/countries/ireland.py\n@@ -16,20 +16,37 @@\n from dateutil.easter import easter\n from dateutil.relativedelta import relativedelta as rd, MO\n \n-from holidays.constants import MAR, MAY, JUN, AUG, OCT, DEC\n+from holidays.constants import FEB, MAR, MAY, JUN, AUG, OCT, DEC\n from holidays.constants import MON, TUE, WED, THU, FRI, SAT, SUN, WEEKEND\n from holidays.holiday_base import HolidayBase\n-from .united_kingdom import UnitedKingdom\n \n+class Ireland(HolidayBase):\n+ \"\"\"\n+ Official holidays in Ireland, as declared in the Citizen's Information\n+ bulletin:\n+ https://www.citizensinformation.ie/en/employment/employment_rights_and_conditions/leave_and_holidays/public_holidays_in_ireland.html\n+ \"\"\"\n \n-class Ireland(UnitedKingdom):\n country = \"IE\"\n+ subdivisions = []\n \n def __init__(self, **kwargs):\n HolidayBase.__init__(self, **kwargs)\n \n- def _country_specific(self, year):\n- # Ireland exclusive holidays\n+ def _populate(self, year):\n+ self[date(year, JAN, 1)] = \"New Year's Day\"\n+\n+ # St. Brigid's Day\n+ if year >= 2023:\n+ dt = date(year, FEB, 1)\n+ self[dt] = \"St. Brigid's Day\"\n+\n+ if self.observed and dt.weekday() != FRI:\n+ self[date(year, FEB, 1) + rd(weekday=MO)] = \"St. Brigid's Day (Observed)\"\n+\n+ # One-off day of rememberance and recognition\n+ if year == 2022:\n+ self[date(year, MAR, 18)] = \"Day of Rememberance and Recognition\"\n \n # St. Patrick's Day\n name = \"St. Patrick's Day\"\n@@ -40,7 +57,7 @@\n # Easter Monday\n self[easter(year) + rd(weekday=MO)] = \"Easter Monday\"\n \n- # May Day bank holiday (first Monday in May)\n+ # May bank holiday (first Monday in May)\n if year >= 1978:\n name = \"May Day\"\n if year == 1995:\n@@ -66,18 +83,24 @@\n self[date(year, JUN, 1) + rd(weekday=MO)] = \"June Bank Holiday\"\n \n # Summer bank holiday (first Monday in August)\n- self[date(year, AUG, 1) + rd(weekday=MO)] = \"Summer Bank Holiday\"\n+ self[date(year, AUG, 1) + rd(weekday=MO)] = \"August Bank Holiday\"\n \n # October Bank Holiday (last Monday in October)\n self[date(year, OCT, 31) + rd(weekday=MO(-1))] = \"October Bank Holiday\"\n \n+ # Christmas Day\n+ name = \"Christmas Day\"\n+ self[date(year, DEC, 25)] = \"Christmas Day\"\n+ if self.observed and date(year, DEC, 25).weekday() in WEEKEND:\n+ self[date(year, DEC, 25) + rd(weekday=MON)] = name + \" (Observed)\"\n+\n # St. Stephen's Day\n name = \"St. Stephen's Day\"\n self[date(year, DEC, 26)] = name\n if self.observed and date(year, DEC, 26).weekday() == SAT:\n- self[date(year, DEC, 28)] = name + \" (Observed)\"\n+ self[date(year, DEC, 26) + rd(weekday=MON)] = name + \" (Observed)\"\n elif self.observed and date(year, DEC, 26).weekday() == SUN:\n- self[date(year, DEC, 28)] = name + \" (Observed)\"\n+ self[date(year, DEC, 26) + rd(weekday=TUE)] = name + \" (Observed)\"\n \n \n class IE(Ireland):\n", "issue": "Ireland considering UK as base class and hence not being a country itself\nIssue also opened here:\r\nhome-assistant/core#67542\r\n\r\nLooks like Ireland is being considered as being part of the UK which is wrong as not all the holidays in the UK exist, or necessarily exist in Ireland.\r\n\r\nTake a reference on this comment: https://github.com/home-assistant/core/issues/67542#issuecomment-1058079650\n", "before_files": [{"content": "# python-holidays\n# ---------------\n# A fast, efficient Python library for generating country, province and state\n# specific sets of holidays on the fly. It aims to make determining whether a\n# specific date is a holiday as fast and flexible as possible.\n#\n# Authors: dr-prodigy <[email protected]> (c) 2017-2022\n# ryanss <[email protected]> (c) 2014-2017\n# Website: https://github.com/dr-prodigy/python-holidays\n# License: MIT (see LICENSE file)\n\nfrom datetime import date\n\nfrom dateutil.easter import easter\nfrom dateutil.relativedelta import relativedelta as rd, MO\n\nfrom holidays.constants import MAR, MAY, JUN, AUG, OCT, DEC\nfrom holidays.constants import MON, TUE, WED, THU, FRI, SAT, SUN, WEEKEND\nfrom holidays.holiday_base import HolidayBase\nfrom .united_kingdom import UnitedKingdom\n\n\nclass Ireland(UnitedKingdom):\n country = \"IE\"\n\n def __init__(self, **kwargs):\n HolidayBase.__init__(self, **kwargs)\n\n def _country_specific(self, year):\n # Ireland exclusive holidays\n\n # St. Patrick's Day\n name = \"St. Patrick's Day\"\n self[date(year, MAR, 17)] = name\n if self.observed and date(year, MAR, 17).weekday() in WEEKEND:\n self[date(year, MAR, 17) + rd(weekday=MO)] = name + \" (Observed)\"\n\n # Easter Monday\n self[easter(year) + rd(weekday=MO)] = \"Easter Monday\"\n\n # May Day bank holiday (first Monday in May)\n if year >= 1978:\n name = \"May Day\"\n if year == 1995:\n dt = date(year, MAY, 8)\n else:\n dt = date(year, MAY, 1)\n if dt.weekday() == MON:\n self[dt] = name\n elif dt.weekday() == TUE:\n self[dt + rd(days=+6)] = name\n elif dt.weekday() == WED:\n self[dt + rd(days=+5)] = name\n elif dt.weekday() == THU:\n self[dt + rd(days=+4)] = name\n elif dt.weekday() == FRI:\n self[dt + rd(days=+3)] = name\n elif dt.weekday() == SAT:\n self[dt + rd(days=+2)] = name\n elif dt.weekday() == SUN:\n self[dt + rd(days=+1)] = name\n\n # June bank holiday (first Monday in June)\n self[date(year, JUN, 1) + rd(weekday=MO)] = \"June Bank Holiday\"\n\n # Summer bank holiday (first Monday in August)\n self[date(year, AUG, 1) + rd(weekday=MO)] = \"Summer Bank Holiday\"\n\n # October Bank Holiday (last Monday in October)\n self[date(year, OCT, 31) + rd(weekday=MO(-1))] = \"October Bank Holiday\"\n\n # St. Stephen's Day\n name = \"St. Stephen's Day\"\n self[date(year, DEC, 26)] = name\n if self.observed and date(year, DEC, 26).weekday() == SAT:\n self[date(year, DEC, 28)] = name + \" (Observed)\"\n elif self.observed and date(year, DEC, 26).weekday() == SUN:\n self[date(year, DEC, 28)] = name + \" (Observed)\"\n\n\nclass IE(Ireland):\n pass\n\n\nclass IRL(Ireland):\n pass\n", "path": "holidays/countries/ireland.py"}]} | 1,669 | 958 |
gh_patches_debug_28460 | rasdani/github-patches | git_diff | mindsdb__mindsdb-2678 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[BUG] Fix scylladb error when connecting with secure bundle
When connecting with `secure_connect_bundle` users got unknown secure_connect_bundle path error.
</issue>
<code>
[start of mindsdb/integrations/handlers/scylla_handler/scylla_handler.py]
1 import os
2 from mindsdb.integrations.libs.base_handler import DatabaseHandler
3 from mindsdb_sql import parse_sql
4 from mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender
5 from cassandra.cluster import Cluster
6 from cassandra.auth import PlainTextAuthProvider
7 from mindsdb.integrations.libs.response import (
8 HandlerStatusResponse as StatusResponse,
9 HandlerResponse as Response,
10 RESPONSE_TYPE
11 )
12 from mindsdb.utilities.log import log
13 import pandas as pd
14 from mindsdb_sql.parser.ast.base import ASTNode
15
16
17 class ScyllaHandler(DatabaseHandler):
18 """
19 This handler handles connection and execution of the Scylla statements.
20 """
21 name = 'scylla'
22
23 def __init__(self, name=None, **kwargs):
24 super().__init__(name)
25 self.parser = parse_sql
26 self.connection_args = kwargs.get('connection_data')
27 self.session = None
28 self.is_connected = False
29
30 def connect(self):
31 """
32 Handles the connection to a Scylla keystore.
33 """
34 if self.is_connected is True:
35 return self.session
36
37 auth_provider = PlainTextAuthProvider(
38 username=self.connection_args['user'], password=self.connection_args['password']
39 )
40
41 connection_props = {
42 'auth_provider': auth_provider
43 }
44
45 if self.connection_args['protocol_version'] is not None:
46 connection_props['protocol_version'] = self.connection_args['protocol_version']
47
48 secure_connect_bundle = self.connection_args.get('secure_connect_bundle')
49
50 if secure_connect_bundle is not None:
51 if os.path.isfile(self.secure_connect_bundle) is False:
52 raise Exception("Secure_connect_bundle' must be path to the file")
53 connection_props['cloud'] = {
54 'secure_connect_bundle': self.secure_connect_bundle
55 }
56 else:
57 connection_props['contact_points'] = [self.connection_args['host']]
58 connection_props['port'] = int(self.connection_args['port'])
59
60 cluster = Cluster(**connection_props)
61 session = cluster.connect(self.connection_args['keyspace'])
62
63 self.is_connected = True
64 self.session = session
65 return self.session
66
67 def check_connection(self) -> StatusResponse:
68 """
69 Check the connection of the Scylla database
70 :return: success status and error message if error occurs
71 """
72 response = StatusResponse(False)
73
74 try:
75 session = self.connect()
76 # TODO: change the healthcheck
77 session.execute('SELECT release_version FROM system.local').one()
78 response.success = True
79 except Exception as e:
80 log.error(f'Error connecting to Scylla {self.connection_args["keyspace"]}, {e}!')
81 response.error_message = e
82
83 if response.success is False and self.is_connected is True:
84 self.is_connected = False
85
86 return response
87
88 def native_query(self, query: str) -> Response:
89 """
90 Receive SQL query and runs it
91 :param query: The SQL query to run in MySQL
92 :return: returns the records from the current recordset
93 """
94 session = self.connect()
95 try:
96 resp = session.execute(query).all()
97 if resp:
98 response = Response(
99 RESPONSE_TYPE.TABLE,
100 pd.DataFrame(
101 resp
102 )
103 )
104 else:
105 response = Response(RESPONSE_TYPE.OK)
106 except Exception as e:
107 log.error(f'Error running query: {query} on {self.connection_args["keyspace"]}!')
108 response = Response(
109 RESPONSE_TYPE.ERROR,
110 error_message=str(e)
111 )
112 return response
113
114 def query(self, query: ASTNode) -> Response:
115 """
116 Retrieve the data from the SQL statement.
117 """
118 renderer = SqlalchemyRender('mysql')
119 query_str = renderer.get_string(query, with_failback=True)
120 return self.native_query(query_str)
121
122 def get_tables(self) -> Response:
123 """
124 Get a list with all of the tabels in MySQL
125 """
126 q = "DESCRIBE TABLES;"
127 result = self.native_query(q)
128 df = result.data_frame
129 result.data_frame = df.rename(columns={df.columns[0]: 'table_name'})
130 return result
131
132 def get_columns(self, table_name) -> Response:
133 """
134 Show details about the table
135 """
136 q = f"DESCRIBE {table_name};"
137 result = self.native_query(q)
138 return result
139
[end of mindsdb/integrations/handlers/scylla_handler/scylla_handler.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py b/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py
--- a/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py
+++ b/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py
@@ -41,24 +41,21 @@
connection_props = {
'auth_provider': auth_provider
}
-
- if self.connection_args['protocol_version'] is not None:
- connection_props['protocol_version'] = self.connection_args['protocol_version']
-
+ connection_props['protocol_version'] = self.connection_args.get('protocol_version', 4)
secure_connect_bundle = self.connection_args.get('secure_connect_bundle')
if secure_connect_bundle is not None:
- if os.path.isfile(self.secure_connect_bundle) is False:
+ if os.path.isfile(secure_connect_bundle) is False:
raise Exception("Secure_connect_bundle' must be path to the file")
connection_props['cloud'] = {
- 'secure_connect_bundle': self.secure_connect_bundle
+ 'secure_connect_bundle': secure_connect_bundle
}
else:
connection_props['contact_points'] = [self.connection_args['host']]
connection_props['port'] = int(self.connection_args['port'])
cluster = Cluster(**connection_props)
- session = cluster.connect(self.connection_args['keyspace'])
+ session = cluster.connect(self.connection_args.get('keyspace'))
self.is_connected = True
self.session = session
| {"golden_diff": "diff --git a/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py b/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py\n--- a/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py\n+++ b/mindsdb/integrations/handlers/scylla_handler/scylla_handler.py\n@@ -41,24 +41,21 @@\n connection_props = {\n 'auth_provider': auth_provider\n }\n-\n- if self.connection_args['protocol_version'] is not None:\n- connection_props['protocol_version'] = self.connection_args['protocol_version']\n- \n+ connection_props['protocol_version'] = self.connection_args.get('protocol_version', 4)\n secure_connect_bundle = self.connection_args.get('secure_connect_bundle')\n \n if secure_connect_bundle is not None:\n- if os.path.isfile(self.secure_connect_bundle) is False:\n+ if os.path.isfile(secure_connect_bundle) is False:\n raise Exception(\"Secure_connect_bundle' must be path to the file\")\n connection_props['cloud'] = {\n- 'secure_connect_bundle': self.secure_connect_bundle\n+ 'secure_connect_bundle': secure_connect_bundle\n }\n else:\n connection_props['contact_points'] = [self.connection_args['host']]\n connection_props['port'] = int(self.connection_args['port'])\n \n cluster = Cluster(**connection_props)\n- session = cluster.connect(self.connection_args['keyspace'])\n+ session = cluster.connect(self.connection_args.get('keyspace'))\n \n self.is_connected = True\n self.session = session\n", "issue": "[BUG] Fix scylladb error when connecting with secure bundle\nWhen connecting with `secure_connect_bundle` users got unknown secure_connect_bundle path error.\n", "before_files": [{"content": "import os\nfrom mindsdb.integrations.libs.base_handler import DatabaseHandler\nfrom mindsdb_sql import parse_sql\nfrom mindsdb_sql.render.sqlalchemy_render import SqlalchemyRender\nfrom cassandra.cluster import Cluster\nfrom cassandra.auth import PlainTextAuthProvider\nfrom mindsdb.integrations.libs.response import (\n HandlerStatusResponse as StatusResponse,\n HandlerResponse as Response,\n RESPONSE_TYPE\n)\nfrom mindsdb.utilities.log import log\nimport pandas as pd\nfrom mindsdb_sql.parser.ast.base import ASTNode\n\n\nclass ScyllaHandler(DatabaseHandler):\n \"\"\"\n This handler handles connection and execution of the Scylla statements.\n \"\"\"\n name = 'scylla'\n\n def __init__(self, name=None, **kwargs):\n super().__init__(name)\n self.parser = parse_sql\n self.connection_args = kwargs.get('connection_data')\n self.session = None\n self.is_connected = False\n\n def connect(self):\n \"\"\"\n Handles the connection to a Scylla keystore.\n \"\"\"\n if self.is_connected is True:\n return self.session\n\n auth_provider = PlainTextAuthProvider(\n username=self.connection_args['user'], password=self.connection_args['password']\n )\n\n connection_props = {\n 'auth_provider': auth_provider\n }\n\n if self.connection_args['protocol_version'] is not None:\n connection_props['protocol_version'] = self.connection_args['protocol_version']\n \n secure_connect_bundle = self.connection_args.get('secure_connect_bundle')\n\n if secure_connect_bundle is not None:\n if os.path.isfile(self.secure_connect_bundle) is False:\n raise Exception(\"Secure_connect_bundle' must be path to the file\")\n connection_props['cloud'] = {\n 'secure_connect_bundle': self.secure_connect_bundle\n }\n else:\n connection_props['contact_points'] = [self.connection_args['host']]\n connection_props['port'] = int(self.connection_args['port'])\n\n cluster = Cluster(**connection_props)\n session = cluster.connect(self.connection_args['keyspace'])\n\n self.is_connected = True\n self.session = session\n return self.session\n\n def check_connection(self) -> StatusResponse:\n \"\"\"\n Check the connection of the Scylla database\n :return: success status and error message if error occurs\n \"\"\"\n response = StatusResponse(False)\n\n try:\n session = self.connect()\n # TODO: change the healthcheck\n session.execute('SELECT release_version FROM system.local').one()\n response.success = True\n except Exception as e:\n log.error(f'Error connecting to Scylla {self.connection_args[\"keyspace\"]}, {e}!')\n response.error_message = e\n\n if response.success is False and self.is_connected is True:\n self.is_connected = False\n\n return response\n\n def native_query(self, query: str) -> Response:\n \"\"\"\n Receive SQL query and runs it\n :param query: The SQL query to run in MySQL\n :return: returns the records from the current recordset\n \"\"\"\n session = self.connect()\n try:\n resp = session.execute(query).all()\n if resp:\n response = Response(\n RESPONSE_TYPE.TABLE,\n pd.DataFrame(\n resp\n )\n )\n else:\n response = Response(RESPONSE_TYPE.OK)\n except Exception as e:\n log.error(f'Error running query: {query} on {self.connection_args[\"keyspace\"]}!')\n response = Response(\n RESPONSE_TYPE.ERROR,\n error_message=str(e)\n )\n return response\n\n def query(self, query: ASTNode) -> Response:\n \"\"\"\n Retrieve the data from the SQL statement.\n \"\"\"\n renderer = SqlalchemyRender('mysql')\n query_str = renderer.get_string(query, with_failback=True)\n return self.native_query(query_str)\n\n def get_tables(self) -> Response:\n \"\"\"\n Get a list with all of the tabels in MySQL\n \"\"\"\n q = \"DESCRIBE TABLES;\"\n result = self.native_query(q)\n df = result.data_frame\n result.data_frame = df.rename(columns={df.columns[0]: 'table_name'})\n return result\n\n def get_columns(self, table_name) -> Response:\n \"\"\"\n Show details about the table\n \"\"\"\n q = f\"DESCRIBE {table_name};\"\n result = self.native_query(q)\n return result\n", "path": "mindsdb/integrations/handlers/scylla_handler/scylla_handler.py"}]} | 1,834 | 351 |
gh_patches_debug_11687 | rasdani/github-patches | git_diff | pypa__setuptools-2907 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`distutils` submodules being loaded from the stdlib
It seems the issue is that `distutils.sysconfig` is being loaded from the stdlib, even though [the distutils hack has an explicit check that submodules are loaded from the locally-bundled copy](https://github.com/pypa/setuptools/blob/dd5a2cec373ffe7eefc087c1cd06fb4e491a7e88/_distutils_hack/__init__.py#L55-L57).
_Originally posted by @jaraco in https://github.com/pypa/distutils/issues/16#issuecomment-980043534_
`distutils` submodules being loaded from the stdlib
It seems the issue is that `distutils.sysconfig` is being loaded from the stdlib, even though [the distutils hack has an explicit check that submodules are loaded from the locally-bundled copy](https://github.com/pypa/setuptools/blob/dd5a2cec373ffe7eefc087c1cd06fb4e491a7e88/_distutils_hack/__init__.py#L55-L57).
_Originally posted by @jaraco in https://github.com/pypa/distutils/issues/16#issuecomment-980043534_
</issue>
<code>
[start of _distutils_hack/__init__.py]
1 import sys
2 import os
3 import re
4 import importlib
5 import warnings
6
7
8 is_pypy = '__pypy__' in sys.builtin_module_names
9
10
11 warnings.filterwarnings('ignore',
12 r'.+ distutils\b.+ deprecated',
13 DeprecationWarning)
14
15
16 def warn_distutils_present():
17 if 'distutils' not in sys.modules:
18 return
19 if is_pypy and sys.version_info < (3, 7):
20 # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
21 # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
22 return
23 warnings.warn(
24 "Distutils was imported before Setuptools, but importing Setuptools "
25 "also replaces the `distutils` module in `sys.modules`. This may lead "
26 "to undesirable behaviors or errors. To avoid these issues, avoid "
27 "using distutils directly, ensure that setuptools is installed in the "
28 "traditional way (e.g. not an editable install), and/or make sure "
29 "that setuptools is always imported before distutils.")
30
31
32 def clear_distutils():
33 if 'distutils' not in sys.modules:
34 return
35 warnings.warn("Setuptools is replacing distutils.")
36 mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
37 for name in mods:
38 del sys.modules[name]
39
40
41 def enabled():
42 """
43 Allow selection of distutils by environment variable.
44 """
45 which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
46 return which == 'local'
47
48
49 def ensure_local_distutils():
50 clear_distutils()
51 distutils = importlib.import_module('setuptools._distutils')
52 distutils.__name__ = 'distutils'
53 sys.modules['distutils'] = distutils
54
55 # sanity check that submodules load as expected
56 core = importlib.import_module('distutils.core')
57 assert '_distutils' in core.__file__, core.__file__
58
59
60 def do_override():
61 """
62 Ensure that the local copy of distutils is preferred over stdlib.
63
64 See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
65 for more motivation.
66 """
67 if enabled():
68 warn_distutils_present()
69 ensure_local_distutils()
70
71
72 class DistutilsMetaFinder:
73 def find_spec(self, fullname, path, target=None):
74 if path is not None:
75 return
76
77 method_name = 'spec_for_{fullname}'.format(**locals())
78 method = getattr(self, method_name, lambda: None)
79 return method()
80
81 def spec_for_distutils(self):
82 import importlib.abc
83 import importlib.util
84
85 class DistutilsLoader(importlib.abc.Loader):
86
87 def create_module(self, spec):
88 return importlib.import_module('setuptools._distutils')
89
90 def exec_module(self, module):
91 pass
92
93 return importlib.util.spec_from_loader('distutils', DistutilsLoader())
94
95 def spec_for_pip(self):
96 """
97 Ensure stdlib distutils when running under pip.
98 See pypa/pip#8761 for rationale.
99 """
100 if self.pip_imported_during_build():
101 return
102 clear_distutils()
103 self.spec_for_distutils = lambda: None
104
105 @staticmethod
106 def pip_imported_during_build():
107 """
108 Detect if pip is being imported in a build script. Ref #2355.
109 """
110 import traceback
111 return any(
112 frame.f_globals['__file__'].endswith('setup.py')
113 for frame, line in traceback.walk_stack(None)
114 )
115
116
117 DISTUTILS_FINDER = DistutilsMetaFinder()
118
119
120 def add_shim():
121 sys.meta_path.insert(0, DISTUTILS_FINDER)
122
123
124 def remove_shim():
125 try:
126 sys.meta_path.remove(DISTUTILS_FINDER)
127 except ValueError:
128 pass
129
[end of _distutils_hack/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -48,11 +48,15 @@
def ensure_local_distutils():
clear_distutils()
- distutils = importlib.import_module('setuptools._distutils')
- distutils.__name__ = 'distutils'
- sys.modules['distutils'] = distutils
- # sanity check that submodules load as expected
+ # With the DistutilsMetaFinder in place,
+ # perform an import to cause distutils to be
+ # loaded from setuptools._distutils. Ref #2906.
+ add_shim()
+ importlib.import_module('distutils')
+ remove_shim()
+
+ # check that submodules load as expected
core = importlib.import_module('distutils.core')
assert '_distutils' in core.__file__, core.__file__
| {"golden_diff": "diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py\n--- a/_distutils_hack/__init__.py\n+++ b/_distutils_hack/__init__.py\n@@ -48,11 +48,15 @@\n \n def ensure_local_distutils():\n clear_distutils()\n- distutils = importlib.import_module('setuptools._distutils')\n- distutils.__name__ = 'distutils'\n- sys.modules['distutils'] = distutils\n \n- # sanity check that submodules load as expected\n+ # With the DistutilsMetaFinder in place,\n+ # perform an import to cause distutils to be\n+ # loaded from setuptools._distutils. Ref #2906.\n+ add_shim()\n+ importlib.import_module('distutils')\n+ remove_shim()\n+\n+ # check that submodules load as expected\n core = importlib.import_module('distutils.core')\n assert '_distutils' in core.__file__, core.__file__\n", "issue": "`distutils` submodules being loaded from the stdlib\nIt seems the issue is that `distutils.sysconfig` is being loaded from the stdlib, even though [the distutils hack has an explicit check that submodules are loaded from the locally-bundled copy](https://github.com/pypa/setuptools/blob/dd5a2cec373ffe7eefc087c1cd06fb4e491a7e88/_distutils_hack/__init__.py#L55-L57).\r\n\r\n_Originally posted by @jaraco in https://github.com/pypa/distutils/issues/16#issuecomment-980043534_\n`distutils` submodules being loaded from the stdlib\nIt seems the issue is that `distutils.sysconfig` is being loaded from the stdlib, even though [the distutils hack has an explicit check that submodules are loaded from the locally-bundled copy](https://github.com/pypa/setuptools/blob/dd5a2cec373ffe7eefc087c1cd06fb4e491a7e88/_distutils_hack/__init__.py#L55-L57).\r\n\r\n_Originally posted by @jaraco in https://github.com/pypa/distutils/issues/16#issuecomment-980043534_\n", "before_files": [{"content": "import sys\nimport os\nimport re\nimport importlib\nimport warnings\n\n\nis_pypy = '__pypy__' in sys.builtin_module_names\n\n\nwarnings.filterwarnings('ignore',\n r'.+ distutils\\b.+ deprecated',\n DeprecationWarning)\n\n\ndef warn_distutils_present():\n if 'distutils' not in sys.modules:\n return\n if is_pypy and sys.version_info < (3, 7):\n # PyPy for 3.6 unconditionally imports distutils, so bypass the warning\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n \"Distutils was imported before Setuptools, but importing Setuptools \"\n \"also replaces the `distutils` module in `sys.modules`. This may lead \"\n \"to undesirable behaviors or errors. To avoid these issues, avoid \"\n \"using distutils directly, ensure that setuptools is installed in the \"\n \"traditional way (e.g. not an editable install), and/or make sure \"\n \"that setuptools is always imported before distutils.\")\n\n\ndef clear_distutils():\n if 'distutils' not in sys.modules:\n return\n warnings.warn(\"Setuptools is replacing distutils.\")\n mods = [name for name in sys.modules if re.match(r'distutils\\b', name)]\n for name in mods:\n del sys.modules[name]\n\n\ndef enabled():\n \"\"\"\n Allow selection of distutils by environment variable.\n \"\"\"\n which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')\n return which == 'local'\n\n\ndef ensure_local_distutils():\n clear_distutils()\n distutils = importlib.import_module('setuptools._distutils')\n distutils.__name__ = 'distutils'\n sys.modules['distutils'] = distutils\n\n # sanity check that submodules load as expected\n core = importlib.import_module('distutils.core')\n assert '_distutils' in core.__file__, core.__file__\n\n\ndef do_override():\n \"\"\"\n Ensure that the local copy of distutils is preferred over stdlib.\n\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n if enabled():\n warn_distutils_present()\n ensure_local_distutils()\n\n\nclass DistutilsMetaFinder:\n def find_spec(self, fullname, path, target=None):\n if path is not None:\n return\n\n method_name = 'spec_for_{fullname}'.format(**locals())\n method = getattr(self, method_name, lambda: None)\n return method()\n\n def spec_for_distutils(self):\n import importlib.abc\n import importlib.util\n\n class DistutilsLoader(importlib.abc.Loader):\n\n def create_module(self, spec):\n return importlib.import_module('setuptools._distutils')\n\n def exec_module(self, module):\n pass\n\n return importlib.util.spec_from_loader('distutils', DistutilsLoader())\n\n def spec_for_pip(self):\n \"\"\"\n Ensure stdlib distutils when running under pip.\n See pypa/pip#8761 for rationale.\n \"\"\"\n if self.pip_imported_during_build():\n return\n clear_distutils()\n self.spec_for_distutils = lambda: None\n\n @staticmethod\n def pip_imported_during_build():\n \"\"\"\n Detect if pip is being imported in a build script. Ref #2355.\n \"\"\"\n import traceback\n return any(\n frame.f_globals['__file__'].endswith('setup.py')\n for frame, line in traceback.walk_stack(None)\n )\n\n\nDISTUTILS_FINDER = DistutilsMetaFinder()\n\n\ndef add_shim():\n sys.meta_path.insert(0, DISTUTILS_FINDER)\n\n\ndef remove_shim():\n try:\n sys.meta_path.remove(DISTUTILS_FINDER)\n except ValueError:\n pass\n", "path": "_distutils_hack/__init__.py"}]} | 2,039 | 230 |
gh_patches_debug_64121 | rasdani/github-patches | git_diff | plotly__dash-333 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
The README is in markdown and doesn't render properly on pypi.io
See: https://pypi.org/project/dash/
</issue>
<code>
[start of setup.py]
1 import io
2 from setuptools import setup, find_packages
3
4 main_ns = {}
5 exec(open('dash/version.py').read(), main_ns) # pylint: disable=exec-used
6
7 setup(
8 name='dash',
9 version=main_ns['__version__'],
10 author='chris p',
11 author_email='[email protected]',
12 packages=find_packages(exclude=['tests*']),
13 license='MIT',
14 description=('A Python framework for building reactive web-apps. '
15 'Developed by Plotly.'),
16 long_description=io.open('README.md', encoding='utf-8').read(),
17 install_requires=[
18 'Flask>=0.12',
19 'flask-compress',
20 'plotly',
21 'dash_renderer',
22 ],
23 url='https://plot.ly/dash',
24 classifiers=[
25 'Development Status :: 5 - Production/Stable',
26 'Environment :: Web Environment',
27 'Framework :: Flask',
28 'Intended Audience :: Developers',
29 'Intended Audience :: Education',
30 'Intended Audience :: Financial and Insurance Industry',
31 'Intended Audience :: Healthcare Industry',
32 'Intended Audience :: Manufacturing',
33 'Intended Audience :: Science/Research',
34 'License :: OSI Approved :: MIT License',
35 'Programming Language :: Python :: 2.7',
36 'Programming Language :: Python :: 3.3',
37 'Programming Language :: Python :: 3.4',
38 'Programming Language :: Python :: 3.5',
39 'Programming Language :: Python :: 3.6',
40 'Topic :: Database :: Front-Ends',
41 'Topic :: Office/Business :: Financial :: Spreadsheet',
42 'Topic :: Scientific/Engineering :: Visualization',
43 'Topic :: Software Development :: Libraries :: Application Frameworks',
44 'Topic :: Software Development :: Widget Sets'
45 ]
46 )
47
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -14,6 +14,7 @@
description=('A Python framework for building reactive web-apps. '
'Developed by Plotly.'),
long_description=io.open('README.md', encoding='utf-8').read(),
+ long_description_content_type='text/markdown',
install_requires=[
'Flask>=0.12',
'flask-compress',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -14,6 +14,7 @@\n description=('A Python framework for building reactive web-apps. '\n 'Developed by Plotly.'),\n long_description=io.open('README.md', encoding='utf-8').read(),\n+ long_description_content_type='text/markdown',\n install_requires=[\n 'Flask>=0.12',\n 'flask-compress',\n", "issue": "The README is in markdown and doesn't render properly on pypi.io\nSee: https://pypi.org/project/dash/\r\n\n", "before_files": [{"content": "import io\nfrom setuptools import setup, find_packages\n\nmain_ns = {}\nexec(open('dash/version.py').read(), main_ns) # pylint: disable=exec-used\n\nsetup(\n name='dash',\n version=main_ns['__version__'],\n author='chris p',\n author_email='[email protected]',\n packages=find_packages(exclude=['tests*']),\n license='MIT',\n description=('A Python framework for building reactive web-apps. '\n 'Developed by Plotly.'),\n long_description=io.open('README.md', encoding='utf-8').read(),\n install_requires=[\n 'Flask>=0.12',\n 'flask-compress',\n 'plotly',\n 'dash_renderer',\n ],\n url='https://plot.ly/dash',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Web Environment',\n 'Framework :: Flask',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Financial and Insurance Industry',\n 'Intended Audience :: Healthcare Industry',\n 'Intended Audience :: Manufacturing',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Topic :: Database :: Front-Ends',\n 'Topic :: Office/Business :: Financial :: Spreadsheet',\n 'Topic :: Scientific/Engineering :: Visualization',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n 'Topic :: Software Development :: Widget Sets'\n ]\n)\n", "path": "setup.py"}]} | 1,023 | 105 |
gh_patches_debug_1331 | rasdani/github-patches | git_diff | litestar-org__litestar-1773 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
</issue>
<code>
[start of litestar/dto/exceptions.py]
1 from __future__ import annotations
2
3 from litestar.exceptions import ImproperlyConfiguredException
4
5 __all__ = ("DTOException", "UnsupportedType")
6
7
8 class DTOException(ImproperlyConfiguredException):
9 """Base exception for DTO errors."""
10
11
12 class UnsupportedType(DTOException):
13 """Raised when a type is not supported by Litestar."""
14
[end of litestar/dto/exceptions.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/litestar/dto/exceptions.py b/litestar/dto/exceptions.py
deleted file mode 100644
--- a/litestar/dto/exceptions.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from __future__ import annotations
-
-from litestar.exceptions import ImproperlyConfiguredException
-
-__all__ = ("DTOException", "UnsupportedType")
-
-
-class DTOException(ImproperlyConfiguredException):
- """Base exception for DTO errors."""
-
-
-class UnsupportedType(DTOException):
- """Raised when a type is not supported by Litestar."""
| {"golden_diff": "diff --git a/litestar/dto/exceptions.py b/litestar/dto/exceptions.py\ndeleted file mode 100644\n--- a/litestar/dto/exceptions.py\n+++ /dev/null\n@@ -1,13 +0,0 @@\n-from __future__ import annotations\n-\n-from litestar.exceptions import ImproperlyConfiguredException\n-\n-__all__ = (\"DTOException\", \"UnsupportedType\")\n-\n-\n-class DTOException(ImproperlyConfiguredException):\n- \"\"\"Base exception for DTO errors.\"\"\"\n-\n-\n-class UnsupportedType(DTOException):\n- \"\"\"Raised when a type is not supported by Litestar.\"\"\"\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom litestar.exceptions import ImproperlyConfiguredException\n\n__all__ = (\"DTOException\", \"UnsupportedType\")\n\n\nclass DTOException(ImproperlyConfiguredException):\n \"\"\"Base exception for DTO errors.\"\"\"\n\n\nclass UnsupportedType(DTOException):\n \"\"\"Raised when a type is not supported by Litestar.\"\"\"\n", "path": "litestar/dto/exceptions.py"}]} | 802 | 140 |
gh_patches_debug_47653 | rasdani/github-patches | git_diff | DataBiosphere__toil-4528 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
WES ignores host in production
When trying to run `toil server --host 0.0.0.0`, I noticed that it would always only listen on `127.0.0.1` no matter what `--host` is set to but running with `--debug` didn't have this problem.
```
❯ toil server --host 0.0.0.0
...
[2022-11-11 16:50:46 +0000] [7173] [INFO] Starting gunicorn 20.1.0
[2022-11-11 16:50:46 +0000] [7173] [INFO] Listening at: http://127.0.0.1:8000
...
```
vs
```
❯ toil server --host 0.0.0.0 --debug
...
INFO:werkzeug:WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.
* Running on all addresses (0.0.0.0)
* Running on http://127.0.0.1:8080
...
```
I tracked the problem down to [this line](https://github.com/DataBiosphere/toil/blob/master/src/toil/server/wsgi_app.py#L44). It appears to be overwriting the settings taken from the command line with Gunicorn's defaults before checking to see if anything has been set which `bind` won't be as it's been set to `None` in the merge.
Swapping the dictionaries around seems to have fixed it.
```python
for key, value in {**vars(env_args), **self.options}.items():
```
┆Issue is synchronized with this [Jira Story](https://ucsc-cgl.atlassian.net/browse/TOIL-1242)
┆Issue Number: TOIL-1242
</issue>
<code>
[start of src/toil/server/wsgi_app.py]
1 # Copyright (C) 2015-2021 Regents of the University of California
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from typing import Any, Dict, Optional
15
16 from gunicorn.app.base import BaseApplication # type: ignore
17
18
19 class GunicornApplication(BaseApplication): # type: ignore
20 """
21 An entry point to integrate a Gunicorn WSGI server in Python. To start a
22 WSGI application with callable `app`, run the following code:
23
24 WSGIApplication(app, options={
25 ...
26 }).run()
27
28 For more details, see: https://docs.gunicorn.org/en/latest/custom.html
29 """
30 def __init__(self, app: object, options: Optional[Dict[str, Any]] = None):
31 self.options = options or {}
32 self.application = app
33 super().__init__()
34
35 def init(self, *args: Any) -> None:
36 pass
37
38 def load_config(self) -> None:
39 parser = self.cfg.parser()
40 env_args = parser.parse_args(self.cfg.get_cmd_args_from_env())
41
42 # TODO: also read from the Gunicorn config file?
43
44 for key, value in {**self.options, **vars(env_args)}.items():
45 if key in self.cfg.settings and value is not None:
46 self.cfg.set(key.lower(), value)
47
48 def load(self) -> object:
49 return self.application
50
51
52 def run_app(app: object, options: Optional[Dict[str, Any]] = None) -> None:
53 """
54 Run a Gunicorn WSGI server.
55 """
56 GunicornApplication(app, options=options).run()
57
[end of src/toil/server/wsgi_app.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/toil/server/wsgi_app.py b/src/toil/server/wsgi_app.py
--- a/src/toil/server/wsgi_app.py
+++ b/src/toil/server/wsgi_app.py
@@ -41,7 +41,7 @@
# TODO: also read from the Gunicorn config file?
- for key, value in {**self.options, **vars(env_args)}.items():
+ for key, value in {**vars(env_args), **self.options}.items():
if key in self.cfg.settings and value is not None:
self.cfg.set(key.lower(), value)
| {"golden_diff": "diff --git a/src/toil/server/wsgi_app.py b/src/toil/server/wsgi_app.py\n--- a/src/toil/server/wsgi_app.py\n+++ b/src/toil/server/wsgi_app.py\n@@ -41,7 +41,7 @@\n \n # TODO: also read from the Gunicorn config file?\n \n- for key, value in {**self.options, **vars(env_args)}.items():\n+ for key, value in {**vars(env_args), **self.options}.items():\n if key in self.cfg.settings and value is not None:\n self.cfg.set(key.lower(), value)\n", "issue": "WES ignores host in production\nWhen trying to run `toil server --host 0.0.0.0`, I noticed that it would always only listen on `127.0.0.1` no matter what `--host` is set to but running with `--debug` didn't have this problem.\n\n```\n\u276f toil server --host 0.0.0.0\n...\n[2022-11-11 16:50:46 +0000] [7173] [INFO] Starting gunicorn 20.1.0\n[2022-11-11 16:50:46 +0000] [7173] [INFO] Listening at: http://127.0.0.1:8000\n...\n```\nvs\n```\n\u276f toil server --host 0.0.0.0 --debug\n...\nINFO:werkzeug:WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.\n * Running on all addresses (0.0.0.0)\n * Running on http://127.0.0.1:8080\n...\n```\n\nI tracked the problem down to [this line](https://github.com/DataBiosphere/toil/blob/master/src/toil/server/wsgi_app.py#L44). It appears to be overwriting the settings taken from the command line with Gunicorn's defaults before checking to see if anything has been set which `bind` won't be as it's been set to `None` in the merge.\n\nSwapping the dictionaries around seems to have fixed it.\n```python\n for key, value in {**vars(env_args), **self.options}.items():\n```\n\n\u2506Issue is synchronized with this [Jira Story](https://ucsc-cgl.atlassian.net/browse/TOIL-1242)\n\u2506Issue Number: TOIL-1242\n\n", "before_files": [{"content": "# Copyright (C) 2015-2021 Regents of the University of California\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Any, Dict, Optional\n\nfrom gunicorn.app.base import BaseApplication # type: ignore\n\n\nclass GunicornApplication(BaseApplication): # type: ignore\n \"\"\"\n An entry point to integrate a Gunicorn WSGI server in Python. To start a\n WSGI application with callable `app`, run the following code:\n\n WSGIApplication(app, options={\n ...\n }).run()\n\n For more details, see: https://docs.gunicorn.org/en/latest/custom.html\n \"\"\"\n def __init__(self, app: object, options: Optional[Dict[str, Any]] = None):\n self.options = options or {}\n self.application = app\n super().__init__()\n\n def init(self, *args: Any) -> None:\n pass\n\n def load_config(self) -> None:\n parser = self.cfg.parser()\n env_args = parser.parse_args(self.cfg.get_cmd_args_from_env())\n\n # TODO: also read from the Gunicorn config file?\n\n for key, value in {**self.options, **vars(env_args)}.items():\n if key in self.cfg.settings and value is not None:\n self.cfg.set(key.lower(), value)\n\n def load(self) -> object:\n return self.application\n\n\ndef run_app(app: object, options: Optional[Dict[str, Any]] = None) -> None:\n \"\"\"\n Run a Gunicorn WSGI server.\n \"\"\"\n GunicornApplication(app, options=options).run()\n", "path": "src/toil/server/wsgi_app.py"}]} | 1,551 | 132 |
gh_patches_debug_13796 | rasdani/github-patches | git_diff | Mailu__Mailu-1874 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Weblate instance is down
I tried accessing the Weblate instance and potentially add another language but it looks down.
</issue>
<code>
[start of setup/server.py]
1 import flask
2 import flask_bootstrap
3 import redis
4 import json
5 import os
6 import jinja2
7 import uuid
8 import string
9 import random
10 import ipaddress
11 import hashlib
12 import time
13
14
15 version = os.getenv("this_version", "master")
16 static_url_path = "/" + version + "/static"
17 app = flask.Flask(__name__, static_url_path=static_url_path)
18 flask_bootstrap.Bootstrap(app)
19 db = redis.StrictRedis(host='redis', port=6379, db=0)
20
21
22 def render_flavor(flavor, template, data):
23 return flask.render_template(
24 os.path.join(flavor, template),
25 **data
26 )
27
28
29 @app.add_template_global
30 def secret(length=16):
31 charset = string.ascii_uppercase + string.digits
32 return ''.join(
33 random.SystemRandom().choice(charset)
34 for _ in range(length)
35 )
36
37 #Original copied from https://github.com/andrewlkho/ulagen
38 def random_ipv6_subnet():
39 eui64 = uuid.getnode() >> 24 << 48 | 0xfffe000000 | uuid.getnode() & 0xffffff
40 eui64_canon = "-".join([format(eui64, "02X")[i:i+2] for i in range(0, 18, 2)])
41
42 h = hashlib.sha1()
43 h.update((eui64_canon + str(time.time() - time.mktime((1900, 1, 1, 0, 0, 0, 0, 1, -1)))).encode('utf-8'))
44 globalid = h.hexdigest()[0:10]
45
46 prefix = ":".join(("fd" + globalid[0:2], globalid[2:6], globalid[6:10]))
47 return prefix
48
49 def build_app(path):
50
51 app.jinja_env.trim_blocks = True
52 app.jinja_env.lstrip_blocks = True
53
54 @app.context_processor
55 def app_context():
56 return dict(
57 versions=os.getenv("VERSIONS","master").split(','),
58 stable_version = os.getenv("stable_version", "master")
59 )
60
61 prefix_bp = flask.Blueprint(version, __name__)
62 prefix_bp.jinja_loader = jinja2.ChoiceLoader([
63 jinja2.FileSystemLoader(os.path.join(path, "templates")),
64 jinja2.FileSystemLoader(os.path.join(path, "flavors"))
65 ])
66
67 root_bp = flask.Blueprint("root", __name__)
68 root_bp.jinja_loader = jinja2.ChoiceLoader([
69 jinja2.FileSystemLoader(os.path.join(path, "templates")),
70 jinja2.FileSystemLoader(os.path.join(path, "flavors"))
71 ])
72
73 @prefix_bp.context_processor
74 @root_bp.context_processor
75 def bp_context(version=version):
76 return dict(version=version)
77
78 @prefix_bp.route("/")
79 @root_bp.route("/")
80 def wizard():
81 return flask.render_template('wizard.html')
82
83 @prefix_bp.route("/submit_flavor", methods=["POST"])
84 @root_bp.route("/submit_flavor", methods=["POST"])
85 def submit_flavor():
86 data = flask.request.form.copy()
87 subnet6 = random_ipv6_subnet()
88 steps = sorted(os.listdir(os.path.join(path, "templates", "steps", data["flavor"])))
89 return flask.render_template('wizard.html', flavor=data["flavor"], steps=steps, subnet6=subnet6)
90
91 @prefix_bp.route("/submit", methods=["POST"])
92 @root_bp.route("/submit", methods=["POST"])
93 def submit():
94 data = flask.request.form.copy()
95 data['uid'] = str(uuid.uuid4())
96 try:
97 data['dns'] = str(ipaddress.IPv4Network(data['subnet'], strict=False)[-2])
98 except ValueError as err:
99 return "Error while generating files: " + str(err)
100 db.set(data['uid'], json.dumps(data))
101 return flask.redirect(flask.url_for('.setup', uid=data['uid']))
102
103 @prefix_bp.route("/setup/<uid>", methods=["GET"])
104 @root_bp.route("/setup/<uid>", methods=["GET"])
105 def setup(uid):
106 data = json.loads(db.get(uid))
107 flavor = data.get("flavor", "compose")
108 rendered = render_flavor(flavor, "setup.html", data)
109 return flask.render_template("setup.html", contents=rendered)
110
111 @prefix_bp.route("/file/<uid>/<filepath>", methods=["GET"])
112 @root_bp.route("/file/<uid>/<filepath>", methods=["GET"])
113 def file(uid, filepath):
114 data = json.loads(db.get(uid))
115 flavor = data.get("flavor", "compose")
116 return flask.Response(
117 render_flavor(flavor, filepath, data),
118 mimetype="application/text"
119 )
120
121 app.register_blueprint(prefix_bp, url_prefix="/{}".format(version))
122 app.register_blueprint(root_bp)
123
124
125 if __name__ == "__main__":
126 build_app("/tmp/mailutest")
127 app.run(debug=True)
128
[end of setup/server.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup/server.py b/setup/server.py
--- a/setup/server.py
+++ b/setup/server.py
@@ -54,11 +54,11 @@
@app.context_processor
def app_context():
return dict(
- versions=os.getenv("VERSIONS","master").split(','),
+ versions=os.getenv("VERSIONS","master").split(','),
stable_version = os.getenv("stable_version", "master")
)
- prefix_bp = flask.Blueprint(version, __name__)
+ prefix_bp = flask.Blueprint(version.replace(".", "_"), __name__)
prefix_bp.jinja_loader = jinja2.ChoiceLoader([
jinja2.FileSystemLoader(os.path.join(path, "templates")),
jinja2.FileSystemLoader(os.path.join(path, "flavors"))
| {"golden_diff": "diff --git a/setup/server.py b/setup/server.py\n--- a/setup/server.py\n+++ b/setup/server.py\n@@ -54,11 +54,11 @@\n @app.context_processor\n def app_context():\n return dict(\n- versions=os.getenv(\"VERSIONS\",\"master\").split(','), \n+ versions=os.getenv(\"VERSIONS\",\"master\").split(','),\n stable_version = os.getenv(\"stable_version\", \"master\")\n )\n \n- prefix_bp = flask.Blueprint(version, __name__)\n+ prefix_bp = flask.Blueprint(version.replace(\".\", \"_\"), __name__)\n prefix_bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.FileSystemLoader(os.path.join(path, \"templates\")),\n jinja2.FileSystemLoader(os.path.join(path, \"flavors\"))\n", "issue": "Weblate instance is down\nI tried accessing the Weblate instance and potentially add another language but it looks down.\n", "before_files": [{"content": "import flask\nimport flask_bootstrap\nimport redis\nimport json\nimport os\nimport jinja2\nimport uuid\nimport string\nimport random\nimport ipaddress\nimport hashlib\nimport time\n\n\nversion = os.getenv(\"this_version\", \"master\")\nstatic_url_path = \"/\" + version + \"/static\"\napp = flask.Flask(__name__, static_url_path=static_url_path)\nflask_bootstrap.Bootstrap(app)\ndb = redis.StrictRedis(host='redis', port=6379, db=0)\n\n\ndef render_flavor(flavor, template, data):\n return flask.render_template(\n os.path.join(flavor, template),\n **data\n )\n\n\[email protected]_template_global\ndef secret(length=16):\n charset = string.ascii_uppercase + string.digits\n return ''.join(\n random.SystemRandom().choice(charset)\n for _ in range(length)\n )\n\n#Original copied from https://github.com/andrewlkho/ulagen\ndef random_ipv6_subnet():\n eui64 = uuid.getnode() >> 24 << 48 | 0xfffe000000 | uuid.getnode() & 0xffffff\n eui64_canon = \"-\".join([format(eui64, \"02X\")[i:i+2] for i in range(0, 18, 2)])\n\n h = hashlib.sha1()\n h.update((eui64_canon + str(time.time() - time.mktime((1900, 1, 1, 0, 0, 0, 0, 1, -1)))).encode('utf-8'))\n globalid = h.hexdigest()[0:10]\n\n prefix = \":\".join((\"fd\" + globalid[0:2], globalid[2:6], globalid[6:10]))\n return prefix\n\ndef build_app(path):\n\n app.jinja_env.trim_blocks = True\n app.jinja_env.lstrip_blocks = True\n\n @app.context_processor\n def app_context():\n return dict(\n versions=os.getenv(\"VERSIONS\",\"master\").split(','), \n stable_version = os.getenv(\"stable_version\", \"master\")\n )\n\n prefix_bp = flask.Blueprint(version, __name__)\n prefix_bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.FileSystemLoader(os.path.join(path, \"templates\")),\n jinja2.FileSystemLoader(os.path.join(path, \"flavors\"))\n ])\n\n root_bp = flask.Blueprint(\"root\", __name__)\n root_bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.FileSystemLoader(os.path.join(path, \"templates\")),\n jinja2.FileSystemLoader(os.path.join(path, \"flavors\"))\n ])\n\n @prefix_bp.context_processor\n @root_bp.context_processor\n def bp_context(version=version):\n return dict(version=version)\n\n @prefix_bp.route(\"/\")\n @root_bp.route(\"/\")\n def wizard():\n return flask.render_template('wizard.html')\n\n @prefix_bp.route(\"/submit_flavor\", methods=[\"POST\"])\n @root_bp.route(\"/submit_flavor\", methods=[\"POST\"])\n def submit_flavor():\n data = flask.request.form.copy()\n subnet6 = random_ipv6_subnet()\n steps = sorted(os.listdir(os.path.join(path, \"templates\", \"steps\", data[\"flavor\"])))\n return flask.render_template('wizard.html', flavor=data[\"flavor\"], steps=steps, subnet6=subnet6)\n\n @prefix_bp.route(\"/submit\", methods=[\"POST\"])\n @root_bp.route(\"/submit\", methods=[\"POST\"])\n def submit():\n data = flask.request.form.copy()\n data['uid'] = str(uuid.uuid4())\n try:\n data['dns'] = str(ipaddress.IPv4Network(data['subnet'], strict=False)[-2])\n except ValueError as err:\n return \"Error while generating files: \" + str(err)\n db.set(data['uid'], json.dumps(data))\n return flask.redirect(flask.url_for('.setup', uid=data['uid']))\n\n @prefix_bp.route(\"/setup/<uid>\", methods=[\"GET\"])\n @root_bp.route(\"/setup/<uid>\", methods=[\"GET\"])\n def setup(uid):\n data = json.loads(db.get(uid))\n flavor = data.get(\"flavor\", \"compose\")\n rendered = render_flavor(flavor, \"setup.html\", data)\n return flask.render_template(\"setup.html\", contents=rendered)\n\n @prefix_bp.route(\"/file/<uid>/<filepath>\", methods=[\"GET\"])\n @root_bp.route(\"/file/<uid>/<filepath>\", methods=[\"GET\"])\n def file(uid, filepath):\n data = json.loads(db.get(uid))\n flavor = data.get(\"flavor\", \"compose\")\n return flask.Response(\n render_flavor(flavor, filepath, data),\n mimetype=\"application/text\"\n )\n\n app.register_blueprint(prefix_bp, url_prefix=\"/{}\".format(version))\n app.register_blueprint(root_bp)\n\n\nif __name__ == \"__main__\":\n build_app(\"/tmp/mailutest\")\n app.run(debug=True)\n", "path": "setup/server.py"}]} | 1,923 | 171 |
gh_patches_debug_6027 | rasdani/github-patches | git_diff | twisted__twisted-12103 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Avoid encode/decode in chat.py for better readablity
As discussed in [this comment](https://github.com/twisted/twisted/pull/12070#discussion_r1442784443), it's better to use byte concat as mentioned in order of better readability in [docs/core/howto/listings/servers/chat.py:35](https://github.com/twisted/twisted/pull/12070/files/c59c93ec644a17e0f3a1752ca9ceca31a27a9f5e#diff-0923ff3db530a2e5d28ea8cc2b3a8f91f399792786772c541bf9edf7a0c50126)
```python
message = b'<' + self.name + b'> ' + message
```
</issue>
<code>
[start of docs/core/howto/listings/servers/chat.py]
1 from twisted.internet import reactor
2 from twisted.internet.protocol import Factory
3 from twisted.protocols.basic import LineReceiver
4
5
6 class Chat(LineReceiver):
7 def __init__(self, users):
8 self.users = users
9 self.name = None
10 self.state = "GETNAME"
11
12 def connectionMade(self):
13 self.sendLine(b"What's your name?")
14
15 def connectionLost(self, reason):
16 if self.name in self.users:
17 del self.users[self.name]
18
19 def lineReceived(self, line):
20 if self.state == "GETNAME":
21 self.handle_GETNAME(line)
22 else:
23 self.handle_CHAT(line)
24
25 def handle_GETNAME(self, name):
26 if name in self.users:
27 self.sendLine(b"Name taken, please choose another.")
28 return
29 self.sendLine(f"Welcome, {name.decode('utf-8')}!".encode("utf-8"))
30 self.name = name
31 self.users[name] = self
32 self.state = "CHAT"
33
34 def handle_CHAT(self, message):
35 message = f"<{self.name.decode('utf-8')}> {message.decode('utf-8')}".encode(
36 "utf-8"
37 )
38 for name, protocol in self.users.items():
39 if protocol != self:
40 protocol.sendLine(message)
41
42
43 class ChatFactory(Factory):
44 def __init__(self):
45 self.users = {} # maps user names to Chat instances
46
47 def buildProtocol(self, addr):
48 return Chat(self.users)
49
50
51 reactor.listenTCP(8123, ChatFactory())
52 reactor.run()
53
[end of docs/core/howto/listings/servers/chat.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/docs/core/howto/listings/servers/chat.py b/docs/core/howto/listings/servers/chat.py
--- a/docs/core/howto/listings/servers/chat.py
+++ b/docs/core/howto/listings/servers/chat.py
@@ -32,9 +32,7 @@
self.state = "CHAT"
def handle_CHAT(self, message):
- message = f"<{self.name.decode('utf-8')}> {message.decode('utf-8')}".encode(
- "utf-8"
- )
+ message = b"<" + self.name + b"> " + message
for name, protocol in self.users.items():
if protocol != self:
protocol.sendLine(message)
| {"golden_diff": "diff --git a/docs/core/howto/listings/servers/chat.py b/docs/core/howto/listings/servers/chat.py\n--- a/docs/core/howto/listings/servers/chat.py\n+++ b/docs/core/howto/listings/servers/chat.py\n@@ -32,9 +32,7 @@\n self.state = \"CHAT\"\n \n def handle_CHAT(self, message):\n- message = f\"<{self.name.decode('utf-8')}> {message.decode('utf-8')}\".encode(\n- \"utf-8\"\n- )\n+ message = b\"<\" + self.name + b\"> \" + message\n for name, protocol in self.users.items():\n if protocol != self:\n protocol.sendLine(message)\n", "issue": "Avoid encode/decode in chat.py for better readablity\nAs discussed in [this comment](https://github.com/twisted/twisted/pull/12070#discussion_r1442784443), it's better to use byte concat as mentioned in order of better readability in [docs/core/howto/listings/servers/chat.py:35](https://github.com/twisted/twisted/pull/12070/files/c59c93ec644a17e0f3a1752ca9ceca31a27a9f5e#diff-0923ff3db530a2e5d28ea8cc2b3a8f91f399792786772c541bf9edf7a0c50126)\r\n```python\r\nmessage = b'<' + self.name + b'> ' + message\r\n```\n", "before_files": [{"content": "from twisted.internet import reactor\nfrom twisted.internet.protocol import Factory\nfrom twisted.protocols.basic import LineReceiver\n\n\nclass Chat(LineReceiver):\n def __init__(self, users):\n self.users = users\n self.name = None\n self.state = \"GETNAME\"\n\n def connectionMade(self):\n self.sendLine(b\"What's your name?\")\n\n def connectionLost(self, reason):\n if self.name in self.users:\n del self.users[self.name]\n\n def lineReceived(self, line):\n if self.state == \"GETNAME\":\n self.handle_GETNAME(line)\n else:\n self.handle_CHAT(line)\n\n def handle_GETNAME(self, name):\n if name in self.users:\n self.sendLine(b\"Name taken, please choose another.\")\n return\n self.sendLine(f\"Welcome, {name.decode('utf-8')}!\".encode(\"utf-8\"))\n self.name = name\n self.users[name] = self\n self.state = \"CHAT\"\n\n def handle_CHAT(self, message):\n message = f\"<{self.name.decode('utf-8')}> {message.decode('utf-8')}\".encode(\n \"utf-8\"\n )\n for name, protocol in self.users.items():\n if protocol != self:\n protocol.sendLine(message)\n\n\nclass ChatFactory(Factory):\n def __init__(self):\n self.users = {} # maps user names to Chat instances\n\n def buildProtocol(self, addr):\n return Chat(self.users)\n\n\nreactor.listenTCP(8123, ChatFactory())\nreactor.run()\n", "path": "docs/core/howto/listings/servers/chat.py"}]} | 1,198 | 157 |
gh_patches_debug_548 | rasdani/github-patches | git_diff | Gallopsled__pwntools-532 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Bpython
Hi,
Unfortunately pwntools doesn't seem to work with bpython 0.12 in conjunction of python 2.7.9.
from pwn import *
results in:
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/pwn/**init**.py", line 2, in <module>
from .toplevel import *
File "/usr/local/lib/python2.7/dist-packages/pwn/toplevel.py", line 2, in <module>
from pwnlib import *
File "/usr/local/lib/python2.7/dist-packages/pwnlib/**init**.py", line 10, in <module>
from . import \
File "/usr/local/lib/python2.7/dist-packages/pwnlib/asm.py", line 45, in <module>
from . import log
File "/usr/local/lib/python2.7/dist-packages/pwnlib/log.py", line 69, in <module>
from .term import spinners, text
File "/usr/local/lib/python2.7/dist-packages/pwnlib/term/**init**.py", line 1, in <module>
from . import key, readline, text, termcap, keymap, term
File "/usr/local/lib/python2.7/dist-packages/pwnlib/term/readline.py", line 2, in <module>
from . import term, text
File "/usr/local/lib/python2.7/dist-packages/pwnlib/term/text.py", line 111, in <module>
sys.modules[**name**] = Module()
File "/usr/local/lib/python2.7/dist-packages/pwnlib/term/text.py", line 22, in **init**
self.num_colors = termcap.get('colors', default = 8)
File "/usr/local/lib/python2.7/dist-packages/pwnlib/term/termcap.py", line 15, in get
init()
File "/usr/local/lib/python2.7/dist-packages/pwnlib/term/termcap.py", line 39, in init
curses.setupterm()
TypeError: argument must be an int, or have a fileno() method.
</issue>
<code>
[start of pwnlib/term/termcap.py]
1 __all__ = ['get']
2 import os, curses
3
4 cache = None
5 def get(cap, *args, **kwargs):
6 default = kwargs.pop('default', '')
7
8 if 'PWNLIB_NOTERM' in os.environ:
9 return ''
10
11 if kwargs != {}:
12 raise TypeError("get(): No such argument %r" % kwargs.popitem()[0])
13
14 if cache == None:
15 init()
16 s = cache.get(cap)
17 if not s:
18 s = curses.tigetstr(cap)
19 if s == None:
20 s = curses.tigetnum(cap)
21 if s == -2:
22 s = curses.tigetflag(cap)
23 if s == -1:
24 # default to empty string so tparm doesn't fail
25 s = ''
26 else:
27 s = bool(s)
28 cache[cap] = s
29 # if `s' is not set `curses.tparm' will throw an error if given arguments
30 if args and s:
31 return curses.tparm(s, *args)
32 else:
33 return s
34
35 def init():
36 global cache
37
38 if 'PWNLIB_NOTERM' not in os.environ:
39 curses.setupterm()
40
41 cache = {}
42
[end of pwnlib/term/termcap.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pwnlib/term/termcap.py b/pwnlib/term/termcap.py
--- a/pwnlib/term/termcap.py
+++ b/pwnlib/term/termcap.py
@@ -36,6 +36,10 @@
global cache
if 'PWNLIB_NOTERM' not in os.environ:
- curses.setupterm()
+ # Fix for BPython
+ try:
+ curses.setupterm()
+ except:
+ pass
cache = {}
| {"golden_diff": "diff --git a/pwnlib/term/termcap.py b/pwnlib/term/termcap.py\n--- a/pwnlib/term/termcap.py\n+++ b/pwnlib/term/termcap.py\n@@ -36,6 +36,10 @@\n global cache\n \n if 'PWNLIB_NOTERM' not in os.environ:\n- curses.setupterm()\n+ # Fix for BPython\n+ try:\n+ curses.setupterm()\n+ except:\n+ pass\n \n cache = {}\n", "issue": "Bpython\nHi,\n\nUnfortunately pwntools doesn't seem to work with bpython 0.12 in conjunction of python 2.7.9.\n\nfrom pwn import *\n\nresults in:\n\nTraceback (most recent call last):\n File \"<input>\", line 1, in <module>\n File \"/usr/local/lib/python2.7/dist-packages/pwn/**init**.py\", line 2, in <module>\n from .toplevel import *\n File \"/usr/local/lib/python2.7/dist-packages/pwn/toplevel.py\", line 2, in <module>\n from pwnlib import *\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/**init**.py\", line 10, in <module>\n from . import \\\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/asm.py\", line 45, in <module>\n from . import log\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/log.py\", line 69, in <module>\n from .term import spinners, text\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/term/**init**.py\", line 1, in <module>\n from . import key, readline, text, termcap, keymap, term\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/term/readline.py\", line 2, in <module>\n from . import term, text\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/term/text.py\", line 111, in <module>\n sys.modules[**name**] = Module()\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/term/text.py\", line 22, in **init**\n self.num_colors = termcap.get('colors', default = 8)\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/term/termcap.py\", line 15, in get\n init()\n File \"/usr/local/lib/python2.7/dist-packages/pwnlib/term/termcap.py\", line 39, in init\n curses.setupterm()\nTypeError: argument must be an int, or have a fileno() method.\n\n", "before_files": [{"content": "__all__ = ['get']\nimport os, curses\n\ncache = None\ndef get(cap, *args, **kwargs):\n default = kwargs.pop('default', '')\n\n if 'PWNLIB_NOTERM' in os.environ:\n return ''\n\n if kwargs != {}:\n raise TypeError(\"get(): No such argument %r\" % kwargs.popitem()[0])\n\n if cache == None:\n init()\n s = cache.get(cap)\n if not s:\n s = curses.tigetstr(cap)\n if s == None:\n s = curses.tigetnum(cap)\n if s == -2:\n s = curses.tigetflag(cap)\n if s == -1:\n # default to empty string so tparm doesn't fail\n s = ''\n else:\n s = bool(s)\n cache[cap] = s\n # if `s' is not set `curses.tparm' will throw an error if given arguments\n if args and s:\n return curses.tparm(s, *args)\n else:\n return s\n\ndef init():\n global cache\n\n if 'PWNLIB_NOTERM' not in os.environ:\n curses.setupterm()\n\n cache = {}\n", "path": "pwnlib/term/termcap.py"}]} | 1,379 | 118 |
gh_patches_debug_1990 | rasdani/github-patches | git_diff | akvo__akvo-rsr-2137 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Bug in project document category API
## Test plan
The project_document_category should not give an error. E.g. `http://rsr.localdev.akvo.org/rest/v1/project_document_category/` should load.
## Issue description
The project document category API gives an error. See http://sentry.support.akvo-ops.org/rsr/test/group/879/, or on the Test server: http://rsr.test.akvo.org/rest/v1/project_document_category/.
</issue>
<code>
[start of akvo/rest/views/project_document.py]
1 # -*- coding: utf-8 -*-
2
3 # Akvo RSR is covered by the GNU Affero General Public License.
4 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
6
7
8 from akvo.rsr.models import ProjectDocument, ProjectDocumentCategory
9
10 from ..serializers import ProjectDocumentSerializer, ProjectDocumentCategorySerializer
11 from ..viewsets import PublicProjectViewSet
12
13
14 class ProjectDocumentViewSet(PublicProjectViewSet):
15 """
16 """
17 queryset = ProjectDocument.objects.all()
18 serializer_class = ProjectDocumentSerializer
19
20
21 class ProjectDocumentCategoryViewSet(PublicProjectViewSet):
22 """
23 """
24 queryset = ProjectDocumentCategory.objects.all()
25 serializer_class = ProjectDocumentCategorySerializer
26 filter_fields = ('document__project', 'document', 'category', )
27
[end of akvo/rest/views/project_document.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/akvo/rest/views/project_document.py b/akvo/rest/views/project_document.py
--- a/akvo/rest/views/project_document.py
+++ b/akvo/rest/views/project_document.py
@@ -24,3 +24,4 @@
queryset = ProjectDocumentCategory.objects.all()
serializer_class = ProjectDocumentCategorySerializer
filter_fields = ('document__project', 'document', 'category', )
+ project_relation = 'document__project__'
| {"golden_diff": "diff --git a/akvo/rest/views/project_document.py b/akvo/rest/views/project_document.py\n--- a/akvo/rest/views/project_document.py\n+++ b/akvo/rest/views/project_document.py\n@@ -24,3 +24,4 @@\n queryset = ProjectDocumentCategory.objects.all()\n serializer_class = ProjectDocumentCategorySerializer\n filter_fields = ('document__project', 'document', 'category', )\n+ project_relation = 'document__project__'\n", "issue": "Bug in project document category API\n## Test plan\n\nThe project_document_category should not give an error. E.g. `http://rsr.localdev.akvo.org/rest/v1/project_document_category/` should load.\n## Issue description\n\nThe project document category API gives an error. See http://sentry.support.akvo-ops.org/rsr/test/group/879/, or on the Test server: http://rsr.test.akvo.org/rest/v1/project_document_category/.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\n\nfrom akvo.rsr.models import ProjectDocument, ProjectDocumentCategory\n\nfrom ..serializers import ProjectDocumentSerializer, ProjectDocumentCategorySerializer\nfrom ..viewsets import PublicProjectViewSet\n\n\nclass ProjectDocumentViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n queryset = ProjectDocument.objects.all()\n serializer_class = ProjectDocumentSerializer\n\n\nclass ProjectDocumentCategoryViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n queryset = ProjectDocumentCategory.objects.all()\n serializer_class = ProjectDocumentCategorySerializer\n filter_fields = ('document__project', 'document', 'category', )\n", "path": "akvo/rest/views/project_document.py"}]} | 874 | 102 |
gh_patches_debug_7593 | rasdani/github-patches | git_diff | python-pillow__Pillow-1230 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Cannot identify XBM file created with filename including underscore
Pillow 2.8.1, Python 2.7.6 (Anaconda 2.2.0), Windows 7 64bit
When I create git_hub.xbm (with ImageMagick), created file's header contains lines like this.
``` C
#define git_hub_width 32
#define git_hub_height 32
```
In XbmImagePlugin.py, regular expression to extract XBM header doesn't match defined macro with more than two underscores like above.This causes an IOError.
``` python
# XBM header
xbm_head = re.compile(
b"\s*#define[ \t]+[^_]*_width[ \t]+(?P<width>[0-9]+)[\r\n]+"
b"#define[ \t]+[^_]*_height[ \t]+(?P<height>[0-9]+)[\r\n]+"
b"(?P<hotspot>"
b"#define[ \t]+[^_]*_x_hot[ \t]+(?P<xhot>[0-9]+)[\r\n]+"
b"#define[ \t]+[^_]*_y_hot[ \t]+(?P<yhot>[0-9]+)[\r\n]+"
b")?"
b"[\\000-\\377]*_bits\\[\\]"
)
```
</issue>
<code>
[start of PIL/XbmImagePlugin.py]
1 #
2 # The Python Imaging Library.
3 # $Id$
4 #
5 # XBM File handling
6 #
7 # History:
8 # 1995-09-08 fl Created
9 # 1996-11-01 fl Added save support
10 # 1997-07-07 fl Made header parser more tolerant
11 # 1997-07-22 fl Fixed yet another parser bug
12 # 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
13 # 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog)
14 # 2004-02-24 fl Allow some whitespace before first #define
15 #
16 # Copyright (c) 1997-2004 by Secret Labs AB
17 # Copyright (c) 1996-1997 by Fredrik Lundh
18 #
19 # See the README file for information on usage and redistribution.
20 #
21
22 __version__ = "0.6"
23
24 import re
25 from PIL import Image, ImageFile
26
27 # XBM header
28 xbm_head = re.compile(
29 b"\s*#define[ \t]+[^_]*_width[ \t]+(?P<width>[0-9]+)[\r\n]+"
30 b"#define[ \t]+[^_]*_height[ \t]+(?P<height>[0-9]+)[\r\n]+"
31 b"(?P<hotspot>"
32 b"#define[ \t]+[^_]*_x_hot[ \t]+(?P<xhot>[0-9]+)[\r\n]+"
33 b"#define[ \t]+[^_]*_y_hot[ \t]+(?P<yhot>[0-9]+)[\r\n]+"
34 b")?"
35 b"[\\000-\\377]*_bits\\[\\]"
36 )
37
38
39 def _accept(prefix):
40 return prefix.lstrip()[:7] == b"#define"
41
42
43 ##
44 # Image plugin for X11 bitmaps.
45
46 class XbmImageFile(ImageFile.ImageFile):
47
48 format = "XBM"
49 format_description = "X11 Bitmap"
50
51 def _open(self):
52
53 m = xbm_head.match(self.fp.read(512))
54
55 if m:
56
57 xsize = int(m.group("width"))
58 ysize = int(m.group("height"))
59
60 if m.group("hotspot"):
61 self.info["hotspot"] = (
62 int(m.group("xhot")), int(m.group("yhot"))
63 )
64
65 self.mode = "1"
66 self.size = xsize, ysize
67
68 self.tile = [("xbm", (0, 0)+self.size, m.end(), None)]
69
70
71 def _save(im, fp, filename):
72
73 if im.mode != "1":
74 raise IOError("cannot write mode %s as XBM" % im.mode)
75
76 fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii'))
77 fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii'))
78
79 hotspot = im.encoderinfo.get("hotspot")
80 if hotspot:
81 fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii'))
82 fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii'))
83
84 fp.write(b"static char im_bits[] = {\n")
85
86 ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)])
87
88 fp.write(b"};\n")
89
90
91 Image.register_open("XBM", XbmImageFile, _accept)
92 Image.register_save("XBM", _save)
93
94 Image.register_extension("XBM", ".xbm")
95
96 Image.register_mime("XBM", "image/xbm")
97
[end of PIL/XbmImagePlugin.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/PIL/XbmImagePlugin.py b/PIL/XbmImagePlugin.py
--- a/PIL/XbmImagePlugin.py
+++ b/PIL/XbmImagePlugin.py
@@ -26,8 +26,8 @@
# XBM header
xbm_head = re.compile(
- b"\s*#define[ \t]+[^_]*_width[ \t]+(?P<width>[0-9]+)[\r\n]+"
- b"#define[ \t]+[^_]*_height[ \t]+(?P<height>[0-9]+)[\r\n]+"
+ b"\s*#define[ \t]+.*_width[ \t]+(?P<width>[0-9]+)[\r\n]+"
+ b"#define[ \t]+.*_height[ \t]+(?P<height>[0-9]+)[\r\n]+"
b"(?P<hotspot>"
b"#define[ \t]+[^_]*_x_hot[ \t]+(?P<xhot>[0-9]+)[\r\n]+"
b"#define[ \t]+[^_]*_y_hot[ \t]+(?P<yhot>[0-9]+)[\r\n]+"
| {"golden_diff": "diff --git a/PIL/XbmImagePlugin.py b/PIL/XbmImagePlugin.py\n--- a/PIL/XbmImagePlugin.py\n+++ b/PIL/XbmImagePlugin.py\n@@ -26,8 +26,8 @@\n \n # XBM header\n xbm_head = re.compile(\n- b\"\\s*#define[ \\t]+[^_]*_width[ \\t]+(?P<width>[0-9]+)[\\r\\n]+\"\n- b\"#define[ \\t]+[^_]*_height[ \\t]+(?P<height>[0-9]+)[\\r\\n]+\"\n+ b\"\\s*#define[ \\t]+.*_width[ \\t]+(?P<width>[0-9]+)[\\r\\n]+\"\n+ b\"#define[ \\t]+.*_height[ \\t]+(?P<height>[0-9]+)[\\r\\n]+\"\n b\"(?P<hotspot>\"\n b\"#define[ \\t]+[^_]*_x_hot[ \\t]+(?P<xhot>[0-9]+)[\\r\\n]+\"\n b\"#define[ \\t]+[^_]*_y_hot[ \\t]+(?P<yhot>[0-9]+)[\\r\\n]+\"\n", "issue": "Cannot identify XBM file created with filename including underscore\nPillow 2.8.1, Python 2.7.6 (Anaconda 2.2.0), Windows 7 64bit\n\nWhen I create git_hub.xbm (with ImageMagick), created file's header contains lines like this.\n\n``` C\n#define git_hub_width 32\n#define git_hub_height 32\n```\n\nIn XbmImagePlugin.py, regular expression to extract XBM header doesn't match defined macro with more than two underscores like above.This causes an IOError.\n\n``` python\n# XBM header\nxbm_head = re.compile(\n b\"\\s*#define[ \\t]+[^_]*_width[ \\t]+(?P<width>[0-9]+)[\\r\\n]+\"\n b\"#define[ \\t]+[^_]*_height[ \\t]+(?P<height>[0-9]+)[\\r\\n]+\"\n b\"(?P<hotspot>\"\n b\"#define[ \\t]+[^_]*_x_hot[ \\t]+(?P<xhot>[0-9]+)[\\r\\n]+\"\n b\"#define[ \\t]+[^_]*_y_hot[ \\t]+(?P<yhot>[0-9]+)[\\r\\n]+\"\n b\")?\"\n b\"[\\\\000-\\\\377]*_bits\\\\[\\\\]\"\n)\n```\n\n", "before_files": [{"content": "#\n# The Python Imaging Library.\n# $Id$\n#\n# XBM File handling\n#\n# History:\n# 1995-09-08 fl Created\n# 1996-11-01 fl Added save support\n# 1997-07-07 fl Made header parser more tolerant\n# 1997-07-22 fl Fixed yet another parser bug\n# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)\n# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog)\n# 2004-02-24 fl Allow some whitespace before first #define\n#\n# Copyright (c) 1997-2004 by Secret Labs AB\n# Copyright (c) 1996-1997 by Fredrik Lundh\n#\n# See the README file for information on usage and redistribution.\n#\n\n__version__ = \"0.6\"\n\nimport re\nfrom PIL import Image, ImageFile\n\n# XBM header\nxbm_head = re.compile(\n b\"\\s*#define[ \\t]+[^_]*_width[ \\t]+(?P<width>[0-9]+)[\\r\\n]+\"\n b\"#define[ \\t]+[^_]*_height[ \\t]+(?P<height>[0-9]+)[\\r\\n]+\"\n b\"(?P<hotspot>\"\n b\"#define[ \\t]+[^_]*_x_hot[ \\t]+(?P<xhot>[0-9]+)[\\r\\n]+\"\n b\"#define[ \\t]+[^_]*_y_hot[ \\t]+(?P<yhot>[0-9]+)[\\r\\n]+\"\n b\")?\"\n b\"[\\\\000-\\\\377]*_bits\\\\[\\\\]\"\n)\n\n\ndef _accept(prefix):\n return prefix.lstrip()[:7] == b\"#define\"\n\n\n##\n# Image plugin for X11 bitmaps.\n\nclass XbmImageFile(ImageFile.ImageFile):\n\n format = \"XBM\"\n format_description = \"X11 Bitmap\"\n\n def _open(self):\n\n m = xbm_head.match(self.fp.read(512))\n\n if m:\n\n xsize = int(m.group(\"width\"))\n ysize = int(m.group(\"height\"))\n\n if m.group(\"hotspot\"):\n self.info[\"hotspot\"] = (\n int(m.group(\"xhot\")), int(m.group(\"yhot\"))\n )\n\n self.mode = \"1\"\n self.size = xsize, ysize\n\n self.tile = [(\"xbm\", (0, 0)+self.size, m.end(), None)]\n\n\ndef _save(im, fp, filename):\n\n if im.mode != \"1\":\n raise IOError(\"cannot write mode %s as XBM\" % im.mode)\n\n fp.write((\"#define im_width %d\\n\" % im.size[0]).encode('ascii'))\n fp.write((\"#define im_height %d\\n\" % im.size[1]).encode('ascii'))\n\n hotspot = im.encoderinfo.get(\"hotspot\")\n if hotspot:\n fp.write((\"#define im_x_hot %d\\n\" % hotspot[0]).encode('ascii'))\n fp.write((\"#define im_y_hot %d\\n\" % hotspot[1]).encode('ascii'))\n\n fp.write(b\"static char im_bits[] = {\\n\")\n\n ImageFile._save(im, fp, [(\"xbm\", (0, 0)+im.size, 0, None)])\n\n fp.write(b\"};\\n\")\n\n\nImage.register_open(\"XBM\", XbmImageFile, _accept)\nImage.register_save(\"XBM\", _save)\n\nImage.register_extension(\"XBM\", \".xbm\")\n\nImage.register_mime(\"XBM\", \"image/xbm\")\n", "path": "PIL/XbmImagePlugin.py"}]} | 1,897 | 275 |
gh_patches_debug_28793 | rasdani/github-patches | git_diff | PaddlePaddle__models-123 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Modify inference script
</issue>
<code>
[start of hsigmoid/infer.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 import os
4 import logging
5 import gzip
6
7 import paddle.v2 as paddle
8 from network_conf import ngram_lm
9
10 logger = logging.getLogger("paddle")
11 logger.setLevel(logging.WARNING)
12
13
14 def decode_res(infer_res, dict_size):
15 """
16 Inferring probabilities are orginized as a complete binary tree.
17 The actual labels are leaves (indices are counted from class number).
18 This function travels paths decoded from inferring results.
19 If the probability >0.5 then go to right child, otherwise go to left child.
20
21 param infer_res: inferring result
22 param dict_size: class number
23 return predict_lbls: actual class
24 """
25 predict_lbls = []
26 infer_res = infer_res > 0.5
27 for i, probs in enumerate(infer_res):
28 idx = 0
29 result = 1
30 while idx < len(probs):
31 result <<= 1
32 if probs[idx]:
33 result |= 1
34 if probs[idx]:
35 idx = idx * 2 + 2 # right child
36 else:
37 idx = idx * 2 + 1 # left child
38
39 predict_lbl = result - dict_size
40 predict_lbls.append(predict_lbl)
41 return predict_lbls
42
43
44 def predict(batch_ins, idx_word_dict, dict_size, prediction_layer, parameters):
45 infer_res = paddle.infer(
46 output_layer=prediction_layer, parameters=parameters, input=batch_ins)
47
48 predict_lbls = decode_res(infer_res, dict_size)
49 predict_words = [idx_word_dict[lbl] for lbl in predict_lbls] # map to word
50
51 # Ouput format: word1 word2 word3 word4 -> predict label
52 for i, ins in enumerate(batch_ins):
53 print(" ".join([idx_word_dict[w]
54 for w in ins]) + " -> " + predict_words[i])
55
56
57 def main(model_path):
58 assert os.path.exists(model_path), "trained model does not exist."
59
60 paddle.init(use_gpu=False, trainer_count=1)
61 word_dict = paddle.dataset.imikolov.build_dict(min_word_freq=2)
62 dict_size = len(word_dict)
63 prediction_layer = ngram_lm(
64 is_train=False, hidden_size=256, embed_size=32, dict_size=dict_size)
65
66 with gzip.open(model_path, "r") as f:
67 parameters = paddle.parameters.Parameters.from_tar(f)
68
69 idx_word_dict = dict((v, k) for k, v in word_dict.items())
70 batch_size = 64
71 batch_ins = []
72 ins_iter = paddle.dataset.imikolov.test(word_dict, 5)
73
74 for ins in ins_iter():
75 batch_ins.append(ins[:-1])
76 if len(batch_ins) == batch_size:
77 predict(batch_ins, idx_word_dict, dict_size, prediction_layer,
78 parameters)
79 batch_ins = []
80
81 if len(batch_ins) > 0:
82 predict(batch_ins, idx_word_dict, dict_size, prediction_layer,
83 parameters)
84
85
86 if __name__ == "__main__":
87 main("models/hsigmoid_batch_00010.tar.gz")
88
[end of hsigmoid/infer.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/hsigmoid/infer.py b/hsigmoid/infer.py
--- a/hsigmoid/infer.py
+++ b/hsigmoid/infer.py
@@ -41,9 +41,8 @@
return predict_lbls
-def predict(batch_ins, idx_word_dict, dict_size, prediction_layer, parameters):
- infer_res = paddle.infer(
- output_layer=prediction_layer, parameters=parameters, input=batch_ins)
+def predict(batch_ins, idx_word_dict, dict_size, inferer):
+ infer_res = inferer.infer(input=batch_ins)
predict_lbls = decode_res(infer_res, dict_size)
predict_words = [idx_word_dict[lbl] for lbl in predict_lbls] # map to word
@@ -66,6 +65,8 @@
with gzip.open(model_path, "r") as f:
parameters = paddle.parameters.Parameters.from_tar(f)
+ inferer = paddle.inference.Inference(
+ output_layer=prediction_layer, parameters=parameters)
idx_word_dict = dict((v, k) for k, v in word_dict.items())
batch_size = 64
batch_ins = []
@@ -74,13 +75,11 @@
for ins in ins_iter():
batch_ins.append(ins[:-1])
if len(batch_ins) == batch_size:
- predict(batch_ins, idx_word_dict, dict_size, prediction_layer,
- parameters)
+ predict(batch_ins, idx_word_dict, dict_size, inferer)
batch_ins = []
if len(batch_ins) > 0:
- predict(batch_ins, idx_word_dict, dict_size, prediction_layer,
- parameters)
+ predict(batch_ins, idx_word_dict, dict_size, inferer)
if __name__ == "__main__":
| {"golden_diff": "diff --git a/hsigmoid/infer.py b/hsigmoid/infer.py\n--- a/hsigmoid/infer.py\n+++ b/hsigmoid/infer.py\n@@ -41,9 +41,8 @@\n return predict_lbls\n \n \n-def predict(batch_ins, idx_word_dict, dict_size, prediction_layer, parameters):\n- infer_res = paddle.infer(\n- output_layer=prediction_layer, parameters=parameters, input=batch_ins)\n+def predict(batch_ins, idx_word_dict, dict_size, inferer):\n+ infer_res = inferer.infer(input=batch_ins)\n \n predict_lbls = decode_res(infer_res, dict_size)\n predict_words = [idx_word_dict[lbl] for lbl in predict_lbls] # map to word\n@@ -66,6 +65,8 @@\n with gzip.open(model_path, \"r\") as f:\n parameters = paddle.parameters.Parameters.from_tar(f)\n \n+ inferer = paddle.inference.Inference(\n+ output_layer=prediction_layer, parameters=parameters)\n idx_word_dict = dict((v, k) for k, v in word_dict.items())\n batch_size = 64\n batch_ins = []\n@@ -74,13 +75,11 @@\n for ins in ins_iter():\n batch_ins.append(ins[:-1])\n if len(batch_ins) == batch_size:\n- predict(batch_ins, idx_word_dict, dict_size, prediction_layer,\n- parameters)\n+ predict(batch_ins, idx_word_dict, dict_size, inferer)\n batch_ins = []\n \n if len(batch_ins) > 0:\n- predict(batch_ins, idx_word_dict, dict_size, prediction_layer,\n- parameters)\n+ predict(batch_ins, idx_word_dict, dict_size, inferer)\n \n \n if __name__ == \"__main__\":\n", "issue": "Modify inference script\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport os\nimport logging\nimport gzip\n\nimport paddle.v2 as paddle\nfrom network_conf import ngram_lm\n\nlogger = logging.getLogger(\"paddle\")\nlogger.setLevel(logging.WARNING)\n\n\ndef decode_res(infer_res, dict_size):\n \"\"\"\n Inferring probabilities are orginized as a complete binary tree.\n The actual labels are leaves (indices are counted from class number).\n This function travels paths decoded from inferring results.\n If the probability >0.5 then go to right child, otherwise go to left child.\n\n param infer_res: inferring result\n param dict_size: class number\n return predict_lbls: actual class\n \"\"\"\n predict_lbls = []\n infer_res = infer_res > 0.5\n for i, probs in enumerate(infer_res):\n idx = 0\n result = 1\n while idx < len(probs):\n result <<= 1\n if probs[idx]:\n result |= 1\n if probs[idx]:\n idx = idx * 2 + 2 # right child\n else:\n idx = idx * 2 + 1 # left child\n\n predict_lbl = result - dict_size\n predict_lbls.append(predict_lbl)\n return predict_lbls\n\n\ndef predict(batch_ins, idx_word_dict, dict_size, prediction_layer, parameters):\n infer_res = paddle.infer(\n output_layer=prediction_layer, parameters=parameters, input=batch_ins)\n\n predict_lbls = decode_res(infer_res, dict_size)\n predict_words = [idx_word_dict[lbl] for lbl in predict_lbls] # map to word\n\n # Ouput format: word1 word2 word3 word4 -> predict label\n for i, ins in enumerate(batch_ins):\n print(\" \".join([idx_word_dict[w]\n for w in ins]) + \" -> \" + predict_words[i])\n\n\ndef main(model_path):\n assert os.path.exists(model_path), \"trained model does not exist.\"\n\n paddle.init(use_gpu=False, trainer_count=1)\n word_dict = paddle.dataset.imikolov.build_dict(min_word_freq=2)\n dict_size = len(word_dict)\n prediction_layer = ngram_lm(\n is_train=False, hidden_size=256, embed_size=32, dict_size=dict_size)\n\n with gzip.open(model_path, \"r\") as f:\n parameters = paddle.parameters.Parameters.from_tar(f)\n\n idx_word_dict = dict((v, k) for k, v in word_dict.items())\n batch_size = 64\n batch_ins = []\n ins_iter = paddle.dataset.imikolov.test(word_dict, 5)\n\n for ins in ins_iter():\n batch_ins.append(ins[:-1])\n if len(batch_ins) == batch_size:\n predict(batch_ins, idx_word_dict, dict_size, prediction_layer,\n parameters)\n batch_ins = []\n\n if len(batch_ins) > 0:\n predict(batch_ins, idx_word_dict, dict_size, prediction_layer,\n parameters)\n\n\nif __name__ == \"__main__\":\n main(\"models/hsigmoid_batch_00010.tar.gz\")\n", "path": "hsigmoid/infer.py"}]} | 1,401 | 396 |
gh_patches_debug_14264 | rasdani/github-patches | git_diff | CTFd__CTFd-1233 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Theme reset from CLI
There needs to be a way to reset the theme and other kinds of unrecoverable errors from the CLI.
Context: https://community.majorleaguecyber.org/t/ctfd-templatenotfound/51
</issue>
<code>
[start of manage.py]
1 from flask import Flask
2 from flask_sqlalchemy import SQLAlchemy
3 from flask_script import Manager
4 from flask_migrate import Migrate, MigrateCommand
5 from CTFd import create_app
6 from CTFd.models import *
7
8 app = create_app()
9
10 manager = Manager(app)
11 manager.add_command('db', MigrateCommand)
12
13 if __name__ == '__main__':
14 manager.run()
15
[end of manage.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/manage.py b/manage.py
--- a/manage.py
+++ b/manage.py
@@ -3,6 +3,7 @@
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from CTFd import create_app
+from CTFd.utils import get_config as get_config_util, set_config as set_config_util
from CTFd.models import *
app = create_app()
@@ -10,5 +11,18 @@
manager = Manager(app)
manager.add_command('db', MigrateCommand)
-if __name__ == '__main__':
+
[email protected]
+def get_config(key):
+ with app.app_context():
+ print(get_config_util(key))
+
+
[email protected]
+def set_config(key, value):
+ with app.app_context():
+ print(set_config_util(key, value).value)
+
+
+if __name__ == "__main__":
manager.run()
| {"golden_diff": "diff --git a/manage.py b/manage.py\n--- a/manage.py\n+++ b/manage.py\n@@ -3,6 +3,7 @@\n from flask_script import Manager\n from flask_migrate import Migrate, MigrateCommand\n from CTFd import create_app\n+from CTFd.utils import get_config as get_config_util, set_config as set_config_util\n from CTFd.models import *\n \n app = create_app()\n@@ -10,5 +11,18 @@\n manager = Manager(app)\n manager.add_command('db', MigrateCommand)\n \n-if __name__ == '__main__':\n+\[email protected]\n+def get_config(key):\n+ with app.app_context():\n+ print(get_config_util(key))\n+\n+\[email protected]\n+def set_config(key, value):\n+ with app.app_context():\n+ print(set_config_util(key, value).value)\n+\n+\n+if __name__ == \"__main__\":\n manager.run()\n", "issue": "Theme reset from CLI\nThere needs to be a way to reset the theme and other kinds of unrecoverable errors from the CLI. \r\n\r\nContext: https://community.majorleaguecyber.org/t/ctfd-templatenotfound/51\n", "before_files": [{"content": "from flask import Flask\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_script import Manager\nfrom flask_migrate import Migrate, MigrateCommand\nfrom CTFd import create_app\nfrom CTFd.models import *\n\napp = create_app()\n\nmanager = Manager(app)\nmanager.add_command('db', MigrateCommand)\n\nif __name__ == '__main__':\n manager.run()\n", "path": "manage.py"}]} | 682 | 204 |
gh_patches_debug_40764 | rasdani/github-patches | git_diff | svthalia__concrexit-3115 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Members API doesn't list all members
### Describe the bug
Luko, pk=23 on staging, is listed as member on the website, but is not a member according to the API
### How to reproduce
No idea.
check https://staging.thalia.nu/members/profile/23 vs https://staging.thalia.nu/api/v2/members/23/
### Expected behaviour
These urls should agree on the memberness of Luko
</issue>
<code>
[start of website/members/api/v2/views.py]
1 """API views of the activemembers app."""
2
3 from django.shortcuts import get_object_or_404
4
5 from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope
6 from rest_framework import filters as framework_filters
7 from rest_framework.generics import ListAPIView, RetrieveAPIView, UpdateAPIView
8
9 from members.api.v2 import filters
10 from members.api.v2.serializers.member import (
11 MemberCurrentSerializer,
12 MemberListSerializer,
13 MemberSerializer,
14 )
15 from members.models import Member
16 from thaliawebsite.api.openapi import OAuthAutoSchema
17 from thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod
18 from utils.media.services import fetch_thumbnails_db
19
20
21 class MemberListView(ListAPIView):
22 """Returns an overview of all members."""
23
24 serializer_class = MemberListSerializer
25 queryset = (
26 Member.current_members.all()
27 .select_related("profile")
28 .prefetch_related("membership_set")
29 )
30
31 def get_serializer(self, *args, **kwargs):
32 if len(args) > 0:
33 members = args[0]
34 fetch_thumbnails_db([member.profile.photo for member in members])
35 return super().get_serializer(*args, **kwargs)
36
37 permission_classes = [
38 IsAuthenticatedOrTokenHasScope,
39 ]
40 required_scopes = ["members:read"]
41 filter_backends = (
42 framework_filters.OrderingFilter,
43 framework_filters.SearchFilter,
44 filters.MembershipTypeFilter,
45 filters.StartingYearFilter,
46 )
47 ordering_fields = ("first_name", "last_name", "username")
48 search_fields = (
49 "profile__nickname",
50 "profile__starting_year",
51 "first_name",
52 "last_name",
53 "username",
54 )
55
56
57 class MemberDetailView(RetrieveAPIView):
58 """Returns details of a member."""
59
60 serializer_class = MemberSerializer
61 queryset = Member.current_members.all()
62 permission_classes = [
63 IsAuthenticatedOrTokenHasScope,
64 ]
65 required_scopes = ["members:read"]
66
67
68 class MemberCurrentView(MemberDetailView, UpdateAPIView):
69 """Returns details of the authenticated member."""
70
71 serializer_class = MemberCurrentSerializer
72 schema = OAuthAutoSchema(operation_id_base="CurrentMember")
73 permission_classes = [
74 IsAuthenticatedOrTokenHasScopeForMethod,
75 ]
76 required_scopes_per_method = {
77 "GET": ["profile:read"],
78 "PATCH": ["profile:write"],
79 "PUT": ["profile:write"],
80 }
81
82 def get_object(self):
83 return get_object_or_404(Member, pk=self.request.user.pk)
84
[end of website/members/api/v2/views.py]
[start of website/members/api/v2/filters.py]
1 from rest_framework import filters
2
3 from members.models import Membership
4
5
6 class StartingYearFilter(filters.BaseFilterBackend):
7 """Allows you to filter by starting year."""
8
9 def filter_queryset(self, request, queryset, view):
10 starting_year = request.query_params.get("starting_year", None)
11
12 if starting_year:
13 queryset = queryset.filter(profile__starting_year=starting_year)
14
15 return queryset
16
17 def get_schema_operation_parameters(self, view):
18 return [
19 {
20 "name": "starting_year",
21 "required": False,
22 "in": "query",
23 "description": "Filter by starting year",
24 "schema": {
25 "type": "number",
26 },
27 }
28 ]
29
30
31 class MembershipTypeFilter(filters.BaseFilterBackend):
32 """Allows you to filter by membership type."""
33
34 def filter_queryset(self, request, queryset, view):
35 membership_type = request.query_params.get("membership_type", None)
36
37 if membership_type:
38 memberships = Membership.objects.filter(type=membership_type)
39 queryset = queryset.filter(pk__in=memberships.values("user__pk"))
40
41 return queryset
42
43 def get_schema_operation_parameters(self, view):
44 return [
45 {
46 "name": "membership_type",
47 "required": False,
48 "in": "query",
49 "description": "Filter by membership type",
50 "schema": {
51 "type": "string",
52 },
53 }
54 ]
55
[end of website/members/api/v2/filters.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/website/members/api/v2/filters.py b/website/members/api/v2/filters.py
--- a/website/members/api/v2/filters.py
+++ b/website/members/api/v2/filters.py
@@ -1,4 +1,9 @@
-from rest_framework import filters
+from datetime import datetime
+
+from django.db.models import Q
+from django.utils import timezone
+
+from rest_framework import filters, serializers
from members.models import Membership
@@ -28,6 +33,62 @@
]
+class FormerMemberFilter(filters.BaseFilterBackend):
+ def filter_queryset(self, request, queryset, view):
+ former = request.query_params.get("former", "false")
+
+ if former == "false":
+ # Filter out former members
+ return (
+ queryset.exclude(membership=None)
+ .filter(
+ Q(membership__until__isnull=True)
+ | Q(membership__until__gt=timezone.now().date())
+ )
+ .distinct()
+ )
+ elif former == "true":
+ # Filter out current members
+
+ memberships_query = Q(until__gt=datetime.now()) | Q(until=None)
+ members_query = ~Q(id=None)
+
+ # Filter out all current active memberships
+ memberships_query &= Q(type=Membership.MEMBER) | Q(type=Membership.HONORARY)
+ memberships = Membership.objects.filter(memberships_query)
+ members_query &= ~Q(pk__in=memberships.values("user__pk"))
+
+ memberships_query = Q(type=Membership.MEMBER) | Q(type=Membership.HONORARY)
+ memberships = Membership.objects.filter(memberships_query)
+ all_memberships = Membership.objects.all()
+ # Only keep members that were once members, or are legacy users
+ # that do not have any memberships at all
+ members_query &= Q(pk__in=memberships.values("user__pk")) | ~Q(
+ pk__in=all_memberships.values("user__pk")
+ )
+
+ return queryset.filter(members_query)
+ elif former == "any":
+ # Include both former and current members
+ return queryset
+ else:
+ raise serializers.ValidationError("invalid former parameter")
+
+ def get_schema_operation_parameters(self, view):
+ return [
+ {
+ "name": "former",
+ "required": False,
+ "in": "query",
+ "description": "Include former members or only former members",
+ "schema": {
+ "type": "string",
+ "enum": ["true", "false", "any"],
+ },
+ }
+ ]
+
+
class MembershipTypeFilter(filters.BaseFilterBackend):
"""Allows you to filter by membership type."""
diff --git a/website/members/api/v2/views.py b/website/members/api/v2/views.py
--- a/website/members/api/v2/views.py
+++ b/website/members/api/v2/views.py
@@ -23,7 +23,7 @@
serializer_class = MemberListSerializer
queryset = (
- Member.current_members.all()
+ Member.objects.all()
.select_related("profile")
.prefetch_related("membership_set")
)
@@ -43,6 +43,7 @@
framework_filters.SearchFilter,
filters.MembershipTypeFilter,
filters.StartingYearFilter,
+ filters.FormerMemberFilter,
)
ordering_fields = ("first_name", "last_name", "username")
search_fields = (
@@ -58,7 +59,7 @@
"""Returns details of a member."""
serializer_class = MemberSerializer
- queryset = Member.current_members.all()
+ queryset = Member.objects.all()
permission_classes = [
IsAuthenticatedOrTokenHasScope,
]
| {"golden_diff": "diff --git a/website/members/api/v2/filters.py b/website/members/api/v2/filters.py\n--- a/website/members/api/v2/filters.py\n+++ b/website/members/api/v2/filters.py\n@@ -1,4 +1,9 @@\n-from rest_framework import filters\n+from datetime import datetime\n+\n+from django.db.models import Q\n+from django.utils import timezone\n+\n+from rest_framework import filters, serializers\n \n from members.models import Membership\n \n@@ -28,6 +33,62 @@\n ]\n \n \n+class FormerMemberFilter(filters.BaseFilterBackend):\n+ def filter_queryset(self, request, queryset, view):\n+ former = request.query_params.get(\"former\", \"false\")\n+\n+ if former == \"false\":\n+ # Filter out former members\n+ return (\n+ queryset.exclude(membership=None)\n+ .filter(\n+ Q(membership__until__isnull=True)\n+ | Q(membership__until__gt=timezone.now().date())\n+ )\n+ .distinct()\n+ )\n+ elif former == \"true\":\n+ # Filter out current members\n+\n+ memberships_query = Q(until__gt=datetime.now()) | Q(until=None)\n+ members_query = ~Q(id=None)\n+\n+ # Filter out all current active memberships\n+ memberships_query &= Q(type=Membership.MEMBER) | Q(type=Membership.HONORARY)\n+ memberships = Membership.objects.filter(memberships_query)\n+ members_query &= ~Q(pk__in=memberships.values(\"user__pk\"))\n+\n+ memberships_query = Q(type=Membership.MEMBER) | Q(type=Membership.HONORARY)\n+ memberships = Membership.objects.filter(memberships_query)\n+ all_memberships = Membership.objects.all()\n+ # Only keep members that were once members, or are legacy users\n+ # that do not have any memberships at all\n+ members_query &= Q(pk__in=memberships.values(\"user__pk\")) | ~Q(\n+ pk__in=all_memberships.values(\"user__pk\")\n+ )\n+\n+ return queryset.filter(members_query)\n+ elif former == \"any\":\n+ # Include both former and current members\n+ return queryset\n+ else:\n+ raise serializers.ValidationError(\"invalid former parameter\")\n+\n+ def get_schema_operation_parameters(self, view):\n+ return [\n+ {\n+ \"name\": \"former\",\n+ \"required\": False,\n+ \"in\": \"query\",\n+ \"description\": \"Include former members or only former members\",\n+ \"schema\": {\n+ \"type\": \"string\",\n+ \"enum\": [\"true\", \"false\", \"any\"],\n+ },\n+ }\n+ ]\n+\n+\n class MembershipTypeFilter(filters.BaseFilterBackend):\n \"\"\"Allows you to filter by membership type.\"\"\"\n \ndiff --git a/website/members/api/v2/views.py b/website/members/api/v2/views.py\n--- a/website/members/api/v2/views.py\n+++ b/website/members/api/v2/views.py\n@@ -23,7 +23,7 @@\n \n serializer_class = MemberListSerializer\n queryset = (\n- Member.current_members.all()\n+ Member.objects.all()\n .select_related(\"profile\")\n .prefetch_related(\"membership_set\")\n )\n@@ -43,6 +43,7 @@\n framework_filters.SearchFilter,\n filters.MembershipTypeFilter,\n filters.StartingYearFilter,\n+ filters.FormerMemberFilter,\n )\n ordering_fields = (\"first_name\", \"last_name\", \"username\")\n search_fields = (\n@@ -58,7 +59,7 @@\n \"\"\"Returns details of a member.\"\"\"\n \n serializer_class = MemberSerializer\n- queryset = Member.current_members.all()\n+ queryset = Member.objects.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n", "issue": "Members API doesn't list all members\n### Describe the bug\r\nLuko, pk=23 on staging, is listed as member on the website, but is not a member according to the API\r\n\r\n### How to reproduce\r\nNo idea.\r\ncheck https://staging.thalia.nu/members/profile/23 vs https://staging.thalia.nu/api/v2/members/23/\r\n\r\n### Expected behaviour\r\nThese urls should agree on the memberness of Luko\r\n\r\n\n", "before_files": [{"content": "\"\"\"API views of the activemembers app.\"\"\"\n\nfrom django.shortcuts import get_object_or_404\n\nfrom oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope\nfrom rest_framework import filters as framework_filters\nfrom rest_framework.generics import ListAPIView, RetrieveAPIView, UpdateAPIView\n\nfrom members.api.v2 import filters\nfrom members.api.v2.serializers.member import (\n MemberCurrentSerializer,\n MemberListSerializer,\n MemberSerializer,\n)\nfrom members.models import Member\nfrom thaliawebsite.api.openapi import OAuthAutoSchema\nfrom thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod\nfrom utils.media.services import fetch_thumbnails_db\n\n\nclass MemberListView(ListAPIView):\n \"\"\"Returns an overview of all members.\"\"\"\n\n serializer_class = MemberListSerializer\n queryset = (\n Member.current_members.all()\n .select_related(\"profile\")\n .prefetch_related(\"membership_set\")\n )\n\n def get_serializer(self, *args, **kwargs):\n if len(args) > 0:\n members = args[0]\n fetch_thumbnails_db([member.profile.photo for member in members])\n return super().get_serializer(*args, **kwargs)\n\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"members:read\"]\n filter_backends = (\n framework_filters.OrderingFilter,\n framework_filters.SearchFilter,\n filters.MembershipTypeFilter,\n filters.StartingYearFilter,\n )\n ordering_fields = (\"first_name\", \"last_name\", \"username\")\n search_fields = (\n \"profile__nickname\",\n \"profile__starting_year\",\n \"first_name\",\n \"last_name\",\n \"username\",\n )\n\n\nclass MemberDetailView(RetrieveAPIView):\n \"\"\"Returns details of a member.\"\"\"\n\n serializer_class = MemberSerializer\n queryset = Member.current_members.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"members:read\"]\n\n\nclass MemberCurrentView(MemberDetailView, UpdateAPIView):\n \"\"\"Returns details of the authenticated member.\"\"\"\n\n serializer_class = MemberCurrentSerializer\n schema = OAuthAutoSchema(operation_id_base=\"CurrentMember\")\n permission_classes = [\n IsAuthenticatedOrTokenHasScopeForMethod,\n ]\n required_scopes_per_method = {\n \"GET\": [\"profile:read\"],\n \"PATCH\": [\"profile:write\"],\n \"PUT\": [\"profile:write\"],\n }\n\n def get_object(self):\n return get_object_or_404(Member, pk=self.request.user.pk)\n", "path": "website/members/api/v2/views.py"}, {"content": "from rest_framework import filters\n\nfrom members.models import Membership\n\n\nclass StartingYearFilter(filters.BaseFilterBackend):\n \"\"\"Allows you to filter by starting year.\"\"\"\n\n def filter_queryset(self, request, queryset, view):\n starting_year = request.query_params.get(\"starting_year\", None)\n\n if starting_year:\n queryset = queryset.filter(profile__starting_year=starting_year)\n\n return queryset\n\n def get_schema_operation_parameters(self, view):\n return [\n {\n \"name\": \"starting_year\",\n \"required\": False,\n \"in\": \"query\",\n \"description\": \"Filter by starting year\",\n \"schema\": {\n \"type\": \"number\",\n },\n }\n ]\n\n\nclass MembershipTypeFilter(filters.BaseFilterBackend):\n \"\"\"Allows you to filter by membership type.\"\"\"\n\n def filter_queryset(self, request, queryset, view):\n membership_type = request.query_params.get(\"membership_type\", None)\n\n if membership_type:\n memberships = Membership.objects.filter(type=membership_type)\n queryset = queryset.filter(pk__in=memberships.values(\"user__pk\"))\n\n return queryset\n\n def get_schema_operation_parameters(self, view):\n return [\n {\n \"name\": \"membership_type\",\n \"required\": False,\n \"in\": \"query\",\n \"description\": \"Filter by membership type\",\n \"schema\": {\n \"type\": \"string\",\n },\n }\n ]\n", "path": "website/members/api/v2/filters.py"}]} | 1,772 | 846 |
gh_patches_debug_11688 | rasdani/github-patches | git_diff | opensearch-project__opensearch-build-272 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add Core plugins zips to published artifacts.
The plugins built with the core repo should be bundled as zips and hosted at artifacts.opensearch.org.
This task involves updating Opensearch's build.sh script to build and include these artifacts with the component's artifact list.
Right now we also have brittle logic that assumes any component with "plugins" in their artifacts list is a plugin repository. This should be updated to identify the min bundle component in another way. Perhaps with a separate artifact folder for "min-bundle"
</issue>
<code>
[start of bundle-workflow/python/build_workflow/builder.py]
1 # Copyright OpenSearch Contributors.
2 # SPDX-License-Identifier: Apache-2.0
3
4 import os
5
6 '''
7 This class is responsible for executing the build for a component and passing the results to a build recorder.
8 It will notify the build recorder of build information such as repository and git ref, and any artifacts generated by the build.
9 Artifacts found in "<build root>/artifacts/<maven|plugins|libs|bundle>" will be recognized and recorded.
10 '''
11 class Builder:
12 def __init__(self, component_name, git_repo, script_finder, build_recorder):
13 '''
14 Construct a new Builder instance.
15 :param component_name: The name of the component to build.
16 :param git_repo: A GitRepository instance containing the checked-out code.
17 :param script_finder: The ScriptFinder to use for finding build.sh scripts.
18 :param build_recorder: The build recorder that will capture build information and artifacts.
19 '''
20
21 self.component_name = component_name
22 self.git_repo = git_repo
23 self.script_finder = script_finder
24 self.build_recorder = build_recorder
25 self.output_path = 'artifacts'
26
27 def build(self, version, arch, snapshot):
28 build_script = self.script_finder.find_build_script(self.component_name, self.git_repo.dir)
29 build_command = f'{build_script} -v {version} -a {arch} -s {str(snapshot).lower()} -o {self.output_path}'
30 self.git_repo.execute(build_command)
31 self.build_recorder.record_component(self.component_name, self.git_repo)
32
33 def export_artifacts(self):
34 artifacts_dir = os.path.realpath(os.path.join(self.git_repo.dir, self.output_path))
35 for artifact_type in ["maven", "bundle", "plugins", "libs"]:
36 for dir, dirs, files in os.walk(os.path.join(artifacts_dir, artifact_type)):
37 for file_name in files:
38 absolute_path = os.path.join(dir, file_name)
39 relative_path = os.path.relpath(absolute_path, artifacts_dir)
40 self.build_recorder.record_artifact(self.component_name, artifact_type, relative_path, absolute_path)
41
[end of bundle-workflow/python/build_workflow/builder.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bundle-workflow/python/build_workflow/builder.py b/bundle-workflow/python/build_workflow/builder.py
--- a/bundle-workflow/python/build_workflow/builder.py
+++ b/bundle-workflow/python/build_workflow/builder.py
@@ -32,7 +32,7 @@
def export_artifacts(self):
artifacts_dir = os.path.realpath(os.path.join(self.git_repo.dir, self.output_path))
- for artifact_type in ["maven", "bundle", "plugins", "libs"]:
+ for artifact_type in ["maven", "bundle", "plugins", "libs", "core-plugins"]:
for dir, dirs, files in os.walk(os.path.join(artifacts_dir, artifact_type)):
for file_name in files:
absolute_path = os.path.join(dir, file_name)
| {"golden_diff": "diff --git a/bundle-workflow/python/build_workflow/builder.py b/bundle-workflow/python/build_workflow/builder.py\n--- a/bundle-workflow/python/build_workflow/builder.py\n+++ b/bundle-workflow/python/build_workflow/builder.py\n@@ -32,7 +32,7 @@\n \n def export_artifacts(self):\n artifacts_dir = os.path.realpath(os.path.join(self.git_repo.dir, self.output_path))\n- for artifact_type in [\"maven\", \"bundle\", \"plugins\", \"libs\"]:\n+ for artifact_type in [\"maven\", \"bundle\", \"plugins\", \"libs\", \"core-plugins\"]:\n for dir, dirs, files in os.walk(os.path.join(artifacts_dir, artifact_type)):\n for file_name in files:\n absolute_path = os.path.join(dir, file_name)\n", "issue": "Add Core plugins zips to published artifacts.\nThe plugins built with the core repo should be bundled as zips and hosted at artifacts.opensearch.org.\r\n\r\nThis task involves updating Opensearch's build.sh script to build and include these artifacts with the component's artifact list.\r\n\r\nRight now we also have brittle logic that assumes any component with \"plugins\" in their artifacts list is a plugin repository. This should be updated to identify the min bundle component in another way. Perhaps with a separate artifact folder for \"min-bundle\"\n", "before_files": [{"content": "# Copyright OpenSearch Contributors.\n# SPDX-License-Identifier: Apache-2.0\n\nimport os\n\n'''\nThis class is responsible for executing the build for a component and passing the results to a build recorder.\nIt will notify the build recorder of build information such as repository and git ref, and any artifacts generated by the build.\nArtifacts found in \"<build root>/artifacts/<maven|plugins|libs|bundle>\" will be recognized and recorded.\n'''\nclass Builder:\n def __init__(self, component_name, git_repo, script_finder, build_recorder):\n '''\n Construct a new Builder instance.\n :param component_name: The name of the component to build.\n :param git_repo: A GitRepository instance containing the checked-out code.\n :param script_finder: The ScriptFinder to use for finding build.sh scripts.\n :param build_recorder: The build recorder that will capture build information and artifacts.\n '''\n\n self.component_name = component_name\n self.git_repo = git_repo\n self.script_finder = script_finder\n self.build_recorder = build_recorder\n self.output_path = 'artifacts'\n\n def build(self, version, arch, snapshot):\n build_script = self.script_finder.find_build_script(self.component_name, self.git_repo.dir)\n build_command = f'{build_script} -v {version} -a {arch} -s {str(snapshot).lower()} -o {self.output_path}'\n self.git_repo.execute(build_command)\n self.build_recorder.record_component(self.component_name, self.git_repo)\n\n def export_artifacts(self):\n artifacts_dir = os.path.realpath(os.path.join(self.git_repo.dir, self.output_path))\n for artifact_type in [\"maven\", \"bundle\", \"plugins\", \"libs\"]:\n for dir, dirs, files in os.walk(os.path.join(artifacts_dir, artifact_type)):\n for file_name in files:\n absolute_path = os.path.join(dir, file_name)\n relative_path = os.path.relpath(absolute_path, artifacts_dir)\n self.build_recorder.record_artifact(self.component_name, artifact_type, relative_path, absolute_path)\n", "path": "bundle-workflow/python/build_workflow/builder.py"}]} | 1,170 | 174 |
gh_patches_debug_390 | rasdani/github-patches | git_diff | google__turbinia-616 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add retries to tox
Tox fails when trying to check links within our docs if the link is temporarily down/unresponsive. Adding retries to sphinx config should take care of that.
</issue>
<code>
[start of docs/conf.py]
1 # Configuration file for the Sphinx documentation builder.
2 #
3 # This file only contains a selection of the most common options. For a full
4 # list see the documentation:
5 # https://www.sphinx-doc.org/en/master/usage/configuration.html
6
7 # -- Path setup --------------------------------------------------------------
8
9 # If extensions (or modules to document with autodoc) are in another directory,
10 # add these directories to sys.path here. If the directory is relative to the
11 # documentation root, use os.path.abspath to make it absolute, like shown here.
12 #
13 # import os
14 # import sys
15 # sys.path.insert(0, os.path.abspath('.'))
16
17 from __future__ import unicode_literals
18 import re
19
20 from recommonmark.parser import CommonMarkParser
21 from recommonmark.transform import AutoStructify
22 from docutils import nodes, transforms
23
24 # -- Project information -----------------------------------------------------
25
26 project = 'Turbinia'
27 copyright = '2020, Google Inc'
28 author = 'Turbinia maintainers'
29
30 # -- General configuration ---------------------------------------------------
31
32 # Add any Sphinx extension module names here, as strings. They can be
33 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
34 # ones.
35 extensions = [
36 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage',
37 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', 'sphinx_markdown_tables',
38 'recommonmark'
39 ]
40
41 # Add any paths that contain templates here, relative to this directory.
42 templates_path = ['_templates']
43
44 # List of patterns, relative to source directory, that match files and
45 # directories to ignore when looking for source files.
46 # This pattern also affects html_static_path and html_extra_path.
47 exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'design/*']
48
49 # -- Options for HTML output -------------------------------------------------
50
51 # The theme to use for HTML and HTML Help pages. See the documentation for
52 # a list of builtin themes.
53 #
54 html_theme = 'sphinx_rtd_theme'
55
56 # The master toctree document.
57 master_doc = 'index'
58
59 # The name of the Pygments (syntax highlighting) style to use.
60 pygments_style = 'sphinx'
61
62 # Add any paths that contain custom static files (such as style sheets) here,
63 # relative to this directory. They are copied after the builtin static files,
64 # so a file named "default.css" will overwrite the builtin "default.css".
65 html_static_path = ['_static']
66
67 # The default sidebars (for documents that don't match any pattern) are
68 # defined by theme itself. Builtin themes are using these templates by
69 # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
70 # 'searchbox.html']``.
71 #
72 html_sidebars = {
73 '**': [
74 'sidebar.html', 'localtoc.html', 'relations.html', 'sourcelink.html',
75 'searchbox.html'
76 ]
77 }
78
79
80 # Output file base name for HTML help builder.
81 htmlhelp_basename = 'turbiniadoc'
82
83 html_logo = "images/turbinia-logo.jpg"
84
85
86 class ProcessLink(transforms.Transform):
87 """Transform definition to parse .md references to internal pages."""
88
89 default_priority = 1000
90
91 def find_replace(self, node):
92 """Parses URIs containing .md and replaces them with their HTML page."""
93 if isinstance(node, nodes.reference) and 'refuri' in node:
94 r = node['refuri']
95 if r.endswith('.md'):
96 r = r[:-3] + '.html'
97 node['refuri'] = r
98
99 return node
100
101 def traverse(self, node):
102 """Traverse the document tree rooted at node.
103 node : docutil node
104 current root node to traverse
105 """
106 self.find_replace(node)
107
108 for c in node.children:
109 self.traverse(c)
110
111 # pylint: disable=arguments-differ,attribute-defined-outside-init
112 # this was taken from GRR's config file for documentation
113 def apply(self):
114 self.current_level = 0
115 self.traverse(self.document)
116
117
118 def setup(app):
119 """Add custom parsers to Sphinx generation."""
120 app.add_config_value(
121 'recommonmark_config', {
122 'enable_auto_doc_ref': False,
123 }, True)
124 app.add_transform(AutoStructify)
125 app.add_transform(ProcessLink)
126
[end of docs/conf.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -76,6 +76,8 @@
]
}
+# Adding retries to linkchecks before declaring a link broken
+linkcheck_retries = 3
# Output file base name for HTML help builder.
htmlhelp_basename = 'turbiniadoc'
| {"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -76,6 +76,8 @@\n ]\n }\n \n+# Adding retries to linkchecks before declaring a link broken\n+linkcheck_retries = 3\n \n # Output file base name for HTML help builder.\n htmlhelp_basename = 'turbiniadoc'\n", "issue": "Add retries to tox\nTox fails when trying to check links within our docs if the link is temporarily down/unresponsive. Adding retries to sphinx config should take care of that.\n", "before_files": [{"content": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\n# import os\n# import sys\n# sys.path.insert(0, os.path.abspath('.'))\n\nfrom __future__ import unicode_literals\nimport re\n\nfrom recommonmark.parser import CommonMarkParser\nfrom recommonmark.transform import AutoStructify\nfrom docutils import nodes, transforms\n\n# -- Project information -----------------------------------------------------\n\nproject = 'Turbinia'\ncopyright = '2020, Google Inc'\nauthor = 'Turbinia maintainers'\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage',\n 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', 'sphinx_markdown_tables',\n 'recommonmark'\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'design/*']\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_rtd_theme'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# The default sidebars (for documents that don't match any pattern) are\n# defined by theme itself. Builtin themes are using these templates by\n# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',\n# 'searchbox.html']``.\n#\nhtml_sidebars = {\n '**': [\n 'sidebar.html', 'localtoc.html', 'relations.html', 'sourcelink.html',\n 'searchbox.html'\n ]\n}\n\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'turbiniadoc'\n\nhtml_logo = \"images/turbinia-logo.jpg\"\n\n\nclass ProcessLink(transforms.Transform):\n \"\"\"Transform definition to parse .md references to internal pages.\"\"\"\n\n default_priority = 1000\n\n def find_replace(self, node):\n \"\"\"Parses URIs containing .md and replaces them with their HTML page.\"\"\"\n if isinstance(node, nodes.reference) and 'refuri' in node:\n r = node['refuri']\n if r.endswith('.md'):\n r = r[:-3] + '.html'\n node['refuri'] = r\n\n return node\n\n def traverse(self, node):\n \"\"\"Traverse the document tree rooted at node.\n node : docutil node\n current root node to traverse\n \"\"\"\n self.find_replace(node)\n\n for c in node.children:\n self.traverse(c)\n\n # pylint: disable=arguments-differ,attribute-defined-outside-init\n # this was taken from GRR's config file for documentation\n def apply(self):\n self.current_level = 0\n self.traverse(self.document)\n\n\ndef setup(app):\n \"\"\"Add custom parsers to Sphinx generation.\"\"\"\n app.add_config_value(\n 'recommonmark_config', {\n 'enable_auto_doc_ref': False,\n }, True)\n app.add_transform(AutoStructify)\n app.add_transform(ProcessLink)\n", "path": "docs/conf.py"}]} | 1,786 | 83 |
gh_patches_debug_34066 | rasdani/github-patches | git_diff | freedomofpress__securedrop-1309 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
NetworkManager hook notifications broken on Tails 2.x
The invocation of `notify-send` in `securedrop_init.py` does not show a notification in Tails 2.x like it did in Tails 1.x. This is due to dbus-related changes in Debian Jessie, and is a known issue as a quick [search](https://labs.riseup.net/code/projects/tails/search?utf8=%E2%9C%93&changesets=1&q=notify-send) of the Tails issue tracker demonstrates.
Furthermore, it looks like Tails has a special wrapper script, `tails-notify-user`, specifically meant for the use case of displaying notifications to the user from background scripts running as different users, so we should just use that instead.
</issue>
<code>
[start of tails_files/securedrop_init.py]
1 #!/usr/bin/env python
2
3 import os
4 import sys
5 import subprocess
6
7
8 if __name__ == '__main__':
9 # check for root
10 if os.geteuid() != 0:
11 sys.exit('You need to run this as root')
12
13 # paths
14 path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'
15 path_torrc_backup = '/etc/tor/torrc.bak'
16 path_torrc = '/etc/tor/torrc'
17
18 # load torrc_additions
19 if os.path.isfile(path_torrc_additions):
20 torrc_additions = open(path_torrc_additions).read()
21 else:
22 sys.exit('Error opening {0} for reading'.format(path_torrc_additions))
23
24 # load torrc
25 if os.path.isfile(path_torrc_backup):
26 torrc = open(path_torrc_backup).read()
27 else:
28 if os.path.isfile(path_torrc):
29 torrc = open(path_torrc).read()
30 else:
31 sys.exit('Error opening {0} for reading'.format(path_torrc))
32
33 # save a backup
34 open(path_torrc_backup, 'w').write(torrc)
35
36 # append the additions
37 open(path_torrc, 'w').write(torrc + torrc_additions)
38
39 # reload tor
40 subprocess.call(['/usr/sbin/service', 'tor', 'reload'])
41
42 # success
43 subprocess.call(['/usr/bin/sudo', '-u', 'amnesia', '/usr/bin/notify-send', '-i', '/home/amnesia/Persistent/.securedrop/securedrop_icon.png',
44 'Updated torrc!', 'You can now connect to your SecureDrop\ndocument interface.'])
45
[end of tails_files/securedrop_init.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/tails_files/securedrop_init.py b/tails_files/securedrop_init.py
--- a/tails_files/securedrop_init.py
+++ b/tails_files/securedrop_init.py
@@ -1,44 +1,47 @@
-#!/usr/bin/env python
+#!/usr/bin/python
import os
import sys
import subprocess
-if __name__ == '__main__':
- # check for root
- if os.geteuid() != 0:
- sys.exit('You need to run this as root')
+# check for root
+if os.geteuid() != 0:
+ sys.exit('You need to run this as root')
- # paths
- path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'
- path_torrc_backup = '/etc/tor/torrc.bak'
- path_torrc = '/etc/tor/torrc'
+# paths
+path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'
+path_torrc_backup = '/etc/tor/torrc.bak'
+path_torrc = '/etc/tor/torrc'
- # load torrc_additions
- if os.path.isfile(path_torrc_additions):
- torrc_additions = open(path_torrc_additions).read()
- else:
- sys.exit('Error opening {0} for reading'.format(path_torrc_additions))
+# load torrc_additions
+if os.path.isfile(path_torrc_additions):
+ torrc_additions = open(path_torrc_additions).read()
+else:
+ sys.exit('Error opening {0} for reading'.format(path_torrc_additions))
- # load torrc
- if os.path.isfile(path_torrc_backup):
- torrc = open(path_torrc_backup).read()
+# load torrc
+if os.path.isfile(path_torrc_backup):
+ torrc = open(path_torrc_backup).read()
+else:
+ if os.path.isfile(path_torrc):
+ torrc = open(path_torrc).read()
else:
- if os.path.isfile(path_torrc):
- torrc = open(path_torrc).read()
- else:
- sys.exit('Error opening {0} for reading'.format(path_torrc))
+ sys.exit('Error opening {0} for reading'.format(path_torrc))
- # save a backup
- open(path_torrc_backup, 'w').write(torrc)
+ # save a backup
+ open(path_torrc_backup, 'w').write(torrc)
- # append the additions
- open(path_torrc, 'w').write(torrc + torrc_additions)
+# append the additions
+open(path_torrc, 'w').write(torrc + torrc_additions)
- # reload tor
- subprocess.call(['/usr/sbin/service', 'tor', 'reload'])
+# reload tor
+try:
+ subprocess.check_call(['systemctl', 'reload', '[email protected]'])
+except subprocess.CalledProcessError:
+ sys.exit('Error reloading Tor')
- # success
- subprocess.call(['/usr/bin/sudo', '-u', 'amnesia', '/usr/bin/notify-send', '-i', '/home/amnesia/Persistent/.securedrop/securedrop_icon.png',
- 'Updated torrc!', 'You can now connect to your SecureDrop\ndocument interface.'])
+# notify the user
+subprocess.call(['tails-notify-user',
+ 'SecureDrop successfully auto-configured!',
+ 'You can now access the Document Interface.\nIf you are an admin, you can now SSH to the servers.'])
| {"golden_diff": "diff --git a/tails_files/securedrop_init.py b/tails_files/securedrop_init.py\n--- a/tails_files/securedrop_init.py\n+++ b/tails_files/securedrop_init.py\n@@ -1,44 +1,47 @@\n-#!/usr/bin/env python\n+#!/usr/bin/python\n \n import os\n import sys\n import subprocess\n \n \n-if __name__ == '__main__':\n- # check for root\n- if os.geteuid() != 0:\n- sys.exit('You need to run this as root')\n+# check for root\n+if os.geteuid() != 0:\n+ sys.exit('You need to run this as root')\n \n- # paths\n- path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'\n- path_torrc_backup = '/etc/tor/torrc.bak'\n- path_torrc = '/etc/tor/torrc'\n+# paths\n+path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'\n+path_torrc_backup = '/etc/tor/torrc.bak'\n+path_torrc = '/etc/tor/torrc'\n \n- # load torrc_additions\n- if os.path.isfile(path_torrc_additions):\n- torrc_additions = open(path_torrc_additions).read()\n- else:\n- sys.exit('Error opening {0} for reading'.format(path_torrc_additions))\n+# load torrc_additions\n+if os.path.isfile(path_torrc_additions):\n+ torrc_additions = open(path_torrc_additions).read()\n+else:\n+ sys.exit('Error opening {0} for reading'.format(path_torrc_additions))\n \n- # load torrc\n- if os.path.isfile(path_torrc_backup):\n- torrc = open(path_torrc_backup).read()\n+# load torrc\n+if os.path.isfile(path_torrc_backup):\n+ torrc = open(path_torrc_backup).read()\n+else:\n+ if os.path.isfile(path_torrc):\n+ torrc = open(path_torrc).read()\n else:\n- if os.path.isfile(path_torrc):\n- torrc = open(path_torrc).read()\n- else:\n- sys.exit('Error opening {0} for reading'.format(path_torrc))\n+ sys.exit('Error opening {0} for reading'.format(path_torrc))\n \n- # save a backup\n- open(path_torrc_backup, 'w').write(torrc)\n+ # save a backup\n+ open(path_torrc_backup, 'w').write(torrc)\n \n- # append the additions\n- open(path_torrc, 'w').write(torrc + torrc_additions)\n+# append the additions\n+open(path_torrc, 'w').write(torrc + torrc_additions)\n \n- # reload tor\n- subprocess.call(['/usr/sbin/service', 'tor', 'reload'])\n+# reload tor\n+try:\n+ subprocess.check_call(['systemctl', 'reload', '[email protected]'])\n+except subprocess.CalledProcessError:\n+ sys.exit('Error reloading Tor')\n \n- # success\n- subprocess.call(['/usr/bin/sudo', '-u', 'amnesia', '/usr/bin/notify-send', '-i', '/home/amnesia/Persistent/.securedrop/securedrop_icon.png',\n- 'Updated torrc!', 'You can now connect to your SecureDrop\\ndocument interface.'])\n+# notify the user\n+subprocess.call(['tails-notify-user',\n+ 'SecureDrop successfully auto-configured!',\n+ 'You can now access the Document Interface.\\nIf you are an admin, you can now SSH to the servers.'])\n", "issue": "NetworkManager hook notifications broken on Tails 2.x\nThe invocation of `notify-send` in `securedrop_init.py` does not show a notification in Tails 2.x like it did in Tails 1.x. This is due to dbus-related changes in Debian Jessie, and is a known issue as a quick [search](https://labs.riseup.net/code/projects/tails/search?utf8=%E2%9C%93&changesets=1&q=notify-send) of the Tails issue tracker demonstrates.\n\nFurthermore, it looks like Tails has a special wrapper script, `tails-notify-user`, specifically meant for the use case of displaying notifications to the user from background scripts running as different users, so we should just use that instead.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport os\nimport sys\nimport subprocess\n\n\nif __name__ == '__main__':\n # check for root\n if os.geteuid() != 0:\n sys.exit('You need to run this as root')\n\n # paths\n path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'\n path_torrc_backup = '/etc/tor/torrc.bak'\n path_torrc = '/etc/tor/torrc'\n\n # load torrc_additions\n if os.path.isfile(path_torrc_additions):\n torrc_additions = open(path_torrc_additions).read()\n else:\n sys.exit('Error opening {0} for reading'.format(path_torrc_additions))\n\n # load torrc\n if os.path.isfile(path_torrc_backup):\n torrc = open(path_torrc_backup).read()\n else:\n if os.path.isfile(path_torrc):\n torrc = open(path_torrc).read()\n else:\n sys.exit('Error opening {0} for reading'.format(path_torrc))\n\n # save a backup\n open(path_torrc_backup, 'w').write(torrc)\n\n # append the additions\n open(path_torrc, 'w').write(torrc + torrc_additions)\n\n # reload tor\n subprocess.call(['/usr/sbin/service', 'tor', 'reload'])\n\n # success\n subprocess.call(['/usr/bin/sudo', '-u', 'amnesia', '/usr/bin/notify-send', '-i', '/home/amnesia/Persistent/.securedrop/securedrop_icon.png',\n 'Updated torrc!', 'You can now connect to your SecureDrop\\ndocument interface.'])\n", "path": "tails_files/securedrop_init.py"}]} | 1,168 | 843 |
gh_patches_debug_22367 | rasdani/github-patches | git_diff | OpenNMT__OpenNMT-tf-515 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Subword tokenisation spacer can mark the beginning of word
Certain sequence noising operations need to retrieve a list of words from the raw list of subword tokens. For example:
* Decoding with word removal/reordering to produce noisy back-translations as in [Scaling BT paper](https://arxiv.org/abs/1808.09381)
* Word omission to support the new contrastive learning feature as in the [contrastive learning paper](https://www.aclweb.org/anthology/P19-1623.pdf)
* Presumably more features relying on word level noise might come up in the future
In these cases the user should specify some details for the sub-tokenisation process:
1. What subword tokens was used? (`decoding_subword_token`)
2. Was that token a joiner or a spacer? (`decoding_subword_token_is_spacer`)
When the user specifies (explicitly or implicitly) a spacer, the framework assumes that the spacer symbol appears at the beginning of each word, similar to what SentencePiece does. However this does not have to be the case, the spacer could also appear at the end of each word - for example [this one does](https://github.com/kovalevfm/SubTokenizer). If that extra sub-tokenisation flexibility is desired, we can add this configuration parameter. A sample implementation could look like [this](https://github.com/steremma/OpenNMT-tf/commit/d109af49911431e424b28def575fb94f07bfec47).
I realise that most user's rely on standard tools that are covered by the current implementation. If there is a user base for which the extra flexibility is desired, I can submit a PR that reads this option from the YAML.
</issue>
<code>
[start of opennmt/data/text.py]
1 # -*- coding: utf-8 -*-
2
3 """Text manipulation."""
4
5 import tensorflow as tf
6
7
8 def tokens_to_chars(tokens):
9 """Splits tokens into unicode characters.
10
11 Args:
12 tokens: A string ``tf.Tensor`` of shape :math:`[T]`.
13
14 Returns:
15 The characters as a 2D string ``tf.RaggedTensor``.
16 """
17 return tf.strings.unicode_split(tokens, "UTF-8")
18
19 def tokens_to_words(tokens, subword_token="■", is_spacer=None):
20 """Converts a sequence of tokens to a sequence of words.
21
22 For example, if a BPE tokenization produces this sequence:
23
24 ["He@@", "llo", "W@@", "orld", "@@!"]
25
26 this function will return the tensor:
27
28 [["He@@", "llo", ""], ["W@@", "orld", "@@!"]]
29
30 Args:
31 tokens: A 1D string ``tf.Tensor``.
32 subword_token: The special token used by the subword tokenizer.
33 is_spacer: Whether :obj:`subword_token` is used as a spacer (as in
34 SentencePiece) or a joiner (as in BPE). If ``None``, will infer
35 directly from :obj:`subword_token`.
36
37 Returns:
38 The words as a 2D string ``tf.RaggedTensor``.
39 """
40 if is_spacer is None:
41 is_spacer = subword_token == "▁"
42 if is_spacer:
43 subword = tf.strings.regex_full_match(tokens, "[^%s].*" % subword_token)
44 else:
45 right = tf.strings.regex_full_match(tokens, ".*%s" % subword_token)
46 left = tf.strings.regex_full_match(tokens, "%s.*" % subword_token)
47 subword = tf.logical_or(tf.roll(right, shift=1, axis=0), left)
48 start = tf.logical_not(subword)
49 start_indices = tf.squeeze(tf.where(start), -1)
50 return tf.RaggedTensor.from_row_starts(tokens, start_indices)
51
52 def alignment_matrix_from_pharaoh(alignment_line,
53 source_length,
54 target_length,
55 dtype=tf.float32):
56 """Parse Pharaoh alignments into an alignment matrix.
57
58 Args:
59 alignment_line: A string ``tf.Tensor`` in the Pharaoh format.
60 source_length: The length of the source sentence, without special symbols.
61 target_length: The length of the target sentence, without special symbols.
62 dtype: The output matrix dtype. Defaults to ``tf.float32`` for convenience
63 when computing the guided alignment loss.
64
65 Returns:
66 The alignment matrix as a 2-D ``tf.Tensor`` of type :obj:`dtype` and shape
67 ``[target_length, source_length]``, where ``[i, j] = 1`` if the ``i`` th
68 target word is aligned with the ``j`` th source word.
69 """
70 align_pairs_str = tf.strings.split([alignment_line]).values
71 align_pairs_flat_str = tf.strings.split(align_pairs_str, sep="-").values
72 align_pairs_flat = tf.strings.to_number(align_pairs_flat_str, out_type=tf.int64)
73 sparse_indices = tf.reshape(align_pairs_flat, [-1, 2])
74 sparse_values = tf.ones([tf.shape(sparse_indices)[0]], dtype=dtype)
75 source_length = tf.cast(source_length, tf.int64)
76 target_length = tf.cast(target_length, tf.int64)
77 alignment_matrix_sparse = tf.sparse.SparseTensor(
78 sparse_indices, sparse_values, [source_length, target_length])
79 alignment_matrix = tf.sparse.to_dense(alignment_matrix_sparse, validate_indices=False)
80 return tf.transpose(alignment_matrix)
81
[end of opennmt/data/text.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opennmt/data/text.py b/opennmt/data/text.py
--- a/opennmt/data/text.py
+++ b/opennmt/data/text.py
@@ -40,13 +40,18 @@
if is_spacer is None:
is_spacer = subword_token == "▁"
if is_spacer:
- subword = tf.strings.regex_full_match(tokens, "[^%s].*" % subword_token)
+ # First token implicitly starts with a spacer.
+ left_and_single = tf.logical_or(
+ tf.strings.regex_full_match(tokens, "%s.*" % subword_token),
+ tf.one_hot(0, tf.shape(tokens)[0], on_value=True, off_value=False))
+ right = tf.strings.regex_full_match(tokens, ".+%s" % subword_token)
+ word_start = tf.logical_or(tf.roll(right, shift=1, axis=0), left_and_single)
else:
right = tf.strings.regex_full_match(tokens, ".*%s" % subword_token)
left = tf.strings.regex_full_match(tokens, "%s.*" % subword_token)
subword = tf.logical_or(tf.roll(right, shift=1, axis=0), left)
- start = tf.logical_not(subword)
- start_indices = tf.squeeze(tf.where(start), -1)
+ word_start = tf.logical_not(subword)
+ start_indices = tf.squeeze(tf.where(word_start), -1)
return tf.RaggedTensor.from_row_starts(tokens, start_indices)
def alignment_matrix_from_pharaoh(alignment_line,
| {"golden_diff": "diff --git a/opennmt/data/text.py b/opennmt/data/text.py\n--- a/opennmt/data/text.py\n+++ b/opennmt/data/text.py\n@@ -40,13 +40,18 @@\n if is_spacer is None:\n is_spacer = subword_token == \"\u2581\"\n if is_spacer:\n- subword = tf.strings.regex_full_match(tokens, \"[^%s].*\" % subword_token)\n+ # First token implicitly starts with a spacer.\n+ left_and_single = tf.logical_or(\n+ tf.strings.regex_full_match(tokens, \"%s.*\" % subword_token),\n+ tf.one_hot(0, tf.shape(tokens)[0], on_value=True, off_value=False))\n+ right = tf.strings.regex_full_match(tokens, \".+%s\" % subword_token)\n+ word_start = tf.logical_or(tf.roll(right, shift=1, axis=0), left_and_single)\n else:\n right = tf.strings.regex_full_match(tokens, \".*%s\" % subword_token)\n left = tf.strings.regex_full_match(tokens, \"%s.*\" % subword_token)\n subword = tf.logical_or(tf.roll(right, shift=1, axis=0), left)\n- start = tf.logical_not(subword)\n- start_indices = tf.squeeze(tf.where(start), -1)\n+ word_start = tf.logical_not(subword)\n+ start_indices = tf.squeeze(tf.where(word_start), -1)\n return tf.RaggedTensor.from_row_starts(tokens, start_indices)\n \n def alignment_matrix_from_pharaoh(alignment_line,\n", "issue": "Subword tokenisation spacer can mark the beginning of word\nCertain sequence noising operations need to retrieve a list of words from the raw list of subword tokens. For example:\r\n\r\n* Decoding with word removal/reordering to produce noisy back-translations as in [Scaling BT paper](https://arxiv.org/abs/1808.09381)\r\n\r\n* Word omission to support the new contrastive learning feature as in the [contrastive learning paper](https://www.aclweb.org/anthology/P19-1623.pdf)\r\n\r\n* Presumably more features relying on word level noise might come up in the future\r\n\r\nIn these cases the user should specify some details for the sub-tokenisation process: \r\n1. What subword tokens was used? (`decoding_subword_token`)\r\n2. Was that token a joiner or a spacer? (`decoding_subword_token_is_spacer`)\r\n\r\nWhen the user specifies (explicitly or implicitly) a spacer, the framework assumes that the spacer symbol appears at the beginning of each word, similar to what SentencePiece does. However this does not have to be the case, the spacer could also appear at the end of each word - for example [this one does](https://github.com/kovalevfm/SubTokenizer). If that extra sub-tokenisation flexibility is desired, we can add this configuration parameter. A sample implementation could look like [this](https://github.com/steremma/OpenNMT-tf/commit/d109af49911431e424b28def575fb94f07bfec47).\r\n\r\nI realise that most user's rely on standard tools that are covered by the current implementation. If there is a user base for which the extra flexibility is desired, I can submit a PR that reads this option from the YAML.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Text manipulation.\"\"\"\n\nimport tensorflow as tf\n\n\ndef tokens_to_chars(tokens):\n \"\"\"Splits tokens into unicode characters.\n\n Args:\n tokens: A string ``tf.Tensor`` of shape :math:`[T]`.\n\n Returns:\n The characters as a 2D string ``tf.RaggedTensor``.\n \"\"\"\n return tf.strings.unicode_split(tokens, \"UTF-8\")\n\ndef tokens_to_words(tokens, subword_token=\"\uffed\", is_spacer=None):\n \"\"\"Converts a sequence of tokens to a sequence of words.\n\n For example, if a BPE tokenization produces this sequence:\n\n [\"He@@\", \"llo\", \"W@@\", \"orld\", \"@@!\"]\n\n this function will return the tensor:\n\n [[\"He@@\", \"llo\", \"\"], [\"W@@\", \"orld\", \"@@!\"]]\n\n Args:\n tokens: A 1D string ``tf.Tensor``.\n subword_token: The special token used by the subword tokenizer.\n is_spacer: Whether :obj:`subword_token` is used as a spacer (as in\n SentencePiece) or a joiner (as in BPE). If ``None``, will infer\n directly from :obj:`subword_token`.\n\n Returns:\n The words as a 2D string ``tf.RaggedTensor``.\n \"\"\"\n if is_spacer is None:\n is_spacer = subword_token == \"\u2581\"\n if is_spacer:\n subword = tf.strings.regex_full_match(tokens, \"[^%s].*\" % subword_token)\n else:\n right = tf.strings.regex_full_match(tokens, \".*%s\" % subword_token)\n left = tf.strings.regex_full_match(tokens, \"%s.*\" % subword_token)\n subword = tf.logical_or(tf.roll(right, shift=1, axis=0), left)\n start = tf.logical_not(subword)\n start_indices = tf.squeeze(tf.where(start), -1)\n return tf.RaggedTensor.from_row_starts(tokens, start_indices)\n\ndef alignment_matrix_from_pharaoh(alignment_line,\n source_length,\n target_length,\n dtype=tf.float32):\n \"\"\"Parse Pharaoh alignments into an alignment matrix.\n\n Args:\n alignment_line: A string ``tf.Tensor`` in the Pharaoh format.\n source_length: The length of the source sentence, without special symbols.\n target_length: The length of the target sentence, without special symbols.\n dtype: The output matrix dtype. Defaults to ``tf.float32`` for convenience\n when computing the guided alignment loss.\n\n Returns:\n The alignment matrix as a 2-D ``tf.Tensor`` of type :obj:`dtype` and shape\n ``[target_length, source_length]``, where ``[i, j] = 1`` if the ``i`` th\n target word is aligned with the ``j`` th source word.\n \"\"\"\n align_pairs_str = tf.strings.split([alignment_line]).values\n align_pairs_flat_str = tf.strings.split(align_pairs_str, sep=\"-\").values\n align_pairs_flat = tf.strings.to_number(align_pairs_flat_str, out_type=tf.int64)\n sparse_indices = tf.reshape(align_pairs_flat, [-1, 2])\n sparse_values = tf.ones([tf.shape(sparse_indices)[0]], dtype=dtype)\n source_length = tf.cast(source_length, tf.int64)\n target_length = tf.cast(target_length, tf.int64)\n alignment_matrix_sparse = tf.sparse.SparseTensor(\n sparse_indices, sparse_values, [source_length, target_length])\n alignment_matrix = tf.sparse.to_dense(alignment_matrix_sparse, validate_indices=False)\n return tf.transpose(alignment_matrix)\n", "path": "opennmt/data/text.py"}]} | 1,881 | 344 |
gh_patches_debug_29109 | rasdani/github-patches | git_diff | saleor__saleor-8874 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
management/commands errors
There are some queries that reference deleted fields in this module (examples below). I was wondering does it matter if this module is updated because it seems like this file hasn't been updated in a while, or are there other reasons that these queries still exist? Thanks.
https://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L45
https://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L51
https://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L52
https://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L53
</issue>
<code>
[start of saleor/core/management/commands/change_currency.py]
1 from babel.numbers import UnknownCurrencyError, validate_currency
2 from django.core.management.base import BaseCommand, CommandError
3
4 from ....checkout.models import Checkout
5 from ....discount.models import Voucher
6 from ....giftcard.models import GiftCard
7 from ....order.models import Order, OrderLine
8 from ....payment.models import Payment, Transaction
9 from ....product.models import Product, ProductVariant
10 from ....shipping.models import ShippingMethod
11
12
13 class Command(BaseCommand):
14 help = (
15 "Change currency in all models in the database. "
16 "Note, that this command only changes currency code "
17 "without doing any conversion. "
18 "Currency set by this command must match "
19 "with the value set in DEFAULT_CURRENCY environment variable."
20 )
21
22 def add_arguments(self, parser):
23 parser.add_argument("currency", type=str)
24
25 parser.add_argument(
26 "--force",
27 action="store_true",
28 help="Allows running command without validation.",
29 )
30
31 def handle(self, **options):
32 force = options.get("force", False)
33 currency = options["currency"]
34
35 if not force:
36 try:
37 validate_currency(currency)
38 except UnknownCurrencyError:
39 raise CommandError(
40 "Unknown currency. "
41 "Use `--force` flag to force migration currencies."
42 )
43
44 Checkout.objects.update(currency=currency)
45 Voucher.objects.update(currency=currency)
46 GiftCard.objects.update(currency=currency)
47 Order.objects.update(currency=currency)
48 OrderLine.objects.update(currency=currency)
49 Payment.objects.update(currency=currency)
50 Transaction.objects.update(currency=currency)
51 Product.objects.update(currency=currency)
52 ProductVariant.objects.update(currency=currency)
53 ShippingMethod.objects.update(currency=currency)
54
[end of saleor/core/management/commands/change_currency.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/saleor/core/management/commands/change_currency.py b/saleor/core/management/commands/change_currency.py
deleted file mode 100644
--- a/saleor/core/management/commands/change_currency.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from babel.numbers import UnknownCurrencyError, validate_currency
-from django.core.management.base import BaseCommand, CommandError
-
-from ....checkout.models import Checkout
-from ....discount.models import Voucher
-from ....giftcard.models import GiftCard
-from ....order.models import Order, OrderLine
-from ....payment.models import Payment, Transaction
-from ....product.models import Product, ProductVariant
-from ....shipping.models import ShippingMethod
-
-
-class Command(BaseCommand):
- help = (
- "Change currency in all models in the database. "
- "Note, that this command only changes currency code "
- "without doing any conversion. "
- "Currency set by this command must match "
- "with the value set in DEFAULT_CURRENCY environment variable."
- )
-
- def add_arguments(self, parser):
- parser.add_argument("currency", type=str)
-
- parser.add_argument(
- "--force",
- action="store_true",
- help="Allows running command without validation.",
- )
-
- def handle(self, **options):
- force = options.get("force", False)
- currency = options["currency"]
-
- if not force:
- try:
- validate_currency(currency)
- except UnknownCurrencyError:
- raise CommandError(
- "Unknown currency. "
- "Use `--force` flag to force migration currencies."
- )
-
- Checkout.objects.update(currency=currency)
- Voucher.objects.update(currency=currency)
- GiftCard.objects.update(currency=currency)
- Order.objects.update(currency=currency)
- OrderLine.objects.update(currency=currency)
- Payment.objects.update(currency=currency)
- Transaction.objects.update(currency=currency)
- Product.objects.update(currency=currency)
- ProductVariant.objects.update(currency=currency)
- ShippingMethod.objects.update(currency=currency)
| {"golden_diff": "diff --git a/saleor/core/management/commands/change_currency.py b/saleor/core/management/commands/change_currency.py\ndeleted file mode 100644\n--- a/saleor/core/management/commands/change_currency.py\n+++ /dev/null\n@@ -1,53 +0,0 @@\n-from babel.numbers import UnknownCurrencyError, validate_currency\n-from django.core.management.base import BaseCommand, CommandError\n-\n-from ....checkout.models import Checkout\n-from ....discount.models import Voucher\n-from ....giftcard.models import GiftCard\n-from ....order.models import Order, OrderLine\n-from ....payment.models import Payment, Transaction\n-from ....product.models import Product, ProductVariant\n-from ....shipping.models import ShippingMethod\n-\n-\n-class Command(BaseCommand):\n- help = (\n- \"Change currency in all models in the database. \"\n- \"Note, that this command only changes currency code \"\n- \"without doing any conversion. \"\n- \"Currency set by this command must match \"\n- \"with the value set in DEFAULT_CURRENCY environment variable.\"\n- )\n-\n- def add_arguments(self, parser):\n- parser.add_argument(\"currency\", type=str)\n-\n- parser.add_argument(\n- \"--force\",\n- action=\"store_true\",\n- help=\"Allows running command without validation.\",\n- )\n-\n- def handle(self, **options):\n- force = options.get(\"force\", False)\n- currency = options[\"currency\"]\n-\n- if not force:\n- try:\n- validate_currency(currency)\n- except UnknownCurrencyError:\n- raise CommandError(\n- \"Unknown currency. \"\n- \"Use `--force` flag to force migration currencies.\"\n- )\n-\n- Checkout.objects.update(currency=currency)\n- Voucher.objects.update(currency=currency)\n- GiftCard.objects.update(currency=currency)\n- Order.objects.update(currency=currency)\n- OrderLine.objects.update(currency=currency)\n- Payment.objects.update(currency=currency)\n- Transaction.objects.update(currency=currency)\n- Product.objects.update(currency=currency)\n- ProductVariant.objects.update(currency=currency)\n- ShippingMethod.objects.update(currency=currency)\n", "issue": "management/commands errors\nThere are some queries that reference deleted fields in this module (examples below). I was wondering does it matter if this module is updated because it seems like this file hasn't been updated in a while, or are there other reasons that these queries still exist? Thanks.\r\n\r\nhttps://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L45\r\nhttps://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L51\r\nhttps://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L52\r\nhttps://github.com/mirumee/saleor/blob/master/saleor/core/management/commands/change_currency.py#L53\n", "before_files": [{"content": "from babel.numbers import UnknownCurrencyError, validate_currency\nfrom django.core.management.base import BaseCommand, CommandError\n\nfrom ....checkout.models import Checkout\nfrom ....discount.models import Voucher\nfrom ....giftcard.models import GiftCard\nfrom ....order.models import Order, OrderLine\nfrom ....payment.models import Payment, Transaction\nfrom ....product.models import Product, ProductVariant\nfrom ....shipping.models import ShippingMethod\n\n\nclass Command(BaseCommand):\n help = (\n \"Change currency in all models in the database. \"\n \"Note, that this command only changes currency code \"\n \"without doing any conversion. \"\n \"Currency set by this command must match \"\n \"with the value set in DEFAULT_CURRENCY environment variable.\"\n )\n\n def add_arguments(self, parser):\n parser.add_argument(\"currency\", type=str)\n\n parser.add_argument(\n \"--force\",\n action=\"store_true\",\n help=\"Allows running command without validation.\",\n )\n\n def handle(self, **options):\n force = options.get(\"force\", False)\n currency = options[\"currency\"]\n\n if not force:\n try:\n validate_currency(currency)\n except UnknownCurrencyError:\n raise CommandError(\n \"Unknown currency. \"\n \"Use `--force` flag to force migration currencies.\"\n )\n\n Checkout.objects.update(currency=currency)\n Voucher.objects.update(currency=currency)\n GiftCard.objects.update(currency=currency)\n Order.objects.update(currency=currency)\n OrderLine.objects.update(currency=currency)\n Payment.objects.update(currency=currency)\n Transaction.objects.update(currency=currency)\n Product.objects.update(currency=currency)\n ProductVariant.objects.update(currency=currency)\n ShippingMethod.objects.update(currency=currency)\n", "path": "saleor/core/management/commands/change_currency.py"}]} | 1,177 | 463 |
gh_patches_debug_11801 | rasdani/github-patches | git_diff | getmoto__moto-399 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Python 2.6 issues with wheels and dependencies
My Travis tests failed on Python 2.6: piskvorky/smart_open#15 .
After some digging around it appears this is because of `moto`. Moto apparently depends on some `ordereddict` package, but that package is not installed (nor mentioned anywhere in the docs, AFAICS).
Do you think you could make `ordereddict` a dependency for moto, when installing on Python 2.6?
In other words, after I successfully run `pip install moto`, I'd expect moto to work, even on Python 2.6.
And thanks for the great package!
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2 from __future__ import unicode_literals
3 from setuptools import setup, find_packages
4
5 install_requires = [
6 "Jinja2",
7 "boto>=2.20.0",
8 "flask",
9 "httpretty>=0.6.1",
10 "requests",
11 "xmltodict",
12 "six",
13 "werkzeug",
14 ]
15
16 import sys
17
18 if sys.version_info < (2, 7):
19 # No buildint OrderedDict before 2.7
20 install_requires.append('ordereddict')
21
22 setup(
23 name='moto',
24 version='0.4.10',
25 description='A library that allows your python tests to easily'
26 ' mock out the boto library',
27 author='Steve Pulec',
28 author_email='spulec@gmail',
29 url='https://github.com/spulec/moto',
30 entry_points={
31 'console_scripts': [
32 'moto_server = moto.server:main',
33 ],
34 },
35 packages=find_packages(exclude=("tests", "tests.*")),
36 install_requires=install_requires,
37 license="Apache",
38 test_suite="tests",
39 classifiers=[
40 "Programming Language :: Python :: 2",
41 "Programming Language :: Python :: 2.6",
42 "Programming Language :: Python :: 2.7",
43 "Programming Language :: Python :: 3",
44 "Programming Language :: Python :: 3.3",
45 "License :: OSI Approved :: Apache Software License",
46 "Topic :: Software Development :: Testing",
47 ],
48 )
49
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -13,11 +13,10 @@
"werkzeug",
]
-import sys
-
-if sys.version_info < (2, 7):
- # No buildint OrderedDict before 2.7
- install_requires.append('ordereddict')
+extras_require = {
+ # No builtin OrderedDict before 2.7
+ ':python_version=="2.6"': ['ordereddict'],
+}
setup(
name='moto',
@@ -34,6 +33,7 @@
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
+ extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -13,11 +13,10 @@\n \"werkzeug\",\n ]\n \n-import sys\n-\n-if sys.version_info < (2, 7):\n- # No buildint OrderedDict before 2.7\n- install_requires.append('ordereddict')\n+extras_require = {\n+ # No builtin OrderedDict before 2.7\n+ ':python_version==\"2.6\"': ['ordereddict'],\n+}\n \n setup(\n name='moto',\n@@ -34,6 +33,7 @@\n },\n packages=find_packages(exclude=(\"tests\", \"tests.*\")),\n install_requires=install_requires,\n+ extras_require=extras_require,\n license=\"Apache\",\n test_suite=\"tests\",\n classifiers=[\n", "issue": "Python 2.6 issues with wheels and dependencies\nMy Travis tests failed on Python 2.6: piskvorky/smart_open#15 .\n\nAfter some digging around it appears this is because of `moto`. Moto apparently depends on some `ordereddict` package, but that package is not installed (nor mentioned anywhere in the docs, AFAICS).\n\nDo you think you could make `ordereddict` a dependency for moto, when installing on Python 2.6?\n\nIn other words, after I successfully run `pip install moto`, I'd expect moto to work, even on Python 2.6.\n\nAnd thanks for the great package!\n\n", "before_files": [{"content": "#!/usr/bin/env python\nfrom __future__ import unicode_literals\nfrom setuptools import setup, find_packages\n\ninstall_requires = [\n \"Jinja2\",\n \"boto>=2.20.0\",\n \"flask\",\n \"httpretty>=0.6.1\",\n \"requests\",\n \"xmltodict\",\n \"six\",\n \"werkzeug\",\n]\n\nimport sys\n\nif sys.version_info < (2, 7):\n # No buildint OrderedDict before 2.7\n install_requires.append('ordereddict')\n\nsetup(\n name='moto',\n version='0.4.10',\n description='A library that allows your python tests to easily'\n ' mock out the boto library',\n author='Steve Pulec',\n author_email='spulec@gmail',\n url='https://github.com/spulec/moto',\n entry_points={\n 'console_scripts': [\n 'moto_server = moto.server:main',\n ],\n },\n packages=find_packages(exclude=(\"tests\", \"tests.*\")),\n install_requires=install_requires,\n license=\"Apache\",\n test_suite=\"tests\",\n classifiers=[\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.3\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Topic :: Software Development :: Testing\",\n ],\n)\n", "path": "setup.py"}]} | 1,081 | 178 |
gh_patches_debug_2230 | rasdani/github-patches | git_diff | getsentry__sentry-18644 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
BufferError: Local: Queue full
I am receiving this error once every 2-4 days and I need to restart Sentry to fix it. This started after moving to the Docker version of Sentry.
I never noticed this being an issue on 9.1.2 also with Clickhouse and Snuba running, but without Kafka.
> https://observ.app/share/issue/4e4f208a500d48cc898770930706959a/
I am not sure where to look / poke / monitor to see this queue that is being spoken of and how I can flush it / enlarge it if needed.
`sentry queues list` showed all 0's so it's not looking like there is a massive backlog of events.
Any help is appreciated!
</issue>
<code>
[start of src/sentry/utils/pubsub.py]
1 from __future__ import absolute_import
2
3 import redis
4 import logging
5
6 from threading import Thread
7 from six.moves.queue import Queue, Full
8
9
10 class QueuedPublisherService(object):
11 """
12 A publisher that queues items locally and publishes them to a
13 remote pubsub service on a background thread.
14
15 Maintains a lossy internal queue for posting, will discard the
16 value if the queue is full or not immediately available. Will also
17 drop items if the publish operation to the remote service fails.
18 """
19
20 def __init__(self, publisher):
21 self._started = False
22 self.publisher = publisher
23
24 def _start(self):
25 if self._started:
26 return True
27
28 self.q = q = Queue(maxsize=100)
29
30 def worker():
31 while True:
32 (channel, key, value) = q.get()
33 try:
34 self.publisher.publish(channel, key=key, value=value)
35 except Exception as e:
36 logger = logging.getLogger("sentry.errors")
37 logger.debug("could not submit event to pubsub: %s" % e)
38 finally:
39 q.task_done()
40
41 t = Thread(target=worker)
42 t.setDaemon(True)
43 t.start()
44
45 self._started = True
46 return True
47
48 def publish(self, channel, value, key=None):
49 if not self._start():
50 return
51
52 try:
53 self.q.put((channel, key, value), block=False)
54 except Full:
55 return
56
57
58 class RedisPublisher(object):
59 def __init__(self, connection):
60 self.rds = None if connection is None else redis.StrictRedis(**connection)
61
62 def publish(self, channel, value, key=None):
63 if self.rds is not None:
64 self.rds.publish(channel, value)
65
66
67 class KafkaPublisher(object):
68 def __init__(self, connection, asynchronous=True):
69 from confluent_kafka import Producer
70
71 self.producer = Producer(connection or {})
72 self.asynchronous = asynchronous
73
74 def publish(self, channel, value, key=None):
75 self.producer.produce(topic=channel, value=value, key=key)
76 if not self.asynchronous:
77 self.producer.flush()
78
[end of src/sentry/utils/pubsub.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/sentry/utils/pubsub.py b/src/sentry/utils/pubsub.py
--- a/src/sentry/utils/pubsub.py
+++ b/src/sentry/utils/pubsub.py
@@ -73,5 +73,7 @@
def publish(self, channel, value, key=None):
self.producer.produce(topic=channel, value=value, key=key)
- if not self.asynchronous:
+ if self.asynchronous:
+ self.producer.poll(0)
+ else:
self.producer.flush()
| {"golden_diff": "diff --git a/src/sentry/utils/pubsub.py b/src/sentry/utils/pubsub.py\n--- a/src/sentry/utils/pubsub.py\n+++ b/src/sentry/utils/pubsub.py\n@@ -73,5 +73,7 @@\n \n def publish(self, channel, value, key=None):\n self.producer.produce(topic=channel, value=value, key=key)\n- if not self.asynchronous:\n+ if self.asynchronous:\n+ self.producer.poll(0)\n+ else:\n self.producer.flush()\n", "issue": "BufferError: Local: Queue full\nI am receiving this error once every 2-4 days and I need to restart Sentry to fix it. This started after moving to the Docker version of Sentry.\r\n\r\nI never noticed this being an issue on 9.1.2 also with Clickhouse and Snuba running, but without Kafka.\r\n\r\n> https://observ.app/share/issue/4e4f208a500d48cc898770930706959a/\r\n\r\nI am not sure where to look / poke / monitor to see this queue that is being spoken of and how I can flush it / enlarge it if needed.\r\n\r\n`sentry queues list` showed all 0's so it's not looking like there is a massive backlog of events.\r\n\r\nAny help is appreciated!\n", "before_files": [{"content": "from __future__ import absolute_import\n\nimport redis\nimport logging\n\nfrom threading import Thread\nfrom six.moves.queue import Queue, Full\n\n\nclass QueuedPublisherService(object):\n \"\"\"\n A publisher that queues items locally and publishes them to a\n remote pubsub service on a background thread.\n\n Maintains a lossy internal queue for posting, will discard the\n value if the queue is full or not immediately available. Will also\n drop items if the publish operation to the remote service fails.\n \"\"\"\n\n def __init__(self, publisher):\n self._started = False\n self.publisher = publisher\n\n def _start(self):\n if self._started:\n return True\n\n self.q = q = Queue(maxsize=100)\n\n def worker():\n while True:\n (channel, key, value) = q.get()\n try:\n self.publisher.publish(channel, key=key, value=value)\n except Exception as e:\n logger = logging.getLogger(\"sentry.errors\")\n logger.debug(\"could not submit event to pubsub: %s\" % e)\n finally:\n q.task_done()\n\n t = Thread(target=worker)\n t.setDaemon(True)\n t.start()\n\n self._started = True\n return True\n\n def publish(self, channel, value, key=None):\n if not self._start():\n return\n\n try:\n self.q.put((channel, key, value), block=False)\n except Full:\n return\n\n\nclass RedisPublisher(object):\n def __init__(self, connection):\n self.rds = None if connection is None else redis.StrictRedis(**connection)\n\n def publish(self, channel, value, key=None):\n if self.rds is not None:\n self.rds.publish(channel, value)\n\n\nclass KafkaPublisher(object):\n def __init__(self, connection, asynchronous=True):\n from confluent_kafka import Producer\n\n self.producer = Producer(connection or {})\n self.asynchronous = asynchronous\n\n def publish(self, channel, value, key=None):\n self.producer.produce(topic=channel, value=value, key=key)\n if not self.asynchronous:\n self.producer.flush()\n", "path": "src/sentry/utils/pubsub.py"}]} | 1,330 | 116 |
gh_patches_debug_28661 | rasdani/github-patches | git_diff | Kinto__kinto-696 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Inconsistency with 404 response on empty collections
- Set `read_only` to true
- Give readonly access to the postgresql user
- Give `read` permission to everyone on a bucket `foo`
- Going to `/buckets/foo/collections/unknown` gives 404
- Going to `/buckets/foo/collections/unknown/records` gives 503
Listing the records of an unknown collection should definitely give 404, except with the `default` bucket plugin.
Very related to https://github.com/Kinto/kinto/issues/558
</issue>
<code>
[start of kinto/views/records.py]
1 import copy
2
3 import jsonschema
4 from kinto.core import resource
5 from kinto.core.errors import raise_invalid
6 from jsonschema import exceptions as jsonschema_exceptions
7 from pyramid.security import Authenticated
8 from pyramid.settings import asbool
9
10 from kinto.views import RelaxedUUID, object_exists_or_404
11
12
13 class RecordSchema(resource.ResourceSchema):
14 class Options:
15 preserve_unknown = True
16
17
18 _parent_path = '/buckets/{{bucket_id}}/collections/{{collection_id}}'
19
20
21 @resource.register(name='record',
22 collection_path=_parent_path + '/records',
23 record_path=_parent_path + '/records/{{id}}')
24 class Record(resource.ShareableResource):
25
26 mapping = RecordSchema()
27 schema_field = 'schema'
28
29 def __init__(self, *args, **kwargs):
30 super(Record, self).__init__(*args, **kwargs)
31
32 self.model.id_generator = RelaxedUUID()
33
34 # Check if already fetched before (in batch).
35 collections = self.request.bound_data.setdefault('collections', {})
36 collection_uri = self.get_parent_id(self.request)
37 if collection_uri not in collections:
38 # Unknown yet, fetch from storage.
39 collection_parent_id = '/buckets/%s' % self.bucket_id
40 collection = object_exists_or_404(self.request,
41 collection_id='collection',
42 parent_id=collection_parent_id,
43 object_id=self.collection_id)
44 collections[collection_uri] = collection
45
46 self._collection = collections[collection_uri]
47
48 def get_parent_id(self, request):
49 self.bucket_id = request.matchdict['bucket_id']
50 self.collection_id = request.matchdict['collection_id']
51 return '/buckets/%s/collections/%s' % (self.bucket_id,
52 self.collection_id)
53
54 def is_known_field(self, field_name):
55 """Without schema, any field is considered as known."""
56 return True
57
58 def process_record(self, new, old=None):
59 """Validate records against collection schema, if any."""
60 new = super(Record, self).process_record(new, old)
61
62 schema = self._collection.get('schema')
63 settings = self.request.registry.settings
64 schema_validation = 'experimental_collection_schema_validation'
65 if not schema or not asbool(settings.get(schema_validation)):
66 return new
67
68 collection_timestamp = self._collection[self.model.modified_field]
69
70 try:
71 stripped = copy.deepcopy(new)
72 stripped.pop(self.model.id_field, None)
73 stripped.pop(self.model.modified_field, None)
74 stripped.pop(self.model.permissions_field, None)
75 stripped.pop(self.schema_field, None)
76 jsonschema.validate(stripped, schema)
77 except jsonschema_exceptions.ValidationError as e:
78 try:
79 field = e.path.pop() if e.path else e.validator_value.pop()
80 except AttributeError:
81 field = None
82 raise_invalid(self.request, name=field, description=e.message)
83
84 new[self.schema_field] = collection_timestamp
85 return new
86
87 def collection_get(self):
88 result = super(Record, self).collection_get()
89 self._handle_cache_expires(self.request.response)
90 return result
91
92 def get(self):
93 result = super(Record, self).get()
94 self._handle_cache_expires(self.request.response)
95 return result
96
97 def _handle_cache_expires(self, response):
98 """If the parent collection defines a ``cache_expires`` attribute,
99 then cache-control response headers are sent.
100
101 .. note::
102
103 Those headers are also sent if the
104 ``kinto.record_cache_expires_seconds`` setting is defined.
105 """
106 is_anonymous = Authenticated not in self.request.effective_principals
107 if not is_anonymous:
108 return
109
110 cache_expires = self._collection.get('cache_expires')
111 if cache_expires is None:
112 by_bucket = '%s_record_cache_expires_seconds' % (self.bucket_id)
113 by_collection = '%s_%s_record_cache_expires_seconds' % (
114 self.bucket_id, self.collection_id)
115 settings = self.request.registry.settings
116 cache_expires = settings.get(by_collection,
117 settings.get(by_bucket))
118
119 if cache_expires is not None:
120 response.cache_expires(seconds=int(cache_expires))
121
[end of kinto/views/records.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kinto/views/records.py b/kinto/views/records.py
--- a/kinto/views/records.py
+++ b/kinto/views/records.py
@@ -26,23 +26,22 @@
mapping = RecordSchema()
schema_field = 'schema'
- def __init__(self, *args, **kwargs):
- super(Record, self).__init__(*args, **kwargs)
-
- self.model.id_generator = RelaxedUUID()
-
+ def __init__(self, request, **kwargs):
+ # Before all, first check that the parent collection exists.
# Check if already fetched before (in batch).
- collections = self.request.bound_data.setdefault('collections', {})
- collection_uri = self.get_parent_id(self.request)
+ collections = request.bound_data.setdefault('collections', {})
+ collection_uri = self.get_parent_id(request)
if collection_uri not in collections:
# Unknown yet, fetch from storage.
collection_parent_id = '/buckets/%s' % self.bucket_id
- collection = object_exists_or_404(self.request,
+ collection = object_exists_or_404(request,
collection_id='collection',
parent_id=collection_parent_id,
object_id=self.collection_id)
collections[collection_uri] = collection
+ super(Record, self).__init__(request, **kwargs)
+ self.model.id_generator = RelaxedUUID()
self._collection = collections[collection_uri]
def get_parent_id(self, request):
| {"golden_diff": "diff --git a/kinto/views/records.py b/kinto/views/records.py\n--- a/kinto/views/records.py\n+++ b/kinto/views/records.py\n@@ -26,23 +26,22 @@\n mapping = RecordSchema()\n schema_field = 'schema'\n \n- def __init__(self, *args, **kwargs):\n- super(Record, self).__init__(*args, **kwargs)\n-\n- self.model.id_generator = RelaxedUUID()\n-\n+ def __init__(self, request, **kwargs):\n+ # Before all, first check that the parent collection exists.\n # Check if already fetched before (in batch).\n- collections = self.request.bound_data.setdefault('collections', {})\n- collection_uri = self.get_parent_id(self.request)\n+ collections = request.bound_data.setdefault('collections', {})\n+ collection_uri = self.get_parent_id(request)\n if collection_uri not in collections:\n # Unknown yet, fetch from storage.\n collection_parent_id = '/buckets/%s' % self.bucket_id\n- collection = object_exists_or_404(self.request,\n+ collection = object_exists_or_404(request,\n collection_id='collection',\n parent_id=collection_parent_id,\n object_id=self.collection_id)\n collections[collection_uri] = collection\n \n+ super(Record, self).__init__(request, **kwargs)\n+ self.model.id_generator = RelaxedUUID()\n self._collection = collections[collection_uri]\n \n def get_parent_id(self, request):\n", "issue": "Inconsistency with 404 response on empty collections\n- Set `read_only` to true\n- Give readonly access to the postgresql user\n- Give `read` permission to everyone on a bucket `foo`\n- Going to `/buckets/foo/collections/unknown` gives 404\n- Going to `/buckets/foo/collections/unknown/records` gives 503\n\nListing the records of an unknown collection should definitely give 404, except with the `default` bucket plugin.\n\nVery related to https://github.com/Kinto/kinto/issues/558\n\n", "before_files": [{"content": "import copy\n\nimport jsonschema\nfrom kinto.core import resource\nfrom kinto.core.errors import raise_invalid\nfrom jsonschema import exceptions as jsonschema_exceptions\nfrom pyramid.security import Authenticated\nfrom pyramid.settings import asbool\n\nfrom kinto.views import RelaxedUUID, object_exists_or_404\n\n\nclass RecordSchema(resource.ResourceSchema):\n class Options:\n preserve_unknown = True\n\n\n_parent_path = '/buckets/{{bucket_id}}/collections/{{collection_id}}'\n\n\[email protected](name='record',\n collection_path=_parent_path + '/records',\n record_path=_parent_path + '/records/{{id}}')\nclass Record(resource.ShareableResource):\n\n mapping = RecordSchema()\n schema_field = 'schema'\n\n def __init__(self, *args, **kwargs):\n super(Record, self).__init__(*args, **kwargs)\n\n self.model.id_generator = RelaxedUUID()\n\n # Check if already fetched before (in batch).\n collections = self.request.bound_data.setdefault('collections', {})\n collection_uri = self.get_parent_id(self.request)\n if collection_uri not in collections:\n # Unknown yet, fetch from storage.\n collection_parent_id = '/buckets/%s' % self.bucket_id\n collection = object_exists_or_404(self.request,\n collection_id='collection',\n parent_id=collection_parent_id,\n object_id=self.collection_id)\n collections[collection_uri] = collection\n\n self._collection = collections[collection_uri]\n\n def get_parent_id(self, request):\n self.bucket_id = request.matchdict['bucket_id']\n self.collection_id = request.matchdict['collection_id']\n return '/buckets/%s/collections/%s' % (self.bucket_id,\n self.collection_id)\n\n def is_known_field(self, field_name):\n \"\"\"Without schema, any field is considered as known.\"\"\"\n return True\n\n def process_record(self, new, old=None):\n \"\"\"Validate records against collection schema, if any.\"\"\"\n new = super(Record, self).process_record(new, old)\n\n schema = self._collection.get('schema')\n settings = self.request.registry.settings\n schema_validation = 'experimental_collection_schema_validation'\n if not schema or not asbool(settings.get(schema_validation)):\n return new\n\n collection_timestamp = self._collection[self.model.modified_field]\n\n try:\n stripped = copy.deepcopy(new)\n stripped.pop(self.model.id_field, None)\n stripped.pop(self.model.modified_field, None)\n stripped.pop(self.model.permissions_field, None)\n stripped.pop(self.schema_field, None)\n jsonschema.validate(stripped, schema)\n except jsonschema_exceptions.ValidationError as e:\n try:\n field = e.path.pop() if e.path else e.validator_value.pop()\n except AttributeError:\n field = None\n raise_invalid(self.request, name=field, description=e.message)\n\n new[self.schema_field] = collection_timestamp\n return new\n\n def collection_get(self):\n result = super(Record, self).collection_get()\n self._handle_cache_expires(self.request.response)\n return result\n\n def get(self):\n result = super(Record, self).get()\n self._handle_cache_expires(self.request.response)\n return result\n\n def _handle_cache_expires(self, response):\n \"\"\"If the parent collection defines a ``cache_expires`` attribute,\n then cache-control response headers are sent.\n\n .. note::\n\n Those headers are also sent if the\n ``kinto.record_cache_expires_seconds`` setting is defined.\n \"\"\"\n is_anonymous = Authenticated not in self.request.effective_principals\n if not is_anonymous:\n return\n\n cache_expires = self._collection.get('cache_expires')\n if cache_expires is None:\n by_bucket = '%s_record_cache_expires_seconds' % (self.bucket_id)\n by_collection = '%s_%s_record_cache_expires_seconds' % (\n self.bucket_id, self.collection_id)\n settings = self.request.registry.settings\n cache_expires = settings.get(by_collection,\n settings.get(by_bucket))\n\n if cache_expires is not None:\n response.cache_expires(seconds=int(cache_expires))\n", "path": "kinto/views/records.py"}]} | 1,812 | 330 |
gh_patches_debug_50453 | rasdani/github-patches | git_diff | jupyterhub__jupyterhub-3837 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Maybe a bug about module checking
### Bug description
<!-- Use this section to clearly and concisely describe the bug. -->
If I use conda to install only jupyterhub and python (conda install -c conda-forge python=3.9 jupyterhub), the following message showed as someone try to login:
```
Failed to set groups [Errno 1] Operation not permitted
Traceback (most recent call last):
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/bin/jupyterhub-singleuser", line 7, in <module>
from jupyterhub.singleuser import main
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/__init__.py", line 5, in <module>
from .app import main
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/app.py", line 38, in <module>
raise _import_error
TypeError: exceptions must derive from BaseException
```
I think the problem is the lines from 32 to 36 in jupyterhub/singleuser/app.py
```
except ImportError as e:
continue
if _import_error is None:
_import_error = e
else:
break
```
I changed that with:
```
except ImportError as e:
if _import_error is None:
_import_error = e
else:
break
continue
```
then the better message showed:
```
Failed to set groups [Errno 1] Operation not permitted
Traceback (most recent call last):
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/bin/jupyterhub-singleuser", line 7, in <module>
from jupyterhub.singleuser import main
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/__init__.py", line 5, in <module>
from .app import main
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/app.py", line 38, in <module>
raise _import_error
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/app.py", line 30, in <module>
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
File "/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/traitlets/utils/importstring.py", line 30, in import_item
module = __import__(package, fromlist=[obj])
ModuleNotFoundError: No module named 'jupyter_server'
```
The above message let me know that I have to install jupyter_server.
This issue can be closed anytime.
Any suggestion is welcome.
</issue>
<code>
[start of jupyterhub/singleuser/app.py]
1 """Make a single-user app based on the environment:
2
3 - $JUPYTERHUB_SINGLEUSER_APP, the base Application class, to be wrapped in JupyterHub authentication.
4 default: jupyter_server.serverapp.ServerApp
5
6 .. versionchanged:: 2.0
7
8 Default app changed to launch `jupyter labhub`.
9 Use JUPYTERHUB_SINGLEUSER_APP=notebook.notebookapp.NotebookApp for the legacy 'classic' notebook server.
10 """
11 import os
12
13 from traitlets import import_item
14
15 from .mixins import make_singleuser_app
16
17 JUPYTERHUB_SINGLEUSER_APP = os.environ.get("JUPYTERHUB_SINGLEUSER_APP")
18
19
20 if JUPYTERHUB_SINGLEUSER_APP:
21 App = import_item(JUPYTERHUB_SINGLEUSER_APP)
22 else:
23 App = None
24 _import_error = None
25 for JUPYTERHUB_SINGLEUSER_APP in (
26 "jupyter_server.serverapp.ServerApp",
27 "notebook.notebookapp.NotebookApp",
28 ):
29 try:
30 App = import_item(JUPYTERHUB_SINGLEUSER_APP)
31 except ImportError as e:
32 continue
33 if _import_error is None:
34 _import_error = e
35 else:
36 break
37 if App is None:
38 raise _import_error
39
40
41 SingleUserNotebookApp = make_singleuser_app(App)
42
43
44 def main():
45 """Launch a jupyterhub single-user server"""
46 if not os.environ.get("JUPYTERHUB_SINGLEUSER_APP"):
47 # app not specified, launch jupyter-labhub by default,
48 # if jupyterlab is recent enough (3.1).
49 # This is a minimally extended ServerApp that does:
50 # 1. ensure lab extension is enabled, and
51 # 2. set default URL to `/lab`
52 import re
53
54 _version_pat = re.compile(r"(\d+)\.(\d+)")
55 try:
56 import jupyterlab
57 from jupyterlab.labhubapp import SingleUserLabApp
58
59 m = _version_pat.match(jupyterlab.__version__)
60 except Exception:
61 m = None
62
63 if m is not None:
64 version_tuple = tuple(int(v) for v in m.groups())
65 if version_tuple >= (3, 1):
66 return SingleUserLabApp.launch_instance()
67
68 return SingleUserNotebookApp.launch_instance()
69
[end of jupyterhub/singleuser/app.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/jupyterhub/singleuser/app.py b/jupyterhub/singleuser/app.py
--- a/jupyterhub/singleuser/app.py
+++ b/jupyterhub/singleuser/app.py
@@ -29,9 +29,9 @@
try:
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
except ImportError as e:
- continue
if _import_error is None:
_import_error = e
+ continue
else:
break
if App is None:
| {"golden_diff": "diff --git a/jupyterhub/singleuser/app.py b/jupyterhub/singleuser/app.py\n--- a/jupyterhub/singleuser/app.py\n+++ b/jupyterhub/singleuser/app.py\n@@ -29,9 +29,9 @@\n try:\n App = import_item(JUPYTERHUB_SINGLEUSER_APP)\n except ImportError as e:\n- continue\n if _import_error is None:\n _import_error = e\n+ continue\n else:\n break\n if App is None:\n", "issue": "Maybe a bug about module checking\n### Bug description\r\n<!-- Use this section to clearly and concisely describe the bug. -->\r\nIf I use conda to install only jupyterhub and python (conda install -c conda-forge python=3.9 jupyterhub), the following message showed as someone try to login:\r\n\r\n```\r\nFailed to set groups [Errno 1] Operation not permitted\r\nTraceback (most recent call last):\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/bin/jupyterhub-singleuser\", line 7, in <module>\r\n from jupyterhub.singleuser import main\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/__init__.py\", line 5, in <module>\r\n from .app import main\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/app.py\", line 38, in <module>\r\n raise _import_error\r\nTypeError: exceptions must derive from BaseException\r\n```\r\nI think the problem is the lines from 32 to 36 in jupyterhub/singleuser/app.py\r\n```\r\n except ImportError as e:\r\n continue\r\n if _import_error is None:\r\n _import_error = e\r\n else:\r\n break\r\n```\r\n\r\nI changed that with:\r\n```\r\n except ImportError as e:\r\n if _import_error is None:\r\n _import_error = e\r\n else:\r\n break\r\n continue\r\n```\r\nthen the better message showed:\r\n```\r\nFailed to set groups [Errno 1] Operation not permitted\r\nTraceback (most recent call last):\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/bin/jupyterhub-singleuser\", line 7, in <module>\r\n from jupyterhub.singleuser import main\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/__init__.py\", line 5, in <module>\r\n from .app import main\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/app.py\", line 38, in <module>\r\n raise _import_error\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/jupyterhub/singleuser/app.py\", line 30, in <module>\r\n App = import_item(JUPYTERHUB_SINGLEUSER_APP)\r\n File \"/home/someone/bin/anaconda3/envs/py39jupyterhub222/lib/python3.9/site-packages/traitlets/utils/importstring.py\", line 30, in import_item\r\n module = __import__(package, fromlist=[obj])\r\nModuleNotFoundError: No module named 'jupyter_server'\r\n```\r\nThe above message let me know that I have to install jupyter_server.\r\nThis issue can be closed anytime.\r\nAny suggestion is welcome.\r\n\n", "before_files": [{"content": "\"\"\"Make a single-user app based on the environment:\n\n- $JUPYTERHUB_SINGLEUSER_APP, the base Application class, to be wrapped in JupyterHub authentication.\n default: jupyter_server.serverapp.ServerApp\n\n.. versionchanged:: 2.0\n\n Default app changed to launch `jupyter labhub`.\n Use JUPYTERHUB_SINGLEUSER_APP=notebook.notebookapp.NotebookApp for the legacy 'classic' notebook server.\n\"\"\"\nimport os\n\nfrom traitlets import import_item\n\nfrom .mixins import make_singleuser_app\n\nJUPYTERHUB_SINGLEUSER_APP = os.environ.get(\"JUPYTERHUB_SINGLEUSER_APP\")\n\n\nif JUPYTERHUB_SINGLEUSER_APP:\n App = import_item(JUPYTERHUB_SINGLEUSER_APP)\nelse:\n App = None\n _import_error = None\n for JUPYTERHUB_SINGLEUSER_APP in (\n \"jupyter_server.serverapp.ServerApp\",\n \"notebook.notebookapp.NotebookApp\",\n ):\n try:\n App = import_item(JUPYTERHUB_SINGLEUSER_APP)\n except ImportError as e:\n continue\n if _import_error is None:\n _import_error = e\n else:\n break\n if App is None:\n raise _import_error\n\n\nSingleUserNotebookApp = make_singleuser_app(App)\n\n\ndef main():\n \"\"\"Launch a jupyterhub single-user server\"\"\"\n if not os.environ.get(\"JUPYTERHUB_SINGLEUSER_APP\"):\n # app not specified, launch jupyter-labhub by default,\n # if jupyterlab is recent enough (3.1).\n # This is a minimally extended ServerApp that does:\n # 1. ensure lab extension is enabled, and\n # 2. set default URL to `/lab`\n import re\n\n _version_pat = re.compile(r\"(\\d+)\\.(\\d+)\")\n try:\n import jupyterlab\n from jupyterlab.labhubapp import SingleUserLabApp\n\n m = _version_pat.match(jupyterlab.__version__)\n except Exception:\n m = None\n\n if m is not None:\n version_tuple = tuple(int(v) for v in m.groups())\n if version_tuple >= (3, 1):\n return SingleUserLabApp.launch_instance()\n\n return SingleUserNotebookApp.launch_instance()\n", "path": "jupyterhub/singleuser/app.py"}]} | 1,898 | 112 |
gh_patches_debug_39273 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-3126 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Spider jimmy-johns is broken
During the global build at 2021-09-29-14-42-48, spider **jimmy-johns** failed with **0 features** and **1544 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-09-29-14-42-48/logs/jimmy-johns.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-09-29-14-42-48/output/jimmy-johns.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-09-29-14-42-48/output/jimmy-johns.geojson))
</issue>
<code>
[start of locations/spiders/jimmy_johns.py]
1 # -*- coding: utf-8 -*-
2 import scrapy
3 import json
4
5 from locations.items import GeojsonPointItem
6
7 STATES = ["AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DC", "DE", "FL", "GA",
8 "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD",
9 "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ",
10 "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC",
11 "SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY"]
12 HEADERS = { 'Content-Type': 'application/json' }
13 JJBASE = 'https://www.jimmyjohns.com/webservices/Location/LocationServiceHandler.asmx/{}'
14 CITIES = JJBASE.format('GetCitiesByStateNameAbbreviation')
15 STORES = JJBASE.format('GetStoreAddressesByCityAndState')
16
17 class JimmyJohnsSpider(scrapy.Spider):
18 name = "jimmy-johns"
19 item_attributes = { 'brand': "Jimmy John's", 'brand_wikidata': "Q1689380" }
20 allowed_domains = ["www.jimmyjohns.com"]
21 download_delay = 0.2
22
23 def start_requests(self):
24 for state in STATES:
25 current_state = json.dumps({ 'state': state })
26 request = scrapy.Request(
27 CITIES,
28 method='POST',
29 body=current_state,
30 headers=HEADERS,
31 callback=self.parse_cities
32 )
33 request.meta['state'] = state
34 yield request
35
36 def parse_cities(self, response):
37 cities = json.loads(response.body)
38 for city in cities['d']:
39 current_city = json.dumps({ 'state': response.meta['state'], 'city': city })
40 request = scrapy.Request(
41 STORES,
42 method='POST',
43 body=current_city,
44 headers=HEADERS,
45 callback=self.parse
46 )
47 yield request
48
49 def parse(self, response):
50 stores = json.loads(response.body)
51 for store in stores['d']:
52 full = '{}, {}, {} {}'.format(store['address'], store['city'], store['state'], store['postalcode'])
53 yield GeojsonPointItem(
54 name=store['storename'],
55 addr_full=full,
56 opening_hours=store['hours'],
57 phone=store['telephone'],
58 ref=store['storeid'],
59 lon=float(store['lng']),
60 lat=float(store['lat']),
61 )
62
[end of locations/spiders/jimmy_johns.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/spiders/jimmy_johns.py b/locations/spiders/jimmy_johns.py
--- a/locations/spiders/jimmy_johns.py
+++ b/locations/spiders/jimmy_johns.py
@@ -1,61 +1,36 @@
# -*- coding: utf-8 -*-
+from os import stat
import scrapy
+from urllib import parse
import json
-
from locations.items import GeojsonPointItem
-STATES = ["AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DC", "DE", "FL", "GA",
- "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD",
- "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ",
- "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC",
- "SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY"]
-HEADERS = { 'Content-Type': 'application/json' }
-JJBASE = 'https://www.jimmyjohns.com/webservices/Location/LocationServiceHandler.asmx/{}'
-CITIES = JJBASE.format('GetCitiesByStateNameAbbreviation')
-STORES = JJBASE.format('GetStoreAddressesByCityAndState')
+class TemplateSpider(scrapy.Spider):
+ name = "jimmy_johns"
+ allowed_domains = ["locations.jimmyjohns.com"]
+ start_urls = (
+ 'https://locations.jimmyjohns.com/sitemap.xml',
+ )
-class JimmyJohnsSpider(scrapy.Spider):
- name = "jimmy-johns"
- item_attributes = { 'brand': "Jimmy John's", 'brand_wikidata': "Q1689380" }
- allowed_domains = ["www.jimmyjohns.com"]
- download_delay = 0.2
+ def parse(self, response):
+ stores = response.xpath('//url/loc[contains(text(),"sandwiches")]/text()').extract()
+ for store in stores:
+ yield scrapy.Request(response.urljoin(store), callback=self.parse_store)
- def start_requests(self):
- for state in STATES:
- current_state = json.dumps({ 'state': state })
- request = scrapy.Request(
- CITIES,
- method='POST',
- body=current_state,
- headers=HEADERS,
- callback=self.parse_cities
- )
- request.meta['state'] = state
- yield request
+ def parse_store(self, response):
+ data = json.loads(response.xpath('//script[@type="application/ld+json"]//text()').extract_first())
- def parse_cities(self, response):
- cities = json.loads(response.body)
- for city in cities['d']:
- current_city = json.dumps({ 'state': response.meta['state'], 'city': city })
- request = scrapy.Request(
- STORES,
- method='POST',
- body=current_city,
- headers=HEADERS,
- callback=self.parse
- )
- yield request
+ properties = {
+ 'ref': data[0]['url'],
+ 'addr_full': data[0]['address']['streetAddress'],
+ 'city': data[0]['address']['addressLocality'],
+ 'state': data[0]['address']['addressRegion'],
+ 'postcode': data[0]['address']['postalCode'],
+ 'website': response.url,
+ 'lat': data[0]['geo']['latitude'],
+ 'lon': data[0]['geo']['longitude'],
+ }
+ if data[0]['address']['telephone']:
+ properties['phone'] = data[0]['address']['telephone']
- def parse(self, response):
- stores = json.loads(response.body)
- for store in stores['d']:
- full = '{}, {}, {} {}'.format(store['address'], store['city'], store['state'], store['postalcode'])
- yield GeojsonPointItem(
- name=store['storename'],
- addr_full=full,
- opening_hours=store['hours'],
- phone=store['telephone'],
- ref=store['storeid'],
- lon=float(store['lng']),
- lat=float(store['lat']),
- )
+ yield GeojsonPointItem(**properties)
| {"golden_diff": "diff --git a/locations/spiders/jimmy_johns.py b/locations/spiders/jimmy_johns.py\n--- a/locations/spiders/jimmy_johns.py\n+++ b/locations/spiders/jimmy_johns.py\n@@ -1,61 +1,36 @@\n # -*- coding: utf-8 -*-\n+from os import stat\n import scrapy\n+from urllib import parse\n import json\n-\n from locations.items import GeojsonPointItem\n \n-STATES = [\"AL\", \"AK\", \"AZ\", \"AR\", \"CA\", \"CO\", \"CT\", \"DC\", \"DE\", \"FL\", \"GA\",\n- \"HI\", \"ID\", \"IL\", \"IN\", \"IA\", \"KS\", \"KY\", \"LA\", \"ME\", \"MD\",\n- \"MA\", \"MI\", \"MN\", \"MS\", \"MO\", \"MT\", \"NE\", \"NV\", \"NH\", \"NJ\",\n- \"NM\", \"NY\", \"NC\", \"ND\", \"OH\", \"OK\", \"OR\", \"PA\", \"RI\", \"SC\",\n- \"SD\", \"TN\", \"TX\", \"UT\", \"VT\", \"VA\", \"WA\", \"WV\", \"WI\", \"WY\"]\n-HEADERS = { 'Content-Type': 'application/json' }\n-JJBASE = 'https://www.jimmyjohns.com/webservices/Location/LocationServiceHandler.asmx/{}'\n-CITIES = JJBASE.format('GetCitiesByStateNameAbbreviation')\n-STORES = JJBASE.format('GetStoreAddressesByCityAndState')\n+class TemplateSpider(scrapy.Spider):\n+ name = \"jimmy_johns\"\n+ allowed_domains = [\"locations.jimmyjohns.com\"]\n+ start_urls = (\n+ 'https://locations.jimmyjohns.com/sitemap.xml',\n+ )\n \n-class JimmyJohnsSpider(scrapy.Spider):\n- name = \"jimmy-johns\"\n- item_attributes = { 'brand': \"Jimmy John's\", 'brand_wikidata': \"Q1689380\" }\n- allowed_domains = [\"www.jimmyjohns.com\"]\n- download_delay = 0.2\n+ def parse(self, response):\n+ stores = response.xpath('//url/loc[contains(text(),\"sandwiches\")]/text()').extract()\n+ for store in stores:\n+ yield scrapy.Request(response.urljoin(store), callback=self.parse_store)\n \n- def start_requests(self):\n- for state in STATES:\n- current_state = json.dumps({ 'state': state })\n- request = scrapy.Request(\n- CITIES,\n- method='POST',\n- body=current_state,\n- headers=HEADERS,\n- callback=self.parse_cities\n- )\n- request.meta['state'] = state\n- yield request\n+ def parse_store(self, response):\n+ data = json.loads(response.xpath('//script[@type=\"application/ld+json\"]//text()').extract_first())\n \n- def parse_cities(self, response):\n- cities = json.loads(response.body)\n- for city in cities['d']:\n- current_city = json.dumps({ 'state': response.meta['state'], 'city': city })\n- request = scrapy.Request(\n- STORES,\n- method='POST',\n- body=current_city,\n- headers=HEADERS,\n- callback=self.parse\n- )\n- yield request\n+ properties = {\n+ 'ref': data[0]['url'],\n+ 'addr_full': data[0]['address']['streetAddress'],\n+ 'city': data[0]['address']['addressLocality'],\n+ 'state': data[0]['address']['addressRegion'],\n+ 'postcode': data[0]['address']['postalCode'],\n+ 'website': response.url,\n+ 'lat': data[0]['geo']['latitude'],\n+ 'lon': data[0]['geo']['longitude'],\n+ }\n+ if data[0]['address']['telephone']:\n+ properties['phone'] = data[0]['address']['telephone']\n \n- def parse(self, response):\n- stores = json.loads(response.body)\n- for store in stores['d']:\n- full = '{}, {}, {} {}'.format(store['address'], store['city'], store['state'], store['postalcode'])\n- yield GeojsonPointItem(\n- name=store['storename'],\n- addr_full=full,\n- opening_hours=store['hours'],\n- phone=store['telephone'],\n- ref=store['storeid'],\n- lon=float(store['lng']),\n- lat=float(store['lat']),\n- )\n+ yield GeojsonPointItem(**properties)\n", "issue": "Spider jimmy-johns is broken\nDuring the global build at 2021-09-29-14-42-48, spider **jimmy-johns** failed with **0 features** and **1544 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-09-29-14-42-48/logs/jimmy-johns.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-09-29-14-42-48/output/jimmy-johns.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-09-29-14-42-48/output/jimmy-johns.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport scrapy\nimport json\n\nfrom locations.items import GeojsonPointItem\n\nSTATES = [\"AL\", \"AK\", \"AZ\", \"AR\", \"CA\", \"CO\", \"CT\", \"DC\", \"DE\", \"FL\", \"GA\",\n \"HI\", \"ID\", \"IL\", \"IN\", \"IA\", \"KS\", \"KY\", \"LA\", \"ME\", \"MD\",\n \"MA\", \"MI\", \"MN\", \"MS\", \"MO\", \"MT\", \"NE\", \"NV\", \"NH\", \"NJ\",\n \"NM\", \"NY\", \"NC\", \"ND\", \"OH\", \"OK\", \"OR\", \"PA\", \"RI\", \"SC\",\n \"SD\", \"TN\", \"TX\", \"UT\", \"VT\", \"VA\", \"WA\", \"WV\", \"WI\", \"WY\"]\nHEADERS = { 'Content-Type': 'application/json' }\nJJBASE = 'https://www.jimmyjohns.com/webservices/Location/LocationServiceHandler.asmx/{}'\nCITIES = JJBASE.format('GetCitiesByStateNameAbbreviation')\nSTORES = JJBASE.format('GetStoreAddressesByCityAndState')\n\nclass JimmyJohnsSpider(scrapy.Spider):\n name = \"jimmy-johns\"\n item_attributes = { 'brand': \"Jimmy John's\", 'brand_wikidata': \"Q1689380\" }\n allowed_domains = [\"www.jimmyjohns.com\"]\n download_delay = 0.2\n\n def start_requests(self):\n for state in STATES:\n current_state = json.dumps({ 'state': state })\n request = scrapy.Request(\n CITIES,\n method='POST',\n body=current_state,\n headers=HEADERS,\n callback=self.parse_cities\n )\n request.meta['state'] = state\n yield request\n\n def parse_cities(self, response):\n cities = json.loads(response.body)\n for city in cities['d']:\n current_city = json.dumps({ 'state': response.meta['state'], 'city': city })\n request = scrapy.Request(\n STORES,\n method='POST',\n body=current_city,\n headers=HEADERS,\n callback=self.parse\n )\n yield request\n\n def parse(self, response):\n stores = json.loads(response.body)\n for store in stores['d']:\n full = '{}, {}, {} {}'.format(store['address'], store['city'], store['state'], store['postalcode'])\n yield GeojsonPointItem(\n name=store['storename'],\n addr_full=full,\n opening_hours=store['hours'],\n phone=store['telephone'],\n ref=store['storeid'],\n lon=float(store['lng']),\n lat=float(store['lat']),\n )\n", "path": "locations/spiders/jimmy_johns.py"}]} | 1,459 | 1,021 |
gh_patches_debug_4071 | rasdani/github-patches | git_diff | mlcommons__GaNDLF-173 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Bug during saving probabilities
**Describe the bug**
Small bug is occurring during saving probabilities in classification tasks. This is due to file existence check. It should check if the file exists instead of checking if the directory exists.
</issue>
<code>
[start of GANDLF/inference_manager.py]
1 from GANDLF.inference_loop import inference_loop
2 import os
3 import numpy as np
4 import torch
5 import torch.nn.functional as F
6
7
8 def InferenceManager(dataframe, outputDir, parameters, device):
9 """
10 This function takes in a dataframe, with some other parameters and performs the inference
11 """
12 # get the indeces for kfold splitting
13 inferenceData_full = dataframe
14
15 # # initialize parameters for inference
16 if not ("weights" in parameters):
17 parameters["weights"] = None # no need for loss weights for inference
18 if not ("class_weights" in parameters):
19 parameters["class_weights"] = None # no need for class weights for inference
20
21 n_folds = parameters["nested_training"]["validation"]
22
23 fold_dirs = []
24 if n_folds > 1:
25 directories = sorted(os.listdir(outputDir))
26 for d in directories:
27 if d.isdigit():
28 fold_dirs.append(os.path.join(outputDir, d, ""))
29 else:
30 fold_dirs = [outputDir]
31
32 probs_list = []
33
34 is_classification = parameters["problem_type"] == "classification"
35
36 for fold_dir in fold_dirs:
37 parameters["current_fold_dir"] = fold_dir
38 inference_loop(
39 inferenceDataFromPickle=inferenceData_full,
40 outputDir=fold_dir,
41 device=device,
42 parameters=parameters,
43 )
44
45 logits_dir = os.path.join(fold_dir, "logits.csv")
46 is_logits_dir_exist = os.path.isdir(logits_dir)
47
48 if is_classification and is_logits_dir_exist:
49 fold_logits = np.genfromtxt(logits_dir, delimiter=",")
50 fold_logits = torch.from_numpy(fold_logits)
51 fold_probs = F.softmax(fold_logits, dim=1)
52 probs_list.append(fold_probs)
53
54 if probs_list and is_classification:
55 probs_list = torch.stack(probs_list)
56 averaged_probs = torch.mean(probs_list, 0).numpy()
57 np.savetxt(
58 os.path.join(outputDir, "averaged_probabilities.csv"),
59 averaged_probs,
60 delimiter=",",
61 )
62
63
[end of GANDLF/inference_manager.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/GANDLF/inference_manager.py b/GANDLF/inference_manager.py
--- a/GANDLF/inference_manager.py
+++ b/GANDLF/inference_manager.py
@@ -43,7 +43,7 @@
)
logits_dir = os.path.join(fold_dir, "logits.csv")
- is_logits_dir_exist = os.path.isdir(logits_dir)
+ is_logits_dir_exist = os.path.isfile(logits_dir)
if is_classification and is_logits_dir_exist:
fold_logits = np.genfromtxt(logits_dir, delimiter=",")
| {"golden_diff": "diff --git a/GANDLF/inference_manager.py b/GANDLF/inference_manager.py\n--- a/GANDLF/inference_manager.py\n+++ b/GANDLF/inference_manager.py\n@@ -43,7 +43,7 @@\n )\n \n logits_dir = os.path.join(fold_dir, \"logits.csv\")\n- is_logits_dir_exist = os.path.isdir(logits_dir)\n+ is_logits_dir_exist = os.path.isfile(logits_dir)\n \n if is_classification and is_logits_dir_exist:\n fold_logits = np.genfromtxt(logits_dir, delimiter=\",\")\n", "issue": "Bug during saving probabilities\n**Describe the bug**\r\nSmall bug is occurring during saving probabilities in classification tasks. This is due to file existence check. It should check if the file exists instead of checking if the directory exists.\r\n\n", "before_files": [{"content": "from GANDLF.inference_loop import inference_loop\nimport os\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\n\n\ndef InferenceManager(dataframe, outputDir, parameters, device):\n \"\"\"\n This function takes in a dataframe, with some other parameters and performs the inference\n \"\"\"\n # get the indeces for kfold splitting\n inferenceData_full = dataframe\n\n # # initialize parameters for inference\n if not (\"weights\" in parameters):\n parameters[\"weights\"] = None # no need for loss weights for inference\n if not (\"class_weights\" in parameters):\n parameters[\"class_weights\"] = None # no need for class weights for inference\n\n n_folds = parameters[\"nested_training\"][\"validation\"]\n\n fold_dirs = []\n if n_folds > 1:\n directories = sorted(os.listdir(outputDir))\n for d in directories:\n if d.isdigit():\n fold_dirs.append(os.path.join(outputDir, d, \"\"))\n else:\n fold_dirs = [outputDir]\n\n probs_list = []\n\n is_classification = parameters[\"problem_type\"] == \"classification\"\n\n for fold_dir in fold_dirs:\n parameters[\"current_fold_dir\"] = fold_dir\n inference_loop(\n inferenceDataFromPickle=inferenceData_full,\n outputDir=fold_dir,\n device=device,\n parameters=parameters,\n )\n\n logits_dir = os.path.join(fold_dir, \"logits.csv\")\n is_logits_dir_exist = os.path.isdir(logits_dir)\n\n if is_classification and is_logits_dir_exist:\n fold_logits = np.genfromtxt(logits_dir, delimiter=\",\")\n fold_logits = torch.from_numpy(fold_logits)\n fold_probs = F.softmax(fold_logits, dim=1)\n probs_list.append(fold_probs)\n\n if probs_list and is_classification:\n probs_list = torch.stack(probs_list)\n averaged_probs = torch.mean(probs_list, 0).numpy()\n np.savetxt(\n os.path.join(outputDir, \"averaged_probabilities.csv\"),\n averaged_probs,\n delimiter=\",\",\n )\n\n", "path": "GANDLF/inference_manager.py"}]} | 1,145 | 123 |
gh_patches_debug_31575 | rasdani/github-patches | git_diff | python-discord__bot-475 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Tags can't be edited due to using the POST method with the API
Currently, the `!tag edit` subcommand is just an alias of `!tag set`. This means that if we try to edit an existing tag, the bot will use the POST http method to communicate with the API. Since we're not posting a new tag, but editing an existing entry, the API will reject this request.
Instead of using POST, we should be using PATCH, since we're only partially updating the entry in the database.
</issue>
<code>
[start of bot/cogs/tags.py]
1 import logging
2 import time
3
4 from discord import Colour, Embed
5 from discord.ext.commands import Bot, Cog, Context, group
6
7 from bot.constants import Channels, Cooldowns, MODERATION_ROLES, Roles
8 from bot.converters import TagContentConverter, TagNameConverter
9 from bot.decorators import with_role
10 from bot.pagination import LinePaginator
11
12
13 log = logging.getLogger(__name__)
14
15 TEST_CHANNELS = (
16 Channels.devtest,
17 Channels.bot,
18 Channels.helpers
19 )
20
21
22 class Tags(Cog):
23 """Save new tags and fetch existing tags."""
24
25 def __init__(self, bot: Bot):
26 self.bot = bot
27 self.tag_cooldowns = {}
28
29 @group(name='tags', aliases=('tag', 't'), invoke_without_command=True)
30 async def tags_group(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None:
31 """Show all known tags, a single tag, or run a subcommand."""
32 await ctx.invoke(self.get_command, tag_name=tag_name)
33
34 @tags_group.command(name='get', aliases=('show', 'g'))
35 async def get_command(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None:
36 """Get a specified tag, or a list of all tags if no tag is specified."""
37 def _command_on_cooldown(tag_name: str) -> bool:
38 """
39 Check if the command is currently on cooldown, on a per-tag, per-channel basis.
40
41 The cooldown duration is set in constants.py.
42 """
43 now = time.time()
44
45 cooldown_conditions = (
46 tag_name
47 and tag_name in self.tag_cooldowns
48 and (now - self.tag_cooldowns[tag_name]["time"]) < Cooldowns.tags
49 and self.tag_cooldowns[tag_name]["channel"] == ctx.channel.id
50 )
51
52 if cooldown_conditions:
53 return True
54 return False
55
56 if _command_on_cooldown(tag_name):
57 time_left = Cooldowns.tags - (time.time() - self.tag_cooldowns[tag_name]["time"])
58 log.warning(f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. "
59 f"Cooldown ends in {time_left:.1f} seconds.")
60 return
61
62 if tag_name is not None:
63 tag = await self.bot.api_client.get(f'bot/tags/{tag_name}')
64 if ctx.channel.id not in TEST_CHANNELS:
65 self.tag_cooldowns[tag_name] = {
66 "time": time.time(),
67 "channel": ctx.channel.id
68 }
69 await ctx.send(embed=Embed.from_dict(tag['embed']))
70
71 else:
72 tags = await self.bot.api_client.get('bot/tags')
73 if not tags:
74 await ctx.send(embed=Embed(
75 description="**There are no tags in the database!**",
76 colour=Colour.red()
77 ))
78 else:
79 embed: Embed = Embed(title="**Current tags**")
80 await LinePaginator.paginate(
81 sorted(f"**»** {tag['title']}" for tag in tags),
82 ctx,
83 embed,
84 footer_text="To show a tag, type !tags <tagname>.",
85 empty=False,
86 max_lines=15
87 )
88
89 @tags_group.command(name='set', aliases=('add', 'edit', 's'))
90 @with_role(*MODERATION_ROLES)
91 async def set_command(
92 self,
93 ctx: Context,
94 tag_name: TagNameConverter,
95 *,
96 tag_content: TagContentConverter,
97 ) -> None:
98 """Create a new tag or update an existing one."""
99 body = {
100 'title': tag_name.lower().strip(),
101 'embed': {
102 'title': tag_name,
103 'description': tag_content
104 }
105 }
106
107 await self.bot.api_client.post('bot/tags', json=body)
108
109 log.debug(f"{ctx.author} successfully added the following tag to our database: \n"
110 f"tag_name: {tag_name}\n"
111 f"tag_content: '{tag_content}'\n")
112
113 await ctx.send(embed=Embed(
114 title="Tag successfully added",
115 description=f"**{tag_name}** added to tag database.",
116 colour=Colour.blurple()
117 ))
118
119 @tags_group.command(name='delete', aliases=('remove', 'rm', 'd'))
120 @with_role(Roles.admin, Roles.owner)
121 async def delete_command(self, ctx: Context, *, tag_name: TagNameConverter) -> None:
122 """Remove a tag from the database."""
123 await self.bot.api_client.delete(f'bot/tags/{tag_name}')
124
125 log.debug(f"{ctx.author} successfully deleted the tag called '{tag_name}'")
126 await ctx.send(embed=Embed(
127 title=tag_name,
128 description=f"Tag successfully removed: {tag_name}.",
129 colour=Colour.blurple()
130 ))
131
132
133 def setup(bot: Bot) -> None:
134 """Tags cog load."""
135 bot.add_cog(Tags(bot))
136 log.info("Cog loaded: Tags")
137
[end of bot/cogs/tags.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bot/cogs/tags.py b/bot/cogs/tags.py
--- a/bot/cogs/tags.py
+++ b/bot/cogs/tags.py
@@ -86,7 +86,7 @@
max_lines=15
)
- @tags_group.command(name='set', aliases=('add', 'edit', 's'))
+ @tags_group.command(name='set', aliases=('add', 's'))
@with_role(*MODERATION_ROLES)
async def set_command(
self,
@@ -95,7 +95,7 @@
*,
tag_content: TagContentConverter,
) -> None:
- """Create a new tag or update an existing one."""
+ """Create a new tag."""
body = {
'title': tag_name.lower().strip(),
'embed': {
@@ -116,6 +116,35 @@
colour=Colour.blurple()
))
+ @tags_group.command(name='edit', aliases=('e', ))
+ @with_role(*MODERATION_ROLES)
+ async def edit_command(
+ self,
+ ctx: Context,
+ tag_name: TagNameConverter,
+ *,
+ tag_content: TagContentConverter,
+ ) -> None:
+ """Edit an existing tag."""
+ body = {
+ 'embed': {
+ 'title': tag_name,
+ 'description': tag_content
+ }
+ }
+
+ await self.bot.api_client.patch(f'bot/tags/{tag_name}', json=body)
+
+ log.debug(f"{ctx.author} successfully edited the following tag in our database: \n"
+ f"tag_name: {tag_name}\n"
+ f"tag_content: '{tag_content}'\n")
+
+ await ctx.send(embed=Embed(
+ title="Tag successfully edited",
+ description=f"**{tag_name}** edited in the database.",
+ colour=Colour.blurple()
+ ))
+
@tags_group.command(name='delete', aliases=('remove', 'rm', 'd'))
@with_role(Roles.admin, Roles.owner)
async def delete_command(self, ctx: Context, *, tag_name: TagNameConverter) -> None:
| {"golden_diff": "diff --git a/bot/cogs/tags.py b/bot/cogs/tags.py\n--- a/bot/cogs/tags.py\n+++ b/bot/cogs/tags.py\n@@ -86,7 +86,7 @@\n max_lines=15\n )\n \n- @tags_group.command(name='set', aliases=('add', 'edit', 's'))\n+ @tags_group.command(name='set', aliases=('add', 's'))\n @with_role(*MODERATION_ROLES)\n async def set_command(\n self,\n@@ -95,7 +95,7 @@\n *,\n tag_content: TagContentConverter,\n ) -> None:\n- \"\"\"Create a new tag or update an existing one.\"\"\"\n+ \"\"\"Create a new tag.\"\"\"\n body = {\n 'title': tag_name.lower().strip(),\n 'embed': {\n@@ -116,6 +116,35 @@\n colour=Colour.blurple()\n ))\n \n+ @tags_group.command(name='edit', aliases=('e', ))\n+ @with_role(*MODERATION_ROLES)\n+ async def edit_command(\n+ self,\n+ ctx: Context,\n+ tag_name: TagNameConverter,\n+ *,\n+ tag_content: TagContentConverter,\n+ ) -> None:\n+ \"\"\"Edit an existing tag.\"\"\"\n+ body = {\n+ 'embed': {\n+ 'title': tag_name,\n+ 'description': tag_content\n+ }\n+ }\n+\n+ await self.bot.api_client.patch(f'bot/tags/{tag_name}', json=body)\n+\n+ log.debug(f\"{ctx.author} successfully edited the following tag in our database: \\n\"\n+ f\"tag_name: {tag_name}\\n\"\n+ f\"tag_content: '{tag_content}'\\n\")\n+\n+ await ctx.send(embed=Embed(\n+ title=\"Tag successfully edited\",\n+ description=f\"**{tag_name}** edited in the database.\",\n+ colour=Colour.blurple()\n+ ))\n+\n @tags_group.command(name='delete', aliases=('remove', 'rm', 'd'))\n @with_role(Roles.admin, Roles.owner)\n async def delete_command(self, ctx: Context, *, tag_name: TagNameConverter) -> None:\n", "issue": "Tags can't be edited due to using the POST method with the API\nCurrently, the `!tag edit` subcommand is just an alias of `!tag set`. This means that if we try to edit an existing tag, the bot will use the POST http method to communicate with the API. Since we're not posting a new tag, but editing an existing entry, the API will reject this request. \r\n\r\nInstead of using POST, we should be using PATCH, since we're only partially updating the entry in the database.\n", "before_files": [{"content": "import logging\nimport time\n\nfrom discord import Colour, Embed\nfrom discord.ext.commands import Bot, Cog, Context, group\n\nfrom bot.constants import Channels, Cooldowns, MODERATION_ROLES, Roles\nfrom bot.converters import TagContentConverter, TagNameConverter\nfrom bot.decorators import with_role\nfrom bot.pagination import LinePaginator\n\n\nlog = logging.getLogger(__name__)\n\nTEST_CHANNELS = (\n Channels.devtest,\n Channels.bot,\n Channels.helpers\n)\n\n\nclass Tags(Cog):\n \"\"\"Save new tags and fetch existing tags.\"\"\"\n\n def __init__(self, bot: Bot):\n self.bot = bot\n self.tag_cooldowns = {}\n\n @group(name='tags', aliases=('tag', 't'), invoke_without_command=True)\n async def tags_group(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None:\n \"\"\"Show all known tags, a single tag, or run a subcommand.\"\"\"\n await ctx.invoke(self.get_command, tag_name=tag_name)\n\n @tags_group.command(name='get', aliases=('show', 'g'))\n async def get_command(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None:\n \"\"\"Get a specified tag, or a list of all tags if no tag is specified.\"\"\"\n def _command_on_cooldown(tag_name: str) -> bool:\n \"\"\"\n Check if the command is currently on cooldown, on a per-tag, per-channel basis.\n\n The cooldown duration is set in constants.py.\n \"\"\"\n now = time.time()\n\n cooldown_conditions = (\n tag_name\n and tag_name in self.tag_cooldowns\n and (now - self.tag_cooldowns[tag_name][\"time\"]) < Cooldowns.tags\n and self.tag_cooldowns[tag_name][\"channel\"] == ctx.channel.id\n )\n\n if cooldown_conditions:\n return True\n return False\n\n if _command_on_cooldown(tag_name):\n time_left = Cooldowns.tags - (time.time() - self.tag_cooldowns[tag_name][\"time\"])\n log.warning(f\"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. \"\n f\"Cooldown ends in {time_left:.1f} seconds.\")\n return\n\n if tag_name is not None:\n tag = await self.bot.api_client.get(f'bot/tags/{tag_name}')\n if ctx.channel.id not in TEST_CHANNELS:\n self.tag_cooldowns[tag_name] = {\n \"time\": time.time(),\n \"channel\": ctx.channel.id\n }\n await ctx.send(embed=Embed.from_dict(tag['embed']))\n\n else:\n tags = await self.bot.api_client.get('bot/tags')\n if not tags:\n await ctx.send(embed=Embed(\n description=\"**There are no tags in the database!**\",\n colour=Colour.red()\n ))\n else:\n embed: Embed = Embed(title=\"**Current tags**\")\n await LinePaginator.paginate(\n sorted(f\"**\u00bb** {tag['title']}\" for tag in tags),\n ctx,\n embed,\n footer_text=\"To show a tag, type !tags <tagname>.\",\n empty=False,\n max_lines=15\n )\n\n @tags_group.command(name='set', aliases=('add', 'edit', 's'))\n @with_role(*MODERATION_ROLES)\n async def set_command(\n self,\n ctx: Context,\n tag_name: TagNameConverter,\n *,\n tag_content: TagContentConverter,\n ) -> None:\n \"\"\"Create a new tag or update an existing one.\"\"\"\n body = {\n 'title': tag_name.lower().strip(),\n 'embed': {\n 'title': tag_name,\n 'description': tag_content\n }\n }\n\n await self.bot.api_client.post('bot/tags', json=body)\n\n log.debug(f\"{ctx.author} successfully added the following tag to our database: \\n\"\n f\"tag_name: {tag_name}\\n\"\n f\"tag_content: '{tag_content}'\\n\")\n\n await ctx.send(embed=Embed(\n title=\"Tag successfully added\",\n description=f\"**{tag_name}** added to tag database.\",\n colour=Colour.blurple()\n ))\n\n @tags_group.command(name='delete', aliases=('remove', 'rm', 'd'))\n @with_role(Roles.admin, Roles.owner)\n async def delete_command(self, ctx: Context, *, tag_name: TagNameConverter) -> None:\n \"\"\"Remove a tag from the database.\"\"\"\n await self.bot.api_client.delete(f'bot/tags/{tag_name}')\n\n log.debug(f\"{ctx.author} successfully deleted the tag called '{tag_name}'\")\n await ctx.send(embed=Embed(\n title=tag_name,\n description=f\"Tag successfully removed: {tag_name}.\",\n colour=Colour.blurple()\n ))\n\n\ndef setup(bot: Bot) -> None:\n \"\"\"Tags cog load.\"\"\"\n bot.add_cog(Tags(bot))\n log.info(\"Cog loaded: Tags\")\n", "path": "bot/cogs/tags.py"}]} | 2,027 | 487 |
gh_patches_debug_1588 | rasdani/github-patches | git_diff | microsoft__botbuilder-python-1804 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Django Component Governance vulnerability
Django 1.11 before 1.11.28, 2.2 before 2.2.10, and 3.0 before 3.0.3 allows SQL Injection if untrusted data is used as a StringAgg delimiter (e.g., in Django applications that offer downloads of data as a series of rows with a user-specified column delimiter). By passing a suitably crafted delimiter to a contrib.postgres.aggregates.StringAgg instance, it was possible to break escaping and inject malicious SQL.
https://dev.azure.com/FuseLabs/SDK_v4/_componentGovernance/112465/alert/2370216?typeId=4354877
</issue>
<code>
[start of libraries/botbuilder-applicationinsights/setup.py]
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License.
3
4 import os
5 from setuptools import setup
6
7 REQUIRES = [
8 "applicationinsights==0.11.9",
9 "botbuilder-schema==4.15.0",
10 "botframework-connector==4.15.0",
11 "botbuilder-core==4.15.0",
12 ]
13 TESTS_REQUIRES = [
14 "aiounittest==1.3.0",
15 "django==2.2.6", # For samples
16 "djangorestframework==3.10.3", # For samples
17 "flask==1.1.1", # For samples
18 ]
19
20 root = os.path.abspath(os.path.dirname(__file__))
21
22 with open(os.path.join(root, "botbuilder", "applicationinsights", "about.py")) as f:
23 package_info = {}
24 info = f.read()
25 exec(info, package_info)
26
27 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f:
28 long_description = f.read()
29
30 setup(
31 name=package_info["__title__"],
32 version=package_info["__version__"],
33 url=package_info["__uri__"],
34 author=package_info["__author__"],
35 description=package_info["__description__"],
36 keywords=[
37 "BotBuilderApplicationInsights",
38 "bots",
39 "ai",
40 "botframework",
41 "botbuilder",
42 ],
43 long_description=long_description,
44 long_description_content_type="text/x-rst",
45 license=package_info["__license__"],
46 packages=[
47 "botbuilder.applicationinsights",
48 "botbuilder.applicationinsights.django",
49 "botbuilder.applicationinsights.flask",
50 "botbuilder.applicationinsights.processor",
51 ],
52 install_requires=REQUIRES + TESTS_REQUIRES,
53 tests_require=TESTS_REQUIRES,
54 include_package_data=True,
55 classifiers=[
56 "Programming Language :: Python :: 3.7",
57 "Intended Audience :: Developers",
58 "License :: OSI Approved :: MIT License",
59 "Operating System :: OS Independent",
60 "Development Status :: 5 - Production/Stable",
61 "Topic :: Scientific/Engineering :: Artificial Intelligence",
62 ],
63 )
64
[end of libraries/botbuilder-applicationinsights/setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/libraries/botbuilder-applicationinsights/setup.py b/libraries/botbuilder-applicationinsights/setup.py
--- a/libraries/botbuilder-applicationinsights/setup.py
+++ b/libraries/botbuilder-applicationinsights/setup.py
@@ -12,7 +12,7 @@
]
TESTS_REQUIRES = [
"aiounittest==1.3.0",
- "django==2.2.6", # For samples
+ "django==2.2.10", # For samples
"djangorestframework==3.10.3", # For samples
"flask==1.1.1", # For samples
]
| {"golden_diff": "diff --git a/libraries/botbuilder-applicationinsights/setup.py b/libraries/botbuilder-applicationinsights/setup.py\n--- a/libraries/botbuilder-applicationinsights/setup.py\n+++ b/libraries/botbuilder-applicationinsights/setup.py\n@@ -12,7 +12,7 @@\n ]\n TESTS_REQUIRES = [\n \"aiounittest==1.3.0\",\n- \"django==2.2.6\", # For samples\n+ \"django==2.2.10\", # For samples\n \"djangorestframework==3.10.3\", # For samples\n \"flask==1.1.1\", # For samples\n ]\n", "issue": "Django Component Governance vulnerability\nDjango 1.11 before 1.11.28, 2.2 before 2.2.10, and 3.0 before 3.0.3 allows SQL Injection if untrusted data is used as a StringAgg delimiter (e.g., in Django applications that offer downloads of data as a series of rows with a user-specified column delimiter). By passing a suitably crafted delimiter to a contrib.postgres.aggregates.StringAgg instance, it was possible to break escaping and inject malicious SQL.\r\n\r\nhttps://dev.azure.com/FuseLabs/SDK_v4/_componentGovernance/112465/alert/2370216?typeId=4354877\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nREQUIRES = [\n \"applicationinsights==0.11.9\",\n \"botbuilder-schema==4.15.0\",\n \"botframework-connector==4.15.0\",\n \"botbuilder-core==4.15.0\",\n]\nTESTS_REQUIRES = [\n \"aiounittest==1.3.0\",\n \"django==2.2.6\", # For samples\n \"djangorestframework==3.10.3\", # For samples\n \"flask==1.1.1\", # For samples\n]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(root, \"botbuilder\", \"applicationinsights\", \"about.py\")) as f:\n package_info = {}\n info = f.read()\n exec(info, package_info)\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=package_info[\"__title__\"],\n version=package_info[\"__version__\"],\n url=package_info[\"__uri__\"],\n author=package_info[\"__author__\"],\n description=package_info[\"__description__\"],\n keywords=[\n \"BotBuilderApplicationInsights\",\n \"bots\",\n \"ai\",\n \"botframework\",\n \"botbuilder\",\n ],\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=package_info[\"__license__\"],\n packages=[\n \"botbuilder.applicationinsights\",\n \"botbuilder.applicationinsights.django\",\n \"botbuilder.applicationinsights.flask\",\n \"botbuilder.applicationinsights.processor\",\n ],\n install_requires=REQUIRES + TESTS_REQUIRES,\n tests_require=TESTS_REQUIRES,\n include_package_data=True,\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 5 - Production/Stable\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botbuilder-applicationinsights/setup.py"}]} | 1,313 | 153 |
gh_patches_debug_36006 | rasdani/github-patches | git_diff | mampfes__hacs_waste_collection_schedule-1366 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Can't configure source without arguments
Hello Team,
I'm trying to configure into HomeAssistant the source that I added Mamirolle info, but I have argument issues.
`args` is marked `required`, so if none is passed, the configuration is invalid.
```
Invalid config for [waste_collection_schedule]: required key not provided @ data['waste_collection_schedule']['sources'][0]['args']. Got None. (See /config/configuration.yaml, line 27).
```
If a dummy argument is passed. The configuration is valid but the source setup fails.
```
Error during setup of component waste_collection_schedule
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/setup.py", line 288, in _async_setup_component
result = await task
^^^^^^^^^^
File "/config/custom_components/waste_collection_schedule/__init__.py", line 109, in async_setup
api.add_source_shell(
File "/config/custom_components/waste_collection_schedule/__init__.py", line 202, in add_source_shell
SourceShell.create(
File "/config/custom_components/waste_collection_schedule/waste_collection_schedule/source_shell.py", line 196, in create
source = source_module.Source(**source_args) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: Source() takes no arguments
```
I understand that we want the configuration to fail early but the real error will still be seen only when the source is actually instantiated. Because of that I think the arguments shouldn't be required.
What do you think about this?
</issue>
<code>
[start of custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py]
1 import datetime
2
3 import requests
4 from bs4 import BeautifulSoup
5 from waste_collection_schedule import Collection
6
7 TITLE = "Mairie de Mamirolle"
8 DESCRIPTION = "Source script for mamirolle.info"
9 COUNTRY = "fr"
10 URL = "http://mamirolle.info/"
11
12 TEST_CASES = {"TestSource": {}}
13
14 ICON_MAP = {
15 "Poubelle grise": "mdi:trash-can",
16 "Poubelle jaune": "mdi:recycle",
17 }
18
19 MONTH_NAMES = [
20 "janvier",
21 "février",
22 "mars",
23 "avril",
24 "mai",
25 "juin",
26 "juillet",
27 "août",
28 "septembre",
29 "octobre",
30 "novembre",
31 "décembre",
32 ]
33
34
35 class Source:
36 def fetch(self):
37 now = datetime.datetime.now()
38 # get list of regions and weblinks
39 page = requests.get(URL)
40 # A lenient HTML parser is need
41 soup = BeautifulSoup(page.text.replace("<![endif]", ""), "html.parser")
42 trash_domestic = soup.find("i", class_="poubelle-grise")
43 _, day, month = trash_domestic.next_sibling.string.split()
44 date_domestic = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()
45 if date_domestic < now.date():
46 date_domestic = date_domestic.replace(year=date_domestic.year + 1)
47
48 trash_recycle = soup.find("i", class_="poubelle-jaune")
49 _, day, month = trash_recycle.next_sibling.string.split()
50 date_recycle = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()
51 if date_recycle < now.date():
52 date_recycle = date_recycle.replace(year=date_recycle.year + 1)
53
54 entries = [
55 Collection(
56 date=date_domestic,
57 t="Poubelle grise",
58 icon=ICON_MAP.get("Poubelle grise"),
59 ),
60 Collection(
61 date=date_recycle,
62 t="Poubelle jaune",
63 icon=ICON_MAP.get("Poubelle jaune"),
64 ),
65 ] # List that holds collection schedule
66
67 return entries
68
[end of custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py
--- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py
+++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py
@@ -9,7 +9,12 @@
COUNTRY = "fr"
URL = "http://mamirolle.info/"
-TEST_CASES = {"TestSource": {}}
+TEST_CASES = {
+ "TestSource": {},
+ "IgnoredArgument": {
+ "_": ""
+ }
+}
ICON_MAP = {
"Poubelle grise": "mdi:trash-can",
@@ -33,6 +38,9 @@
class Source:
+ def __init__(self, _=None):
+ pass
+
def fetch(self):
now = datetime.datetime.now()
# get list of regions and weblinks
@@ -40,28 +48,19 @@
# A lenient HTML parser is need
soup = BeautifulSoup(page.text.replace("<![endif]", ""), "html.parser")
trash_domestic = soup.find("i", class_="poubelle-grise")
- _, day, month = trash_domestic.next_sibling.string.split()
- date_domestic = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()
- if date_domestic < now.date():
- date_domestic = date_domestic.replace(year=date_domestic.year + 1)
-
trash_recycle = soup.find("i", class_="poubelle-jaune")
- _, day, month = trash_recycle.next_sibling.string.split()
- date_recycle = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()
- if date_recycle < now.date():
- date_recycle = date_recycle.replace(year=date_recycle.year + 1)
- entries = [
- Collection(
- date=date_domestic,
- t="Poubelle grise",
- icon=ICON_MAP.get("Poubelle grise"),
- ),
- Collection(
- date=date_recycle,
- t="Poubelle jaune",
- icon=ICON_MAP.get("Poubelle jaune"),
- ),
- ] # List that holds collection schedule
+ entries = [] # List that holds collection schedule
+ for trash, label in [(trash_domestic, "Poubelle grise"), (trash_recycle, "Poubelle jaune")]:
+ _, day, month = trash.next_sibling.string.split()
+ date = now.replace(month=MONTH_NAMES.index(month) + 1, day=int(day)).date()
+ if date < now.date():
+ date = date.replace(year=date.year + 1)
+
+ entries.append(Collection(
+ date=date,
+ t=label,
+ icon=ICON_MAP.get(label),
+ ))
return entries
| {"golden_diff": "diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py\n--- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py\n+++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py\n@@ -9,7 +9,12 @@\n COUNTRY = \"fr\"\n URL = \"http://mamirolle.info/\"\n \n-TEST_CASES = {\"TestSource\": {}}\n+TEST_CASES = {\n+ \"TestSource\": {},\n+ \"IgnoredArgument\": {\n+ \"_\": \"\"\n+ }\n+}\n \n ICON_MAP = {\n \"Poubelle grise\": \"mdi:trash-can\",\n@@ -33,6 +38,9 @@\n \n \n class Source:\n+ def __init__(self, _=None):\n+ pass\n+\n def fetch(self):\n now = datetime.datetime.now()\n # get list of regions and weblinks\n@@ -40,28 +48,19 @@\n # A lenient HTML parser is need\n soup = BeautifulSoup(page.text.replace(\"<![endif]\", \"\"), \"html.parser\")\n trash_domestic = soup.find(\"i\", class_=\"poubelle-grise\")\n- _, day, month = trash_domestic.next_sibling.string.split()\n- date_domestic = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()\n- if date_domestic < now.date():\n- date_domestic = date_domestic.replace(year=date_domestic.year + 1)\n-\n trash_recycle = soup.find(\"i\", class_=\"poubelle-jaune\")\n- _, day, month = trash_recycle.next_sibling.string.split()\n- date_recycle = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()\n- if date_recycle < now.date():\n- date_recycle = date_recycle.replace(year=date_recycle.year + 1)\n \n- entries = [\n- Collection(\n- date=date_domestic,\n- t=\"Poubelle grise\",\n- icon=ICON_MAP.get(\"Poubelle grise\"),\n- ),\n- Collection(\n- date=date_recycle,\n- t=\"Poubelle jaune\",\n- icon=ICON_MAP.get(\"Poubelle jaune\"),\n- ),\n- ] # List that holds collection schedule\n+ entries = [] # List that holds collection schedule\n+ for trash, label in [(trash_domestic, \"Poubelle grise\"), (trash_recycle, \"Poubelle jaune\")]:\n+ _, day, month = trash.next_sibling.string.split()\n+ date = now.replace(month=MONTH_NAMES.index(month) + 1, day=int(day)).date()\n+ if date < now.date():\n+ date = date.replace(year=date.year + 1)\n+\n+ entries.append(Collection(\n+ date=date,\n+ t=label,\n+ icon=ICON_MAP.get(label),\n+ ))\n \n return entries\n", "issue": "Can't configure source without arguments\nHello Team,\r\nI'm trying to configure into HomeAssistant the source that I added Mamirolle info, but I have argument issues.\r\n\r\n`args` is marked `required`, so if none is passed, the configuration is invalid.\r\n\r\n```\r\nInvalid config for [waste_collection_schedule]: required key not provided @ data['waste_collection_schedule']['sources'][0]['args']. Got None. (See /config/configuration.yaml, line 27). \r\n```\r\n\r\nIf a dummy argument is passed. The configuration is valid but the source setup fails.\r\n```\r\nError during setup of component waste_collection_schedule\r\n\r\nTraceback (most recent call last):\r\n File \"/usr/src/homeassistant/homeassistant/setup.py\", line 288, in _async_setup_component\r\n result = await task\r\n ^^^^^^^^^^\r\n File \"/config/custom_components/waste_collection_schedule/__init__.py\", line 109, in async_setup\r\n api.add_source_shell(\r\n File \"/config/custom_components/waste_collection_schedule/__init__.py\", line 202, in add_source_shell\r\n SourceShell.create(\r\n File \"/config/custom_components/waste_collection_schedule/waste_collection_schedule/source_shell.py\", line 196, in create\r\n source = source_module.Source(**source_args) # type: ignore\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nTypeError: Source() takes no arguments\r\n```\r\nI understand that we want the configuration to fail early but the real error will still be seen only when the source is actually instantiated. Because of that I think the arguments shouldn't be required.\r\n\r\nWhat do you think about this?\n", "before_files": [{"content": "import datetime\n\nimport requests\nfrom bs4 import BeautifulSoup\nfrom waste_collection_schedule import Collection\n\nTITLE = \"Mairie de Mamirolle\"\nDESCRIPTION = \"Source script for mamirolle.info\"\nCOUNTRY = \"fr\"\nURL = \"http://mamirolle.info/\"\n\nTEST_CASES = {\"TestSource\": {}}\n\nICON_MAP = {\n \"Poubelle grise\": \"mdi:trash-can\",\n \"Poubelle jaune\": \"mdi:recycle\",\n}\n\nMONTH_NAMES = [\n \"janvier\",\n \"f\u00e9vrier\",\n \"mars\",\n \"avril\",\n \"mai\",\n \"juin\",\n \"juillet\",\n \"ao\u00fbt\",\n \"septembre\",\n \"octobre\",\n \"novembre\",\n \"d\u00e9cembre\",\n]\n\n\nclass Source:\n def fetch(self):\n now = datetime.datetime.now()\n # get list of regions and weblinks\n page = requests.get(URL)\n # A lenient HTML parser is need\n soup = BeautifulSoup(page.text.replace(\"<![endif]\", \"\"), \"html.parser\")\n trash_domestic = soup.find(\"i\", class_=\"poubelle-grise\")\n _, day, month = trash_domestic.next_sibling.string.split()\n date_domestic = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()\n if date_domestic < now.date():\n date_domestic = date_domestic.replace(year=date_domestic.year + 1)\n\n trash_recycle = soup.find(\"i\", class_=\"poubelle-jaune\")\n _, day, month = trash_recycle.next_sibling.string.split()\n date_recycle = now.replace(month=MONTH_NAMES.index(month), day=int(day)).date()\n if date_recycle < now.date():\n date_recycle = date_recycle.replace(year=date_recycle.year + 1)\n\n entries = [\n Collection(\n date=date_domestic,\n t=\"Poubelle grise\",\n icon=ICON_MAP.get(\"Poubelle grise\"),\n ),\n Collection(\n date=date_recycle,\n t=\"Poubelle jaune\",\n icon=ICON_MAP.get(\"Poubelle jaune\"),\n ),\n ] # List that holds collection schedule\n\n return entries\n", "path": "custom_components/waste_collection_schedule/waste_collection_schedule/source/mamirolle_info.py"}]} | 1,513 | 666 |
gh_patches_debug_5170 | rasdani/github-patches | git_diff | mitmproxy__mitmproxy-2781 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Traceback appears in Status Bar, when trying to replay live flow
##### Steps to reproduce the problem:
1. Run **pathod** : `pathod -a "/=200:p0,10"`
2. Run mitmproxy.
3. Send _get request_ to pathod through mitmproxy using **pathoc**:
`pathoc -c localhost:9999 localhost:8080 'get:/'`
4. Try to replay the corresponding live flow in mitmproxy by pressing `r`.
I am seeing:

##### Any other comments? What have you tried so far?
This issue is relevant for the situations, when server didn't have time to send a response yet, but a user tries to replay the corresponding flow.
I also faced this issue, when trying to replay `mitm.it` flow from onboardingapp.
##### System information
Mitmproxy: 3.0.0.dev1101 (commit d9d4d15) binary
Python: 3.5.2
OpenSSL: OpenSSL 1.1.0g 2 Nov 2017
Platform: Linux-4.4.0-104-generic-x86_64-with-debian-stretch-sid
</issue>
<code>
[start of mitmproxy/addons/clientplayback.py]
1 from mitmproxy import exceptions
2 from mitmproxy import ctx
3 from mitmproxy import io
4 from mitmproxy import flow
5 from mitmproxy import command
6 import mitmproxy.types
7
8 import typing
9
10
11 class ClientPlayback:
12 def __init__(self):
13 self.flows = [] # type: typing.List[flow.Flow]
14 self.current_thread = None
15 self.configured = False
16
17 def count(self) -> int:
18 if self.current_thread:
19 current = 1
20 else:
21 current = 0
22 return current + len(self.flows)
23
24 @command.command("replay.client.stop")
25 def stop_replay(self) -> None:
26 """
27 Stop client replay.
28 """
29 self.flows = []
30 ctx.log.alert("Client replay stopped.")
31 ctx.master.addons.trigger("update", [])
32
33 @command.command("replay.client")
34 def start_replay(self, flows: typing.Sequence[flow.Flow]) -> None:
35 """
36 Replay requests from flows.
37 """
38 self.flows = list(flows)
39 ctx.log.alert("Replaying %s flows." % len(self.flows))
40 ctx.master.addons.trigger("update", [])
41
42 @command.command("replay.client.file")
43 def load_file(self, path: mitmproxy.types.Path) -> None:
44 try:
45 flows = io.read_flows_from_paths([path])
46 except exceptions.FlowReadException as e:
47 raise exceptions.CommandError(str(e))
48 ctx.log.alert("Replaying %s flows." % len(self.flows))
49 self.flows = flows
50 ctx.master.addons.trigger("update", [])
51
52 def configure(self, updated):
53 if not self.configured and ctx.options.client_replay:
54 self.configured = True
55 ctx.log.info("Client Replay: {}".format(ctx.options.client_replay))
56 try:
57 flows = io.read_flows_from_paths(ctx.options.client_replay)
58 except exceptions.FlowReadException as e:
59 raise exceptions.OptionsError(str(e))
60 self.start_replay(flows)
61
62 def tick(self):
63 current_is_done = self.current_thread and not self.current_thread.is_alive()
64 can_start_new = not self.current_thread or current_is_done
65 will_start_new = can_start_new and self.flows
66
67 if current_is_done:
68 self.current_thread = None
69 ctx.master.addons.trigger("update", [])
70 if will_start_new:
71 f = self.flows.pop(0)
72 self.current_thread = ctx.master.replay_request(f)
73 ctx.master.addons.trigger("update", [f])
74 if current_is_done and not will_start_new:
75 ctx.master.addons.trigger("processing_complete")
76
[end of mitmproxy/addons/clientplayback.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mitmproxy/addons/clientplayback.py b/mitmproxy/addons/clientplayback.py
--- a/mitmproxy/addons/clientplayback.py
+++ b/mitmproxy/addons/clientplayback.py
@@ -35,6 +35,9 @@
"""
Replay requests from flows.
"""
+ for f in flows:
+ if f.live:
+ raise exceptions.CommandError("Can't replay live flow.")
self.flows = list(flows)
ctx.log.alert("Replaying %s flows." % len(self.flows))
ctx.master.addons.trigger("update", [])
| {"golden_diff": "diff --git a/mitmproxy/addons/clientplayback.py b/mitmproxy/addons/clientplayback.py\n--- a/mitmproxy/addons/clientplayback.py\n+++ b/mitmproxy/addons/clientplayback.py\n@@ -35,6 +35,9 @@\n \"\"\"\n Replay requests from flows.\n \"\"\"\n+ for f in flows:\n+ if f.live:\n+ raise exceptions.CommandError(\"Can't replay live flow.\")\n self.flows = list(flows)\n ctx.log.alert(\"Replaying %s flows.\" % len(self.flows))\n ctx.master.addons.trigger(\"update\", [])\n", "issue": "Traceback appears in Status Bar, when trying to replay live flow\n##### Steps to reproduce the problem:\r\n\r\n1. Run **pathod** : `pathod -a \"/=200:p0,10\"`\r\n2. Run mitmproxy.\r\n3. Send _get request_ to pathod through mitmproxy using **pathoc**: \r\n`pathoc -c localhost:9999 localhost:8080 'get:/'`\r\n4. Try to replay the corresponding live flow in mitmproxy by pressing `r`.\r\n\r\nI am seeing:\r\n\r\n\r\n\r\n##### Any other comments? What have you tried so far?\r\nThis issue is relevant for the situations, when server didn't have time to send a response yet, but a user tries to replay the corresponding flow.\r\nI also faced this issue, when trying to replay `mitm.it` flow from onboardingapp.\r\n\r\n\r\n##### System information\r\n\r\nMitmproxy: 3.0.0.dev1101 (commit d9d4d15) binary\r\nPython: 3.5.2\r\nOpenSSL: OpenSSL 1.1.0g 2 Nov 2017\r\nPlatform: Linux-4.4.0-104-generic-x86_64-with-debian-stretch-sid\r\n\r\n \n", "before_files": [{"content": "from mitmproxy import exceptions\nfrom mitmproxy import ctx\nfrom mitmproxy import io\nfrom mitmproxy import flow\nfrom mitmproxy import command\nimport mitmproxy.types\n\nimport typing\n\n\nclass ClientPlayback:\n def __init__(self):\n self.flows = [] # type: typing.List[flow.Flow]\n self.current_thread = None\n self.configured = False\n\n def count(self) -> int:\n if self.current_thread:\n current = 1\n else:\n current = 0\n return current + len(self.flows)\n\n @command.command(\"replay.client.stop\")\n def stop_replay(self) -> None:\n \"\"\"\n Stop client replay.\n \"\"\"\n self.flows = []\n ctx.log.alert(\"Client replay stopped.\")\n ctx.master.addons.trigger(\"update\", [])\n\n @command.command(\"replay.client\")\n def start_replay(self, flows: typing.Sequence[flow.Flow]) -> None:\n \"\"\"\n Replay requests from flows.\n \"\"\"\n self.flows = list(flows)\n ctx.log.alert(\"Replaying %s flows.\" % len(self.flows))\n ctx.master.addons.trigger(\"update\", [])\n\n @command.command(\"replay.client.file\")\n def load_file(self, path: mitmproxy.types.Path) -> None:\n try:\n flows = io.read_flows_from_paths([path])\n except exceptions.FlowReadException as e:\n raise exceptions.CommandError(str(e))\n ctx.log.alert(\"Replaying %s flows.\" % len(self.flows))\n self.flows = flows\n ctx.master.addons.trigger(\"update\", [])\n\n def configure(self, updated):\n if not self.configured and ctx.options.client_replay:\n self.configured = True\n ctx.log.info(\"Client Replay: {}\".format(ctx.options.client_replay))\n try:\n flows = io.read_flows_from_paths(ctx.options.client_replay)\n except exceptions.FlowReadException as e:\n raise exceptions.OptionsError(str(e))\n self.start_replay(flows)\n\n def tick(self):\n current_is_done = self.current_thread and not self.current_thread.is_alive()\n can_start_new = not self.current_thread or current_is_done\n will_start_new = can_start_new and self.flows\n\n if current_is_done:\n self.current_thread = None\n ctx.master.addons.trigger(\"update\", [])\n if will_start_new:\n f = self.flows.pop(0)\n self.current_thread = ctx.master.replay_request(f)\n ctx.master.addons.trigger(\"update\", [f])\n if current_is_done and not will_start_new:\n ctx.master.addons.trigger(\"processing_complete\")\n", "path": "mitmproxy/addons/clientplayback.py"}]} | 1,614 | 133 |
gh_patches_debug_9395 | rasdani/github-patches | git_diff | microsoft__botbuilder-python-741 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Fix versioning on dependencies
Fix dependency package versions to be consistent with the rest of the libraries
</issue>
<code>
[start of libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py]
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License.
3
4 import os
5 from setuptools import setup
6
7 REQUIRES = [
8 "applicationinsights>=0.11.9",
9 "botbuilder-schema>=4.4.0b1",
10 "botframework-connector>=4.4.0b1",
11 "botbuilder-core>=4.4.0b1",
12 "botbuilder-applicationinsights>=4.4.0b1",
13 ]
14 TESTS_REQUIRES = [
15 "aiounittest==1.3.0",
16 "aiohttp==3.5.4",
17 ]
18
19 root = os.path.abspath(os.path.dirname(__file__))
20
21 with open(
22 os.path.join(
23 root, "botbuilder", "integration", "applicationinsights", "aiohttp", "about.py"
24 )
25 ) as f:
26 package_info = {}
27 info = f.read()
28 exec(info, package_info)
29
30 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f:
31 long_description = f.read()
32
33 setup(
34 name=package_info["__title__"],
35 version=package_info["__version__"],
36 url=package_info["__uri__"],
37 author=package_info["__author__"],
38 description=package_info["__description__"],
39 keywords=[
40 "BotBuilderApplicationInsights",
41 "bots",
42 "ai",
43 "botframework",
44 "botbuilder",
45 "aiohttp",
46 ],
47 long_description=long_description,
48 long_description_content_type="text/x-rst",
49 license=package_info["__license__"],
50 packages=["botbuilder.integration.applicationinsights.aiohttp"],
51 install_requires=REQUIRES + TESTS_REQUIRES,
52 tests_require=TESTS_REQUIRES,
53 include_package_data=True,
54 classifiers=[
55 "Programming Language :: Python :: 3.7",
56 "Intended Audience :: Developers",
57 "License :: OSI Approved :: MIT License",
58 "Operating System :: OS Independent",
59 "Development Status :: 5 - Production/Stable",
60 "Topic :: Scientific/Engineering :: Artificial Intelligence",
61 ],
62 )
63
[end of libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py b/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py
--- a/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py
+++ b/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py
@@ -6,14 +6,14 @@
REQUIRES = [
"applicationinsights>=0.11.9",
- "botbuilder-schema>=4.4.0b1",
- "botframework-connector>=4.4.0b1",
- "botbuilder-core>=4.4.0b1",
- "botbuilder-applicationinsights>=4.4.0b1",
+ "aiohttp==3.6.2",
+ "botbuilder-schema>=4.7.1",
+ "botframework-connector>=4.7.1",
+ "botbuilder-core>=4.7.1",
+ "botbuilder-applicationinsights>=4.7.1",
]
TESTS_REQUIRES = [
"aiounittest==1.3.0",
- "aiohttp==3.5.4",
]
root = os.path.abspath(os.path.dirname(__file__))
| {"golden_diff": "diff --git a/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py b/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py\n--- a/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py\n+++ b/libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py\n@@ -6,14 +6,14 @@\n \n REQUIRES = [\n \"applicationinsights>=0.11.9\",\n- \"botbuilder-schema>=4.4.0b1\",\n- \"botframework-connector>=4.4.0b1\",\n- \"botbuilder-core>=4.4.0b1\",\n- \"botbuilder-applicationinsights>=4.4.0b1\",\n+ \"aiohttp==3.6.2\",\n+ \"botbuilder-schema>=4.7.1\",\n+ \"botframework-connector>=4.7.1\",\n+ \"botbuilder-core>=4.7.1\",\n+ \"botbuilder-applicationinsights>=4.7.1\",\n ]\n TESTS_REQUIRES = [\n \"aiounittest==1.3.0\",\n- \"aiohttp==3.5.4\",\n ]\n \n root = os.path.abspath(os.path.dirname(__file__))\n", "issue": "Fix versioning on dependencies\nFix dependency package versions to be consistent with the rest of the libraries\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nREQUIRES = [\n \"applicationinsights>=0.11.9\",\n \"botbuilder-schema>=4.4.0b1\",\n \"botframework-connector>=4.4.0b1\",\n \"botbuilder-core>=4.4.0b1\",\n \"botbuilder-applicationinsights>=4.4.0b1\",\n]\nTESTS_REQUIRES = [\n \"aiounittest==1.3.0\",\n \"aiohttp==3.5.4\",\n]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(\n os.path.join(\n root, \"botbuilder\", \"integration\", \"applicationinsights\", \"aiohttp\", \"about.py\"\n )\n) as f:\n package_info = {}\n info = f.read()\n exec(info, package_info)\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=package_info[\"__title__\"],\n version=package_info[\"__version__\"],\n url=package_info[\"__uri__\"],\n author=package_info[\"__author__\"],\n description=package_info[\"__description__\"],\n keywords=[\n \"BotBuilderApplicationInsights\",\n \"bots\",\n \"ai\",\n \"botframework\",\n \"botbuilder\",\n \"aiohttp\",\n ],\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=package_info[\"__license__\"],\n packages=[\"botbuilder.integration.applicationinsights.aiohttp\"],\n install_requires=REQUIRES + TESTS_REQUIRES,\n tests_require=TESTS_REQUIRES,\n include_package_data=True,\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 5 - Production/Stable\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botbuilder-integration-applicationinsights-aiohttp/setup.py"}]} | 1,151 | 279 |
gh_patches_debug_726 | rasdani/github-patches | git_diff | dotkom__onlineweb4-425 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
"Startet studie" in Profile -> Medlemskap requires defined format without specifying it
"Started studie" is a datefield. The problem is that most browsers (like FF, Chrome) don't render these fields with any additional tools which makes filling them out a pain in the ass (Safari@iOS has that fancy datepicker-shit).
The field requires the format 'yyyy-mm-dd', but does not specify this anywhere. This should be fixed somehow.
</issue>
<code>
[start of apps/profiles/forms.py]
1 # -*- coding: utf-8 -*-
2
3 from django import forms
4 from django.utils.translation import ugettext as _
5
6 from apps.profiles.models import Privacy
7 from apps.authentication.models import OnlineUser, FIELD_OF_STUDY_CHOICES
8
9 class ProfileForm(forms.ModelForm):
10
11 class Meta:
12 model = OnlineUser
13
14 fields = ['nickname', 'website', 'phone_number', 'address', 'zip_code', 'allergies', 'mark_rules', ]
15 widgets = {
16 'allergies' : forms.Textarea(attrs={'id' : 'allergies'}),
17 }
18
19 def clean(self):
20 super(ProfileForm, self).clean()
21
22 cleaned_data = self.cleaned_data
23
24 # ZIP code digits only
25 zip_code = cleaned_data['zip_code']
26 if len(zip_code) != 0 and (len(zip_code) != 4 or not zip_code.isdigit()):
27 self._errors['zip_code'] = self.error_class([_(u"Postnummer må bestå av fire siffer.")])
28
29 return cleaned_data
30
31 class ImageForm(forms.ModelForm):
32
33 class Meta:
34 model = OnlineUser
35
36 fields = ['image']
37 widgets = {
38 'image': forms.FileInput(attrs={'class' : 'hidden-input', 'id' : 'image'}),
39 }
40
41 class PrivacyForm(forms.ModelForm):
42
43 class Meta:
44 model = Privacy
45 exclude = ['user']
46
47
48 class MailSettingsForm(forms.ModelForm):
49
50 class Meta:
51 model = OnlineUser
52 fields = ['infomail', ]
53
54
55 class MembershipSettingsForm(forms.ModelForm):
56
57 def __init__(self, *args, **kwargs):
58 super(MembershipSettingsForm, self).__init__(*args, **kwargs)
59 self.fields['started_date'].widget.attrs['class'] = 'hasDatePicker'
60
61 class Meta:
62 model = OnlineUser
63 fields = ['field_of_study', 'started_date', ]
64
[end of apps/profiles/forms.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/apps/profiles/forms.py b/apps/profiles/forms.py
--- a/apps/profiles/forms.py
+++ b/apps/profiles/forms.py
@@ -61,3 +61,7 @@
class Meta:
model = OnlineUser
fields = ['field_of_study', 'started_date', ]
+
+ widgets = {
+ 'started_date' : forms.TextInput(attrs={'placeholder' : 'YYYY-MM-DD'}),
+ }
| {"golden_diff": "diff --git a/apps/profiles/forms.py b/apps/profiles/forms.py\n--- a/apps/profiles/forms.py\n+++ b/apps/profiles/forms.py\n@@ -61,3 +61,7 @@\n class Meta:\n model = OnlineUser\n fields = ['field_of_study', 'started_date', ]\n+\n+ widgets = {\n+ 'started_date' : forms.TextInput(attrs={'placeholder' : 'YYYY-MM-DD'}),\n+ }\n", "issue": "\"Startet studie\" in Profile -> Medlemskap requires defined format without specifying it\n\"Started studie\" is a datefield. The problem is that most browsers (like FF, Chrome) don't render these fields with any additional tools which makes filling them out a pain in the ass (Safari@iOS has that fancy datepicker-shit).\n\nThe field requires the format 'yyyy-mm-dd', but does not specify this anywhere. This should be fixed somehow.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom django import forms\nfrom django.utils.translation import ugettext as _\n\nfrom apps.profiles.models import Privacy\nfrom apps.authentication.models import OnlineUser, FIELD_OF_STUDY_CHOICES\n\nclass ProfileForm(forms.ModelForm):\n\n class Meta:\n model = OnlineUser\n\n fields = ['nickname', 'website', 'phone_number', 'address', 'zip_code', 'allergies', 'mark_rules', ]\n widgets = {\n 'allergies' : forms.Textarea(attrs={'id' : 'allergies'}),\n }\n\n def clean(self):\n super(ProfileForm, self).clean()\n\n cleaned_data = self.cleaned_data\n\n # ZIP code digits only\n zip_code = cleaned_data['zip_code']\n if len(zip_code) != 0 and (len(zip_code) != 4 or not zip_code.isdigit()):\n self._errors['zip_code'] = self.error_class([_(u\"Postnummer m\u00e5 best\u00e5 av fire siffer.\")])\n\n return cleaned_data\n\nclass ImageForm(forms.ModelForm):\n\n class Meta:\n model = OnlineUser\n\n fields = ['image']\n widgets = {\n 'image': forms.FileInput(attrs={'class' : 'hidden-input', 'id' : 'image'}),\n }\n\nclass PrivacyForm(forms.ModelForm):\n\n class Meta:\n model = Privacy\n exclude = ['user']\n\n\nclass MailSettingsForm(forms.ModelForm):\n\n class Meta:\n model = OnlineUser\n fields = ['infomail', ]\n\n\nclass MembershipSettingsForm(forms.ModelForm):\n\n def __init__(self, *args, **kwargs):\n super(MembershipSettingsForm, self).__init__(*args, **kwargs)\n self.fields['started_date'].widget.attrs['class'] = 'hasDatePicker'\n\n class Meta:\n model = OnlineUser\n fields = ['field_of_study', 'started_date', ]\n", "path": "apps/profiles/forms.py"}]} | 1,159 | 96 |
gh_patches_debug_35056 | rasdani/github-patches | git_diff | opsdroid__opsdroid-142 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Make crontab parser timezone aware
The crontab matcher should take a timezone as a kwarg. It should also be possible to set a global timezone in the config. Default should be UTC.
</issue>
<code>
[start of opsdroid/matchers.py]
1 """Decorator functions to use when creating skill modules."""
2
3 import logging
4
5 from opsdroid.helper import get_opsdroid
6 from opsdroid.web import Web
7
8
9 _LOGGER = logging.getLogger(__name__)
10
11
12 def match_regex(regex):
13 """Return regex match decorator."""
14 def matcher(func):
15 """Add decorated function to skills list for regex matching."""
16 opsdroid = get_opsdroid()
17 opsdroid.skills.append({"regex": regex, "skill": func,
18 "config":
19 opsdroid.loader.current_import_config})
20 return func
21 return matcher
22
23
24 def match_apiai_action(action):
25 """Return apiai action match decorator."""
26 def matcher(func):
27 """Add decorated function to skills list for apiai matching."""
28 opsdroid = get_opsdroid()
29 opsdroid.skills.append({"apiai_action": action, "skill": func,
30 "config":
31 opsdroid.loader.current_import_config})
32 return func
33 return matcher
34
35
36 def match_apiai_intent(intent):
37 """Return apiai intent match decorator."""
38 def matcher(func):
39 """Add decorated function to skills list for apiai matching."""
40 opsdroid = get_opsdroid()
41 opsdroid.skills.append({"apiai_intent": intent, "skill": func,
42 "config":
43 opsdroid.loader.current_import_config})
44 return func
45 return matcher
46
47
48 def match_crontab(crontab):
49 """Return crontab match decorator."""
50 def matcher(func):
51 """Add decorated function to skills list for crontab matching."""
52 opsdroid = get_opsdroid()
53 opsdroid.skills.append({"crontab": crontab, "skill": func,
54 "config":
55 opsdroid.loader.current_import_config})
56 return func
57 return matcher
58
59
60 def match_webhook(webhook):
61 """Return webhook match decorator."""
62 def matcher(func):
63 """Add decorated function to skills list for webhook matching."""
64 opsdroid = get_opsdroid()
65 config = opsdroid.loader.current_import_config
66 opsdroid.skills.append({"webhook": webhook, "skill": func,
67 "config": config})
68
69 async def wrapper(req, opsdroid=opsdroid, config=config):
70 """Wrap up the aiohttp handler."""
71 _LOGGER.info("Running skill %s via webhook", webhook)
72 opsdroid.stats["webhooks_called"] = \
73 opsdroid.stats["webhooks_called"] + 1
74 await func(opsdroid, config, req)
75 return Web.build_response(200, {"called_skill": webhook})
76
77 opsdroid.web_server.web_app.router.add_post(
78 "/skill/{}/{}".format(config["name"], webhook), wrapper)
79 opsdroid.web_server.web_app.router.add_post(
80 "/skill/{}/{}/".format(config["name"], webhook), wrapper)
81
82 return func
83 return matcher
84
[end of opsdroid/matchers.py]
[start of opsdroid/parsers/crontab.py]
1 """A helper function for parsing and executing crontab skills."""
2
3 import logging
4 import asyncio
5 from datetime import datetime
6
7 import pycron
8
9
10 _LOGGER = logging.getLogger(__name__)
11
12
13 async def parse_crontab(opsdroid):
14 """Parse all crontab skills against the current time."""
15 # pylint: disable=broad-except
16 # We want to catch all exceptions coming from a skill module and not
17 # halt the application. If a skill throws an exception it just doesn't
18 # give a response to the user, so an error response should be given.
19 while opsdroid.eventloop.is_running():
20 await asyncio.sleep(60 - datetime.now().time().second)
21 _LOGGER.debug("Running crontab skills")
22 for skill in opsdroid.skills:
23 if "crontab" in skill and pycron.is_now(skill["crontab"]):
24 try:
25 await skill["skill"](opsdroid, skill["config"], None)
26 except Exception:
27 _LOGGER.exception("Exception when executing cron skill.")
28
[end of opsdroid/parsers/crontab.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py
--- a/opsdroid/matchers.py
+++ b/opsdroid/matchers.py
@@ -45,14 +45,14 @@
return matcher
-def match_crontab(crontab):
+def match_crontab(crontab, timezone=None):
"""Return crontab match decorator."""
def matcher(func):
"""Add decorated function to skills list for crontab matching."""
opsdroid = get_opsdroid()
+ config = opsdroid.loader.current_import_config
opsdroid.skills.append({"crontab": crontab, "skill": func,
- "config":
- opsdroid.loader.current_import_config})
+ "config": config, "timezone": timezone})
return func
return matcher
diff --git a/opsdroid/parsers/crontab.py b/opsdroid/parsers/crontab.py
--- a/opsdroid/parsers/crontab.py
+++ b/opsdroid/parsers/crontab.py
@@ -1,9 +1,9 @@
"""A helper function for parsing and executing crontab skills."""
-import logging
import asyncio
-from datetime import datetime
+import logging
+import arrow
import pycron
@@ -17,11 +17,17 @@
# halt the application. If a skill throws an exception it just doesn't
# give a response to the user, so an error response should be given.
while opsdroid.eventloop.is_running():
- await asyncio.sleep(60 - datetime.now().time().second)
+ await asyncio.sleep(60 - arrow.now().time().second)
_LOGGER.debug("Running crontab skills")
for skill in opsdroid.skills:
- if "crontab" in skill and pycron.is_now(skill["crontab"]):
- try:
- await skill["skill"](opsdroid, skill["config"], None)
- except Exception:
- _LOGGER.exception("Exception when executing cron skill.")
+ if "crontab" in skill:
+ if skill["timezone"] is not None:
+ timezone = skill["timezone"]
+ else:
+ timezone = opsdroid.config.get("timezone", "UTC")
+ if pycron.is_now(skill["crontab"], arrow.now(tz=timezone)):
+ try:
+ await skill["skill"](opsdroid, skill["config"], None)
+ except Exception:
+ _LOGGER.exception(
+ "Exception when executing cron skill.")
| {"golden_diff": "diff --git a/opsdroid/matchers.py b/opsdroid/matchers.py\n--- a/opsdroid/matchers.py\n+++ b/opsdroid/matchers.py\n@@ -45,14 +45,14 @@\n return matcher\n \n \n-def match_crontab(crontab):\n+def match_crontab(crontab, timezone=None):\n \"\"\"Return crontab match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for crontab matching.\"\"\"\n opsdroid = get_opsdroid()\n+ config = opsdroid.loader.current_import_config\n opsdroid.skills.append({\"crontab\": crontab, \"skill\": func,\n- \"config\":\n- opsdroid.loader.current_import_config})\n+ \"config\": config, \"timezone\": timezone})\n return func\n return matcher\n \ndiff --git a/opsdroid/parsers/crontab.py b/opsdroid/parsers/crontab.py\n--- a/opsdroid/parsers/crontab.py\n+++ b/opsdroid/parsers/crontab.py\n@@ -1,9 +1,9 @@\n \"\"\"A helper function for parsing and executing crontab skills.\"\"\"\n \n-import logging\n import asyncio\n-from datetime import datetime\n+import logging\n \n+import arrow\n import pycron\n \n \n@@ -17,11 +17,17 @@\n # halt the application. If a skill throws an exception it just doesn't\n # give a response to the user, so an error response should be given.\n while opsdroid.eventloop.is_running():\n- await asyncio.sleep(60 - datetime.now().time().second)\n+ await asyncio.sleep(60 - arrow.now().time().second)\n _LOGGER.debug(\"Running crontab skills\")\n for skill in opsdroid.skills:\n- if \"crontab\" in skill and pycron.is_now(skill[\"crontab\"]):\n- try:\n- await skill[\"skill\"](opsdroid, skill[\"config\"], None)\n- except Exception:\n- _LOGGER.exception(\"Exception when executing cron skill.\")\n+ if \"crontab\" in skill:\n+ if skill[\"timezone\"] is not None:\n+ timezone = skill[\"timezone\"]\n+ else:\n+ timezone = opsdroid.config.get(\"timezone\", \"UTC\")\n+ if pycron.is_now(skill[\"crontab\"], arrow.now(tz=timezone)):\n+ try:\n+ await skill[\"skill\"](opsdroid, skill[\"config\"], None)\n+ except Exception:\n+ _LOGGER.exception(\n+ \"Exception when executing cron skill.\")\n", "issue": "Make crontab parser timezone aware\nThe crontab matcher should take a timezone as a kwarg. It should also be possible to set a global timezone in the config. Default should be UTC.\n", "before_files": [{"content": "\"\"\"Decorator functions to use when creating skill modules.\"\"\"\n\nimport logging\n\nfrom opsdroid.helper import get_opsdroid\nfrom opsdroid.web import Web\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\ndef match_regex(regex):\n \"\"\"Return regex match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for regex matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"regex\": regex, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_apiai_action(action):\n \"\"\"Return apiai action match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for apiai matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"apiai_action\": action, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_apiai_intent(intent):\n \"\"\"Return apiai intent match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for apiai matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"apiai_intent\": intent, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_crontab(crontab):\n \"\"\"Return crontab match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for crontab matching.\"\"\"\n opsdroid = get_opsdroid()\n opsdroid.skills.append({\"crontab\": crontab, \"skill\": func,\n \"config\":\n opsdroid.loader.current_import_config})\n return func\n return matcher\n\n\ndef match_webhook(webhook):\n \"\"\"Return webhook match decorator.\"\"\"\n def matcher(func):\n \"\"\"Add decorated function to skills list for webhook matching.\"\"\"\n opsdroid = get_opsdroid()\n config = opsdroid.loader.current_import_config\n opsdroid.skills.append({\"webhook\": webhook, \"skill\": func,\n \"config\": config})\n\n async def wrapper(req, opsdroid=opsdroid, config=config):\n \"\"\"Wrap up the aiohttp handler.\"\"\"\n _LOGGER.info(\"Running skill %s via webhook\", webhook)\n opsdroid.stats[\"webhooks_called\"] = \\\n opsdroid.stats[\"webhooks_called\"] + 1\n await func(opsdroid, config, req)\n return Web.build_response(200, {\"called_skill\": webhook})\n\n opsdroid.web_server.web_app.router.add_post(\n \"/skill/{}/{}\".format(config[\"name\"], webhook), wrapper)\n opsdroid.web_server.web_app.router.add_post(\n \"/skill/{}/{}/\".format(config[\"name\"], webhook), wrapper)\n\n return func\n return matcher\n", "path": "opsdroid/matchers.py"}, {"content": "\"\"\"A helper function for parsing and executing crontab skills.\"\"\"\n\nimport logging\nimport asyncio\nfrom datetime import datetime\n\nimport pycron\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def parse_crontab(opsdroid):\n \"\"\"Parse all crontab skills against the current time.\"\"\"\n # pylint: disable=broad-except\n # We want to catch all exceptions coming from a skill module and not\n # halt the application. If a skill throws an exception it just doesn't\n # give a response to the user, so an error response should be given.\n while opsdroid.eventloop.is_running():\n await asyncio.sleep(60 - datetime.now().time().second)\n _LOGGER.debug(\"Running crontab skills\")\n for skill in opsdroid.skills:\n if \"crontab\" in skill and pycron.is_now(skill[\"crontab\"]):\n try:\n await skill[\"skill\"](opsdroid, skill[\"config\"], None)\n except Exception:\n _LOGGER.exception(\"Exception when executing cron skill.\")\n", "path": "opsdroid/parsers/crontab.py"}]} | 1,664 | 574 |
gh_patches_debug_37008 | rasdani/github-patches | git_diff | great-expectations__great_expectations-2966 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Use cleaner solution for non-truncating division in python 2
Prefer `from __future__ import division` to `1.*x/y`
</issue>
<code>
[start of great_expectations/rule_based_profiler/profiler.py]
1 import uuid
2 from typing import Dict, List, Optional, Union
3
4 import great_expectations.exceptions as ge_exceptions
5 from great_expectations import DataContext
6 from great_expectations.core import ExpectationConfiguration, ExpectationSuite
7 from great_expectations.data_context.util import instantiate_class_from_config
8 from great_expectations.rule_based_profiler.domain_builder.domain_builder import (
9 DomainBuilder,
10 )
11 from great_expectations.rule_based_profiler.expectation_configuration_builder.expectation_configuration_builder import (
12 ExpectationConfigurationBuilder,
13 )
14 from great_expectations.rule_based_profiler.parameter_builder.parameter_builder import (
15 ParameterBuilder,
16 )
17 from great_expectations.rule_based_profiler.parameter_builder.parameter_container import (
18 ParameterContainer,
19 build_parameter_container_for_variables,
20 )
21 from great_expectations.rule_based_profiler.rule.rule import Rule
22
23
24 class Profiler:
25 """
26 Profiler object serves to profile, or automatically evaluate a set of rules, upon a given
27 batch / multiple batches of data.
28 """
29
30 def __init__(
31 self,
32 *,
33 profiler_config: Optional[Dict[str, Dict[str, Dict]]] = None,
34 data_context: Optional[DataContext] = None,
35 ):
36 """
37 Create a new Profiler using configured rules.
38 For a rule or an item in a rule configuration, instantiates the following if
39 available: a domain builder, a parameter builder, and a configuration builder.
40 These will be used to define profiler computation patterns.
41
42 Args:
43 profiler_config: Variables and Rules configuration as a dictionary
44 data_context: DataContext object that defines a full runtime environment (data access, etc.)
45 """
46 self._data_context = data_context
47 self._rules = []
48
49 rules_configs: Dict[str, Dict] = profiler_config.get("rules", {})
50 rule_name: str
51 rule_config: dict
52
53 for rule_name, rule_config in rules_configs.items():
54 domain_builder_config: dict = rule_config.get("domain_builder")
55
56 if domain_builder_config is None:
57 raise ge_exceptions.ProfilerConfigurationError(
58 message=f'Invalid rule "{rule_name}": no domain_builder found.'
59 )
60
61 domain_builder: DomainBuilder = instantiate_class_from_config(
62 config=domain_builder_config,
63 runtime_environment={"data_context": data_context},
64 config_defaults={
65 "module_name": "great_expectations.rule_based_profiler.domain_builder"
66 },
67 )
68
69 parameter_builders: List[ParameterBuilder] = []
70
71 parameter_builder_configs: dict = rule_config.get("parameter_builders")
72
73 if parameter_builder_configs:
74 parameter_builder_config: dict
75 for parameter_builder_config in parameter_builder_configs:
76 parameter_builders.append(
77 instantiate_class_from_config(
78 config=parameter_builder_config,
79 runtime_environment={"data_context": data_context},
80 config_defaults={
81 "module_name": "great_expectations.rule_based_profiler.parameter_builder"
82 },
83 )
84 )
85
86 expectation_configuration_builders: List[
87 ExpectationConfigurationBuilder
88 ] = []
89
90 expectation_configuration_builder_configs: dict = rule_config.get(
91 "expectation_configuration_builders"
92 )
93
94 if expectation_configuration_builder_configs:
95 expectation_configuration_builder_config: dict
96 for (
97 expectation_configuration_builder_config
98 ) in expectation_configuration_builder_configs:
99 expectation_configuration_builders.append(
100 instantiate_class_from_config(
101 config=expectation_configuration_builder_config,
102 runtime_environment={},
103 config_defaults={
104 "class_name": "DefaultExpectationConfigurationBuilder",
105 "module_name": "great_expectations.rule_based_profiler.expectation_configuration_builder",
106 },
107 )
108 )
109
110 variables_configs: Dict[str, Dict] = profiler_config.get("variables", {})
111 variables: Optional[ParameterContainer] = None
112
113 if variables_configs:
114 variables = build_parameter_container_for_variables(
115 variables_configs=variables_configs
116 )
117
118 self._rules.append(
119 Rule(
120 name=rule_name,
121 domain_builder=domain_builder,
122 parameter_builders=parameter_builders,
123 expectation_configuration_builders=expectation_configuration_builders,
124 variables=variables,
125 )
126 )
127
128 def profile(
129 self,
130 *,
131 expectation_suite_name: Optional[str] = None,
132 ) -> ExpectationSuite:
133 """
134 Args:
135 :param expectation_suite_name: A name for returned Expectation suite.
136 :return: Set of rule evaluation results in the form of an ExpectationSuite
137 """
138 if expectation_suite_name is None:
139 expectation_suite_name = (
140 f"tmp.profiler_{self.__class__.__name__}_suite_{str(uuid.uuid4())[:8]}"
141 )
142
143 expectation_suite: ExpectationSuite = ExpectationSuite(
144 expectation_suite_name=expectation_suite_name
145 )
146
147 rule: Rule
148 for rule in self._rules:
149 expectation_configurations: List[ExpectationConfiguration] = rule.generate()
150 expectation_configuration: ExpectationConfiguration
151 for expectation_configuration in expectation_configurations:
152 expectation_suite.add_expectation(
153 expectation_configuration=expectation_configuration
154 )
155
156 return expectation_suite
157
[end of great_expectations/rule_based_profiler/profiler.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/great_expectations/rule_based_profiler/profiler.py b/great_expectations/rule_based_profiler/profiler.py
--- a/great_expectations/rule_based_profiler/profiler.py
+++ b/great_expectations/rule_based_profiler/profiler.py
@@ -43,10 +43,11 @@
profiler_config: Variables and Rules configuration as a dictionary
data_context: DataContext object that defines a full runtime environment (data access, etc.)
"""
+ self._profiler_config = profiler_config
self._data_context = data_context
self._rules = []
- rules_configs: Dict[str, Dict] = profiler_config.get("rules", {})
+ rules_configs: Dict[str, Dict] = self._profiler_config.get("rules", {})
rule_name: str
rule_config: dict
@@ -107,7 +108,9 @@
)
)
- variables_configs: Dict[str, Dict] = profiler_config.get("variables", {})
+ variables_configs: Dict[str, Dict] = self._profiler_config.get(
+ "variables", {}
+ )
variables: Optional[ParameterContainer] = None
if variables_configs:
@@ -129,10 +132,12 @@
self,
*,
expectation_suite_name: Optional[str] = None,
+ include_citation: bool = True,
) -> ExpectationSuite:
"""
Args:
:param expectation_suite_name: A name for returned Expectation suite.
+ :param include_citation: Whether or not to include the Profiler config in the metadata for the ExpectationSuite produced by the Profiler
:return: Set of rule evaluation results in the form of an ExpectationSuite
"""
if expectation_suite_name is None:
@@ -144,6 +149,12 @@
expectation_suite_name=expectation_suite_name
)
+ if include_citation:
+ expectation_suite.add_citation(
+ comment="Suite created by Rule-Based Profiler with the following config",
+ profiler_config=self._profiler_config,
+ )
+
rule: Rule
for rule in self._rules:
expectation_configurations: List[ExpectationConfiguration] = rule.generate()
| {"golden_diff": "diff --git a/great_expectations/rule_based_profiler/profiler.py b/great_expectations/rule_based_profiler/profiler.py\n--- a/great_expectations/rule_based_profiler/profiler.py\n+++ b/great_expectations/rule_based_profiler/profiler.py\n@@ -43,10 +43,11 @@\n profiler_config: Variables and Rules configuration as a dictionary\n data_context: DataContext object that defines a full runtime environment (data access, etc.)\n \"\"\"\n+ self._profiler_config = profiler_config\n self._data_context = data_context\n self._rules = []\n \n- rules_configs: Dict[str, Dict] = profiler_config.get(\"rules\", {})\n+ rules_configs: Dict[str, Dict] = self._profiler_config.get(\"rules\", {})\n rule_name: str\n rule_config: dict\n \n@@ -107,7 +108,9 @@\n )\n )\n \n- variables_configs: Dict[str, Dict] = profiler_config.get(\"variables\", {})\n+ variables_configs: Dict[str, Dict] = self._profiler_config.get(\n+ \"variables\", {}\n+ )\n variables: Optional[ParameterContainer] = None\n \n if variables_configs:\n@@ -129,10 +132,12 @@\n self,\n *,\n expectation_suite_name: Optional[str] = None,\n+ include_citation: bool = True,\n ) -> ExpectationSuite:\n \"\"\"\n Args:\n :param expectation_suite_name: A name for returned Expectation suite.\n+ :param include_citation: Whether or not to include the Profiler config in the metadata for the ExpectationSuite produced by the Profiler\n :return: Set of rule evaluation results in the form of an ExpectationSuite\n \"\"\"\n if expectation_suite_name is None:\n@@ -144,6 +149,12 @@\n expectation_suite_name=expectation_suite_name\n )\n \n+ if include_citation:\n+ expectation_suite.add_citation(\n+ comment=\"Suite created by Rule-Based Profiler with the following config\",\n+ profiler_config=self._profiler_config,\n+ )\n+\n rule: Rule\n for rule in self._rules:\n expectation_configurations: List[ExpectationConfiguration] = rule.generate()\n", "issue": "Use cleaner solution for non-truncating division in python 2\nPrefer `from __future__ import division` to `1.*x/y`\n", "before_files": [{"content": "import uuid\nfrom typing import Dict, List, Optional, Union\n\nimport great_expectations.exceptions as ge_exceptions\nfrom great_expectations import DataContext\nfrom great_expectations.core import ExpectationConfiguration, ExpectationSuite\nfrom great_expectations.data_context.util import instantiate_class_from_config\nfrom great_expectations.rule_based_profiler.domain_builder.domain_builder import (\n DomainBuilder,\n)\nfrom great_expectations.rule_based_profiler.expectation_configuration_builder.expectation_configuration_builder import (\n ExpectationConfigurationBuilder,\n)\nfrom great_expectations.rule_based_profiler.parameter_builder.parameter_builder import (\n ParameterBuilder,\n)\nfrom great_expectations.rule_based_profiler.parameter_builder.parameter_container import (\n ParameterContainer,\n build_parameter_container_for_variables,\n)\nfrom great_expectations.rule_based_profiler.rule.rule import Rule\n\n\nclass Profiler:\n \"\"\"\n Profiler object serves to profile, or automatically evaluate a set of rules, upon a given\n batch / multiple batches of data.\n \"\"\"\n\n def __init__(\n self,\n *,\n profiler_config: Optional[Dict[str, Dict[str, Dict]]] = None,\n data_context: Optional[DataContext] = None,\n ):\n \"\"\"\n Create a new Profiler using configured rules.\n For a rule or an item in a rule configuration, instantiates the following if\n available: a domain builder, a parameter builder, and a configuration builder.\n These will be used to define profiler computation patterns.\n\n Args:\n profiler_config: Variables and Rules configuration as a dictionary\n data_context: DataContext object that defines a full runtime environment (data access, etc.)\n \"\"\"\n self._data_context = data_context\n self._rules = []\n\n rules_configs: Dict[str, Dict] = profiler_config.get(\"rules\", {})\n rule_name: str\n rule_config: dict\n\n for rule_name, rule_config in rules_configs.items():\n domain_builder_config: dict = rule_config.get(\"domain_builder\")\n\n if domain_builder_config is None:\n raise ge_exceptions.ProfilerConfigurationError(\n message=f'Invalid rule \"{rule_name}\": no domain_builder found.'\n )\n\n domain_builder: DomainBuilder = instantiate_class_from_config(\n config=domain_builder_config,\n runtime_environment={\"data_context\": data_context},\n config_defaults={\n \"module_name\": \"great_expectations.rule_based_profiler.domain_builder\"\n },\n )\n\n parameter_builders: List[ParameterBuilder] = []\n\n parameter_builder_configs: dict = rule_config.get(\"parameter_builders\")\n\n if parameter_builder_configs:\n parameter_builder_config: dict\n for parameter_builder_config in parameter_builder_configs:\n parameter_builders.append(\n instantiate_class_from_config(\n config=parameter_builder_config,\n runtime_environment={\"data_context\": data_context},\n config_defaults={\n \"module_name\": \"great_expectations.rule_based_profiler.parameter_builder\"\n },\n )\n )\n\n expectation_configuration_builders: List[\n ExpectationConfigurationBuilder\n ] = []\n\n expectation_configuration_builder_configs: dict = rule_config.get(\n \"expectation_configuration_builders\"\n )\n\n if expectation_configuration_builder_configs:\n expectation_configuration_builder_config: dict\n for (\n expectation_configuration_builder_config\n ) in expectation_configuration_builder_configs:\n expectation_configuration_builders.append(\n instantiate_class_from_config(\n config=expectation_configuration_builder_config,\n runtime_environment={},\n config_defaults={\n \"class_name\": \"DefaultExpectationConfigurationBuilder\",\n \"module_name\": \"great_expectations.rule_based_profiler.expectation_configuration_builder\",\n },\n )\n )\n\n variables_configs: Dict[str, Dict] = profiler_config.get(\"variables\", {})\n variables: Optional[ParameterContainer] = None\n\n if variables_configs:\n variables = build_parameter_container_for_variables(\n variables_configs=variables_configs\n )\n\n self._rules.append(\n Rule(\n name=rule_name,\n domain_builder=domain_builder,\n parameter_builders=parameter_builders,\n expectation_configuration_builders=expectation_configuration_builders,\n variables=variables,\n )\n )\n\n def profile(\n self,\n *,\n expectation_suite_name: Optional[str] = None,\n ) -> ExpectationSuite:\n \"\"\"\n Args:\n :param expectation_suite_name: A name for returned Expectation suite.\n :return: Set of rule evaluation results in the form of an ExpectationSuite\n \"\"\"\n if expectation_suite_name is None:\n expectation_suite_name = (\n f\"tmp.profiler_{self.__class__.__name__}_suite_{str(uuid.uuid4())[:8]}\"\n )\n\n expectation_suite: ExpectationSuite = ExpectationSuite(\n expectation_suite_name=expectation_suite_name\n )\n\n rule: Rule\n for rule in self._rules:\n expectation_configurations: List[ExpectationConfiguration] = rule.generate()\n expectation_configuration: ExpectationConfiguration\n for expectation_configuration in expectation_configurations:\n expectation_suite.add_expectation(\n expectation_configuration=expectation_configuration\n )\n\n return expectation_suite\n", "path": "great_expectations/rule_based_profiler/profiler.py"}]} | 1,995 | 494 |
gh_patches_debug_36047 | rasdani/github-patches | git_diff | ivy-llc__ivy-15973 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add Sparse Array Functions to Paddle Backend
Add [Sparse Array Functions](https://www.paddlepaddle.org.cn/documentation/docs/en/api/index\_en.html) to Paddle backend
\_
>Please keep in mind that the proper way to link an issue to this list is to comment "- [ ] #issue\_number" while the issue's title only includes the name of the function you've chosen.
\_
## Experimental
- [x] is\_native\_sparse\_array
- [x] native\_sparse\_array
- [x] native\_sparse\_array\_to\_indices\_values\_and\_shape
</issue>
<code>
[start of ivy/functional/backends/paddle/experimental/sparse_array.py]
1 from ivy.utils.exceptions import IvyNotImplementedException
2 import paddle
3
4
5 def is_native_sparse_array(x: paddle.Tensor) -> bool:
6 return x.is_sparse_coo() or x.is_sparse_csr()
7
8
9 def native_sparse_array(
10 data=None,
11 *,
12 coo_indices=None,
13 crow_indices=None,
14 col_indices=None,
15 ccol_indices=None,
16 row_indices=None,
17 values=None,
18 dense_shape=None,
19 format="coo",
20 ):
21 raise IvyNotImplementedException()
22
23
24 def native_sparse_array_to_indices_values_and_shape(x):
25 raise IvyNotImplementedException()
26
[end of ivy/functional/backends/paddle/experimental/sparse_array.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/ivy/functional/backends/paddle/experimental/sparse_array.py b/ivy/functional/backends/paddle/experimental/sparse_array.py
--- a/ivy/functional/backends/paddle/experimental/sparse_array.py
+++ b/ivy/functional/backends/paddle/experimental/sparse_array.py
@@ -1,11 +1,26 @@
+import ivy
+from ivy.functional.ivy.experimental.sparse_array import (
+ _verify_coo_components,
+ _verify_csr_components,
+ _is_data_not_indices_values_and_shape,
+)
+from ivy.func_wrapper import (
+ with_unsupported_device_and_dtypes,
+)
from ivy.utils.exceptions import IvyNotImplementedException
import paddle
+# local
+from .. import backend_version
+
def is_native_sparse_array(x: paddle.Tensor) -> bool:
return x.is_sparse_coo() or x.is_sparse_csr()
+@with_unsupported_device_and_dtypes(
+ {"2.4.2 and below": {"cpu": ("int8",)}}, backend_version
+)
def native_sparse_array(
data=None,
*,
@@ -17,9 +32,67 @@
values=None,
dense_shape=None,
format="coo",
-):
- raise IvyNotImplementedException()
+) -> paddle.Tensor:
+ format = format.lower()
+
+ if format not in ["coo", "csr"]:
+ raise IvyNotImplementedException(
+ "paddle only supports 'coo' and 'csr' sparse formats."
+ )
+
+ if _is_data_not_indices_values_and_shape(
+ data,
+ coo_indices,
+ crow_indices,
+ col_indices,
+ ccol_indices,
+ row_indices,
+ values,
+ dense_shape,
+ ):
+ ivy.utils.assertions.check_true(
+ ivy.is_native_sparse_array(data), message="not a sparse array"
+ )
+ return data
+
+ if format == "coo":
+ _verify_coo_components(
+ indices=coo_indices, values=values, dense_shape=dense_shape
+ )
+ return paddle.sparse.sparse_coo_tensor(
+ indices=coo_indices,
+ values=values,
+ shape=dense_shape,
+ dtype=dtype,
+ place=device,
+ stop_gradient=not requires_grad,
+ )
+ else:
+ _verify_csr_components(
+ crow_indices=crow_indices,
+ col_indices=col_indices,
+ values=values,
+ dense_shape=dense_shape,
+ )
+ return paddle.sparse.sparse_csr_tensor(
+ crows=crow_indices,
+ cols=col_indices,
+ values=values,
+ shape=dense_shape,
+ dtype=dtype,
+ place=device,
+ stop_gradient=not requires_grad,
+ )
def native_sparse_array_to_indices_values_and_shape(x):
- raise IvyNotImplementedException()
+ if not is_native_sparse_array(x):
+ raise ivy.utils.exceptions.IvyException("not a Paddle Sparse Array")
+ if x.is_sparse_coo():
+ return {"coo_indices": x.indices()}, x.values(), x.shape
+ else:
+ return (
+ {"crow_indices": x.crows(), "col_indices": x.cols()},
+ x.values(),
+ x.shape,
+ )
| {"golden_diff": "diff --git a/ivy/functional/backends/paddle/experimental/sparse_array.py b/ivy/functional/backends/paddle/experimental/sparse_array.py\n--- a/ivy/functional/backends/paddle/experimental/sparse_array.py\n+++ b/ivy/functional/backends/paddle/experimental/sparse_array.py\n@@ -1,11 +1,26 @@\n+import ivy\n+from ivy.functional.ivy.experimental.sparse_array import (\n+ _verify_coo_components,\n+ _verify_csr_components,\n+ _is_data_not_indices_values_and_shape,\n+)\n+from ivy.func_wrapper import (\n+ with_unsupported_device_and_dtypes,\n+)\n from ivy.utils.exceptions import IvyNotImplementedException\n import paddle\n \n+# local\n+from .. import backend_version\n+\n \n def is_native_sparse_array(x: paddle.Tensor) -> bool:\n return x.is_sparse_coo() or x.is_sparse_csr()\n \n \n+@with_unsupported_device_and_dtypes(\n+ {\"2.4.2 and below\": {\"cpu\": (\"int8\",)}}, backend_version\n+)\n def native_sparse_array(\n data=None,\n *,\n@@ -17,9 +32,67 @@\n values=None,\n dense_shape=None,\n format=\"coo\",\n-):\n- raise IvyNotImplementedException()\n+) -> paddle.Tensor:\n+ format = format.lower()\n+\n+ if format not in [\"coo\", \"csr\"]:\n+ raise IvyNotImplementedException(\n+ \"paddle only supports 'coo' and 'csr' sparse formats.\"\n+ )\n+\n+ if _is_data_not_indices_values_and_shape(\n+ data,\n+ coo_indices,\n+ crow_indices,\n+ col_indices,\n+ ccol_indices,\n+ row_indices,\n+ values,\n+ dense_shape,\n+ ):\n+ ivy.utils.assertions.check_true(\n+ ivy.is_native_sparse_array(data), message=\"not a sparse array\"\n+ )\n+ return data\n+\n+ if format == \"coo\":\n+ _verify_coo_components(\n+ indices=coo_indices, values=values, dense_shape=dense_shape\n+ )\n+ return paddle.sparse.sparse_coo_tensor(\n+ indices=coo_indices,\n+ values=values,\n+ shape=dense_shape,\n+ dtype=dtype,\n+ place=device,\n+ stop_gradient=not requires_grad,\n+ )\n+ else:\n+ _verify_csr_components(\n+ crow_indices=crow_indices,\n+ col_indices=col_indices,\n+ values=values,\n+ dense_shape=dense_shape,\n+ )\n+ return paddle.sparse.sparse_csr_tensor(\n+ crows=crow_indices,\n+ cols=col_indices,\n+ values=values,\n+ shape=dense_shape,\n+ dtype=dtype,\n+ place=device,\n+ stop_gradient=not requires_grad,\n+ )\n \n \n def native_sparse_array_to_indices_values_and_shape(x):\n- raise IvyNotImplementedException()\n+ if not is_native_sparse_array(x):\n+ raise ivy.utils.exceptions.IvyException(\"not a Paddle Sparse Array\")\n+ if x.is_sparse_coo():\n+ return {\"coo_indices\": x.indices()}, x.values(), x.shape\n+ else:\n+ return (\n+ {\"crow_indices\": x.crows(), \"col_indices\": x.cols()},\n+ x.values(),\n+ x.shape,\n+ )\n", "issue": "Add Sparse Array Functions to Paddle Backend\nAdd [Sparse Array Functions](https://www.paddlepaddle.org.cn/documentation/docs/en/api/index\\_en.html) to Paddle backend\r\n\r\n\\_\r\n\r\n>Please keep in mind that the proper way to link an issue to this list is to comment \"- [ ] #issue\\_number\" while the issue's title only includes the name of the function you've chosen.\r\n\r\n\\_\r\n\r\n## Experimental\r\n\r\n- [x] is\\_native\\_sparse\\_array\r\n- [x] native\\_sparse\\_array\r\n- [x] native\\_sparse\\_array\\_to\\_indices\\_values\\_and\\_shape\n", "before_files": [{"content": "from ivy.utils.exceptions import IvyNotImplementedException\nimport paddle\n\n\ndef is_native_sparse_array(x: paddle.Tensor) -> bool:\n return x.is_sparse_coo() or x.is_sparse_csr()\n\n\ndef native_sparse_array(\n data=None,\n *,\n coo_indices=None,\n crow_indices=None,\n col_indices=None,\n ccol_indices=None,\n row_indices=None,\n values=None,\n dense_shape=None,\n format=\"coo\",\n):\n raise IvyNotImplementedException()\n\n\ndef native_sparse_array_to_indices_values_and_shape(x):\n raise IvyNotImplementedException()\n", "path": "ivy/functional/backends/paddle/experimental/sparse_array.py"}]} | 853 | 743 |
gh_patches_debug_30271 | rasdani/github-patches | git_diff | rasterio__rasterio-886 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
rio overview --ls should not modify file
Currently running `rio overview --ls` to inspect the overviews modifies the file. We could detect the `--ls` option and open in read-only mode.
</issue>
<code>
[start of rasterio/rio/overview.py]
1 # coding: utf-8
2 """Manage overviews of a dataset."""
3
4 from functools import reduce
5 import logging
6 import operator
7
8 import click
9
10 from . import options
11 import rasterio
12 from rasterio.enums import Resampling
13
14
15 def build_handler(ctx, param, value):
16 if value:
17 try:
18 if '^' in value:
19 base, exp_range = value.split('^')
20 exp_min, exp_max = (int(v) for v in exp_range.split('..'))
21 value = [pow(int(base), k) for k in range(exp_min, exp_max + 1)]
22 else:
23 value = [int(v) for v in value.split(',')]
24 except Exception:
25 raise click.BadParameter(u"must match 'n,n,n,…' or 'n^n..n'.")
26 return value
27
28
29 @click.command('overview', short_help="Construct overviews in an existing dataset.")
30 @options.file_in_arg
31 @click.option('--build', callback=build_handler, metavar=u"f1,f2,…|b^min..max",
32 help="A sequence of decimation factors specied as "
33 "comma-separated list of numbers or a base and range of "
34 "exponents.")
35 @click.option('--ls', help="Print the overviews for each band.",
36 is_flag=True, default=False)
37 @click.option('--rebuild', help="Reconstruct existing overviews.",
38 is_flag=True, default=False)
39 @click.option('--resampling', help="Resampling algorithm.",
40 type=click.Choice(
41 [it.name for it in Resampling if it.value in [0, 2, 5, 6, 7]]),
42 default='nearest', show_default=True)
43 @click.pass_context
44 def overview(ctx, input, build, ls, rebuild, resampling):
45 """Construct overviews in an existing dataset.
46
47 A pyramid of overviews computed once and stored in the dataset can
48 improve performance in some applications.
49
50 The decimation levels at which to build overviews can be specified as
51 a comma separated list
52
53 rio overview --build 2,4,8,16
54
55 or a base and range of exponents.
56
57 rio overview --build 2^1..4
58
59 Note that overviews can not currently be removed and are not
60 automatically updated when the dataset's primary bands are
61 modified.
62
63 Information about existing overviews can be printed using the --ls
64 option.
65
66 rio overview --ls
67
68 """
69 with ctx.obj['env']:
70 with rasterio.open(input, 'r+') as dst:
71
72 if ls:
73 resampling_method = dst.tags(
74 ns='rio_overview').get('resampling') or 'unknown'
75
76 click.echo("Overview factors:")
77 for idx in dst.indexes:
78 click.echo(" Band %d: %s (method: '%s')" % (
79 idx, dst.overviews(idx) or 'None', resampling_method))
80
81 elif rebuild:
82 # Build the same overviews for all bands.
83 factors = reduce(
84 operator.or_,
85 [set(dst.overviews(i)) for i in dst.indexes])
86
87 # Attempt to recover the resampling method from dataset tags.
88 resampling_method = dst.tags(
89 ns='rio_overview').get('resampling') or resampling
90
91 dst.build_overviews(
92 list(factors), Resampling[resampling_method])
93
94 elif build:
95 dst.build_overviews(build, Resampling[resampling])
96
97 # Save the resampling method to a tag.
98 dst.update_tags(ns='rio_overview', resampling=resampling)
99
[end of rasterio/rio/overview.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/rasterio/rio/overview.py b/rasterio/rio/overview.py
--- a/rasterio/rio/overview.py
+++ b/rasterio/rio/overview.py
@@ -67,9 +67,8 @@
"""
with ctx.obj['env']:
- with rasterio.open(input, 'r+') as dst:
-
- if ls:
+ if ls:
+ with rasterio.open(input, 'r') as dst:
resampling_method = dst.tags(
ns='rio_overview').get('resampling') or 'unknown'
@@ -77,8 +76,8 @@
for idx in dst.indexes:
click.echo(" Band %d: %s (method: '%s')" % (
idx, dst.overviews(idx) or 'None', resampling_method))
-
- elif rebuild:
+ elif rebuild:
+ with rasterio.open(input, 'r+') as dst:
# Build the same overviews for all bands.
factors = reduce(
operator.or_,
@@ -91,8 +90,13 @@
dst.build_overviews(
list(factors), Resampling[resampling_method])
- elif build:
+ elif build:
+ with rasterio.open(input, 'r+') as dst:
dst.build_overviews(build, Resampling[resampling])
# Save the resampling method to a tag.
dst.update_tags(ns='rio_overview', resampling=resampling)
+
+ else:
+ raise click.UsageError(
+ "Please specify --ls, --rebuild, or --build ...")
| {"golden_diff": "diff --git a/rasterio/rio/overview.py b/rasterio/rio/overview.py\n--- a/rasterio/rio/overview.py\n+++ b/rasterio/rio/overview.py\n@@ -67,9 +67,8 @@\n \n \"\"\"\n with ctx.obj['env']:\n- with rasterio.open(input, 'r+') as dst:\n-\n- if ls:\n+ if ls:\n+ with rasterio.open(input, 'r') as dst:\n resampling_method = dst.tags(\n ns='rio_overview').get('resampling') or 'unknown'\n \n@@ -77,8 +76,8 @@\n for idx in dst.indexes:\n click.echo(\" Band %d: %s (method: '%s')\" % (\n idx, dst.overviews(idx) or 'None', resampling_method))\n-\n- elif rebuild:\n+ elif rebuild:\n+ with rasterio.open(input, 'r+') as dst:\n # Build the same overviews for all bands.\n factors = reduce(\n operator.or_,\n@@ -91,8 +90,13 @@\n dst.build_overviews(\n list(factors), Resampling[resampling_method])\n \n- elif build:\n+ elif build:\n+ with rasterio.open(input, 'r+') as dst:\n dst.build_overviews(build, Resampling[resampling])\n \n # Save the resampling method to a tag.\n dst.update_tags(ns='rio_overview', resampling=resampling)\n+\n+ else:\n+ raise click.UsageError(\n+ \"Please specify --ls, --rebuild, or --build ...\")\n", "issue": "rio overview --ls should not modify file\nCurrently running `rio overview --ls` to inspect the overviews modifies the file. We could detect the `--ls` option and open in read-only mode. \n\n", "before_files": [{"content": "# coding: utf-8\n\"\"\"Manage overviews of a dataset.\"\"\"\n\nfrom functools import reduce\nimport logging\nimport operator\n\nimport click\n\nfrom . import options\nimport rasterio\nfrom rasterio.enums import Resampling\n\n\ndef build_handler(ctx, param, value):\n if value:\n try:\n if '^' in value:\n base, exp_range = value.split('^')\n exp_min, exp_max = (int(v) for v in exp_range.split('..'))\n value = [pow(int(base), k) for k in range(exp_min, exp_max + 1)]\n else:\n value = [int(v) for v in value.split(',')]\n except Exception:\n raise click.BadParameter(u\"must match 'n,n,n,\u2026' or 'n^n..n'.\")\n return value\n\n\[email protected]('overview', short_help=\"Construct overviews in an existing dataset.\")\[email protected]_in_arg\[email protected]('--build', callback=build_handler, metavar=u\"f1,f2,\u2026|b^min..max\",\n help=\"A sequence of decimation factors specied as \"\n \"comma-separated list of numbers or a base and range of \"\n \"exponents.\")\[email protected]('--ls', help=\"Print the overviews for each band.\",\n is_flag=True, default=False)\[email protected]('--rebuild', help=\"Reconstruct existing overviews.\",\n is_flag=True, default=False)\[email protected]('--resampling', help=\"Resampling algorithm.\",\n type=click.Choice(\n [it.name for it in Resampling if it.value in [0, 2, 5, 6, 7]]),\n default='nearest', show_default=True)\[email protected]_context\ndef overview(ctx, input, build, ls, rebuild, resampling):\n \"\"\"Construct overviews in an existing dataset.\n\n A pyramid of overviews computed once and stored in the dataset can\n improve performance in some applications.\n\n The decimation levels at which to build overviews can be specified as\n a comma separated list\n\n rio overview --build 2,4,8,16\n\n or a base and range of exponents.\n\n rio overview --build 2^1..4\n\n Note that overviews can not currently be removed and are not\n automatically updated when the dataset's primary bands are\n modified.\n\n Information about existing overviews can be printed using the --ls\n option.\n\n rio overview --ls\n\n \"\"\"\n with ctx.obj['env']:\n with rasterio.open(input, 'r+') as dst:\n\n if ls:\n resampling_method = dst.tags(\n ns='rio_overview').get('resampling') or 'unknown'\n\n click.echo(\"Overview factors:\")\n for idx in dst.indexes:\n click.echo(\" Band %d: %s (method: '%s')\" % (\n idx, dst.overviews(idx) or 'None', resampling_method))\n\n elif rebuild:\n # Build the same overviews for all bands.\n factors = reduce(\n operator.or_,\n [set(dst.overviews(i)) for i in dst.indexes])\n\n # Attempt to recover the resampling method from dataset tags.\n resampling_method = dst.tags(\n ns='rio_overview').get('resampling') or resampling\n\n dst.build_overviews(\n list(factors), Resampling[resampling_method])\n\n elif build:\n dst.build_overviews(build, Resampling[resampling])\n\n # Save the resampling method to a tag.\n dst.update_tags(ns='rio_overview', resampling=resampling)\n", "path": "rasterio/rio/overview.py"}]} | 1,547 | 354 |
gh_patches_debug_20748 | rasdani/github-patches | git_diff | WordPress__openverse-api-318 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add audio to the ingestion server tests
Audio is currently not included in the ingestion server integration or unit tests. We should update these tests to include support for audio. Separate PRs for unit and integration tests would be best. Below is some information on these tests and how to work with them.
## Running the tests
To run the tests and get a sense of what they do, do the following steps:
```bash
cd ingestion_server
pipenv install
pipenv run python3 test/integration_tests.py
```
This is currently blocked by #143. I would've liked to run the tests to learn a bit more about how they work but this isn't yet possible.
</issue>
<code>
[start of sample_data/make_sample_pop.py]
1 import csv
2 import random
3
4
5 in_tsv = open("sample_data.csv", "r")
6 out_tsv = open("sample_popularity_data.csv", "w+")
7 output_fields = ["identifier", "normalized_popularity"]
8 reader = csv.DictReader(in_tsv, delimiter=",")
9 writer = csv.DictWriter(out_tsv, delimiter=",", fieldnames=output_fields)
10 writer.writeheader()
11 for row in reader:
12 pop = random.uniform(0, 100)
13 out_row = {"identifier": row["identifier"], "normalized_popularity": pop}
14 writer.writerow(out_row)
15
[end of sample_data/make_sample_pop.py]
[start of ingestion_server/ingestion_server/tasks.py]
1 """
2 Simple in-memory tracking of executed tasks.
3 """
4
5 import datetime as dt
6 import logging
7 from enum import Enum
8 from multiprocessing import Process
9
10 import requests
11
12 from ingestion_server.indexer import TableIndexer, elasticsearch_connect
13 from ingestion_server.ingest import reload_upstream
14
15
16 class TaskTypes(Enum):
17 # Completely reindex all data for a given model.
18 REINDEX = 0
19 # Reindex updates to a model from the database since a certain date.
20 UPDATE_INDEX = 1
21 # Download the latest copy of the data from the upstream database, then
22 # completely reindex the newly imported data.
23 INGEST_UPSTREAM = 2
24 # Create indices in Elasticsearch for QA tests.
25 # This is not intended for production use, but can be safely executed in a
26 # production environment without consequence.
27 LOAD_TEST_DATA = 3
28
29
30 class TaskTracker:
31 def __init__(self):
32 self.id_task = {}
33 self.id_action = {}
34 self.id_progress = {}
35 self.id_start_time = {}
36 self.id_finish_time = {}
37
38 def add_task(self, task, task_id, action, progress, finish_time):
39 self._prune_old_tasks()
40 self.id_task[task_id] = task
41 self.id_action[task_id] = action
42 self.id_progress[task_id] = progress
43 self.id_start_time[task_id] = dt.datetime.utcnow().timestamp()
44 self.id_finish_time[task_id] = finish_time
45 return task_id
46
47 def _prune_old_tasks(self):
48 pass
49
50 def list_task_statuses(self):
51 self._prune_old_tasks()
52 results = []
53 for _id, task in self.id_task.items():
54 percent_completed = self.id_progress[_id].value
55 active = task.is_alive()
56 start_time = self.id_start_time[_id]
57 finish_time = self.id_finish_time[_id].value
58 results.append(
59 {
60 "task_id": _id,
61 "active": active,
62 "action": self.id_action[_id],
63 "progress": percent_completed,
64 "error": percent_completed < 100 and not active,
65 "start_time": start_time,
66 "finish_time": finish_time,
67 }
68 )
69 sorted_results = sorted(results, key=lambda x: x["finish_time"])
70
71 to_utc = dt.datetime.utcfromtimestamp
72
73 def render_date(x):
74 return to_utc(x) if x != 0.0 else None
75
76 # Convert date to a readable format
77 for idx, task in enumerate(sorted_results):
78 start_time = task["start_time"]
79 finish_time = task["finish_time"]
80 sorted_results[idx]["start_time"] = str(render_date(start_time))
81 sorted_results[idx]["finish_time"] = str(render_date(finish_time))
82
83 return sorted_results
84
85
86 class Task(Process):
87 def __init__(
88 self, model, task_type, since_date, progress, task_id, finish_time, callback_url
89 ):
90 Process.__init__(self)
91 self.model = model
92 self.task_type = task_type
93 self.since_date = since_date
94 self.progress = progress
95 self.task_id = task_id
96 self.finish_time = finish_time
97 self.callback_url = callback_url
98
99 def run(self):
100 # Map task types to actions.
101 elasticsearch = elasticsearch_connect()
102 indexer = TableIndexer(
103 elasticsearch, self.model, self.progress, self.finish_time
104 )
105 if self.task_type == TaskTypes.REINDEX:
106 indexer.reindex(self.model)
107 elif self.task_type == TaskTypes.UPDATE_INDEX:
108 indexer.update(self.model, self.since_date)
109 elif self.task_type == TaskTypes.INGEST_UPSTREAM:
110 reload_upstream(self.model)
111 if self.model == "audio":
112 reload_upstream("audioset", approach="basic")
113 indexer.reindex(self.model)
114 elif self.task_type == TaskTypes.LOAD_TEST_DATA:
115 indexer.load_test_data(self.model)
116 logging.info(f"Task {self.task_id} exited.")
117 if self.callback_url:
118 try:
119 requests.post(self.callback_url)
120 except requests.exceptions.RequestException as e:
121 logging.error("Failed to send callback!")
122 logging.error(e)
123
[end of ingestion_server/ingestion_server/tasks.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/ingestion_server/ingestion_server/tasks.py b/ingestion_server/ingestion_server/tasks.py
--- a/ingestion_server/ingestion_server/tasks.py
+++ b/ingestion_server/ingestion_server/tasks.py
@@ -116,7 +116,9 @@
logging.info(f"Task {self.task_id} exited.")
if self.callback_url:
try:
- requests.post(self.callback_url)
+ logging.info("Sending callback request")
+ res = requests.post(self.callback_url)
+ logging.info(f"Response: {res.text}")
except requests.exceptions.RequestException as e:
logging.error("Failed to send callback!")
logging.error(e)
diff --git a/sample_data/make_sample_pop.py b/sample_data/make_sample_pop.py
deleted file mode 100644
--- a/sample_data/make_sample_pop.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import csv
-import random
-
-
-in_tsv = open("sample_data.csv", "r")
-out_tsv = open("sample_popularity_data.csv", "w+")
-output_fields = ["identifier", "normalized_popularity"]
-reader = csv.DictReader(in_tsv, delimiter=",")
-writer = csv.DictWriter(out_tsv, delimiter=",", fieldnames=output_fields)
-writer.writeheader()
-for row in reader:
- pop = random.uniform(0, 100)
- out_row = {"identifier": row["identifier"], "normalized_popularity": pop}
- writer.writerow(out_row)
| {"golden_diff": "diff --git a/ingestion_server/ingestion_server/tasks.py b/ingestion_server/ingestion_server/tasks.py\n--- a/ingestion_server/ingestion_server/tasks.py\n+++ b/ingestion_server/ingestion_server/tasks.py\n@@ -116,7 +116,9 @@\n logging.info(f\"Task {self.task_id} exited.\")\n if self.callback_url:\n try:\n- requests.post(self.callback_url)\n+ logging.info(\"Sending callback request\")\n+ res = requests.post(self.callback_url)\n+ logging.info(f\"Response: {res.text}\")\n except requests.exceptions.RequestException as e:\n logging.error(\"Failed to send callback!\")\n logging.error(e)\ndiff --git a/sample_data/make_sample_pop.py b/sample_data/make_sample_pop.py\ndeleted file mode 100644\n--- a/sample_data/make_sample_pop.py\n+++ /dev/null\n@@ -1,14 +0,0 @@\n-import csv\n-import random\n-\n-\n-in_tsv = open(\"sample_data.csv\", \"r\")\n-out_tsv = open(\"sample_popularity_data.csv\", \"w+\")\n-output_fields = [\"identifier\", \"normalized_popularity\"]\n-reader = csv.DictReader(in_tsv, delimiter=\",\")\n-writer = csv.DictWriter(out_tsv, delimiter=\",\", fieldnames=output_fields)\n-writer.writeheader()\n-for row in reader:\n- pop = random.uniform(0, 100)\n- out_row = {\"identifier\": row[\"identifier\"], \"normalized_popularity\": pop}\n- writer.writerow(out_row)\n", "issue": "Add audio to the ingestion server tests\nAudio is currently not included in the ingestion server integration or unit tests. We should update these tests to include support for audio. Separate PRs for unit and integration tests would be best. Below is some information on these tests and how to work with them.\r\n\r\n## Running the tests \r\n\r\nTo run the tests and get a sense of what they do, do the following steps:\r\n\r\n```bash\r\ncd ingestion_server\r\npipenv install\r\npipenv run python3 test/integration_tests.py\r\n```\r\n\r\nThis is currently blocked by #143. I would've liked to run the tests to learn a bit more about how they work but this isn't yet possible.\n", "before_files": [{"content": "import csv\nimport random\n\n\nin_tsv = open(\"sample_data.csv\", \"r\")\nout_tsv = open(\"sample_popularity_data.csv\", \"w+\")\noutput_fields = [\"identifier\", \"normalized_popularity\"]\nreader = csv.DictReader(in_tsv, delimiter=\",\")\nwriter = csv.DictWriter(out_tsv, delimiter=\",\", fieldnames=output_fields)\nwriter.writeheader()\nfor row in reader:\n pop = random.uniform(0, 100)\n out_row = {\"identifier\": row[\"identifier\"], \"normalized_popularity\": pop}\n writer.writerow(out_row)\n", "path": "sample_data/make_sample_pop.py"}, {"content": "\"\"\"\nSimple in-memory tracking of executed tasks.\n\"\"\"\n\nimport datetime as dt\nimport logging\nfrom enum import Enum\nfrom multiprocessing import Process\n\nimport requests\n\nfrom ingestion_server.indexer import TableIndexer, elasticsearch_connect\nfrom ingestion_server.ingest import reload_upstream\n\n\nclass TaskTypes(Enum):\n # Completely reindex all data for a given model.\n REINDEX = 0\n # Reindex updates to a model from the database since a certain date.\n UPDATE_INDEX = 1\n # Download the latest copy of the data from the upstream database, then\n # completely reindex the newly imported data.\n INGEST_UPSTREAM = 2\n # Create indices in Elasticsearch for QA tests.\n # This is not intended for production use, but can be safely executed in a\n # production environment without consequence.\n LOAD_TEST_DATA = 3\n\n\nclass TaskTracker:\n def __init__(self):\n self.id_task = {}\n self.id_action = {}\n self.id_progress = {}\n self.id_start_time = {}\n self.id_finish_time = {}\n\n def add_task(self, task, task_id, action, progress, finish_time):\n self._prune_old_tasks()\n self.id_task[task_id] = task\n self.id_action[task_id] = action\n self.id_progress[task_id] = progress\n self.id_start_time[task_id] = dt.datetime.utcnow().timestamp()\n self.id_finish_time[task_id] = finish_time\n return task_id\n\n def _prune_old_tasks(self):\n pass\n\n def list_task_statuses(self):\n self._prune_old_tasks()\n results = []\n for _id, task in self.id_task.items():\n percent_completed = self.id_progress[_id].value\n active = task.is_alive()\n start_time = self.id_start_time[_id]\n finish_time = self.id_finish_time[_id].value\n results.append(\n {\n \"task_id\": _id,\n \"active\": active,\n \"action\": self.id_action[_id],\n \"progress\": percent_completed,\n \"error\": percent_completed < 100 and not active,\n \"start_time\": start_time,\n \"finish_time\": finish_time,\n }\n )\n sorted_results = sorted(results, key=lambda x: x[\"finish_time\"])\n\n to_utc = dt.datetime.utcfromtimestamp\n\n def render_date(x):\n return to_utc(x) if x != 0.0 else None\n\n # Convert date to a readable format\n for idx, task in enumerate(sorted_results):\n start_time = task[\"start_time\"]\n finish_time = task[\"finish_time\"]\n sorted_results[idx][\"start_time\"] = str(render_date(start_time))\n sorted_results[idx][\"finish_time\"] = str(render_date(finish_time))\n\n return sorted_results\n\n\nclass Task(Process):\n def __init__(\n self, model, task_type, since_date, progress, task_id, finish_time, callback_url\n ):\n Process.__init__(self)\n self.model = model\n self.task_type = task_type\n self.since_date = since_date\n self.progress = progress\n self.task_id = task_id\n self.finish_time = finish_time\n self.callback_url = callback_url\n\n def run(self):\n # Map task types to actions.\n elasticsearch = elasticsearch_connect()\n indexer = TableIndexer(\n elasticsearch, self.model, self.progress, self.finish_time\n )\n if self.task_type == TaskTypes.REINDEX:\n indexer.reindex(self.model)\n elif self.task_type == TaskTypes.UPDATE_INDEX:\n indexer.update(self.model, self.since_date)\n elif self.task_type == TaskTypes.INGEST_UPSTREAM:\n reload_upstream(self.model)\n if self.model == \"audio\":\n reload_upstream(\"audioset\", approach=\"basic\")\n indexer.reindex(self.model)\n elif self.task_type == TaskTypes.LOAD_TEST_DATA:\n indexer.load_test_data(self.model)\n logging.info(f\"Task {self.task_id} exited.\")\n if self.callback_url:\n try:\n requests.post(self.callback_url)\n except requests.exceptions.RequestException as e:\n logging.error(\"Failed to send callback!\")\n logging.error(e)\n", "path": "ingestion_server/ingestion_server/tasks.py"}]} | 2,020 | 336 |
gh_patches_debug_41905 | rasdani/github-patches | git_diff | pytorch__ignite-478 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Improve create_supervised_trainer with optional output_transform
Following [the discussion](https://github.com/pytorch/ignite/pull/476#discussion_r272108999), idea is to give more flexibility to users who are using `create_supervised_trainer`:
```python
def default_output_transform(x, y, y_pred, loss):
return loss.item()
def create_supervised_trainer(model, optimizer, loss_fn,
device=None, non_blocking=False, prepare_batch=_prepare_batch,
output_transform=default_output_transform):
if device:
model.to(device)
def _update(engine, batch):
model.train()
optimizer.zero_grad()
x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
y_pred = model(x)
loss = loss_fn(y_pred, y)
loss.backward()
optimizer.step()
return output_transform(x, y, y_pred, loss)
return Engine(_update)
```
cc @IlyaOvodov
Improve create_supervised_trainer with optional output_transform
Following [the discussion](https://github.com/pytorch/ignite/pull/476#discussion_r272108999), idea is to give more flexibility to users who are using `create_supervised_trainer`:
```python
def default_output_transform(x, y, y_pred, loss):
return loss.item()
def create_supervised_trainer(model, optimizer, loss_fn,
device=None, non_blocking=False, prepare_batch=_prepare_batch,
output_transform=default_output_transform):
if device:
model.to(device)
def _update(engine, batch):
model.train()
optimizer.zero_grad()
x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
y_pred = model(x)
loss = loss_fn(y_pred, y)
loss.backward()
optimizer.step()
return output_transform(x, y, y_pred, loss)
return Engine(_update)
```
cc @IlyaOvodov
</issue>
<code>
[start of ignite/engine/__init__.py]
1 import torch
2
3 from ignite.engine.engine import Engine, State, Events
4 from ignite.utils import convert_tensor
5
6
7 def _prepare_batch(batch, device=None, non_blocking=False):
8 """Prepare batch for training: pass to a device with options.
9
10 """
11 x, y = batch
12 return (convert_tensor(x, device=device, non_blocking=non_blocking),
13 convert_tensor(y, device=device, non_blocking=non_blocking))
14
15
16 def create_supervised_trainer(model, optimizer, loss_fn,
17 device=None, non_blocking=False,
18 prepare_batch=_prepare_batch):
19 """
20 Factory function for creating a trainer for supervised models.
21
22 Args:
23 model (`torch.nn.Module`): the model to train.
24 optimizer (`torch.optim.Optimizer`): the optimizer to use.
25 loss_fn (torch.nn loss function): the loss function to use.
26 device (str, optional): device type specification (default: None).
27 Applies to both model and batches.
28 non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously
29 with respect to the host. For other cases, this argument has no effect.
30 prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs
31 tuple of tensors `(batch_x, batch_y)`.
32
33 Note: `engine.state.output` for this engine is the loss of the processed batch.
34
35 Returns:
36 Engine: a trainer engine with supervised update function.
37 """
38 if device:
39 model.to(device)
40
41 def _update(engine, batch):
42 model.train()
43 optimizer.zero_grad()
44 x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
45 y_pred = model(x)
46 loss = loss_fn(y_pred, y)
47 loss.backward()
48 optimizer.step()
49 return loss.item()
50
51 return Engine(_update)
52
53
54 def create_supervised_evaluator(model, metrics={},
55 device=None, non_blocking=False,
56 prepare_batch=_prepare_batch):
57 """
58 Factory function for creating an evaluator for supervised models.
59
60 Args:
61 model (`torch.nn.Module`): the model to train.
62 metrics (dict of str - :class:`~ignite.metrics.Metric`): a map of metric names to Metrics.
63 device (str, optional): device type specification (default: None).
64 Applies to both model and batches.
65 non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously
66 with respect to the host. For other cases, this argument has no effect.
67 prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs
68 tuple of tensors `(batch_x, batch_y)`.
69
70 Note: `engine.state.output` for this engine is a tuple of `(batch_pred, batch_y)`.
71
72 Returns:
73 Engine: an evaluator engine with supervised inference function.
74 """
75 if device:
76 model.to(device)
77
78 def _inference(engine, batch):
79 model.eval()
80 with torch.no_grad():
81 x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
82 y_pred = model(x)
83 return y_pred, y
84
85 engine = Engine(_inference)
86
87 for name, metric in metrics.items():
88 metric.attach(engine, name)
89
90 return engine
91
[end of ignite/engine/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/ignite/engine/__init__.py b/ignite/engine/__init__.py
--- a/ignite/engine/__init__.py
+++ b/ignite/engine/__init__.py
@@ -15,7 +15,8 @@
def create_supervised_trainer(model, optimizer, loss_fn,
device=None, non_blocking=False,
- prepare_batch=_prepare_batch):
+ prepare_batch=_prepare_batch,
+ output_transform=lambda x, y, y_pred, loss: loss.item()):
"""
Factory function for creating a trainer for supervised models.
@@ -29,8 +30,11 @@
with respect to the host. For other cases, this argument has no effect.
prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs
tuple of tensors `(batch_x, batch_y)`.
+ output_transform (callable, optional): function that receives 'x', 'y', 'y_pred', 'loss' and returns value
+ to be assigned to engine's state.output after each iteration. Default is returning `loss.item()`.
- Note: `engine.state.output` for this engine is the loss of the processed batch.
+ Note: `engine.state.output` for this engine is defind by `output_transform` parameter and is the loss
+ of the processed batch by default.
Returns:
Engine: a trainer engine with supervised update function.
@@ -46,14 +50,15 @@
loss = loss_fn(y_pred, y)
loss.backward()
optimizer.step()
- return loss.item()
+ return output_transform(x, y, y_pred, loss)
return Engine(_update)
def create_supervised_evaluator(model, metrics={},
device=None, non_blocking=False,
- prepare_batch=_prepare_batch):
+ prepare_batch=_prepare_batch,
+ output_transform=lambda x, y, y_pred: (y_pred, y,)):
"""
Factory function for creating an evaluator for supervised models.
@@ -66,8 +71,12 @@
with respect to the host. For other cases, this argument has no effect.
prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs
tuple of tensors `(batch_x, batch_y)`.
+ output_transform (callable, optional): function that receives 'x', 'y', 'y_pred' and returns value
+ to be assigned to engine's state.output after each iteration. Default is returning `(y_pred, y,)` which fits
+ output expected by metrics. If you change it you should use `output_transform` in metrics.
- Note: `engine.state.output` for this engine is a tuple of `(batch_pred, batch_y)`.
+ Note: `engine.state.output` for this engine is defind by `output_transform` parameter and is
+ a tuple of `(batch_pred, batch_y)` by default.
Returns:
Engine: an evaluator engine with supervised inference function.
@@ -80,7 +89,7 @@
with torch.no_grad():
x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
y_pred = model(x)
- return y_pred, y
+ return output_transform(x, y, y_pred)
engine = Engine(_inference)
| {"golden_diff": "diff --git a/ignite/engine/__init__.py b/ignite/engine/__init__.py\n--- a/ignite/engine/__init__.py\n+++ b/ignite/engine/__init__.py\n@@ -15,7 +15,8 @@\n \n def create_supervised_trainer(model, optimizer, loss_fn,\n device=None, non_blocking=False,\n- prepare_batch=_prepare_batch):\n+ prepare_batch=_prepare_batch,\n+ output_transform=lambda x, y, y_pred, loss: loss.item()):\n \"\"\"\n Factory function for creating a trainer for supervised models.\n \n@@ -29,8 +30,11 @@\n with respect to the host. For other cases, this argument has no effect.\n prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs\n tuple of tensors `(batch_x, batch_y)`.\n+ output_transform (callable, optional): function that receives 'x', 'y', 'y_pred', 'loss' and returns value\n+ to be assigned to engine's state.output after each iteration. Default is returning `loss.item()`.\n \n- Note: `engine.state.output` for this engine is the loss of the processed batch.\n+ Note: `engine.state.output` for this engine is defind by `output_transform` parameter and is the loss\n+ of the processed batch by default.\n \n Returns:\n Engine: a trainer engine with supervised update function.\n@@ -46,14 +50,15 @@\n loss = loss_fn(y_pred, y)\n loss.backward()\n optimizer.step()\n- return loss.item()\n+ return output_transform(x, y, y_pred, loss)\n \n return Engine(_update)\n \n \n def create_supervised_evaluator(model, metrics={},\n device=None, non_blocking=False,\n- prepare_batch=_prepare_batch):\n+ prepare_batch=_prepare_batch,\n+ output_transform=lambda x, y, y_pred: (y_pred, y,)):\n \"\"\"\n Factory function for creating an evaluator for supervised models.\n \n@@ -66,8 +71,12 @@\n with respect to the host. For other cases, this argument has no effect.\n prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs\n tuple of tensors `(batch_x, batch_y)`.\n+ output_transform (callable, optional): function that receives 'x', 'y', 'y_pred' and returns value\n+ to be assigned to engine's state.output after each iteration. Default is returning `(y_pred, y,)` which fits\n+ output expected by metrics. If you change it you should use `output_transform` in metrics.\n \n- Note: `engine.state.output` for this engine is a tuple of `(batch_pred, batch_y)`.\n+ Note: `engine.state.output` for this engine is defind by `output_transform` parameter and is\n+ a tuple of `(batch_pred, batch_y)` by default.\n \n Returns:\n Engine: an evaluator engine with supervised inference function.\n@@ -80,7 +89,7 @@\n with torch.no_grad():\n x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)\n y_pred = model(x)\n- return y_pred, y\n+ return output_transform(x, y, y_pred)\n \n engine = Engine(_inference)\n", "issue": "Improve create_supervised_trainer with optional output_transform\nFollowing [the discussion](https://github.com/pytorch/ignite/pull/476#discussion_r272108999), idea is to give more flexibility to users who are using `create_supervised_trainer`:\r\n```python\r\ndef default_output_transform(x, y, y_pred, loss):\r\n return loss.item() \r\n\r\n\r\ndef create_supervised_trainer(model, optimizer, loss_fn,\r\n device=None, non_blocking=False, prepare_batch=_prepare_batch, \r\n output_transform=default_output_transform):\r\n if device:\r\n model.to(device)\r\n\r\n def _update(engine, batch):\r\n model.train()\r\n optimizer.zero_grad()\r\n x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)\r\n y_pred = model(x)\r\n loss = loss_fn(y_pred, y)\r\n loss.backward()\r\n optimizer.step()\r\n return output_transform(x, y, y_pred, loss)\r\n\r\n return Engine(_update) \r\n```\r\n\r\ncc @IlyaOvodov\nImprove create_supervised_trainer with optional output_transform\nFollowing [the discussion](https://github.com/pytorch/ignite/pull/476#discussion_r272108999), idea is to give more flexibility to users who are using `create_supervised_trainer`:\r\n```python\r\ndef default_output_transform(x, y, y_pred, loss):\r\n return loss.item() \r\n\r\n\r\ndef create_supervised_trainer(model, optimizer, loss_fn,\r\n device=None, non_blocking=False, prepare_batch=_prepare_batch, \r\n output_transform=default_output_transform):\r\n if device:\r\n model.to(device)\r\n\r\n def _update(engine, batch):\r\n model.train()\r\n optimizer.zero_grad()\r\n x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)\r\n y_pred = model(x)\r\n loss = loss_fn(y_pred, y)\r\n loss.backward()\r\n optimizer.step()\r\n return output_transform(x, y, y_pred, loss)\r\n\r\n return Engine(_update) \r\n```\r\n\r\ncc @IlyaOvodov\n", "before_files": [{"content": "import torch\n\nfrom ignite.engine.engine import Engine, State, Events\nfrom ignite.utils import convert_tensor\n\n\ndef _prepare_batch(batch, device=None, non_blocking=False):\n \"\"\"Prepare batch for training: pass to a device with options.\n\n \"\"\"\n x, y = batch\n return (convert_tensor(x, device=device, non_blocking=non_blocking),\n convert_tensor(y, device=device, non_blocking=non_blocking))\n\n\ndef create_supervised_trainer(model, optimizer, loss_fn,\n device=None, non_blocking=False,\n prepare_batch=_prepare_batch):\n \"\"\"\n Factory function for creating a trainer for supervised models.\n\n Args:\n model (`torch.nn.Module`): the model to train.\n optimizer (`torch.optim.Optimizer`): the optimizer to use.\n loss_fn (torch.nn loss function): the loss function to use.\n device (str, optional): device type specification (default: None).\n Applies to both model and batches.\n non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously\n with respect to the host. For other cases, this argument has no effect.\n prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs\n tuple of tensors `(batch_x, batch_y)`.\n\n Note: `engine.state.output` for this engine is the loss of the processed batch.\n\n Returns:\n Engine: a trainer engine with supervised update function.\n \"\"\"\n if device:\n model.to(device)\n\n def _update(engine, batch):\n model.train()\n optimizer.zero_grad()\n x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)\n y_pred = model(x)\n loss = loss_fn(y_pred, y)\n loss.backward()\n optimizer.step()\n return loss.item()\n\n return Engine(_update)\n\n\ndef create_supervised_evaluator(model, metrics={},\n device=None, non_blocking=False,\n prepare_batch=_prepare_batch):\n \"\"\"\n Factory function for creating an evaluator for supervised models.\n\n Args:\n model (`torch.nn.Module`): the model to train.\n metrics (dict of str - :class:`~ignite.metrics.Metric`): a map of metric names to Metrics.\n device (str, optional): device type specification (default: None).\n Applies to both model and batches.\n non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously\n with respect to the host. For other cases, this argument has no effect.\n prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs\n tuple of tensors `(batch_x, batch_y)`.\n\n Note: `engine.state.output` for this engine is a tuple of `(batch_pred, batch_y)`.\n\n Returns:\n Engine: an evaluator engine with supervised inference function.\n \"\"\"\n if device:\n model.to(device)\n\n def _inference(engine, batch):\n model.eval()\n with torch.no_grad():\n x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)\n y_pred = model(x)\n return y_pred, y\n\n engine = Engine(_inference)\n\n for name, metric in metrics.items():\n metric.attach(engine, name)\n\n return engine\n", "path": "ignite/engine/__init__.py"}]} | 1,868 | 729 |
gh_patches_debug_3543 | rasdani/github-patches | git_diff | beeware__toga-1634 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Source installs no longer working
#1614 made some changes to the packaging of modules to support the release package workflow.
The wheels generated from this process appear to work fine; however, source installs don't appear to be working. I've had problems on both macOS and Android.
**To Reproduce**
Steps to reproduce the behavior:
1. `briefcase run` or `briefcase run android` on Tutorial 0.
**Expected behavior**
App should start.
**Environment:**
- Operating System: macOS
- Python version: 3.10
- Software versions:
- Briefcase: 0.3.11
- Toga: 96881f093
</issue>
<code>
[start of src/web/setup.py]
1 #!/usr/bin/env python
2 import re
3
4 from setuptools import setup
5
6 # Version handline needs to be programatic because
7 # we can't import toga_web to compute the version;
8 # and to support versioned subpackage dependencies
9 with open('src/toga_web/__init__.py', encoding='utf8') as version_file:
10 version_match = re.search(
11 r"^__version__ = ['\"]([^'\"]*)['\"]",
12 version_file.read(),
13 re.M
14 )
15 if version_match:
16 version = version_match.group(1)
17 else:
18 raise RuntimeError("Unable to find version string.")
19
20 setup(
21 version=version,
22 install_requires=[
23 # TODO: Due to https://github.com/pyodide/pyodide/issues/2408, the name
24 # toga-core is ambigous when on the package hasn't been published to
25 # PyPI. As a workaround, don't specify the dependency, and manually
26 # ensure that toga-core is installed.
27 # 'toga-core==%s' % version,
28 ],
29 )
30
[end of src/web/setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/web/setup.py b/src/web/setup.py
--- a/src/web/setup.py
+++ b/src/web/setup.py
@@ -20,10 +20,6 @@
setup(
version=version,
install_requires=[
- # TODO: Due to https://github.com/pyodide/pyodide/issues/2408, the name
- # toga-core is ambigous when on the package hasn't been published to
- # PyPI. As a workaround, don't specify the dependency, and manually
- # ensure that toga-core is installed.
- # 'toga-core==%s' % version,
+ 'toga-core==%s' % version,
],
)
| {"golden_diff": "diff --git a/src/web/setup.py b/src/web/setup.py\n--- a/src/web/setup.py\n+++ b/src/web/setup.py\n@@ -20,10 +20,6 @@\n setup(\n version=version,\n install_requires=[\n- # TODO: Due to https://github.com/pyodide/pyodide/issues/2408, the name\n- # toga-core is ambigous when on the package hasn't been published to\n- # PyPI. As a workaround, don't specify the dependency, and manually\n- # ensure that toga-core is installed.\n- # 'toga-core==%s' % version,\n+ 'toga-core==%s' % version,\n ],\n )\n", "issue": "Source installs no longer working\n#1614 made some changes to the packaging of modules to support the release package workflow.\r\n\r\nThe wheels generated from this process appear to work fine; however, source installs don't appear to be working. I've had problems on both macOS and Android.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. `briefcase run` or `briefcase run android` on Tutorial 0.\r\n\r\n**Expected behavior**\r\n\r\nApp should start.\r\n\r\n**Environment:**\r\n - Operating System: macOS\r\n - Python version: 3.10\r\n - Software versions:\r\n - Briefcase: 0.3.11\r\n - Toga: 96881f093\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport re\n\nfrom setuptools import setup\n\n# Version handline needs to be programatic because\n# we can't import toga_web to compute the version;\n# and to support versioned subpackage dependencies\nwith open('src/toga_web/__init__.py', encoding='utf8') as version_file:\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file.read(),\n re.M\n )\n if version_match:\n version = version_match.group(1)\n else:\n raise RuntimeError(\"Unable to find version string.\")\n\nsetup(\n version=version,\n install_requires=[\n # TODO: Due to https://github.com/pyodide/pyodide/issues/2408, the name\n # toga-core is ambigous when on the package hasn't been published to\n # PyPI. As a workaround, don't specify the dependency, and manually\n # ensure that toga-core is installed.\n # 'toga-core==%s' % version,\n ],\n)\n", "path": "src/web/setup.py"}]} | 970 | 160 |
gh_patches_debug_2990 | rasdani/github-patches | git_diff | aio-libs-abandoned__aioredis-py-535 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add a BUSYGROUP reply error
The XGROUP CREATE command can return a BUSYGROUP error when a group already exists: https://redis.io/commands/xgroup
I think the `ReplyError` subclass for matching it would look like this:
```py
class BusyGroupError(ReplyError):
MATCH_REPLY = "BUSYGROUP Consumer Group name already exists"
```
</issue>
<code>
[start of aioredis/errors.py]
1 __all__ = [
2 'RedisError',
3 'ProtocolError',
4 'ReplyError',
5 'MaxClientsError',
6 'AuthError',
7 'PipelineError',
8 'MultiExecError',
9 'WatchVariableError',
10 'ChannelClosedError',
11 'ConnectionClosedError',
12 'ConnectionForcedCloseError',
13 'PoolClosedError',
14 'MasterNotFoundError',
15 'SlaveNotFoundError',
16 'ReadOnlyError',
17 ]
18
19
20 class RedisError(Exception):
21 """Base exception class for aioredis exceptions."""
22
23
24 class ProtocolError(RedisError):
25 """Raised when protocol error occurs."""
26
27
28 class ReplyError(RedisError):
29 """Raised for redis error replies (-ERR)."""
30
31 MATCH_REPLY = None
32
33 def __new__(cls, msg, *args):
34 for klass in cls.__subclasses__():
35 if msg and klass.MATCH_REPLY and msg.startswith(klass.MATCH_REPLY):
36 return klass(msg, *args)
37 return super().__new__(cls, msg, *args)
38
39
40 class MaxClientsError(ReplyError):
41 """Raised for redis server when the maximum number of client has been
42 reached."""
43
44 MATCH_REPLY = "ERR max number of clients reached"
45
46
47 class AuthError(ReplyError):
48 """Raised when authentication errors occurs."""
49
50 MATCH_REPLY = ("NOAUTH ", "ERR invalid password")
51
52
53 class PipelineError(RedisError):
54 """Raised if command within pipeline raised error."""
55
56 def __init__(self, errors):
57 super().__init__('{} errors:'.format(self.__class__.__name__), errors)
58
59
60 class MultiExecError(PipelineError):
61 """Raised if command within MULTI/EXEC block caused error."""
62
63
64 class WatchVariableError(MultiExecError):
65 """Raised if watched variable changed (EXEC returns None)."""
66
67
68 class ChannelClosedError(RedisError):
69 """Raised when Pub/Sub channel is unsubscribed and messages queue is empty.
70 """
71
72
73 class ReadOnlyError(RedisError):
74 """Raised from slave when read-only mode is enabled"""
75
76
77 class MasterNotFoundError(RedisError):
78 """Raised for sentinel master not found error."""
79
80
81 class SlaveNotFoundError(RedisError):
82 """Raised for sentinel slave not found error."""
83
84
85 class MasterReplyError(RedisError):
86 """Raised by sentinel client for master error replies."""
87
88
89 class SlaveReplyError(RedisError):
90 """Raised by sentinel client for slave error replies."""
91
92
93 class ConnectionClosedError(RedisError):
94 """Raised if connection to server was closed."""
95
96
97 class ConnectionForcedCloseError(ConnectionClosedError):
98 """Raised if connection was closed with .close() method."""
99
100
101 class PoolClosedError(RedisError):
102 """Raised if pool is closed."""
103
[end of aioredis/errors.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/aioredis/errors.py b/aioredis/errors.py
--- a/aioredis/errors.py
+++ b/aioredis/errors.py
@@ -50,6 +50,12 @@
MATCH_REPLY = ("NOAUTH ", "ERR invalid password")
+class BusyGroupError(ReplyError):
+ """Raised if Consumer Group name already exists."""
+
+ MATCH_REPLY = "BUSYGROUP Consumer Group name already exists"
+
+
class PipelineError(RedisError):
"""Raised if command within pipeline raised error."""
| {"golden_diff": "diff --git a/aioredis/errors.py b/aioredis/errors.py\n--- a/aioredis/errors.py\n+++ b/aioredis/errors.py\n@@ -50,6 +50,12 @@\n MATCH_REPLY = (\"NOAUTH \", \"ERR invalid password\")\n \n \n+class BusyGroupError(ReplyError):\n+ \"\"\"Raised if Consumer Group name already exists.\"\"\"\n+\n+ MATCH_REPLY = \"BUSYGROUP Consumer Group name already exists\"\n+\n+\n class PipelineError(RedisError):\n \"\"\"Raised if command within pipeline raised error.\"\"\"\n", "issue": "Add a BUSYGROUP reply error\nThe XGROUP CREATE command can return a BUSYGROUP error when a group already exists: https://redis.io/commands/xgroup\r\n\r\nI think the `ReplyError` subclass for matching it would look like this:\r\n\r\n```py\r\nclass BusyGroupError(ReplyError):\r\n MATCH_REPLY = \"BUSYGROUP Consumer Group name already exists\"\r\n```\n", "before_files": [{"content": "__all__ = [\n 'RedisError',\n 'ProtocolError',\n 'ReplyError',\n 'MaxClientsError',\n 'AuthError',\n 'PipelineError',\n 'MultiExecError',\n 'WatchVariableError',\n 'ChannelClosedError',\n 'ConnectionClosedError',\n 'ConnectionForcedCloseError',\n 'PoolClosedError',\n 'MasterNotFoundError',\n 'SlaveNotFoundError',\n 'ReadOnlyError',\n ]\n\n\nclass RedisError(Exception):\n \"\"\"Base exception class for aioredis exceptions.\"\"\"\n\n\nclass ProtocolError(RedisError):\n \"\"\"Raised when protocol error occurs.\"\"\"\n\n\nclass ReplyError(RedisError):\n \"\"\"Raised for redis error replies (-ERR).\"\"\"\n\n MATCH_REPLY = None\n\n def __new__(cls, msg, *args):\n for klass in cls.__subclasses__():\n if msg and klass.MATCH_REPLY and msg.startswith(klass.MATCH_REPLY):\n return klass(msg, *args)\n return super().__new__(cls, msg, *args)\n\n\nclass MaxClientsError(ReplyError):\n \"\"\"Raised for redis server when the maximum number of client has been\n reached.\"\"\"\n\n MATCH_REPLY = \"ERR max number of clients reached\"\n\n\nclass AuthError(ReplyError):\n \"\"\"Raised when authentication errors occurs.\"\"\"\n\n MATCH_REPLY = (\"NOAUTH \", \"ERR invalid password\")\n\n\nclass PipelineError(RedisError):\n \"\"\"Raised if command within pipeline raised error.\"\"\"\n\n def __init__(self, errors):\n super().__init__('{} errors:'.format(self.__class__.__name__), errors)\n\n\nclass MultiExecError(PipelineError):\n \"\"\"Raised if command within MULTI/EXEC block caused error.\"\"\"\n\n\nclass WatchVariableError(MultiExecError):\n \"\"\"Raised if watched variable changed (EXEC returns None).\"\"\"\n\n\nclass ChannelClosedError(RedisError):\n \"\"\"Raised when Pub/Sub channel is unsubscribed and messages queue is empty.\n \"\"\"\n\n\nclass ReadOnlyError(RedisError):\n \"\"\"Raised from slave when read-only mode is enabled\"\"\"\n\n\nclass MasterNotFoundError(RedisError):\n \"\"\"Raised for sentinel master not found error.\"\"\"\n\n\nclass SlaveNotFoundError(RedisError):\n \"\"\"Raised for sentinel slave not found error.\"\"\"\n\n\nclass MasterReplyError(RedisError):\n \"\"\"Raised by sentinel client for master error replies.\"\"\"\n\n\nclass SlaveReplyError(RedisError):\n \"\"\"Raised by sentinel client for slave error replies.\"\"\"\n\n\nclass ConnectionClosedError(RedisError):\n \"\"\"Raised if connection to server was closed.\"\"\"\n\n\nclass ConnectionForcedCloseError(ConnectionClosedError):\n \"\"\"Raised if connection was closed with .close() method.\"\"\"\n\n\nclass PoolClosedError(RedisError):\n \"\"\"Raised if pool is closed.\"\"\"\n", "path": "aioredis/errors.py"}]} | 1,384 | 119 |
gh_patches_debug_1898 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-1813 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
OpenTelemetry distro as a default distro for OpenTelemetry Instrumentation
The `opentelemetry-instrumentation` auto instrumentation doesn't work without installing `opentelemetry-distro` as the components initialisation is done in distro package. How does a regular user know about this and shouldn't openetemetry distro be the default and can give an option to let user use others?
</issue>
<code>
[start of docs/getting_started/otlpcollector_example.py]
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # otcollector.py
16 import time
17
18 from opentelemetry import trace
19 from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
20 OTLPSpanExporter,
21 )
22 from opentelemetry.sdk.trace import TracerProvider
23 from opentelemetry.sdk.trace.export import BatchSpanProcessor
24
25 span_exporter = OTLPSpanExporter(
26 # optional
27 # endpoint:="myCollectorURL:4317",
28 # credentials=ChannelCredentials(credentials),
29 # headers=(("metadata", "metadata")),
30 )
31 tracer_provider = TracerProvider()
32 trace.set_tracer_provider(tracer_provider)
33 span_processor = BatchSpanProcessor(span_exporter)
34 tracer_provider.add_span_processor(span_processor)
35
36 # Configure the tracer to use the collector exporter
37 tracer = trace.get_tracer_provider().get_tracer(__name__)
38
39 with tracer.start_as_current_span("foo"):
40 print("Hello world!")
41
[end of docs/getting_started/otlpcollector_example.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/docs/getting_started/otlpcollector_example.py b/docs/getting_started/otlpcollector_example.py
--- a/docs/getting_started/otlpcollector_example.py
+++ b/docs/getting_started/otlpcollector_example.py
@@ -24,7 +24,7 @@
span_exporter = OTLPSpanExporter(
# optional
- # endpoint:="myCollectorURL:4317",
+ # endpoint="myCollectorURL:4317",
# credentials=ChannelCredentials(credentials),
# headers=(("metadata", "metadata")),
)
| {"golden_diff": "diff --git a/docs/getting_started/otlpcollector_example.py b/docs/getting_started/otlpcollector_example.py\n--- a/docs/getting_started/otlpcollector_example.py\n+++ b/docs/getting_started/otlpcollector_example.py\n@@ -24,7 +24,7 @@\n \n span_exporter = OTLPSpanExporter(\n # optional\n- # endpoint:=\"myCollectorURL:4317\",\n+ # endpoint=\"myCollectorURL:4317\",\n # credentials=ChannelCredentials(credentials),\n # headers=((\"metadata\", \"metadata\")),\n )\n", "issue": "OpenTelemetry distro as a default distro for OpenTelemetry Instrumentation\nThe `opentelemetry-instrumentation` auto instrumentation doesn't work without installing `opentelemetry-distro` as the components initialisation is done in distro package. How does a regular user know about this and shouldn't openetemetry distro be the default and can give an option to let user use others? \n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# otcollector.py\nimport time\n\nfrom opentelemetry import trace\nfrom opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (\n OTLPSpanExporter,\n)\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import BatchSpanProcessor\n\nspan_exporter = OTLPSpanExporter(\n # optional\n # endpoint:=\"myCollectorURL:4317\",\n # credentials=ChannelCredentials(credentials),\n # headers=((\"metadata\", \"metadata\")),\n)\ntracer_provider = TracerProvider()\ntrace.set_tracer_provider(tracer_provider)\nspan_processor = BatchSpanProcessor(span_exporter)\ntracer_provider.add_span_processor(span_processor)\n\n# Configure the tracer to use the collector exporter\ntracer = trace.get_tracer_provider().get_tracer(__name__)\n\nwith tracer.start_as_current_span(\"foo\"):\n print(\"Hello world!\")\n", "path": "docs/getting_started/otlpcollector_example.py"}]} | 1,017 | 128 |
gh_patches_debug_18421 | rasdani/github-patches | git_diff | akvo__akvo-rsr-2512 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
More 504s: on the results framework page
@Geerts reports on Skype: 504 Gateway timeout hunter strikes again: http://rsr.test.akvo.org/rest/v1/indicator_period_data_framework/?format=json&period__indicator__result__project=2780
Via: http://rsr.test.akvo.org/en/myrsr/results/2780/#results,13323,5679
</issue>
<code>
[start of akvo/rest/views/indicator_period_data.py]
1 # -*- coding: utf-8 -*-
2
3 # Akvo RSR is covered by the GNU Affero General Public License.
4 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
6
7
8 from akvo.rsr.models import IndicatorPeriodData, IndicatorPeriodDataComment
9
10 from ..serializers import (IndicatorPeriodDataSerializer, IndicatorPeriodDataFrameworkSerializer,
11 IndicatorPeriodDataCommentSerializer)
12 from ..viewsets import PublicProjectViewSet
13
14 from django.http import HttpResponseForbidden
15
16 from rest_framework import status
17 from rest_framework.decorators import api_view, permission_classes
18 from rest_framework.response import Response
19
20
21 class IndicatorPeriodDataViewSet(PublicProjectViewSet):
22 """
23 """
24 queryset = IndicatorPeriodData.objects.all()
25 serializer_class = IndicatorPeriodDataSerializer
26
27 project_relation = 'period__indicator__result__project__'
28
29
30 class IndicatorPeriodDataFrameworkViewSet(PublicProjectViewSet):
31 """
32 """
33 queryset = IndicatorPeriodData.objects.all()
34 serializer_class = IndicatorPeriodDataFrameworkSerializer
35 project_relation = 'period__indicator__result__project__'
36
37
38 class IndicatorPeriodDataCommentViewSet(PublicProjectViewSet):
39 """
40 """
41 queryset = IndicatorPeriodDataComment.objects.all()
42 serializer_class = IndicatorPeriodDataCommentSerializer
43 project_relation = 'data__period__indicator__result__project__'
44
45
46 @api_view(['POST'])
47 def indicator_upload_file(request, pk=None):
48 """
49 Special API call for directly uploading a file.
50
51 :param request; A Django request object.
52 :param pk; The primary key of an IndicatorPeriodData instance.
53 """
54 update = IndicatorPeriodData.objects.get(pk=pk)
55 upload_file = request.data['file']
56
57 # Permissions
58 user = getattr(request, 'user', None)
59 if not user:
60 return Response({'error': 'User is not logged in'}, status=status.HTTP_403_FORBIDDEN)
61
62 # TODO: Check if user is allowed to upload a file
63 # if not user.has_perm('rsr.change_project', update.period.indicator.result.project):
64 # return Response({'error': 'User has no permission to place an update'},
65 # status=status.HTTP_403_FORBIDDEN)
66
67 try:
68 file_type = request.POST.copy()['type']
69 if file_type == 'photo':
70 update.photo = upload_file
71 update.save(update_fields=['photo'])
72 return Response({'file': update.photo.url})
73 elif file_type == 'file':
74 update.file = upload_file
75 update.save(update_fields=['file'])
76 return Response({'file': update.file.url})
77 except Exception as e:
78 return Response({'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)
79
[end of akvo/rest/views/indicator_period_data.py]
[start of akvo/rest/views/partnership.py]
1 # -*- coding: utf-8 -*-
2
3 # Akvo RSR is covered by the GNU Affero General Public License.
4 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
6
7
8 from akvo.rsr.models import Partnership
9
10 from ..serializers import PartnershipSerializer, PartnershipBasicSerializer
11 from ..viewsets import PublicProjectViewSet
12
13
14 class PartnershipViewSet(PublicProjectViewSet):
15 """
16 """
17 queryset = Partnership.objects.all()
18 serializer_class = PartnershipSerializer
19
20 def get_queryset(self):
21 """Allow filtering on partner_type."""
22 partner_type = self.request.query_params.get('partner_type', None)
23 if partner_type and partner_type in Partnership.PARTNER_TYPES_TO_ROLES_MAP.keys():
24 self.queryset = self.queryset.filter(
25 iati_organisation_role=Partnership.PARTNER_TYPES_TO_ROLES_MAP[partner_type]
26 ).distinct()
27 return super(PartnershipViewSet, self).get_queryset()
28
29
30 class PartnershipMoreLinkViewSet(PublicProjectViewSet):
31 """
32 Specific endpoint for the '+X partners' links in RSR. Contains the name, long name and logo of
33 an organisation and the partnership role.
34 """
35 queryset = Partnership.objects.all()
36 serializer_class = PartnershipBasicSerializer
37
[end of akvo/rest/views/partnership.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/akvo/rest/views/indicator_period_data.py b/akvo/rest/views/indicator_period_data.py
--- a/akvo/rest/views/indicator_period_data.py
+++ b/akvo/rest/views/indicator_period_data.py
@@ -30,7 +30,13 @@
class IndicatorPeriodDataFrameworkViewSet(PublicProjectViewSet):
"""
"""
- queryset = IndicatorPeriodData.objects.all()
+ queryset = IndicatorPeriodData.objects.select_related(
+ 'period',
+ 'user'
+ ).prefetch_related(
+ 'comments',
+ 'comments__user'
+ ).all()
serializer_class = IndicatorPeriodDataFrameworkSerializer
project_relation = 'period__indicator__result__project__'
diff --git a/akvo/rest/views/partnership.py b/akvo/rest/views/partnership.py
--- a/akvo/rest/views/partnership.py
+++ b/akvo/rest/views/partnership.py
@@ -14,7 +14,7 @@
class PartnershipViewSet(PublicProjectViewSet):
"""
"""
- queryset = Partnership.objects.all()
+ queryset = Partnership.objects.select_related('organisation', 'project').all()
serializer_class = PartnershipSerializer
def get_queryset(self):
| {"golden_diff": "diff --git a/akvo/rest/views/indicator_period_data.py b/akvo/rest/views/indicator_period_data.py\n--- a/akvo/rest/views/indicator_period_data.py\n+++ b/akvo/rest/views/indicator_period_data.py\n@@ -30,7 +30,13 @@\n class IndicatorPeriodDataFrameworkViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n- queryset = IndicatorPeriodData.objects.all()\n+ queryset = IndicatorPeriodData.objects.select_related(\n+ 'period',\n+ 'user'\n+ ).prefetch_related(\n+ 'comments',\n+ 'comments__user'\n+ ).all()\n serializer_class = IndicatorPeriodDataFrameworkSerializer\n project_relation = 'period__indicator__result__project__'\n \ndiff --git a/akvo/rest/views/partnership.py b/akvo/rest/views/partnership.py\n--- a/akvo/rest/views/partnership.py\n+++ b/akvo/rest/views/partnership.py\n@@ -14,7 +14,7 @@\n class PartnershipViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n- queryset = Partnership.objects.all()\n+ queryset = Partnership.objects.select_related('organisation', 'project').all()\n serializer_class = PartnershipSerializer\n \n def get_queryset(self):\n", "issue": "More 504s: on the results framework page\n@Geerts reports on Skype: 504 Gateway timeout hunter strikes again: http://rsr.test.akvo.org/rest/v1/indicator_period_data_framework/?format=json&period__indicator__result__project=2780\r\n\r\nVia: http://rsr.test.akvo.org/en/myrsr/results/2780/#results,13323,5679\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\n\nfrom akvo.rsr.models import IndicatorPeriodData, IndicatorPeriodDataComment\n\nfrom ..serializers import (IndicatorPeriodDataSerializer, IndicatorPeriodDataFrameworkSerializer,\n IndicatorPeriodDataCommentSerializer)\nfrom ..viewsets import PublicProjectViewSet\n\nfrom django.http import HttpResponseForbidden\n\nfrom rest_framework import status\nfrom rest_framework.decorators import api_view, permission_classes\nfrom rest_framework.response import Response\n\n\nclass IndicatorPeriodDataViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n queryset = IndicatorPeriodData.objects.all()\n serializer_class = IndicatorPeriodDataSerializer\n\n project_relation = 'period__indicator__result__project__'\n\n\nclass IndicatorPeriodDataFrameworkViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n queryset = IndicatorPeriodData.objects.all()\n serializer_class = IndicatorPeriodDataFrameworkSerializer\n project_relation = 'period__indicator__result__project__'\n\n\nclass IndicatorPeriodDataCommentViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n queryset = IndicatorPeriodDataComment.objects.all()\n serializer_class = IndicatorPeriodDataCommentSerializer\n project_relation = 'data__period__indicator__result__project__'\n\n\n@api_view(['POST'])\ndef indicator_upload_file(request, pk=None):\n \"\"\"\n Special API call for directly uploading a file.\n\n :param request; A Django request object.\n :param pk; The primary key of an IndicatorPeriodData instance.\n \"\"\"\n update = IndicatorPeriodData.objects.get(pk=pk)\n upload_file = request.data['file']\n\n # Permissions\n user = getattr(request, 'user', None)\n if not user:\n return Response({'error': 'User is not logged in'}, status=status.HTTP_403_FORBIDDEN)\n\n # TODO: Check if user is allowed to upload a file\n # if not user.has_perm('rsr.change_project', update.period.indicator.result.project):\n # return Response({'error': 'User has no permission to place an update'},\n # status=status.HTTP_403_FORBIDDEN)\n\n try:\n file_type = request.POST.copy()['type']\n if file_type == 'photo':\n update.photo = upload_file\n update.save(update_fields=['photo'])\n return Response({'file': update.photo.url})\n elif file_type == 'file':\n update.file = upload_file\n update.save(update_fields=['file'])\n return Response({'file': update.file.url})\n except Exception as e:\n return Response({'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)\n", "path": "akvo/rest/views/indicator_period_data.py"}, {"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\n\nfrom akvo.rsr.models import Partnership\n\nfrom ..serializers import PartnershipSerializer, PartnershipBasicSerializer\nfrom ..viewsets import PublicProjectViewSet\n\n\nclass PartnershipViewSet(PublicProjectViewSet):\n \"\"\"\n \"\"\"\n queryset = Partnership.objects.all()\n serializer_class = PartnershipSerializer\n\n def get_queryset(self):\n \"\"\"Allow filtering on partner_type.\"\"\"\n partner_type = self.request.query_params.get('partner_type', None)\n if partner_type and partner_type in Partnership.PARTNER_TYPES_TO_ROLES_MAP.keys():\n self.queryset = self.queryset.filter(\n iati_organisation_role=Partnership.PARTNER_TYPES_TO_ROLES_MAP[partner_type]\n ).distinct()\n return super(PartnershipViewSet, self).get_queryset()\n\n\nclass PartnershipMoreLinkViewSet(PublicProjectViewSet):\n \"\"\"\n Specific endpoint for the '+X partners' links in RSR. Contains the name, long name and logo of\n an organisation and the partnership role.\n \"\"\"\n queryset = Partnership.objects.all()\n serializer_class = PartnershipBasicSerializer\n", "path": "akvo/rest/views/partnership.py"}]} | 1,768 | 270 |
gh_patches_debug_12568 | rasdani/github-patches | git_diff | Kinto__kinto-474 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Do not require cliquet master branch in dev
As discussed with @Natim @almet
</issue>
<code>
[start of kinto/config/__init__.py]
1 import os
2 import binascii
3 import codecs
4 from kinto import logger
5
6 HERE = os.path.abspath(os.path.dirname(__file__))
7
8
9 def render_template(template, destination, **kwargs):
10 template = os.path.join(HERE, template)
11 folder = os.path.dirname(destination)
12
13 if not os.path.exists(folder):
14 os.makedirs(folder)
15
16 logger.info("Created config {}".format(os.path.abspath(destination)))
17
18 with codecs.open(template, 'r', encoding='utf-8') as f:
19 raw_template = f.read()
20 rendered = raw_template.format(**kwargs)
21 with codecs.open(destination, 'w+', encoding='utf-8') as output:
22 output.write(rendered)
23
24
25 def init(config_file, backend):
26 values = {}
27 values['secret'] = binascii.b2a_hex(os.urandom(32))
28
29 values['storage_backend'] = "cliquet.storage.%s" % backend
30 values['cache_backend'] = "cliquet.cache.%s" % backend
31 values['permission_backend'] = "cliquet.permission.%s" % backend
32
33 if backend == 'postgresql':
34 postgresql_url = "postgres://postgres:postgres@localhost/postgres"
35 values['storage_url'] = postgresql_url
36 values['cache_url'] = postgresql_url
37 values['permission_url'] = postgresql_url
38
39 elif backend == 'redis':
40 redis_url = "redis://localhost:6379"
41 values['storage_url'] = redis_url + "/1"
42 values['cache_url'] = redis_url + "/2"
43 values['permission_url'] = redis_url + "/3"
44
45 else:
46 values['storage_url'] = ''
47 values['cache_url'] = ''
48 values['permission_url'] = ''
49
50 render_template("kinto.tpl", config_file, **values)
51
[end of kinto/config/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kinto/config/__init__.py b/kinto/config/__init__.py
--- a/kinto/config/__init__.py
+++ b/kinto/config/__init__.py
@@ -1,6 +1,8 @@
import os
-import binascii
import codecs
+
+from cliquet import utils as cliquet_utils
+
from kinto import logger
HERE = os.path.abspath(os.path.dirname(__file__))
@@ -24,7 +26,8 @@
def init(config_file, backend):
values = {}
- values['secret'] = binascii.b2a_hex(os.urandom(32))
+
+ values['secret'] = cliquet_utils.random_bytes_hex(32)
values['storage_backend'] = "cliquet.storage.%s" % backend
values['cache_backend'] = "cliquet.cache.%s" % backend
| {"golden_diff": "diff --git a/kinto/config/__init__.py b/kinto/config/__init__.py\n--- a/kinto/config/__init__.py\n+++ b/kinto/config/__init__.py\n@@ -1,6 +1,8 @@\n import os\n-import binascii\n import codecs\n+\n+from cliquet import utils as cliquet_utils\n+\n from kinto import logger\n \n HERE = os.path.abspath(os.path.dirname(__file__))\n@@ -24,7 +26,8 @@\n \n def init(config_file, backend):\n values = {}\n- values['secret'] = binascii.b2a_hex(os.urandom(32))\n+\n+ values['secret'] = cliquet_utils.random_bytes_hex(32)\n \n values['storage_backend'] = \"cliquet.storage.%s\" % backend\n values['cache_backend'] = \"cliquet.cache.%s\" % backend\n", "issue": "Do not require cliquet master branch in dev\nAs discussed with @Natim @almet \n\n", "before_files": [{"content": "import os\nimport binascii\nimport codecs\nfrom kinto import logger\n\nHERE = os.path.abspath(os.path.dirname(__file__))\n\n\ndef render_template(template, destination, **kwargs):\n template = os.path.join(HERE, template)\n folder = os.path.dirname(destination)\n\n if not os.path.exists(folder):\n os.makedirs(folder)\n\n logger.info(\"Created config {}\".format(os.path.abspath(destination)))\n\n with codecs.open(template, 'r', encoding='utf-8') as f:\n raw_template = f.read()\n rendered = raw_template.format(**kwargs)\n with codecs.open(destination, 'w+', encoding='utf-8') as output:\n output.write(rendered)\n\n\ndef init(config_file, backend):\n values = {}\n values['secret'] = binascii.b2a_hex(os.urandom(32))\n\n values['storage_backend'] = \"cliquet.storage.%s\" % backend\n values['cache_backend'] = \"cliquet.cache.%s\" % backend\n values['permission_backend'] = \"cliquet.permission.%s\" % backend\n\n if backend == 'postgresql':\n postgresql_url = \"postgres://postgres:postgres@localhost/postgres\"\n values['storage_url'] = postgresql_url\n values['cache_url'] = postgresql_url\n values['permission_url'] = postgresql_url\n\n elif backend == 'redis':\n redis_url = \"redis://localhost:6379\"\n values['storage_url'] = redis_url + \"/1\"\n values['cache_url'] = redis_url + \"/2\"\n values['permission_url'] = redis_url + \"/3\"\n\n else:\n values['storage_url'] = ''\n values['cache_url'] = ''\n values['permission_url'] = ''\n\n render_template(\"kinto.tpl\", config_file, **values)\n", "path": "kinto/config/__init__.py"}]} | 1,047 | 190 |
gh_patches_debug_37959 | rasdani/github-patches | git_diff | openvinotoolkit__datumaro-371 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Import ImageNet dataset
### Steps to reproduce problem
1. Download and extract ImageNet dataset for image classification: [link](https://www.kaggle.com/c/imagenet-object-localization-challenge/data);
2. Add the loaded dataset into a Datumaro project;
3. Run `datum info`.
### Current behaviour
ImageNet dataset has ~1.2m images, but in the `info` output we can see that imported dataset has only 69647, and also these images have wrong labels.
### Expected behaviour
Correct import.
### Environment
`git log -1`: 7e35c8
</issue>
<code>
[start of datumaro/plugins/imagenet_format.py]
1 # Copyright (C) 2020 Intel Corporation
2 #
3 # SPDX-License-Identifier: MIT
4
5 import logging as log
6 import os
7 import os.path as osp
8
9 from datumaro.components.converter import Converter
10 from datumaro.components.extractor import (
11 AnnotationType, DatasetItem, Importer, Label, LabelCategories,
12 SourceExtractor,
13 )
14 from datumaro.util.image import find_images
15
16
17 class ImagenetPath:
18 IMAGE_DIR_NO_LABEL = 'no_label'
19
20
21 class ImagenetExtractor(SourceExtractor):
22 def __init__(self, path, subset=None):
23 assert osp.isdir(path), path
24 super().__init__(subset=subset)
25
26 self._categories = self._load_categories(path)
27 self._items = list(self._load_items(path).values())
28
29 def _load_categories(self, path):
30 label_cat = LabelCategories()
31 for dirname in sorted(os.listdir(path)):
32 if dirname != ImagenetPath.IMAGE_DIR_NO_LABEL:
33 label_cat.add(dirname)
34 return { AnnotationType.label: label_cat }
35
36 def _load_items(self, path):
37 items = {}
38
39 for image_path in find_images(path, recursive=True, max_depth=1):
40 label = osp.basename(osp.dirname(image_path))
41 image_name = osp.splitext(osp.basename(image_path))[0]
42 if image_name.startswith(label + '_'):
43 image_name = image_name[len(label) + 1:]
44
45 item = items.get(image_name)
46 if item is None:
47 item = DatasetItem(id=image_name, subset=self._subset,
48 image=image_path)
49 items[image_name] = item
50 annotations = item.annotations
51
52 if label != ImagenetPath.IMAGE_DIR_NO_LABEL:
53 label = self._categories[AnnotationType.label].find(label)[0]
54 annotations.append(Label(label=label))
55
56 return items
57
58
59 class ImagenetImporter(Importer):
60 @classmethod
61 def find_sources(cls, path):
62 if not osp.isdir(path):
63 return []
64 return [{ 'url': path, 'format': 'imagenet' }]
65
66
67 class ImagenetConverter(Converter):
68 DEFAULT_IMAGE_EXT = '.jpg'
69
70 def apply(self):
71 if 1 < len(self._extractor.subsets()):
72 log.warning("ImageNet format only supports exporting a single "
73 "subset, subset information will not be used.")
74
75 subset_dir = self._save_dir
76 extractor = self._extractor
77 labels = {}
78 for item in self._extractor:
79 labels = set(p.label for p in item.annotations
80 if p.type == AnnotationType.label)
81
82 for label in labels:
83 label_name = extractor.categories()[AnnotationType.label][label].name
84 self._save_image(item, osp.join(subset_dir, label_name,
85 '%s_%s' % (label_name, self._make_image_filename(item))))
86
87 if not labels:
88 self._save_image(item, osp.join(subset_dir,
89 ImagenetPath.IMAGE_DIR_NO_LABEL,
90 ImagenetPath.IMAGE_DIR_NO_LABEL + '_' + \
91 self._make_image_filename(item)))
92
[end of datumaro/plugins/imagenet_format.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/datumaro/plugins/imagenet_format.py b/datumaro/plugins/imagenet_format.py
--- a/datumaro/plugins/imagenet_format.py
+++ b/datumaro/plugins/imagenet_format.py
@@ -39,14 +39,13 @@
for image_path in find_images(path, recursive=True, max_depth=1):
label = osp.basename(osp.dirname(image_path))
image_name = osp.splitext(osp.basename(image_path))[0]
- if image_name.startswith(label + '_'):
- image_name = image_name[len(label) + 1:]
- item = items.get(image_name)
+ item_id = osp.join(label, image_name)
+ item = items.get(item_id)
if item is None:
- item = DatasetItem(id=image_name, subset=self._subset,
+ item = DatasetItem(id=item_id, subset=self._subset,
image=image_path)
- items[image_name] = item
+ items[item_id] = item
annotations = item.annotations
if label != ImagenetPath.IMAGE_DIR_NO_LABEL:
@@ -68,6 +67,13 @@
DEFAULT_IMAGE_EXT = '.jpg'
def apply(self):
+
+ def _get_dir_name(id_parts, label_name):
+ if 1 < len(id_parts) and id_parts[0] == label_name:
+ return ''
+ else:
+ return label_name
+
if 1 < len(self._extractor.subsets()):
log.warning("ImageNet format only supports exporting a single "
"subset, subset information will not be used.")
@@ -76,16 +82,15 @@
extractor = self._extractor
labels = {}
for item in self._extractor:
+ id_parts = item.id.split('/')
labels = set(p.label for p in item.annotations
if p.type == AnnotationType.label)
for label in labels:
label_name = extractor.categories()[AnnotationType.label][label].name
- self._save_image(item, osp.join(subset_dir, label_name,
- '%s_%s' % (label_name, self._make_image_filename(item))))
+ self._save_image(item, subdir=osp.join(subset_dir,
+ _get_dir_name(id_parts, label_name)))
if not labels:
- self._save_image(item, osp.join(subset_dir,
- ImagenetPath.IMAGE_DIR_NO_LABEL,
- ImagenetPath.IMAGE_DIR_NO_LABEL + '_' + \
- self._make_image_filename(item)))
+ self._save_image(item, subdir=osp.join(subset_dir,
+ _get_dir_name(id_parts, ImagenetPath.IMAGE_DIR_NO_LABEL)))
| {"golden_diff": "diff --git a/datumaro/plugins/imagenet_format.py b/datumaro/plugins/imagenet_format.py\n--- a/datumaro/plugins/imagenet_format.py\n+++ b/datumaro/plugins/imagenet_format.py\n@@ -39,14 +39,13 @@\n for image_path in find_images(path, recursive=True, max_depth=1):\n label = osp.basename(osp.dirname(image_path))\n image_name = osp.splitext(osp.basename(image_path))[0]\n- if image_name.startswith(label + '_'):\n- image_name = image_name[len(label) + 1:]\n \n- item = items.get(image_name)\n+ item_id = osp.join(label, image_name)\n+ item = items.get(item_id)\n if item is None:\n- item = DatasetItem(id=image_name, subset=self._subset,\n+ item = DatasetItem(id=item_id, subset=self._subset,\n image=image_path)\n- items[image_name] = item\n+ items[item_id] = item\n annotations = item.annotations\n \n if label != ImagenetPath.IMAGE_DIR_NO_LABEL:\n@@ -68,6 +67,13 @@\n DEFAULT_IMAGE_EXT = '.jpg'\n \n def apply(self):\n+\n+ def _get_dir_name(id_parts, label_name):\n+ if 1 < len(id_parts) and id_parts[0] == label_name:\n+ return ''\n+ else:\n+ return label_name\n+\n if 1 < len(self._extractor.subsets()):\n log.warning(\"ImageNet format only supports exporting a single \"\n \"subset, subset information will not be used.\")\n@@ -76,16 +82,15 @@\n extractor = self._extractor\n labels = {}\n for item in self._extractor:\n+ id_parts = item.id.split('/')\n labels = set(p.label for p in item.annotations\n if p.type == AnnotationType.label)\n \n for label in labels:\n label_name = extractor.categories()[AnnotationType.label][label].name\n- self._save_image(item, osp.join(subset_dir, label_name,\n- '%s_%s' % (label_name, self._make_image_filename(item))))\n+ self._save_image(item, subdir=osp.join(subset_dir,\n+ _get_dir_name(id_parts, label_name)))\n \n if not labels:\n- self._save_image(item, osp.join(subset_dir,\n- ImagenetPath.IMAGE_DIR_NO_LABEL,\n- ImagenetPath.IMAGE_DIR_NO_LABEL + '_' + \\\n- self._make_image_filename(item)))\n+ self._save_image(item, subdir=osp.join(subset_dir,\n+ _get_dir_name(id_parts, ImagenetPath.IMAGE_DIR_NO_LABEL)))\n", "issue": "Import ImageNet dataset\n### Steps to reproduce problem\r\n1. Download and extract ImageNet dataset for image classification: [link](https://www.kaggle.com/c/imagenet-object-localization-challenge/data);\r\n2. Add the loaded dataset into a Datumaro project;\r\n3. Run `datum info`.\r\n\r\n### Current behaviour\r\nImageNet dataset has ~1.2m images, but in the `info` output we can see that imported dataset has only 69647, and also these images have wrong labels.\r\n\r\n### Expected behaviour\r\nCorrect import.\r\n\r\n### Environment\r\n`git log -1`: 7e35c8\n", "before_files": [{"content": "# Copyright (C) 2020 Intel Corporation\n#\n# SPDX-License-Identifier: MIT\n\nimport logging as log\nimport os\nimport os.path as osp\n\nfrom datumaro.components.converter import Converter\nfrom datumaro.components.extractor import (\n AnnotationType, DatasetItem, Importer, Label, LabelCategories,\n SourceExtractor,\n)\nfrom datumaro.util.image import find_images\n\n\nclass ImagenetPath:\n IMAGE_DIR_NO_LABEL = 'no_label'\n\n\nclass ImagenetExtractor(SourceExtractor):\n def __init__(self, path, subset=None):\n assert osp.isdir(path), path\n super().__init__(subset=subset)\n\n self._categories = self._load_categories(path)\n self._items = list(self._load_items(path).values())\n\n def _load_categories(self, path):\n label_cat = LabelCategories()\n for dirname in sorted(os.listdir(path)):\n if dirname != ImagenetPath.IMAGE_DIR_NO_LABEL:\n label_cat.add(dirname)\n return { AnnotationType.label: label_cat }\n\n def _load_items(self, path):\n items = {}\n\n for image_path in find_images(path, recursive=True, max_depth=1):\n label = osp.basename(osp.dirname(image_path))\n image_name = osp.splitext(osp.basename(image_path))[0]\n if image_name.startswith(label + '_'):\n image_name = image_name[len(label) + 1:]\n\n item = items.get(image_name)\n if item is None:\n item = DatasetItem(id=image_name, subset=self._subset,\n image=image_path)\n items[image_name] = item\n annotations = item.annotations\n\n if label != ImagenetPath.IMAGE_DIR_NO_LABEL:\n label = self._categories[AnnotationType.label].find(label)[0]\n annotations.append(Label(label=label))\n\n return items\n\n\nclass ImagenetImporter(Importer):\n @classmethod\n def find_sources(cls, path):\n if not osp.isdir(path):\n return []\n return [{ 'url': path, 'format': 'imagenet' }]\n\n\nclass ImagenetConverter(Converter):\n DEFAULT_IMAGE_EXT = '.jpg'\n\n def apply(self):\n if 1 < len(self._extractor.subsets()):\n log.warning(\"ImageNet format only supports exporting a single \"\n \"subset, subset information will not be used.\")\n\n subset_dir = self._save_dir\n extractor = self._extractor\n labels = {}\n for item in self._extractor:\n labels = set(p.label for p in item.annotations\n if p.type == AnnotationType.label)\n\n for label in labels:\n label_name = extractor.categories()[AnnotationType.label][label].name\n self._save_image(item, osp.join(subset_dir, label_name,\n '%s_%s' % (label_name, self._make_image_filename(item))))\n\n if not labels:\n self._save_image(item, osp.join(subset_dir,\n ImagenetPath.IMAGE_DIR_NO_LABEL,\n ImagenetPath.IMAGE_DIR_NO_LABEL + '_' + \\\n self._make_image_filename(item)))\n", "path": "datumaro/plugins/imagenet_format.py"}]} | 1,507 | 591 |
gh_patches_debug_12128 | rasdani/github-patches | git_diff | nf-core__tools-1357 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Linting should fail if pipeline module file is edited
In https://github.com/ewels/nf-core-methylseq/pull/2 I have edited a module file that was pulled from nf-core/modules. I shouldn't be allowed to do this.
The linting warns me that something is amiss:
```
│ bismark/align │ modules/nf-cor… │ Local copy of │
│ │ │ module outdated │
```
But it should be checking the git sha in `modules.json` and recognising that it doesn't match what it expects. Then throwing a hard error.
Possible that the code for this is lurking in a PR that's waiting to be merged..
</issue>
<code>
[start of nf_core/modules/lint/module_changes.py]
1 """
2 Check whether the content of a module has changed compared to the original repository
3 """
4 import os
5 import requests
6 import rich
7 from nf_core.modules.lint import LintResult
8
9
10 def module_changes(module_lint_object, module):
11 """
12 Checks whether installed nf-core modules have changed compared to the
13 original repository
14 Downloads the 'main.nf' and 'meta.yml' files for every module
15 and compares them to the local copies
16
17 If the module has a 'git_sha', the file content is checked against this sha
18 """
19 files_to_check = ["main.nf", "meta.yml"]
20
21 # Loop over nf-core modules
22 module_base_url = f"https://raw.githubusercontent.com/{module_lint_object.modules_repo.name}/{module_lint_object.modules_repo.branch}/modules/{module.module_name}/"
23
24 # If module.git_sha specified, check specific commit version for changes
25 if module.git_sha:
26 module_base_url = f"https://raw.githubusercontent.com/{module_lint_object.modules_repo.name}/{module.git_sha}/modules/{module.module_name}/"
27
28 for f in files_to_check:
29 # open local copy, continue if file not found (a failed message has already been issued in this case)
30 try:
31 local_copy = open(os.path.join(module.module_dir, f), "r").read()
32 except FileNotFoundError as e:
33 continue
34
35 # Download remote copy and compare
36 url = module_base_url + f
37 r = requests.get(url=url)
38
39 if r.status_code != 200:
40 module.warned.append(
41 (
42 "check_local_copy",
43 f"Could not fetch remote copy, skipping comparison.",
44 f"{os.path.join(module.module_dir, f)}",
45 )
46 )
47 else:
48 try:
49 remote_copy = r.content.decode("utf-8")
50
51 if local_copy != remote_copy:
52 module.warned.append(
53 (
54 "check_local_copy",
55 "Local copy of module outdated",
56 f"{os.path.join(module.module_dir, f)}",
57 )
58 )
59 else:
60 module.passed.append(
61 (
62 "check_local_copy",
63 "Local copy of module up to date",
64 f"{os.path.join(module.module_dir, f)}",
65 )
66 )
67 except UnicodeDecodeError as e:
68 module.warned.append(
69 (
70 "check_local_copy",
71 f"Could not decode file from {url}. Skipping comparison ({e})",
72 f"{os.path.join(module.module_dir, f)}",
73 )
74 )
75
[end of nf_core/modules/lint/module_changes.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py
--- a/nf_core/modules/lint/module_changes.py
+++ b/nf_core/modules/lint/module_changes.py
@@ -49,10 +49,10 @@
remote_copy = r.content.decode("utf-8")
if local_copy != remote_copy:
- module.warned.append(
+ module.failed.append(
(
"check_local_copy",
- "Local copy of module outdated",
+ "Local copy of module does not match remote",
f"{os.path.join(module.module_dir, f)}",
)
)
| {"golden_diff": "diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py\n--- a/nf_core/modules/lint/module_changes.py\n+++ b/nf_core/modules/lint/module_changes.py\n@@ -49,10 +49,10 @@\n remote_copy = r.content.decode(\"utf-8\")\n \n if local_copy != remote_copy:\n- module.warned.append(\n+ module.failed.append(\n (\n \"check_local_copy\",\n- \"Local copy of module outdated\",\n+ \"Local copy of module does not match remote\",\n f\"{os.path.join(module.module_dir, f)}\",\n )\n )\n", "issue": "Linting should fail if pipeline module file is edited\nIn https://github.com/ewels/nf-core-methylseq/pull/2 I have edited a module file that was pulled from nf-core/modules. I shouldn't be allowed to do this.\r\n\r\nThe linting warns me that something is amiss:\r\n\r\n```\r\n\u2502 bismark/align \u2502 modules/nf-cor\u2026 \u2502 Local copy of \u2502\r\n\u2502 \u2502 \u2502 module outdated \u2502\r\n```\r\n\r\nBut it should be checking the git sha in `modules.json` and recognising that it doesn't match what it expects. Then throwing a hard error.\r\n\r\nPossible that the code for this is lurking in a PR that's waiting to be merged..\n", "before_files": [{"content": "\"\"\"\nCheck whether the content of a module has changed compared to the original repository\n\"\"\"\nimport os\nimport requests\nimport rich\nfrom nf_core.modules.lint import LintResult\n\n\ndef module_changes(module_lint_object, module):\n \"\"\"\n Checks whether installed nf-core modules have changed compared to the\n original repository\n Downloads the 'main.nf' and 'meta.yml' files for every module\n and compares them to the local copies\n\n If the module has a 'git_sha', the file content is checked against this sha\n \"\"\"\n files_to_check = [\"main.nf\", \"meta.yml\"]\n\n # Loop over nf-core modules\n module_base_url = f\"https://raw.githubusercontent.com/{module_lint_object.modules_repo.name}/{module_lint_object.modules_repo.branch}/modules/{module.module_name}/\"\n\n # If module.git_sha specified, check specific commit version for changes\n if module.git_sha:\n module_base_url = f\"https://raw.githubusercontent.com/{module_lint_object.modules_repo.name}/{module.git_sha}/modules/{module.module_name}/\"\n\n for f in files_to_check:\n # open local copy, continue if file not found (a failed message has already been issued in this case)\n try:\n local_copy = open(os.path.join(module.module_dir, f), \"r\").read()\n except FileNotFoundError as e:\n continue\n\n # Download remote copy and compare\n url = module_base_url + f\n r = requests.get(url=url)\n\n if r.status_code != 200:\n module.warned.append(\n (\n \"check_local_copy\",\n f\"Could not fetch remote copy, skipping comparison.\",\n f\"{os.path.join(module.module_dir, f)}\",\n )\n )\n else:\n try:\n remote_copy = r.content.decode(\"utf-8\")\n\n if local_copy != remote_copy:\n module.warned.append(\n (\n \"check_local_copy\",\n \"Local copy of module outdated\",\n f\"{os.path.join(module.module_dir, f)}\",\n )\n )\n else:\n module.passed.append(\n (\n \"check_local_copy\",\n \"Local copy of module up to date\",\n f\"{os.path.join(module.module_dir, f)}\",\n )\n )\n except UnicodeDecodeError as e:\n module.warned.append(\n (\n \"check_local_copy\",\n f\"Could not decode file from {url}. Skipping comparison ({e})\",\n f\"{os.path.join(module.module_dir, f)}\",\n )\n )\n", "path": "nf_core/modules/lint/module_changes.py"}]} | 1,366 | 143 |
gh_patches_debug_6286 | rasdani/github-patches | git_diff | digitalfabrik__integreat-cms-284 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Include tailwind.css via npm instead of static file
At the moment, we include tailwind.css as a static file in our repo.
Instead, we should use [the recommended installation via npm](https://tailwindcss.com/docs/installation/) where we can configure exactly which parts we need and compile a minified css file in our deployment chain.
</issue>
<code>
[start of backend/cms/views/media/media_list_view.py]
1 from django.contrib.auth.decorators import login_required
2 from django.shortcuts import render
3 from django.utils.decorators import method_decorator
4 from django.views.generic import TemplateView
5
6 from ...decorators import region_permission_required
7 from ...models import Document
8
9
10 @method_decorator(login_required, name='dispatch')
11 @method_decorator(region_permission_required, name='dispatch')
12 class MediaListView(TemplateView):
13 template_name = 'media/list.html'
14 base_context = {'current_menu_item': 'media'}
15
16 def get(self, request, *args, **kwargs):
17 documents = Document.objects.all()
18
19 return render(
20 request,
21 self.template_name,
22 {
23 **self.base_context,
24 'documents': documents
25 }
26 )
27
[end of backend/cms/views/media/media_list_view.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/backend/cms/views/media/media_list_view.py b/backend/cms/views/media/media_list_view.py
--- a/backend/cms/views/media/media_list_view.py
+++ b/backend/cms/views/media/media_list_view.py
@@ -10,7 +10,7 @@
@method_decorator(login_required, name='dispatch')
@method_decorator(region_permission_required, name='dispatch')
class MediaListView(TemplateView):
- template_name = 'media/list.html'
+ template_name = 'media/media_list.html'
base_context = {'current_menu_item': 'media'}
def get(self, request, *args, **kwargs):
| {"golden_diff": "diff --git a/backend/cms/views/media/media_list_view.py b/backend/cms/views/media/media_list_view.py\n--- a/backend/cms/views/media/media_list_view.py\n+++ b/backend/cms/views/media/media_list_view.py\n@@ -10,7 +10,7 @@\n @method_decorator(login_required, name='dispatch')\n @method_decorator(region_permission_required, name='dispatch')\n class MediaListView(TemplateView):\n- template_name = 'media/list.html'\n+ template_name = 'media/media_list.html'\n base_context = {'current_menu_item': 'media'}\n \n def get(self, request, *args, **kwargs):\n", "issue": "Include tailwind.css via npm instead of static file\nAt the moment, we include tailwind.css as a static file in our repo.\r\nInstead, we should use [the recommended installation via npm](https://tailwindcss.com/docs/installation/) where we can configure exactly which parts we need and compile a minified css file in our deployment chain.\n", "before_files": [{"content": "from django.contrib.auth.decorators import login_required\nfrom django.shortcuts import render\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import TemplateView\n\nfrom ...decorators import region_permission_required\nfrom ...models import Document\n\n\n@method_decorator(login_required, name='dispatch')\n@method_decorator(region_permission_required, name='dispatch')\nclass MediaListView(TemplateView):\n template_name = 'media/list.html'\n base_context = {'current_menu_item': 'media'}\n\n def get(self, request, *args, **kwargs):\n documents = Document.objects.all()\n\n return render(\n request,\n self.template_name,\n {\n **self.base_context,\n 'documents': documents\n }\n )\n", "path": "backend/cms/views/media/media_list_view.py"}]} | 807 | 132 |
gh_patches_debug_378 | rasdani/github-patches | git_diff | encode__uvicorn-1099 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
PackageInfo: Invalid constraint (click (>=7.*)) found in uvicorn-0.14.0 dependencies, skipping
### Checklist
<!-- Please make sure you check all these items before submitting your bug report. -->
- [X] The bug is reproducible against the latest release and/or `master`.
- [X] There are no similar issues or pull requests to fix it yet.
### Describe the bug
When adding uvicorn package with poetry the following warning is raised:
PackageInfo: Invalid constraint (click (>=7.*)) found in uvicorn-0.14.0 dependencies, skipping
because the constraint `>=7.*` violates PEP440 syntax. It should either be `>=7.0` or `=7.*`.
Because of this, the `click` dependency is not installed and uvicorn may not work.
### To reproduce
Just execute `poetry add uvicorn` in any environment.
### Expected behavior
To install `uvicorn` correctly together with all its dependencies.
### Actual behavior
The `click` dependency is not installed and uvicorn may not work.
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 import os
5 import re
6
7 from setuptools import setup
8
9
10 def get_version(package):
11 """
12 Return package version as listed in `__version__` in `init.py`.
13 """
14 path = os.path.join(package, "__init__.py")
15 init_py = open(path, "r", encoding="utf8").read()
16 return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
17
18
19 def get_long_description():
20 """
21 Return the README.
22 """
23 return open("README.md", "r", encoding="utf8").read()
24
25
26 def get_packages(package):
27 """
28 Return root package and all sub-packages.
29 """
30 return [
31 dirpath
32 for dirpath, dirnames, filenames in os.walk(package)
33 if os.path.exists(os.path.join(dirpath, "__init__.py"))
34 ]
35
36
37 env_marker_cpython = (
38 "sys_platform != 'win32'"
39 " and (sys_platform != 'cygwin'"
40 " and platform_python_implementation != 'PyPy')"
41 )
42
43 env_marker_win = "sys_platform == 'win32'"
44 env_marker_below_38 = "python_version < '3.8'"
45
46 minimal_requirements = [
47 "asgiref>=3.4.0",
48 "click>=7.*",
49 "h11>=0.8",
50 "typing-extensions;" + env_marker_below_38,
51 ]
52
53
54 extra_requirements = [
55 "websockets>=9.1",
56 "httptools==0.2.*",
57 "uvloop>=0.14.0,!=0.15.0,!=0.15.1; " + env_marker_cpython,
58 "colorama>=0.4;" + env_marker_win,
59 "watchgod>=0.6",
60 "python-dotenv>=0.13",
61 "PyYAML>=5.1",
62 ]
63
64
65 setup(
66 name="uvicorn",
67 version=get_version("uvicorn"),
68 url="https://github.com/encode/uvicorn",
69 license="BSD",
70 description="The lightning-fast ASGI server.",
71 long_description=get_long_description(),
72 long_description_content_type="text/markdown",
73 author="Tom Christie",
74 author_email="[email protected]",
75 packages=get_packages("uvicorn"),
76 install_requires=minimal_requirements,
77 extras_require={"standard": extra_requirements},
78 include_package_data=True,
79 classifiers=[
80 "Development Status :: 4 - Beta",
81 "Environment :: Web Environment",
82 "Intended Audience :: Developers",
83 "License :: OSI Approved :: BSD License",
84 "Operating System :: OS Independent",
85 "Topic :: Internet :: WWW/HTTP",
86 "Programming Language :: Python :: 3",
87 "Programming Language :: Python :: 3.6",
88 "Programming Language :: Python :: 3.7",
89 "Programming Language :: Python :: 3.8",
90 "Programming Language :: Python :: 3.9",
91 "Programming Language :: Python :: Implementation :: CPython",
92 "Programming Language :: Python :: Implementation :: PyPy",
93 ],
94 entry_points="""
95 [console_scripts]
96 uvicorn=uvicorn.main:main
97 """,
98 )
99
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@
minimal_requirements = [
"asgiref>=3.4.0",
- "click>=7.*",
+ "click>=7.0",
"h11>=0.8",
"typing-extensions;" + env_marker_below_38,
]
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -45,7 +45,7 @@\n \n minimal_requirements = [\n \"asgiref>=3.4.0\",\n- \"click>=7.*\",\n+ \"click>=7.0\",\n \"h11>=0.8\",\n \"typing-extensions;\" + env_marker_below_38,\n ]\n", "issue": "PackageInfo: Invalid constraint (click (>=7.*)) found in uvicorn-0.14.0 dependencies, skipping\n### Checklist\r\n\r\n<!-- Please make sure you check all these items before submitting your bug report. -->\r\n\r\n- [X] The bug is reproducible against the latest release and/or `master`.\r\n- [X] There are no similar issues or pull requests to fix it yet.\r\n\r\n### Describe the bug\r\n\r\nWhen adding uvicorn package with poetry the following warning is raised:\r\n\r\n PackageInfo: Invalid constraint (click (>=7.*)) found in uvicorn-0.14.0 dependencies, skipping\r\n\r\nbecause the constraint `>=7.*` violates PEP440 syntax. It should either be `>=7.0` or `=7.*`.\r\n\r\nBecause of this, the `click` dependency is not installed and uvicorn may not work.\r\n\r\n### To reproduce\r\n\r\nJust execute `poetry add uvicorn` in any environment.\r\n\r\n### Expected behavior\r\n\r\nTo install `uvicorn` correctly together with all its dependencies.\r\n\r\n### Actual behavior\r\n\r\nThe `click` dependency is not installed and uvicorn may not work.\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport re\n\nfrom setuptools import setup\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n path = os.path.join(package, \"__init__.py\")\n init_py = open(path, \"r\", encoding=\"utf8\").read()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)\n\n\ndef get_long_description():\n \"\"\"\n Return the README.\n \"\"\"\n return open(\"README.md\", \"r\", encoding=\"utf8\").read()\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [\n dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, \"__init__.py\"))\n ]\n\n\nenv_marker_cpython = (\n \"sys_platform != 'win32'\"\n \" and (sys_platform != 'cygwin'\"\n \" and platform_python_implementation != 'PyPy')\"\n)\n\nenv_marker_win = \"sys_platform == 'win32'\"\nenv_marker_below_38 = \"python_version < '3.8'\"\n\nminimal_requirements = [\n \"asgiref>=3.4.0\",\n \"click>=7.*\",\n \"h11>=0.8\",\n \"typing-extensions;\" + env_marker_below_38,\n]\n\n\nextra_requirements = [\n \"websockets>=9.1\",\n \"httptools==0.2.*\",\n \"uvloop>=0.14.0,!=0.15.0,!=0.15.1; \" + env_marker_cpython,\n \"colorama>=0.4;\" + env_marker_win,\n \"watchgod>=0.6\",\n \"python-dotenv>=0.13\",\n \"PyYAML>=5.1\",\n]\n\n\nsetup(\n name=\"uvicorn\",\n version=get_version(\"uvicorn\"),\n url=\"https://github.com/encode/uvicorn\",\n license=\"BSD\",\n description=\"The lightning-fast ASGI server.\",\n long_description=get_long_description(),\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n packages=get_packages(\"uvicorn\"),\n install_requires=minimal_requirements,\n extras_require={\"standard\": extra_requirements},\n include_package_data=True,\n classifiers=[\n \"Development Status :: 4 - Beta\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n ],\n entry_points=\"\"\"\n [console_scripts]\n uvicorn=uvicorn.main:main\n \"\"\",\n)\n", "path": "setup.py"}]} | 1,657 | 93 |
gh_patches_debug_31527 | rasdani/github-patches | git_diff | iterative__dvc-4739 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
dvc dag --outs
In the `0.xx` version days, there was a `dvc pipeline show --outs` command that was able to show a DAG that included outputs.
I thought this was a really nice feature. For instance, I have a `train_test_split` stage that creates two outputs, `train` and `test`, and it would be nice to see them taking different paths in the DAG.
Can you maybe (re)implement this feature?
Thanks!
dvc dag --outs
In the `0.xx` version days, there was a `dvc pipeline show --outs` command that was able to show a DAG that included outputs.
I thought this was a really nice feature. For instance, I have a `train_test_split` stage that creates two outputs, `train` and `test`, and it would be nice to see them taking different paths in the DAG.
Can you maybe (re)implement this feature?
Thanks!
</issue>
<code>
[start of dvc/command/dag.py]
1 import argparse
2 import logging
3
4 from dvc.command.base import CmdBase, append_doc_link
5 from dvc.exceptions import DvcException
6
7 logger = logging.getLogger(__name__)
8
9
10 def _show_ascii(G):
11 from dvc.dagascii import draw
12 from dvc.repo.graph import get_pipelines
13
14 pipelines = get_pipelines(G)
15
16 ret = []
17 for pipeline in pipelines:
18 ret.append(draw(pipeline.nodes, pipeline.edges))
19
20 return "\n".join(ret)
21
22
23 def _show_dot(G):
24 import io
25
26 from networkx.drawing.nx_pydot import write_dot
27
28 dot_file = io.StringIO()
29 write_dot(G, dot_file)
30 return dot_file.getvalue()
31
32
33 def _build(G, target=None, full=False):
34 import networkx as nx
35
36 from dvc.repo.graph import get_pipeline, get_pipelines
37
38 if target:
39 H = get_pipeline(get_pipelines(G), target)
40 if not full:
41 descendants = nx.descendants(G, target)
42 descendants.add(target)
43 H.remove_nodes_from(set(G.nodes()) - descendants)
44 else:
45 H = G
46
47 def _relabel(stage):
48 return stage.addressing
49
50 return nx.relabel_nodes(H, _relabel, copy=False)
51
52
53 class CmdDAG(CmdBase):
54 def run(self):
55 try:
56 target = None
57 if self.args.target:
58 stages = self.repo.collect(self.args.target)
59 if len(stages) > 1:
60 logger.error(
61 f"'{self.args.target}' contains more than one stage "
62 "{stages}, please specify one stage"
63 )
64 return 1
65 target = stages[0]
66
67 G = _build(self.repo.graph, target=target, full=self.args.full,)
68
69 if self.args.dot:
70 logger.info(_show_dot(G))
71 else:
72 from dvc.utils.pager import pager
73
74 pager(_show_ascii(G))
75
76 return 0
77 except DvcException:
78 msg = "failed to show "
79 if self.args.target:
80 msg += f"a pipeline for '{target}'"
81 else:
82 msg += "pipelines"
83 logger.exception(msg)
84 return 1
85
86
87 def add_parser(subparsers, parent_parser):
88 DAG_HELP = "Visualize DVC project DAG."
89 dag_parser = subparsers.add_parser(
90 "dag",
91 parents=[parent_parser],
92 description=append_doc_link(DAG_HELP, "dag"),
93 help=DAG_HELP,
94 formatter_class=argparse.RawDescriptionHelpFormatter,
95 )
96 dag_parser.add_argument(
97 "--dot",
98 action="store_true",
99 default=False,
100 help="Print DAG with .dot format.",
101 )
102 dag_parser.add_argument(
103 "--full",
104 action="store_true",
105 default=False,
106 help=(
107 "Show full DAG that the target belongs too, instead of "
108 "showing DAG consisting only of ancestors."
109 ),
110 )
111 dag_parser.add_argument(
112 "target",
113 nargs="?",
114 help="Stage or output to show pipeline for (optional). "
115 "Finds all stages in the workspace by default.",
116 )
117 dag_parser.set_defaults(func=CmdDAG)
118
[end of dvc/command/dag.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dvc/command/dag.py b/dvc/command/dag.py
--- a/dvc/command/dag.py
+++ b/dvc/command/dag.py
@@ -30,7 +30,7 @@
return dot_file.getvalue()
-def _build(G, target=None, full=False):
+def _build(G, target=None, full=False, outs=False):
import networkx as nx
from dvc.repo.graph import get_pipeline, get_pipelines
@@ -44,8 +44,25 @@
else:
H = G
- def _relabel(stage):
- return stage.addressing
+ if outs:
+ G = nx.DiGraph()
+ for stage in H.nodes:
+ G.add_nodes_from(stage.outs)
+
+ for from_stage, to_stage in nx.edge_dfs(H):
+ G.add_edges_from(
+ [
+ (from_out, to_out)
+ for from_out in from_stage.outs
+ for to_out in to_stage.outs
+ ]
+ )
+ H = G
+
+ def _relabel(node):
+ from dvc.stage import Stage
+
+ return node.addressing if isinstance(node, Stage) else str(node)
return nx.relabel_nodes(H, _relabel, copy=False)
@@ -64,7 +81,12 @@
return 1
target = stages[0]
- G = _build(self.repo.graph, target=target, full=self.args.full,)
+ G = _build(
+ self.repo.graph,
+ target=target,
+ full=self.args.full,
+ outs=self.args.outs,
+ )
if self.args.dot:
logger.info(_show_dot(G))
@@ -108,6 +130,13 @@
"showing DAG consisting only of ancestors."
),
)
+ dag_parser.add_argument(
+ "-o",
+ "--outs",
+ action="store_true",
+ default=False,
+ help="Print output files instead of stages.",
+ )
dag_parser.add_argument(
"target",
nargs="?",
| {"golden_diff": "diff --git a/dvc/command/dag.py b/dvc/command/dag.py\n--- a/dvc/command/dag.py\n+++ b/dvc/command/dag.py\n@@ -30,7 +30,7 @@\n return dot_file.getvalue()\n \n \n-def _build(G, target=None, full=False):\n+def _build(G, target=None, full=False, outs=False):\n import networkx as nx\n \n from dvc.repo.graph import get_pipeline, get_pipelines\n@@ -44,8 +44,25 @@\n else:\n H = G\n \n- def _relabel(stage):\n- return stage.addressing\n+ if outs:\n+ G = nx.DiGraph()\n+ for stage in H.nodes:\n+ G.add_nodes_from(stage.outs)\n+\n+ for from_stage, to_stage in nx.edge_dfs(H):\n+ G.add_edges_from(\n+ [\n+ (from_out, to_out)\n+ for from_out in from_stage.outs\n+ for to_out in to_stage.outs\n+ ]\n+ )\n+ H = G\n+\n+ def _relabel(node):\n+ from dvc.stage import Stage\n+\n+ return node.addressing if isinstance(node, Stage) else str(node)\n \n return nx.relabel_nodes(H, _relabel, copy=False)\n \n@@ -64,7 +81,12 @@\n return 1\n target = stages[0]\n \n- G = _build(self.repo.graph, target=target, full=self.args.full,)\n+ G = _build(\n+ self.repo.graph,\n+ target=target,\n+ full=self.args.full,\n+ outs=self.args.outs,\n+ )\n \n if self.args.dot:\n logger.info(_show_dot(G))\n@@ -108,6 +130,13 @@\n \"showing DAG consisting only of ancestors.\"\n ),\n )\n+ dag_parser.add_argument(\n+ \"-o\",\n+ \"--outs\",\n+ action=\"store_true\",\n+ default=False,\n+ help=\"Print output files instead of stages.\",\n+ )\n dag_parser.add_argument(\n \"target\",\n nargs=\"?\",\n", "issue": "dvc dag --outs\nIn the `0.xx` version days, there was a `dvc pipeline show --outs` command that was able to show a DAG that included outputs. \r\nI thought this was a really nice feature. For instance, I have a `train_test_split` stage that creates two outputs, `train` and `test`, and it would be nice to see them taking different paths in the DAG.\r\nCan you maybe (re)implement this feature?\r\nThanks!\ndvc dag --outs\nIn the `0.xx` version days, there was a `dvc pipeline show --outs` command that was able to show a DAG that included outputs. \r\nI thought this was a really nice feature. For instance, I have a `train_test_split` stage that creates two outputs, `train` and `test`, and it would be nice to see them taking different paths in the DAG.\r\nCan you maybe (re)implement this feature?\r\nThanks!\n", "before_files": [{"content": "import argparse\nimport logging\n\nfrom dvc.command.base import CmdBase, append_doc_link\nfrom dvc.exceptions import DvcException\n\nlogger = logging.getLogger(__name__)\n\n\ndef _show_ascii(G):\n from dvc.dagascii import draw\n from dvc.repo.graph import get_pipelines\n\n pipelines = get_pipelines(G)\n\n ret = []\n for pipeline in pipelines:\n ret.append(draw(pipeline.nodes, pipeline.edges))\n\n return \"\\n\".join(ret)\n\n\ndef _show_dot(G):\n import io\n\n from networkx.drawing.nx_pydot import write_dot\n\n dot_file = io.StringIO()\n write_dot(G, dot_file)\n return dot_file.getvalue()\n\n\ndef _build(G, target=None, full=False):\n import networkx as nx\n\n from dvc.repo.graph import get_pipeline, get_pipelines\n\n if target:\n H = get_pipeline(get_pipelines(G), target)\n if not full:\n descendants = nx.descendants(G, target)\n descendants.add(target)\n H.remove_nodes_from(set(G.nodes()) - descendants)\n else:\n H = G\n\n def _relabel(stage):\n return stage.addressing\n\n return nx.relabel_nodes(H, _relabel, copy=False)\n\n\nclass CmdDAG(CmdBase):\n def run(self):\n try:\n target = None\n if self.args.target:\n stages = self.repo.collect(self.args.target)\n if len(stages) > 1:\n logger.error(\n f\"'{self.args.target}' contains more than one stage \"\n \"{stages}, please specify one stage\"\n )\n return 1\n target = stages[0]\n\n G = _build(self.repo.graph, target=target, full=self.args.full,)\n\n if self.args.dot:\n logger.info(_show_dot(G))\n else:\n from dvc.utils.pager import pager\n\n pager(_show_ascii(G))\n\n return 0\n except DvcException:\n msg = \"failed to show \"\n if self.args.target:\n msg += f\"a pipeline for '{target}'\"\n else:\n msg += \"pipelines\"\n logger.exception(msg)\n return 1\n\n\ndef add_parser(subparsers, parent_parser):\n DAG_HELP = \"Visualize DVC project DAG.\"\n dag_parser = subparsers.add_parser(\n \"dag\",\n parents=[parent_parser],\n description=append_doc_link(DAG_HELP, \"dag\"),\n help=DAG_HELP,\n formatter_class=argparse.RawDescriptionHelpFormatter,\n )\n dag_parser.add_argument(\n \"--dot\",\n action=\"store_true\",\n default=False,\n help=\"Print DAG with .dot format.\",\n )\n dag_parser.add_argument(\n \"--full\",\n action=\"store_true\",\n default=False,\n help=(\n \"Show full DAG that the target belongs too, instead of \"\n \"showing DAG consisting only of ancestors.\"\n ),\n )\n dag_parser.add_argument(\n \"target\",\n nargs=\"?\",\n help=\"Stage or output to show pipeline for (optional). \"\n \"Finds all stages in the workspace by default.\",\n )\n dag_parser.set_defaults(func=CmdDAG)\n", "path": "dvc/command/dag.py"}]} | 1,672 | 472 |
gh_patches_debug_32659 | rasdani/github-patches | git_diff | pypi__warehouse-12343 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Python 3.1 classifier filtering is broken
When [filtering by the `Programming Language :: Python :: 3.1` classifier on pypi.org][1], the results include packages which don't have that classifier - any package with a classifier matching `Programming Language :: Python :: 3.1*` is included. That is, packages for 3.10, 3.11, 3.12, and so on are included in the results.
[1]: https://pypi.org/search/?q=&o=&c=Programming+Language+%3A%3A+Python+%3A%3A+3.1
</issue>
<code>
[start of warehouse/search/queries.py]
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 import re
14
15 from elasticsearch_dsl import Q
16
17 SEARCH_FIELDS = [
18 "author",
19 "author_email",
20 "description",
21 "download_url",
22 "home_page",
23 "keywords",
24 "license",
25 "maintainer",
26 "maintainer_email",
27 "normalized_name",
28 "platform",
29 "summary",
30 ]
31 SEARCH_BOOSTS = {
32 "name": 10,
33 "normalized_name": 10,
34 "description": 5,
35 "keywords": 5,
36 "summary": 5,
37 }
38 SEARCH_FILTER_ORDER = (
39 "Framework",
40 "Topic",
41 "Development Status",
42 "License",
43 "Programming Language",
44 "Operating System",
45 "Environment",
46 "Intended Audience",
47 "Natural Language",
48 )
49
50
51 def get_es_query(es, terms, order, classifiers):
52 """
53 Returns an Elasticsearch query from data from the request.
54 """
55 if not terms:
56 query = es.query()
57 else:
58 bool_query = gather_es_queries(terms)
59 query = es.query(bool_query)
60 query = query.suggest("name_suggestion", terms, term={"field": "name"})
61
62 # Require match to all specified classifiers
63 for classifier in classifiers:
64 query = query.query("prefix", classifiers=classifier)
65
66 query = query_for_order(query, order)
67 return query
68
69
70 def gather_es_queries(q):
71 quoted_string, unquoted_string = filter_query(q)
72 must = [form_query("phrase", i) for i in quoted_string] + [
73 form_query("best_fields", i) for i in unquoted_string
74 ]
75
76 bool_query = Q("bool", must=must)
77
78 # Allow to optionally match on prefix
79 # if ``q`` is longer than one character.
80 if len(q) > 1:
81 bool_query = bool_query | Q("prefix", normalized_name=q)
82 return bool_query
83
84
85 def filter_query(s):
86 """
87 Filters given query with the below regex
88 and returns lists of quoted and unquoted strings
89 """
90 matches = re.findall(r'(?:"([^"]*)")|([^"]*)', s)
91 result_quoted = [t[0].strip() for t in matches if t[0]]
92 result_unquoted = [t[1].strip() for t in matches if t[1]]
93 return result_quoted, result_unquoted
94
95
96 def form_query(query_type, query):
97 """
98 Returns a multi match query
99 """
100 fields = [
101 field + "^" + str(SEARCH_BOOSTS[field]) if field in SEARCH_BOOSTS else field
102 for field in SEARCH_FIELDS
103 ]
104 return Q("multi_match", fields=fields, query=query, type=query_type)
105
106
107 def query_for_order(query, order):
108 """
109 Applies transformations on the ES query based on the search order.
110
111 Order is assumed to be a string with the name of a field with an optional
112 hyphen to indicate descending sort order.
113 """
114 if order == "": # relevance should not sort
115 return query
116
117 field = order[order.find("-") + 1 :]
118 sort_info = {
119 field: {
120 "order": "desc" if order.startswith("-") else "asc",
121 "unmapped_type": "long",
122 }
123 }
124 query = query.sort(sort_info)
125 return query
126
[end of warehouse/search/queries.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/warehouse/search/queries.py b/warehouse/search/queries.py
--- a/warehouse/search/queries.py
+++ b/warehouse/search/queries.py
@@ -52,36 +52,45 @@
"""
Returns an Elasticsearch query from data from the request.
"""
+ classifier_q = Q(
+ "bool",
+ # Theh results must have all selected classifiers
+ must=[
+ Q(
+ "bool",
+ should=[
+ # Term search for the exact classifier
+ Q("term", classifiers=classifier),
+ # Prefix search for potential children classifiers
+ Q("prefix", classifiers=classifier + " :: "),
+ ],
+ )
+ for classifier in classifiers
+ ],
+ )
if not terms:
- query = es.query()
+ query = es.query(classifier_q) if classifiers else es.query()
else:
- bool_query = gather_es_queries(terms)
+ quoted_string, unquoted_string = filter_query(terms)
+ bool_query = Q(
+ "bool",
+ must=[form_query("phrase", i) for i in quoted_string]
+ + [form_query("best_fields", i) for i in unquoted_string]
+ + ([classifier_q] if classifiers else []),
+ )
+
+ # Allow to optionally match on prefix
+ # if ``q`` is longer than one character.
+ if len(terms) > 1:
+ bool_query = bool_query | Q("prefix", normalized_name=terms)
+
query = es.query(bool_query)
query = query.suggest("name_suggestion", terms, term={"field": "name"})
- # Require match to all specified classifiers
- for classifier in classifiers:
- query = query.query("prefix", classifiers=classifier)
-
query = query_for_order(query, order)
return query
-def gather_es_queries(q):
- quoted_string, unquoted_string = filter_query(q)
- must = [form_query("phrase", i) for i in quoted_string] + [
- form_query("best_fields", i) for i in unquoted_string
- ]
-
- bool_query = Q("bool", must=must)
-
- # Allow to optionally match on prefix
- # if ``q`` is longer than one character.
- if len(q) > 1:
- bool_query = bool_query | Q("prefix", normalized_name=q)
- return bool_query
-
-
def filter_query(s):
"""
Filters given query with the below regex
| {"golden_diff": "diff --git a/warehouse/search/queries.py b/warehouse/search/queries.py\n--- a/warehouse/search/queries.py\n+++ b/warehouse/search/queries.py\n@@ -52,36 +52,45 @@\n \"\"\"\n Returns an Elasticsearch query from data from the request.\n \"\"\"\n+ classifier_q = Q(\n+ \"bool\",\n+ # Theh results must have all selected classifiers\n+ must=[\n+ Q(\n+ \"bool\",\n+ should=[\n+ # Term search for the exact classifier\n+ Q(\"term\", classifiers=classifier),\n+ # Prefix search for potential children classifiers\n+ Q(\"prefix\", classifiers=classifier + \" :: \"),\n+ ],\n+ )\n+ for classifier in classifiers\n+ ],\n+ )\n if not terms:\n- query = es.query()\n+ query = es.query(classifier_q) if classifiers else es.query()\n else:\n- bool_query = gather_es_queries(terms)\n+ quoted_string, unquoted_string = filter_query(terms)\n+ bool_query = Q(\n+ \"bool\",\n+ must=[form_query(\"phrase\", i) for i in quoted_string]\n+ + [form_query(\"best_fields\", i) for i in unquoted_string]\n+ + ([classifier_q] if classifiers else []),\n+ )\n+\n+ # Allow to optionally match on prefix\n+ # if ``q`` is longer than one character.\n+ if len(terms) > 1:\n+ bool_query = bool_query | Q(\"prefix\", normalized_name=terms)\n+\n query = es.query(bool_query)\n query = query.suggest(\"name_suggestion\", terms, term={\"field\": \"name\"})\n \n- # Require match to all specified classifiers\n- for classifier in classifiers:\n- query = query.query(\"prefix\", classifiers=classifier)\n-\n query = query_for_order(query, order)\n return query\n \n \n-def gather_es_queries(q):\n- quoted_string, unquoted_string = filter_query(q)\n- must = [form_query(\"phrase\", i) for i in quoted_string] + [\n- form_query(\"best_fields\", i) for i in unquoted_string\n- ]\n-\n- bool_query = Q(\"bool\", must=must)\n-\n- # Allow to optionally match on prefix\n- # if ``q`` is longer than one character.\n- if len(q) > 1:\n- bool_query = bool_query | Q(\"prefix\", normalized_name=q)\n- return bool_query\n-\n-\n def filter_query(s):\n \"\"\"\n Filters given query with the below regex\n", "issue": "Python 3.1 classifier filtering is broken\nWhen [filtering by the `Programming Language :: Python :: 3.1` classifier on pypi.org][1], the results include packages which don't have that classifier - any package with a classifier matching `Programming Language :: Python :: 3.1*` is included. That is, packages for 3.10, 3.11, 3.12, and so on are included in the results.\r\n\r\n[1]: https://pypi.org/search/?q=&o=&c=Programming+Language+%3A%3A+Python+%3A%3A+3.1\r\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport re\n\nfrom elasticsearch_dsl import Q\n\nSEARCH_FIELDS = [\n \"author\",\n \"author_email\",\n \"description\",\n \"download_url\",\n \"home_page\",\n \"keywords\",\n \"license\",\n \"maintainer\",\n \"maintainer_email\",\n \"normalized_name\",\n \"platform\",\n \"summary\",\n]\nSEARCH_BOOSTS = {\n \"name\": 10,\n \"normalized_name\": 10,\n \"description\": 5,\n \"keywords\": 5,\n \"summary\": 5,\n}\nSEARCH_FILTER_ORDER = (\n \"Framework\",\n \"Topic\",\n \"Development Status\",\n \"License\",\n \"Programming Language\",\n \"Operating System\",\n \"Environment\",\n \"Intended Audience\",\n \"Natural Language\",\n)\n\n\ndef get_es_query(es, terms, order, classifiers):\n \"\"\"\n Returns an Elasticsearch query from data from the request.\n \"\"\"\n if not terms:\n query = es.query()\n else:\n bool_query = gather_es_queries(terms)\n query = es.query(bool_query)\n query = query.suggest(\"name_suggestion\", terms, term={\"field\": \"name\"})\n\n # Require match to all specified classifiers\n for classifier in classifiers:\n query = query.query(\"prefix\", classifiers=classifier)\n\n query = query_for_order(query, order)\n return query\n\n\ndef gather_es_queries(q):\n quoted_string, unquoted_string = filter_query(q)\n must = [form_query(\"phrase\", i) for i in quoted_string] + [\n form_query(\"best_fields\", i) for i in unquoted_string\n ]\n\n bool_query = Q(\"bool\", must=must)\n\n # Allow to optionally match on prefix\n # if ``q`` is longer than one character.\n if len(q) > 1:\n bool_query = bool_query | Q(\"prefix\", normalized_name=q)\n return bool_query\n\n\ndef filter_query(s):\n \"\"\"\n Filters given query with the below regex\n and returns lists of quoted and unquoted strings\n \"\"\"\n matches = re.findall(r'(?:\"([^\"]*)\")|([^\"]*)', s)\n result_quoted = [t[0].strip() for t in matches if t[0]]\n result_unquoted = [t[1].strip() for t in matches if t[1]]\n return result_quoted, result_unquoted\n\n\ndef form_query(query_type, query):\n \"\"\"\n Returns a multi match query\n \"\"\"\n fields = [\n field + \"^\" + str(SEARCH_BOOSTS[field]) if field in SEARCH_BOOSTS else field\n for field in SEARCH_FIELDS\n ]\n return Q(\"multi_match\", fields=fields, query=query, type=query_type)\n\n\ndef query_for_order(query, order):\n \"\"\"\n Applies transformations on the ES query based on the search order.\n\n Order is assumed to be a string with the name of a field with an optional\n hyphen to indicate descending sort order.\n \"\"\"\n if order == \"\": # relevance should not sort\n return query\n\n field = order[order.find(\"-\") + 1 :]\n sort_info = {\n field: {\n \"order\": \"desc\" if order.startswith(\"-\") else \"asc\",\n \"unmapped_type\": \"long\",\n }\n }\n query = query.sort(sort_info)\n return query\n", "path": "warehouse/search/queries.py"}]} | 1,789 | 562 |
gh_patches_debug_29422 | rasdani/github-patches | git_diff | freedomofpress__securedrop-7035 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
determine post-upgrade failure-mode for a SHA-1-signed submission key
## Description
After #6948 (for #6399), redwood will refuse to encrypt to a submission key with a SHA-1 signature.
After #6928, `securedrop-admin sdconfig` will reject a submission key with a SHA-1 signature. This check guarantees that new and reconfigured instances will comply with #6948.
What will happen to an instance with a SHA-1-signed signature after upgrading to v2.7.0?
## Possible approaches
| Option | Documentation changes | Code changes | Implication |
| --- | --- | --- | --- |
| Fail open, but log | optional | ✓ | Admin must monitor logs and/or OSSEC alerts. |
| Fail open, but document | ✓ | ✗ | Admin must monitor release notes or check documentation. |
| Fail closed | optional | ✓[1] | Admin can contact us for help. |
**Notes:**
1. @legoktm observes that, without a code change to handle this case, Apache will come back up after reboot even if the `postinst` script fails under `unattended-upgrades`.
</issue>
<code>
[start of securedrop/journalist.py]
1 from encryption import EncryptionManager, GpgKeyNotFoundError
2 from execution import asynchronous
3 from journalist_app import create_app
4 from models import Source
5 from sdconfig import SecureDropConfig
6
7 config = SecureDropConfig.get_current()
8 # app is imported by journalist.wsgi
9 app = create_app(config)
10
11
12 @asynchronous
13 def prime_keycache() -> None:
14 """Pre-load the source public keys into Redis."""
15 with app.app_context():
16 encryption_mgr = EncryptionManager.get_default()
17 for source in Source.query.filter_by(pending=False, deleted_at=None).all():
18 try:
19 encryption_mgr.get_source_public_key(source.filesystem_id)
20 except GpgKeyNotFoundError:
21 pass
22
23
24 prime_keycache()
25
26
27 if __name__ == "__main__": # pragma: no cover
28 debug = getattr(config, "env", "prod") != "prod"
29 # nosemgrep: python.flask.security.audit.app-run-param-config.avoid_app_run_with_bad_host
30 app.run(debug=debug, host="0.0.0.0", port=8081)
31
[end of securedrop/journalist.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -1,9 +1,13 @@
+import sys
+
from encryption import EncryptionManager, GpgKeyNotFoundError
from execution import asynchronous
from journalist_app import create_app
from models import Source
from sdconfig import SecureDropConfig
+import redwood
+
config = SecureDropConfig.get_current()
# app is imported by journalist.wsgi
app = create_app(config)
@@ -21,10 +25,28 @@
pass
-prime_keycache()
+def validate_journalist_key() -> None:
+ """Verify the journalist PGP key is valid"""
+ encryption_mgr = EncryptionManager.get_default()
+ # First check that we can read it
+ try:
+ journalist_key = encryption_mgr.get_journalist_public_key()
+ except Exception as e:
+ print(f"ERROR: Unable to read journalist public key: {e}", file=sys.stderr)
+ app.logger.error(f"ERROR: Unable to read journalist public key: {e}")
+ sys.exit(1)
+ # And then what we read is valid
+ try:
+ redwood.is_valid_public_key(journalist_key)
+ except redwood.RedwoodError as e:
+ print(f"ERROR: Journalist public key is not valid: {e}", file=sys.stderr)
+ app.logger.error(f"ERROR: Journalist public key is not valid: {e}")
+ sys.exit(1)
if __name__ == "__main__": # pragma: no cover
+ validate_journalist_key()
+ prime_keycache()
debug = getattr(config, "env", "prod") != "prod"
# nosemgrep: python.flask.security.audit.app-run-param-config.avoid_app_run_with_bad_host
app.run(debug=debug, host="0.0.0.0", port=8081)
| {"golden_diff": "diff --git a/securedrop/journalist.py b/securedrop/journalist.py\n--- a/securedrop/journalist.py\n+++ b/securedrop/journalist.py\n@@ -1,9 +1,13 @@\n+import sys\n+\n from encryption import EncryptionManager, GpgKeyNotFoundError\n from execution import asynchronous\n from journalist_app import create_app\n from models import Source\n from sdconfig import SecureDropConfig\n \n+import redwood\n+\n config = SecureDropConfig.get_current()\n # app is imported by journalist.wsgi\n app = create_app(config)\n@@ -21,10 +25,28 @@\n pass\n \n \n-prime_keycache()\n+def validate_journalist_key() -> None:\n+ \"\"\"Verify the journalist PGP key is valid\"\"\"\n+ encryption_mgr = EncryptionManager.get_default()\n+ # First check that we can read it\n+ try:\n+ journalist_key = encryption_mgr.get_journalist_public_key()\n+ except Exception as e:\n+ print(f\"ERROR: Unable to read journalist public key: {e}\", file=sys.stderr)\n+ app.logger.error(f\"ERROR: Unable to read journalist public key: {e}\")\n+ sys.exit(1)\n+ # And then what we read is valid\n+ try:\n+ redwood.is_valid_public_key(journalist_key)\n+ except redwood.RedwoodError as e:\n+ print(f\"ERROR: Journalist public key is not valid: {e}\", file=sys.stderr)\n+ app.logger.error(f\"ERROR: Journalist public key is not valid: {e}\")\n+ sys.exit(1)\n \n \n if __name__ == \"__main__\": # pragma: no cover\n+ validate_journalist_key()\n+ prime_keycache()\n debug = getattr(config, \"env\", \"prod\") != \"prod\"\n # nosemgrep: python.flask.security.audit.app-run-param-config.avoid_app_run_with_bad_host\n app.run(debug=debug, host=\"0.0.0.0\", port=8081)\n", "issue": "determine post-upgrade failure-mode for a SHA-1-signed submission key\n## Description\r\n\r\nAfter #6948 (for #6399), redwood will refuse to encrypt to a submission key with a SHA-1 signature.\r\n\r\nAfter #6928, `securedrop-admin sdconfig` will reject a submission key with a SHA-1 signature. This check guarantees that new and reconfigured instances will comply with #6948.\r\n\r\nWhat will happen to an instance with a SHA-1-signed signature after upgrading to v2.7.0?\r\n\r\n## Possible approaches\r\n\r\n| Option | Documentation changes | Code changes | Implication |\r\n| --- | --- | --- | --- |\r\n| Fail open, but log | optional | \u2713 | Admin must monitor logs and/or OSSEC alerts. |\r\n| Fail open, but document | \u2713 | \u2717 | Admin must monitor release notes or check documentation. |\r\n| Fail closed | optional | \u2713[1] | Admin can contact us for help. |\r\n\r\n**Notes:**\r\n1. @legoktm observes that, without a code change to handle this case, Apache will come back up after reboot even if the `postinst` script fails under `unattended-upgrades`.\n", "before_files": [{"content": "from encryption import EncryptionManager, GpgKeyNotFoundError\nfrom execution import asynchronous\nfrom journalist_app import create_app\nfrom models import Source\nfrom sdconfig import SecureDropConfig\n\nconfig = SecureDropConfig.get_current()\n# app is imported by journalist.wsgi\napp = create_app(config)\n\n\n@asynchronous\ndef prime_keycache() -> None:\n \"\"\"Pre-load the source public keys into Redis.\"\"\"\n with app.app_context():\n encryption_mgr = EncryptionManager.get_default()\n for source in Source.query.filter_by(pending=False, deleted_at=None).all():\n try:\n encryption_mgr.get_source_public_key(source.filesystem_id)\n except GpgKeyNotFoundError:\n pass\n\n\nprime_keycache()\n\n\nif __name__ == \"__main__\": # pragma: no cover\n debug = getattr(config, \"env\", \"prod\") != \"prod\"\n # nosemgrep: python.flask.security.audit.app-run-param-config.avoid_app_run_with_bad_host\n app.run(debug=debug, host=\"0.0.0.0\", port=8081)\n", "path": "securedrop/journalist.py"}]} | 1,075 | 441 |
gh_patches_debug_12398 | rasdani/github-patches | git_diff | pre-commit__pre-commit-1590 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
improve `healthy()` check for node
See pre-commit/actions#45
for `language_version: system` hooks this looks like:
```
eslint...................................................................Failed
- hook id: eslint
- exit code: 127
/home/runner/.cache/pre-commit/repoibq27hfw/node_env-system/bin/node: line 5: /opt/hostedtoolcache/node/14.8.0/x64/bin/node: No such file or directory
/home/runner/.cache/pre-commit/repoibq27hfw/node_env-system/bin/node: line 5: /opt/hostedtoolcache/node/14.8.0/x64/bin/node: No such file or directory
##[error]The process '/opt/hostedtoolcache/Python/3.8.5/x64/bin/pre-commit' failed with exit code 1
```
</issue>
<code>
[start of pre_commit/languages/node.py]
1 import contextlib
2 import functools
3 import os
4 import sys
5 from typing import Generator
6 from typing import Sequence
7 from typing import Tuple
8
9 import pre_commit.constants as C
10 from pre_commit import parse_shebang
11 from pre_commit.envcontext import envcontext
12 from pre_commit.envcontext import PatchesT
13 from pre_commit.envcontext import UNSET
14 from pre_commit.envcontext import Var
15 from pre_commit.hook import Hook
16 from pre_commit.languages import helpers
17 from pre_commit.languages.python import bin_dir
18 from pre_commit.prefix import Prefix
19 from pre_commit.util import clean_path_on_failure
20 from pre_commit.util import cmd_output
21 from pre_commit.util import cmd_output_b
22
23 ENVIRONMENT_DIR = 'node_env'
24 healthy = helpers.basic_healthy
25
26
27 @functools.lru_cache(maxsize=1)
28 def get_default_version() -> str:
29 # nodeenv does not yet support `-n system` on windows
30 if sys.platform == 'win32':
31 return C.DEFAULT
32 # if node is already installed, we can save a bunch of setup time by
33 # using the installed version
34 elif all(parse_shebang.find_executable(exe) for exe in ('node', 'npm')):
35 return 'system'
36 else:
37 return C.DEFAULT
38
39
40 def _envdir(prefix: Prefix, version: str) -> str:
41 directory = helpers.environment_dir(ENVIRONMENT_DIR, version)
42 return prefix.path(directory)
43
44
45 def get_env_patch(venv: str) -> PatchesT:
46 if sys.platform == 'cygwin': # pragma: no cover
47 _, win_venv, _ = cmd_output('cygpath', '-w', venv)
48 install_prefix = fr'{win_venv.strip()}\bin'
49 lib_dir = 'lib'
50 elif sys.platform == 'win32': # pragma: no cover
51 install_prefix = bin_dir(venv)
52 lib_dir = 'Scripts'
53 else: # pragma: win32 no cover
54 install_prefix = venv
55 lib_dir = 'lib'
56 return (
57 ('NODE_VIRTUAL_ENV', venv),
58 ('NPM_CONFIG_PREFIX', install_prefix),
59 ('npm_config_prefix', install_prefix),
60 ('NPM_CONFIG_USERCONFIG', UNSET),
61 ('npm_config_userconfig', UNSET),
62 ('NODE_PATH', os.path.join(venv, lib_dir, 'node_modules')),
63 ('PATH', (bin_dir(venv), os.pathsep, Var('PATH'))),
64 )
65
66
67 @contextlib.contextmanager
68 def in_env(
69 prefix: Prefix,
70 language_version: str,
71 ) -> Generator[None, None, None]:
72 with envcontext(get_env_patch(_envdir(prefix, language_version))):
73 yield
74
75
76 def install_environment(
77 prefix: Prefix, version: str, additional_dependencies: Sequence[str],
78 ) -> None:
79 additional_dependencies = tuple(additional_dependencies)
80 assert prefix.exists('package.json')
81 envdir = _envdir(prefix, version)
82
83 # https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx?f=255&MSPPError=-2147217396#maxpath
84 if sys.platform == 'win32': # pragma: no cover
85 envdir = fr'\\?\{os.path.normpath(envdir)}'
86 with clean_path_on_failure(envdir):
87 cmd = [
88 sys.executable, '-mnodeenv', '--prebuilt', '--clean-src', envdir,
89 ]
90 if version != C.DEFAULT:
91 cmd.extend(['-n', version])
92 cmd_output_b(*cmd)
93
94 with in_env(prefix, version):
95 # https://npm.community/t/npm-install-g-git-vs-git-clone-cd-npm-install-g/5449
96 # install as if we installed from git
97 helpers.run_setup_cmd(prefix, ('npm', 'install'))
98 helpers.run_setup_cmd(
99 prefix,
100 ('npm', 'install', '-g', '.', *additional_dependencies),
101 )
102
103
104 def run_hook(
105 hook: Hook,
106 file_args: Sequence[str],
107 color: bool,
108 ) -> Tuple[int, bytes]:
109 with in_env(hook.prefix, hook.language_version):
110 return helpers.run_xargs(hook, hook.cmd, file_args, color=color)
111
[end of pre_commit/languages/node.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pre_commit/languages/node.py b/pre_commit/languages/node.py
--- a/pre_commit/languages/node.py
+++ b/pre_commit/languages/node.py
@@ -21,7 +21,6 @@
from pre_commit.util import cmd_output_b
ENVIRONMENT_DIR = 'node_env'
-healthy = helpers.basic_healthy
@functools.lru_cache(maxsize=1)
@@ -73,6 +72,12 @@
yield
+def healthy(prefix: Prefix, language_version: str) -> bool:
+ with in_env(prefix, language_version):
+ retcode, _, _ = cmd_output_b('node', '--version', retcode=None)
+ return retcode == 0
+
+
def install_environment(
prefix: Prefix, version: str, additional_dependencies: Sequence[str],
) -> None:
| {"golden_diff": "diff --git a/pre_commit/languages/node.py b/pre_commit/languages/node.py\n--- a/pre_commit/languages/node.py\n+++ b/pre_commit/languages/node.py\n@@ -21,7 +21,6 @@\n from pre_commit.util import cmd_output_b\n \n ENVIRONMENT_DIR = 'node_env'\n-healthy = helpers.basic_healthy\n \n \n @functools.lru_cache(maxsize=1)\n@@ -73,6 +72,12 @@\n yield\n \n \n+def healthy(prefix: Prefix, language_version: str) -> bool:\n+ with in_env(prefix, language_version):\n+ retcode, _, _ = cmd_output_b('node', '--version', retcode=None)\n+ return retcode == 0\n+\n+\n def install_environment(\n prefix: Prefix, version: str, additional_dependencies: Sequence[str],\n ) -> None:\n", "issue": "improve `healthy()` check for node\nSee pre-commit/actions#45\r\n\r\nfor `language_version: system` hooks this looks like:\r\n\r\n```\r\neslint...................................................................Failed\r\n- hook id: eslint\r\n- exit code: 127\r\n\r\n/home/runner/.cache/pre-commit/repoibq27hfw/node_env-system/bin/node: line 5: /opt/hostedtoolcache/node/14.8.0/x64/bin/node: No such file or directory\r\n/home/runner/.cache/pre-commit/repoibq27hfw/node_env-system/bin/node: line 5: /opt/hostedtoolcache/node/14.8.0/x64/bin/node: No such file or directory\r\n\r\n##[error]The process '/opt/hostedtoolcache/Python/3.8.5/x64/bin/pre-commit' failed with exit code 1\r\n```\r\n\r\n\n", "before_files": [{"content": "import contextlib\nimport functools\nimport os\nimport sys\nfrom typing import Generator\nfrom typing import Sequence\nfrom typing import Tuple\n\nimport pre_commit.constants as C\nfrom pre_commit import parse_shebang\nfrom pre_commit.envcontext import envcontext\nfrom pre_commit.envcontext import PatchesT\nfrom pre_commit.envcontext import UNSET\nfrom pre_commit.envcontext import Var\nfrom pre_commit.hook import Hook\nfrom pre_commit.languages import helpers\nfrom pre_commit.languages.python import bin_dir\nfrom pre_commit.prefix import Prefix\nfrom pre_commit.util import clean_path_on_failure\nfrom pre_commit.util import cmd_output\nfrom pre_commit.util import cmd_output_b\n\nENVIRONMENT_DIR = 'node_env'\nhealthy = helpers.basic_healthy\n\n\[email protected]_cache(maxsize=1)\ndef get_default_version() -> str:\n # nodeenv does not yet support `-n system` on windows\n if sys.platform == 'win32':\n return C.DEFAULT\n # if node is already installed, we can save a bunch of setup time by\n # using the installed version\n elif all(parse_shebang.find_executable(exe) for exe in ('node', 'npm')):\n return 'system'\n else:\n return C.DEFAULT\n\n\ndef _envdir(prefix: Prefix, version: str) -> str:\n directory = helpers.environment_dir(ENVIRONMENT_DIR, version)\n return prefix.path(directory)\n\n\ndef get_env_patch(venv: str) -> PatchesT:\n if sys.platform == 'cygwin': # pragma: no cover\n _, win_venv, _ = cmd_output('cygpath', '-w', venv)\n install_prefix = fr'{win_venv.strip()}\\bin'\n lib_dir = 'lib'\n elif sys.platform == 'win32': # pragma: no cover\n install_prefix = bin_dir(venv)\n lib_dir = 'Scripts'\n else: # pragma: win32 no cover\n install_prefix = venv\n lib_dir = 'lib'\n return (\n ('NODE_VIRTUAL_ENV', venv),\n ('NPM_CONFIG_PREFIX', install_prefix),\n ('npm_config_prefix', install_prefix),\n ('NPM_CONFIG_USERCONFIG', UNSET),\n ('npm_config_userconfig', UNSET),\n ('NODE_PATH', os.path.join(venv, lib_dir, 'node_modules')),\n ('PATH', (bin_dir(venv), os.pathsep, Var('PATH'))),\n )\n\n\[email protected]\ndef in_env(\n prefix: Prefix,\n language_version: str,\n) -> Generator[None, None, None]:\n with envcontext(get_env_patch(_envdir(prefix, language_version))):\n yield\n\n\ndef install_environment(\n prefix: Prefix, version: str, additional_dependencies: Sequence[str],\n) -> None:\n additional_dependencies = tuple(additional_dependencies)\n assert prefix.exists('package.json')\n envdir = _envdir(prefix, version)\n\n # https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx?f=255&MSPPError=-2147217396#maxpath\n if sys.platform == 'win32': # pragma: no cover\n envdir = fr'\\\\?\\{os.path.normpath(envdir)}'\n with clean_path_on_failure(envdir):\n cmd = [\n sys.executable, '-mnodeenv', '--prebuilt', '--clean-src', envdir,\n ]\n if version != C.DEFAULT:\n cmd.extend(['-n', version])\n cmd_output_b(*cmd)\n\n with in_env(prefix, version):\n # https://npm.community/t/npm-install-g-git-vs-git-clone-cd-npm-install-g/5449\n # install as if we installed from git\n helpers.run_setup_cmd(prefix, ('npm', 'install'))\n helpers.run_setup_cmd(\n prefix,\n ('npm', 'install', '-g', '.', *additional_dependencies),\n )\n\n\ndef run_hook(\n hook: Hook,\n file_args: Sequence[str],\n color: bool,\n) -> Tuple[int, bytes]:\n with in_env(hook.prefix, hook.language_version):\n return helpers.run_xargs(hook, hook.cmd, file_args, color=color)\n", "path": "pre_commit/languages/node.py"}]} | 1,886 | 184 |
gh_patches_debug_16984 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-1872 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Spider sallybeauty is broken
During the global build at 2021-05-26-14-42-23, spider **sallybeauty** failed with **2712 features** and **5 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/sallybeauty.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/sallybeauty.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/sallybeauty.geojson))
</issue>
<code>
[start of locations/spiders/sallybeauty.py]
1 # -*- coding: utf-8 -*-
2 import scrapy
3 from locations.items import GeojsonPointItem
4 from urllib.parse import urlencode
5 import json
6 import csv
7 from locations.hours import OpeningHours
8 from scrapy.selector import Selector
9
10
11 class SallySpider(scrapy.Spider):
12 name = "sallybeauty"
13 item_attributes = { 'brand': "Sally Beauty" }
14 allowed_domains = ["sallybeauty.com"]
15
16 def start_requests(self):
17 base_url = "https://www.sallybeauty.com/on/demandware.store/Sites-SA-Site/default/Stores-FindStores?"
18
19 point_files = [
20 './locations/searchable_points/us_centroids_100mile_radius.csv',
21 './locations/searchable_points/ca_centroids_100mile_radius.csv'
22 ]
23
24 params = {
25 "showmap": "true",
26 "radius": "100",
27 }
28
29 for point_file in point_files:
30 with open(point_file) as points:
31 next(points)
32 for point in points:
33 _, lat, lon = point.strip().split(',')
34 params.update({"lat": lat, "long": lon})
35 yield scrapy.Request(url=base_url + urlencode(params))
36
37 def parse_hours(self, hours):
38 hrs = Selector(text=hours)
39 days = hrs.xpath('//div[@class="store-hours-day"]/text()').extract()
40 hours = hrs.xpath('//div[@class="store-hours-day"]/span/text()').extract()
41
42 opening_hours = OpeningHours()
43
44 for d, h in zip(days, hours):
45 try:
46 day = d.strip(': ')
47 open_time, close_time = h.split(' - ')
48 open_time = open_time.lstrip('0')
49 opening_hours.add_range(day=day[:2],
50 open_time=open_time,
51 close_time=close_time,
52 time_format="%I:%M %p")
53 except:
54 continue
55
56 return opening_hours.as_opening_hours()
57
58 def parse(self, response):
59 jdata = json.loads(response.body_as_unicode())
60
61 for row in jdata.get('stores',[]):
62
63 properties = {
64 'ref': row["ID"],
65 'name': row["name"],
66 'addr_full': " ".join([row["address1"], row.get("address2", "") or ""]).strip(),
67 'city': row["city"],
68 'postcode': row["postalCode"],
69 'lat': row["latitude"],
70 'lon': row["longitude"],
71 'phone': row["phone"],
72 'state': row["stateCode"],
73 }
74
75 hours = self.parse_hours(row["storeHours"])
76 if hours:
77 properties['opening_hours'] = hours
78
79 yield GeojsonPointItem(**properties)
80
[end of locations/spiders/sallybeauty.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/spiders/sallybeauty.py b/locations/spiders/sallybeauty.py
--- a/locations/spiders/sallybeauty.py
+++ b/locations/spiders/sallybeauty.py
@@ -58,7 +58,7 @@
def parse(self, response):
jdata = json.loads(response.body_as_unicode())
- for row in jdata.get('stores',[]):
+ for row in jdata.get('stores', []):
properties = {
'ref': row["ID"],
@@ -72,8 +72,11 @@
'state': row["stateCode"],
}
- hours = self.parse_hours(row["storeHours"])
- if hours:
- properties['opening_hours'] = hours
+ store_hours = row.get("storeHours")
+ if store_hours:
+ hours = self.parse_hours(store_hours)
+
+ if hours:
+ properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
| {"golden_diff": "diff --git a/locations/spiders/sallybeauty.py b/locations/spiders/sallybeauty.py\n--- a/locations/spiders/sallybeauty.py\n+++ b/locations/spiders/sallybeauty.py\n@@ -58,7 +58,7 @@\n def parse(self, response):\n jdata = json.loads(response.body_as_unicode())\n \n- for row in jdata.get('stores',[]):\n+ for row in jdata.get('stores', []):\n \n properties = {\n 'ref': row[\"ID\"],\n@@ -72,8 +72,11 @@\n 'state': row[\"stateCode\"],\n }\n \n- hours = self.parse_hours(row[\"storeHours\"])\n- if hours:\n- properties['opening_hours'] = hours\n+ store_hours = row.get(\"storeHours\")\n+ if store_hours:\n+ hours = self.parse_hours(store_hours)\n+\n+ if hours:\n+ properties['opening_hours'] = hours\n \n yield GeojsonPointItem(**properties)\n", "issue": "Spider sallybeauty is broken\nDuring the global build at 2021-05-26-14-42-23, spider **sallybeauty** failed with **2712 features** and **5 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/sallybeauty.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/sallybeauty.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/sallybeauty.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom locations.items import GeojsonPointItem\nfrom urllib.parse import urlencode\nimport json\nimport csv\nfrom locations.hours import OpeningHours\nfrom scrapy.selector import Selector\n\n\nclass SallySpider(scrapy.Spider):\n name = \"sallybeauty\"\n item_attributes = { 'brand': \"Sally Beauty\" }\n allowed_domains = [\"sallybeauty.com\"]\n\n def start_requests(self):\n base_url = \"https://www.sallybeauty.com/on/demandware.store/Sites-SA-Site/default/Stores-FindStores?\"\n\n point_files = [\n './locations/searchable_points/us_centroids_100mile_radius.csv',\n './locations/searchable_points/ca_centroids_100mile_radius.csv'\n ]\n\n params = {\n \"showmap\": \"true\",\n \"radius\": \"100\",\n }\n\n for point_file in point_files:\n with open(point_file) as points:\n next(points)\n for point in points:\n _, lat, lon = point.strip().split(',')\n params.update({\"lat\": lat, \"long\": lon})\n yield scrapy.Request(url=base_url + urlencode(params))\n\n def parse_hours(self, hours):\n hrs = Selector(text=hours)\n days = hrs.xpath('//div[@class=\"store-hours-day\"]/text()').extract()\n hours = hrs.xpath('//div[@class=\"store-hours-day\"]/span/text()').extract()\n\n opening_hours = OpeningHours()\n\n for d, h in zip(days, hours):\n try:\n day = d.strip(': ')\n open_time, close_time = h.split(' - ')\n open_time = open_time.lstrip('0')\n opening_hours.add_range(day=day[:2],\n open_time=open_time,\n close_time=close_time,\n time_format=\"%I:%M %p\")\n except:\n continue\n\n return opening_hours.as_opening_hours()\n\n def parse(self, response):\n jdata = json.loads(response.body_as_unicode())\n\n for row in jdata.get('stores',[]):\n\n properties = {\n 'ref': row[\"ID\"],\n 'name': row[\"name\"],\n 'addr_full': \" \".join([row[\"address1\"], row.get(\"address2\", \"\") or \"\"]).strip(),\n 'city': row[\"city\"],\n 'postcode': row[\"postalCode\"],\n 'lat': row[\"latitude\"],\n 'lon': row[\"longitude\"],\n 'phone': row[\"phone\"],\n 'state': row[\"stateCode\"],\n }\n\n hours = self.parse_hours(row[\"storeHours\"])\n if hours:\n properties['opening_hours'] = hours\n\n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/sallybeauty.py"}]} | 1,467 | 223 |
gh_patches_debug_55584 | rasdani/github-patches | git_diff | wagtail__wagtail-1873 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Migrating to 1.1 Migration File Errors
I am attempting to migrate to 1.1 and I am getting an error involving the migration files.
```
Migration wagtailcore.0017_change_edit_page_permission_description dependencies reference nonexistent parent node (u'wagtailcore', u'0001_squashed_0016_change_page_url_path_to_text_field')
```
The last migration for wagtail core in my migrations table is 0015. Since 0017 refers to 0001_squashed_0016 as a dependency and since I have not applied that migration, it's turn up as an error.
I tried manually applying 0016, but the error is preventing that from happening.
I know the issue queue is not intended for support questions, but I was speaking in the #django irc channel and they told me to check and see if the migrations were autogenerated. They said that normally migrations refer to the one before it and not the squashed ones.
Migrating to 1.1 Migration File Errors
I am attempting to migrate to 1.1 and I am getting an error involving the migration files.
```
Migration wagtailcore.0017_change_edit_page_permission_description dependencies reference nonexistent parent node (u'wagtailcore', u'0001_squashed_0016_change_page_url_path_to_text_field')
```
The last migration for wagtail core in my migrations table is 0015. Since 0017 refers to 0001_squashed_0016 as a dependency and since I have not applied that migration, it's turn up as an error.
I tried manually applying 0016, but the error is preventing that from happening.
I know the issue queue is not intended for support questions, but I was speaking in the #django irc channel and they told me to check and see if the migrations were autogenerated. They said that normally migrations refer to the one before it and not the squashed ones.
</issue>
<code>
[start of wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py]
1 # -*- coding: utf-8 -*-
2 from __future__ import unicode_literals
3
4 from django.db import models, migrations
5
6
7 class Migration(migrations.Migration):
8
9 dependencies = [
10 ('wagtailcore', '0001_squashed_0016_change_page_url_path_to_text_field'),
11 ]
12
13 operations = [
14 migrations.AlterField(
15 model_name='grouppagepermission',
16 name='permission_type',
17 field=models.CharField(choices=[('add', 'Add/edit pages you own'), ('edit', 'Edit any page'), ('publish', 'Publish any page'), ('lock', 'Lock/unlock any page')], max_length=20, verbose_name='Permission type'),
18 preserve_default=True,
19 ),
20 ]
21
[end of wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py b/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py
--- a/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py
+++ b/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py
@@ -7,7 +7,7 @@
class Migration(migrations.Migration):
dependencies = [
- ('wagtailcore', '0001_squashed_0016_change_page_url_path_to_text_field'),
+ ('wagtailcore', '0016_change_page_url_path_to_text_field'),
]
operations = [
| {"golden_diff": "diff --git a/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py b/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py\n--- a/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py\n+++ b/wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py\n@@ -7,7 +7,7 @@\n class Migration(migrations.Migration):\n \n dependencies = [\n- ('wagtailcore', '0001_squashed_0016_change_page_url_path_to_text_field'),\n+ ('wagtailcore', '0016_change_page_url_path_to_text_field'),\n ]\n \n operations = [\n", "issue": "Migrating to 1.1 Migration File Errors\nI am attempting to migrate to 1.1 and I am getting an error involving the migration files.\n\n```\nMigration wagtailcore.0017_change_edit_page_permission_description dependencies reference nonexistent parent node (u'wagtailcore', u'0001_squashed_0016_change_page_url_path_to_text_field')\n```\n\nThe last migration for wagtail core in my migrations table is 0015. Since 0017 refers to 0001_squashed_0016 as a dependency and since I have not applied that migration, it's turn up as an error.\n\nI tried manually applying 0016, but the error is preventing that from happening.\n\nI know the issue queue is not intended for support questions, but I was speaking in the #django irc channel and they told me to check and see if the migrations were autogenerated. They said that normally migrations refer to the one before it and not the squashed ones.\n\nMigrating to 1.1 Migration File Errors\nI am attempting to migrate to 1.1 and I am getting an error involving the migration files.\n\n```\nMigration wagtailcore.0017_change_edit_page_permission_description dependencies reference nonexistent parent node (u'wagtailcore', u'0001_squashed_0016_change_page_url_path_to_text_field')\n```\n\nThe last migration for wagtail core in my migrations table is 0015. Since 0017 refers to 0001_squashed_0016 as a dependency and since I have not applied that migration, it's turn up as an error.\n\nI tried manually applying 0016, but the error is preventing that from happening.\n\nI know the issue queue is not intended for support questions, but I was speaking in the #django irc channel and they told me to check and see if the migrations were autogenerated. They said that normally migrations refer to the one before it and not the squashed ones.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('wagtailcore', '0001_squashed_0016_change_page_url_path_to_text_field'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='grouppagepermission',\n name='permission_type',\n field=models.CharField(choices=[('add', 'Add/edit pages you own'), ('edit', 'Edit any page'), ('publish', 'Publish any page'), ('lock', 'Lock/unlock any page')], max_length=20, verbose_name='Permission type'),\n preserve_default=True,\n ),\n ]\n", "path": "wagtail/wagtailcore/migrations/0017_change_edit_page_permission_description.py"}]} | 1,192 | 172 |
gh_patches_debug_33817 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-contrib-530 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
opentelemetry-instrument command fails if incompatible instrumentation is found
If an instrumentation is installed for a library that is not found in the environment, the instrument command raises the following exception:
```
❯ opentelemetry-instrument python main.py
Instrumenting of flask failed
Traceback (most recent call last):
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py", line 71, in _load_instrumentors
conflict = get_dist_dependency_conflicts(entry_point.dist)
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py", line 33, in get_dist_dependency_conflicts
return get_dependency_conflicts(deps)
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py", line 41, in get_dependency_conflicts
get_distribution(str(dep))
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py", line 482, in get_distribution
dist = get_provider(dist)
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py", line 358, in get_provider
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
IndexError: list index out of range
Failed to auto initialize opentelemetry
Traceback (most recent call last):
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py", line 111, in initialize
_load_instrumentors(distro)
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py", line 85, in _load_instrumentors
raise exc
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py", line 71, in _load_instrumentors
conflict = get_dist_dependency_conflicts(entry_point.dist)
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py", line 33, in get_dist_dependency_conflicts
return get_dependency_conflicts(deps)
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py", line 41, in get_dependency_conflicts
get_distribution(str(dep))
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py", line 482, in get_distribution
dist = get_provider(dist)
File "/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py", line 358, in get_provider
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
IndexError: list index out of range
```
bootstrap command does not install any instrumentations for libraries that are not present in the environment so this would only happen if someone manually installed an instrumentation package for a library they're not using. So this is not a deal breaker and doesn't require an immediate hotfix. That said, this IS a bug as the intended behavior of instrument command is to silently ignore such instrumentations.
</issue>
<code>
[start of opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py]
1 from typing import Collection, Optional
2
3 from pkg_resources import (
4 Distribution,
5 DistributionNotFound,
6 VersionConflict,
7 get_distribution,
8 )
9
10
11 class DependencyConflict:
12 required: str = None
13 found: Optional[str] = None
14
15 def __init__(self, required, found=None):
16 self.required = required
17 self.found = found
18
19 def __str__(self):
20 return 'DependencyConflict: requested: "{0}" but found: "{1}"'.format(
21 self.required, self.found
22 )
23
24
25 def get_dist_dependency_conflicts(
26 dist: Distribution,
27 ) -> Optional[DependencyConflict]:
28 deps = [
29 dep
30 for dep in dist.requires(("instruments",))
31 if dep not in dist.requires()
32 ]
33 return get_dependency_conflicts(deps)
34
35
36 def get_dependency_conflicts(
37 deps: Collection[str],
38 ) -> Optional[DependencyConflict]:
39 for dep in deps:
40 try:
41 get_distribution(str(dep))
42 except VersionConflict as exc:
43 return DependencyConflict(dep, exc.dist)
44 except DistributionNotFound:
45 return DependencyConflict(dep)
46 return None
47
[end of opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py
+++ b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py
@@ -1,12 +1,16 @@
+from logging import getLogger
from typing import Collection, Optional
from pkg_resources import (
Distribution,
DistributionNotFound,
+ RequirementParseError,
VersionConflict,
get_distribution,
)
+logger = getLogger(__file__)
+
class DependencyConflict:
required: str = None
@@ -25,12 +29,19 @@
def get_dist_dependency_conflicts(
dist: Distribution,
) -> Optional[DependencyConflict]:
- deps = [
- dep
- for dep in dist.requires(("instruments",))
- if dep not in dist.requires()
- ]
- return get_dependency_conflicts(deps)
+ main_deps = dist.requires()
+ instrumentation_deps = []
+ for dep in dist.requires(("instruments",)):
+ if dep not in main_deps:
+ # we set marker to none so string representation of the dependency looks like
+ # requests ~= 1.0
+ # instead of
+ # requests ~= 1.0; extra = "instruments"
+ # which does not work with `get_distribution()`
+ dep.marker = None
+ instrumentation_deps.append(str(dep))
+
+ return get_dependency_conflicts(instrumentation_deps)
def get_dependency_conflicts(
@@ -38,9 +49,16 @@
) -> Optional[DependencyConflict]:
for dep in deps:
try:
- get_distribution(str(dep))
+ get_distribution(dep)
except VersionConflict as exc:
return DependencyConflict(dep, exc.dist)
except DistributionNotFound:
return DependencyConflict(dep)
+ except RequirementParseError as exc:
+ logger.warning(
+ 'error parsing dependency, reporting as a conflict: "%s" - %s',
+ dep,
+ exc,
+ )
+ return DependencyConflict(dep)
return None
| {"golden_diff": "diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py\n--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py\n+++ b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py\n@@ -1,12 +1,16 @@\n+from logging import getLogger\n from typing import Collection, Optional\n \n from pkg_resources import (\n Distribution,\n DistributionNotFound,\n+ RequirementParseError,\n VersionConflict,\n get_distribution,\n )\n \n+logger = getLogger(__file__)\n+\n \n class DependencyConflict:\n required: str = None\n@@ -25,12 +29,19 @@\n def get_dist_dependency_conflicts(\n dist: Distribution,\n ) -> Optional[DependencyConflict]:\n- deps = [\n- dep\n- for dep in dist.requires((\"instruments\",))\n- if dep not in dist.requires()\n- ]\n- return get_dependency_conflicts(deps)\n+ main_deps = dist.requires()\n+ instrumentation_deps = []\n+ for dep in dist.requires((\"instruments\",)):\n+ if dep not in main_deps:\n+ # we set marker to none so string representation of the dependency looks like\n+ # requests ~= 1.0\n+ # instead of\n+ # requests ~= 1.0; extra = \"instruments\"\n+ # which does not work with `get_distribution()`\n+ dep.marker = None\n+ instrumentation_deps.append(str(dep))\n+\n+ return get_dependency_conflicts(instrumentation_deps)\n \n \n def get_dependency_conflicts(\n@@ -38,9 +49,16 @@\n ) -> Optional[DependencyConflict]:\n for dep in deps:\n try:\n- get_distribution(str(dep))\n+ get_distribution(dep)\n except VersionConflict as exc:\n return DependencyConflict(dep, exc.dist)\n except DistributionNotFound:\n return DependencyConflict(dep)\n+ except RequirementParseError as exc:\n+ logger.warning(\n+ 'error parsing dependency, reporting as a conflict: \"%s\" - %s',\n+ dep,\n+ exc,\n+ )\n+ return DependencyConflict(dep)\n return None\n", "issue": "opentelemetry-instrument command fails if incompatible instrumentation is found\nIf an instrumentation is installed for a library that is not found in the environment, the instrument command raises the following exception:\r\n\r\n\r\n```\r\n\u276f opentelemetry-instrument python main.py\r\nInstrumenting of flask failed\r\nTraceback (most recent call last):\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\", line 71, in _load_instrumentors\r\n conflict = get_dist_dependency_conflicts(entry_point.dist)\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py\", line 33, in get_dist_dependency_conflicts\r\n return get_dependency_conflicts(deps)\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py\", line 41, in get_dependency_conflicts\r\n get_distribution(str(dep))\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py\", line 482, in get_distribution\r\n dist = get_provider(dist)\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py\", line 358, in get_provider\r\n return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]\r\nIndexError: list index out of range\r\nFailed to auto initialize opentelemetry\r\nTraceback (most recent call last):\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\", line 111, in initialize\r\n _load_instrumentors(distro)\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\", line 85, in _load_instrumentors\r\n raise exc\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\", line 71, in _load_instrumentors\r\n conflict = get_dist_dependency_conflicts(entry_point.dist)\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py\", line 33, in get_dist_dependency_conflicts\r\n return get_dependency_conflicts(deps)\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/opentelemetry/instrumentation/dependencies.py\", line 41, in get_dependency_conflicts\r\n get_distribution(str(dep))\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py\", line 482, in get_distribution\r\n dist = get_provider(dist)\r\n File \"/Users/olone/playground/splunk-otel-py/venv/lib/python3.8/site-packages/pkg_resources/__init__.py\", line 358, in get_provider\r\n return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]\r\nIndexError: list index out of range\r\n```\r\n\r\nbootstrap command does not install any instrumentations for libraries that are not present in the environment so this would only happen if someone manually installed an instrumentation package for a library they're not using. So this is not a deal breaker and doesn't require an immediate hotfix. That said, this IS a bug as the intended behavior of instrument command is to silently ignore such instrumentations.\n", "before_files": [{"content": "from typing import Collection, Optional\n\nfrom pkg_resources import (\n Distribution,\n DistributionNotFound,\n VersionConflict,\n get_distribution,\n)\n\n\nclass DependencyConflict:\n required: str = None\n found: Optional[str] = None\n\n def __init__(self, required, found=None):\n self.required = required\n self.found = found\n\n def __str__(self):\n return 'DependencyConflict: requested: \"{0}\" but found: \"{1}\"'.format(\n self.required, self.found\n )\n\n\ndef get_dist_dependency_conflicts(\n dist: Distribution,\n) -> Optional[DependencyConflict]:\n deps = [\n dep\n for dep in dist.requires((\"instruments\",))\n if dep not in dist.requires()\n ]\n return get_dependency_conflicts(deps)\n\n\ndef get_dependency_conflicts(\n deps: Collection[str],\n) -> Optional[DependencyConflict]:\n for dep in deps:\n try:\n get_distribution(str(dep))\n except VersionConflict as exc:\n return DependencyConflict(dep, exc.dist)\n except DistributionNotFound:\n return DependencyConflict(dep)\n return None\n", "path": "opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py"}]} | 1,742 | 484 |
gh_patches_debug_29287 | rasdani/github-patches | git_diff | weni-ai__bothub-engine-77 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Is possible translate example to same language
</issue>
<code>
[start of bothub/api/serializers/translate.py]
1 from rest_framework import serializers
2
3 from django.utils.translation import gettext as _
4
5 from bothub.common.models import RepositoryTranslatedExampleEntity
6 from bothub.common.models import RepositoryTranslatedExample
7 from bothub.common.models import RepositoryExample
8
9 from ..validators import CanContributeInRepositoryTranslatedExampleValidator
10 from ..validators import CanContributeInRepositoryExampleValidator
11 from ..validators import TranslatedExampleEntitiesValidator
12
13
14 class RepositoryTranslatedExampleEntitySeralizer(serializers.ModelSerializer):
15 class Meta:
16 model = RepositoryTranslatedExampleEntity
17 fields = [
18 'id',
19 'repository_translated_example',
20 'start',
21 'end',
22 'entity',
23 'created_at',
24 'value',
25 ]
26
27 repository_translated_example = serializers.PrimaryKeyRelatedField(
28 queryset=RepositoryTranslatedExample.objects,
29 validators=[
30 CanContributeInRepositoryTranslatedExampleValidator(),
31 ],
32 help_text='Example translation ID')
33 value = serializers.SerializerMethodField()
34
35 def get_value(self, obj):
36 return obj.value
37
38
39 class RepositoryTranslatedExampleSerializer(serializers.ModelSerializer):
40 class Meta:
41 model = RepositoryTranslatedExample
42 fields = [
43 'id',
44 'original_example',
45 'from_language',
46 'language',
47 'text',
48 'has_valid_entities',
49 'entities',
50 'created_at',
51 ]
52
53 original_example = serializers.PrimaryKeyRelatedField(
54 queryset=RepositoryExample.objects,
55 validators=[
56 CanContributeInRepositoryExampleValidator(),
57 ],
58 help_text=_('Example\'s ID'))
59 from_language = serializers.SerializerMethodField()
60 has_valid_entities = serializers.SerializerMethodField()
61 entities = RepositoryTranslatedExampleEntitySeralizer(
62 many=True,
63 read_only=True)
64
65 def get_from_language(self, obj):
66 return obj.original_example.repository_update.language
67
68 def get_has_valid_entities(self, obj):
69 return obj.has_valid_entities
70
71
72 class NewRepositoryTranslatedExampleEntitySeralizer(
73 serializers.ModelSerializer):
74 class Meta:
75 model = RepositoryTranslatedExampleEntity
76 fields = [
77 'start',
78 'end',
79 'entity',
80 ]
81
82
83 class NewRepositoryTranslatedExampleSerializer(serializers.ModelSerializer):
84 class Meta:
85 model = RepositoryTranslatedExample
86 fields = [
87 'id',
88 'original_example',
89 'language',
90 'text',
91 'has_valid_entities',
92 'entities',
93 ]
94
95 def __init__(self, *args, **kwargs):
96 super().__init__(*args, **kwargs)
97 self.validators.append(TranslatedExampleEntitiesValidator())
98
99 original_example = serializers.PrimaryKeyRelatedField(
100 queryset=RepositoryExample.objects,
101 validators=[
102 CanContributeInRepositoryExampleValidator(),
103 ],
104 help_text=_('Example\'s ID'))
105 has_valid_entities = serializers.SerializerMethodField()
106 entities = NewRepositoryTranslatedExampleEntitySeralizer(
107 many=True,
108 style={'text_field': 'text'})
109
110 def get_has_valid_entities(self, obj):
111 return obj.has_valid_entities
112
113 def create(self, validated_data):
114 entities_data = validated_data.pop('entities')
115
116 translated = self.Meta.model.objects.create(**validated_data)
117 for entity_data in entities_data:
118 RepositoryTranslatedExampleEntity.objects.create(
119 repository_translated_example=translated,
120 **entity_data)
121 return translated
122
[end of bothub/api/serializers/translate.py]
[start of bothub/api/validators.py]
1 from django.utils.translation import gettext as _
2 from rest_framework.exceptions import PermissionDenied
3 from rest_framework.exceptions import ValidationError
4
5 from bothub.common.models import RepositoryTranslatedExample
6
7
8 class CanContributeInRepositoryValidator(object):
9 def __call__(self, value):
10 user_authorization = value.get_user_authorization(
11 self.request.user)
12 if not user_authorization.can_contribute:
13 raise PermissionDenied(
14 _('You can\'t contribute in this repository'))
15
16 def set_context(self, serializer):
17 self.request = serializer.context.get('request')
18
19
20 class CanContributeInRepositoryExampleValidator(object):
21 def __call__(self, value):
22 repository = value.repository_update.repository
23 user_authorization = repository.get_user_authorization(
24 self.request.user)
25 if not user_authorization.can_contribute:
26 raise PermissionDenied(
27 _('You can\'t contribute in this repository'))
28
29 def set_context(self, serializer):
30 self.request = serializer.context.get('request')
31
32
33 class CanContributeInRepositoryTranslatedExampleValidator(object):
34 def __call__(self, value):
35 repository = value.original_example.repository_update.repository
36 user_authorization = repository.get_user_authorization(
37 self.request.user)
38 if not user_authorization.can_contribute:
39 raise PermissionDenied(
40 _('You can\'t contribute in this repository'))
41
42 def set_context(self, serializer):
43 self.request = serializer.context.get('request')
44
45
46 class TranslatedExampleEntitiesValidator(object):
47 def __call__(self, attrs):
48 original_example = attrs.get('original_example')
49 entities_valid = RepositoryTranslatedExample.same_entities_validator(
50 list(map(lambda x: dict(x), attrs.get('entities'))),
51 list(map(lambda x: x.to_dict, original_example.entities.all())))
52 if not entities_valid:
53 raise ValidationError({'entities': _('Invalid entities')})
54
[end of bothub/api/validators.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bothub/api/serializers/translate.py b/bothub/api/serializers/translate.py
--- a/bothub/api/serializers/translate.py
+++ b/bothub/api/serializers/translate.py
@@ -9,6 +9,7 @@
from ..validators import CanContributeInRepositoryTranslatedExampleValidator
from ..validators import CanContributeInRepositoryExampleValidator
from ..validators import TranslatedExampleEntitiesValidator
+from ..validators import TranslatedExampleLanguageValidator
class RepositoryTranslatedExampleEntitySeralizer(serializers.ModelSerializer):
@@ -95,6 +96,7 @@
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.validators.append(TranslatedExampleEntitiesValidator())
+ self.validators.append(TranslatedExampleLanguageValidator())
original_example = serializers.PrimaryKeyRelatedField(
queryset=RepositoryExample.objects,
diff --git a/bothub/api/validators.py b/bothub/api/validators.py
--- a/bothub/api/validators.py
+++ b/bothub/api/validators.py
@@ -51,3 +51,11 @@
list(map(lambda x: x.to_dict, original_example.entities.all())))
if not entities_valid:
raise ValidationError({'entities': _('Invalid entities')})
+
+
+class TranslatedExampleLanguageValidator(object):
+ def __call__(self, attrs):
+ original_example = attrs.get('original_example')
+ language = attrs.get('language')
+ if original_example.repository_update.language == language:
+ raise ValidationError({'language': _('Can\'t translate to same language')})
| {"golden_diff": "diff --git a/bothub/api/serializers/translate.py b/bothub/api/serializers/translate.py\n--- a/bothub/api/serializers/translate.py\n+++ b/bothub/api/serializers/translate.py\n@@ -9,6 +9,7 @@\n from ..validators import CanContributeInRepositoryTranslatedExampleValidator\n from ..validators import CanContributeInRepositoryExampleValidator\n from ..validators import TranslatedExampleEntitiesValidator\n+from ..validators import TranslatedExampleLanguageValidator\n \n \n class RepositoryTranslatedExampleEntitySeralizer(serializers.ModelSerializer):\n@@ -95,6 +96,7 @@\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.validators.append(TranslatedExampleEntitiesValidator())\n+ self.validators.append(TranslatedExampleLanguageValidator())\n \n original_example = serializers.PrimaryKeyRelatedField(\n queryset=RepositoryExample.objects,\ndiff --git a/bothub/api/validators.py b/bothub/api/validators.py\n--- a/bothub/api/validators.py\n+++ b/bothub/api/validators.py\n@@ -51,3 +51,11 @@\n list(map(lambda x: x.to_dict, original_example.entities.all())))\n if not entities_valid:\n raise ValidationError({'entities': _('Invalid entities')})\n+\n+\n+class TranslatedExampleLanguageValidator(object):\n+ def __call__(self, attrs):\n+ original_example = attrs.get('original_example')\n+ language = attrs.get('language')\n+ if original_example.repository_update.language == language:\n+ raise ValidationError({'language': _('Can\\'t translate to same language')})\n", "issue": "Is possible translate example to same language\n\n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom django.utils.translation import gettext as _\n\nfrom bothub.common.models import RepositoryTranslatedExampleEntity\nfrom bothub.common.models import RepositoryTranslatedExample\nfrom bothub.common.models import RepositoryExample\n\nfrom ..validators import CanContributeInRepositoryTranslatedExampleValidator\nfrom ..validators import CanContributeInRepositoryExampleValidator\nfrom ..validators import TranslatedExampleEntitiesValidator\n\n\nclass RepositoryTranslatedExampleEntitySeralizer(serializers.ModelSerializer):\n class Meta:\n model = RepositoryTranslatedExampleEntity\n fields = [\n 'id',\n 'repository_translated_example',\n 'start',\n 'end',\n 'entity',\n 'created_at',\n 'value',\n ]\n\n repository_translated_example = serializers.PrimaryKeyRelatedField(\n queryset=RepositoryTranslatedExample.objects,\n validators=[\n CanContributeInRepositoryTranslatedExampleValidator(),\n ],\n help_text='Example translation ID')\n value = serializers.SerializerMethodField()\n\n def get_value(self, obj):\n return obj.value\n\n\nclass RepositoryTranslatedExampleSerializer(serializers.ModelSerializer):\n class Meta:\n model = RepositoryTranslatedExample\n fields = [\n 'id',\n 'original_example',\n 'from_language',\n 'language',\n 'text',\n 'has_valid_entities',\n 'entities',\n 'created_at',\n ]\n\n original_example = serializers.PrimaryKeyRelatedField(\n queryset=RepositoryExample.objects,\n validators=[\n CanContributeInRepositoryExampleValidator(),\n ],\n help_text=_('Example\\'s ID'))\n from_language = serializers.SerializerMethodField()\n has_valid_entities = serializers.SerializerMethodField()\n entities = RepositoryTranslatedExampleEntitySeralizer(\n many=True,\n read_only=True)\n\n def get_from_language(self, obj):\n return obj.original_example.repository_update.language\n\n def get_has_valid_entities(self, obj):\n return obj.has_valid_entities\n\n\nclass NewRepositoryTranslatedExampleEntitySeralizer(\n serializers.ModelSerializer):\n class Meta:\n model = RepositoryTranslatedExampleEntity\n fields = [\n 'start',\n 'end',\n 'entity',\n ]\n\n\nclass NewRepositoryTranslatedExampleSerializer(serializers.ModelSerializer):\n class Meta:\n model = RepositoryTranslatedExample\n fields = [\n 'id',\n 'original_example',\n 'language',\n 'text',\n 'has_valid_entities',\n 'entities',\n ]\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.validators.append(TranslatedExampleEntitiesValidator())\n\n original_example = serializers.PrimaryKeyRelatedField(\n queryset=RepositoryExample.objects,\n validators=[\n CanContributeInRepositoryExampleValidator(),\n ],\n help_text=_('Example\\'s ID'))\n has_valid_entities = serializers.SerializerMethodField()\n entities = NewRepositoryTranslatedExampleEntitySeralizer(\n many=True,\n style={'text_field': 'text'})\n\n def get_has_valid_entities(self, obj):\n return obj.has_valid_entities\n\n def create(self, validated_data):\n entities_data = validated_data.pop('entities')\n\n translated = self.Meta.model.objects.create(**validated_data)\n for entity_data in entities_data:\n RepositoryTranslatedExampleEntity.objects.create(\n repository_translated_example=translated,\n **entity_data)\n return translated\n", "path": "bothub/api/serializers/translate.py"}, {"content": "from django.utils.translation import gettext as _\nfrom rest_framework.exceptions import PermissionDenied\nfrom rest_framework.exceptions import ValidationError\n\nfrom bothub.common.models import RepositoryTranslatedExample\n\n\nclass CanContributeInRepositoryValidator(object):\n def __call__(self, value):\n user_authorization = value.get_user_authorization(\n self.request.user)\n if not user_authorization.can_contribute:\n raise PermissionDenied(\n _('You can\\'t contribute in this repository'))\n\n def set_context(self, serializer):\n self.request = serializer.context.get('request')\n\n\nclass CanContributeInRepositoryExampleValidator(object):\n def __call__(self, value):\n repository = value.repository_update.repository\n user_authorization = repository.get_user_authorization(\n self.request.user)\n if not user_authorization.can_contribute:\n raise PermissionDenied(\n _('You can\\'t contribute in this repository'))\n\n def set_context(self, serializer):\n self.request = serializer.context.get('request')\n\n\nclass CanContributeInRepositoryTranslatedExampleValidator(object):\n def __call__(self, value):\n repository = value.original_example.repository_update.repository\n user_authorization = repository.get_user_authorization(\n self.request.user)\n if not user_authorization.can_contribute:\n raise PermissionDenied(\n _('You can\\'t contribute in this repository'))\n\n def set_context(self, serializer):\n self.request = serializer.context.get('request')\n\n\nclass TranslatedExampleEntitiesValidator(object):\n def __call__(self, attrs):\n original_example = attrs.get('original_example')\n entities_valid = RepositoryTranslatedExample.same_entities_validator(\n list(map(lambda x: dict(x), attrs.get('entities'))),\n list(map(lambda x: x.to_dict, original_example.entities.all())))\n if not entities_valid:\n raise ValidationError({'entities': _('Invalid entities')})\n", "path": "bothub/api/validators.py"}]} | 1,996 | 348 |
gh_patches_debug_18321 | rasdani/github-patches | git_diff | crytic__slither-2394 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
filter `name-reused` to only run on Truffle projects
The detector should check which platform was used with https://crytic.github.io/crytic-compile/crytic_compile/crytic_compile.html#CryticCompile.platform and https://crytic.github.io/slither/slither/core/compilation_unit.html#SlitherCompilationUnit.crytic_compile
https://github.com/crytic/slither/blob/13d7d9f66a6be4f798478fa3735fb63444b46c3d/slither/detectors/slither/name_reused.py#L51-L61
https://github.com/crytic/crytic-compile/blob/b5c538aaa66be44b7a68d9723881a7eba2c20898/crytic_compile/platform/truffle.py#L83-L90
</issue>
<code>
[start of slither/detectors/slither/name_reused.py]
1 from collections import defaultdict
2 from typing import List
3
4 from slither.core.compilation_unit import SlitherCompilationUnit
5 from slither.core.declarations import Contract
6 from slither.detectors.abstract_detector import (
7 AbstractDetector,
8 DetectorClassification,
9 DETECTOR_INFO,
10 )
11 from slither.utils.output import Output
12
13
14 def _find_missing_inheritance(compilation_unit: SlitherCompilationUnit) -> List[Contract]:
15 """
16 Filter contracts with missing inheritance to return only the "most base" contracts
17 in the inheritance tree.
18 :param slither:
19 :return:
20 """
21 missings = compilation_unit.contracts_with_missing_inheritance
22
23 ret = []
24 for b in missings:
25 is_most_base = True
26 for inheritance in b.immediate_inheritance:
27 if inheritance in missings:
28 is_most_base = False
29 if is_most_base:
30 ret.append(b)
31
32 return ret
33
34
35 class NameReused(AbstractDetector):
36 ARGUMENT = "name-reused"
37 HELP = "Contract's name reused"
38 IMPACT = DetectorClassification.HIGH
39 CONFIDENCE = DetectorClassification.HIGH
40
41 WIKI = "https://github.com/crytic/slither/wiki/Detector-Documentation#name-reused"
42
43 WIKI_TITLE = "Name reused"
44
45 # region wiki_description
46 WIKI_DESCRIPTION = """If a codebase has two contracts the similar names, the compilation artifacts
47 will not contain one of the contracts with the duplicate name."""
48 # endregion wiki_description
49
50 # region wiki_exploit_scenario
51 WIKI_EXPLOIT_SCENARIO = """
52 Bob's `truffle` codebase has two contracts named `ERC20`.
53 When `truffle compile` runs, only one of the two contracts will generate artifacts in `build/contracts`.
54 As a result, the second contract cannot be analyzed.
55 """
56 # endregion wiki_exploit_scenario
57
58 WIKI_RECOMMENDATION = "Rename the contract."
59
60 # pylint: disable=too-many-locals,too-many-branches
61 def _detect(self) -> List[Output]:
62 results = []
63 compilation_unit = self.compilation_unit
64
65 all_contracts = compilation_unit.contracts
66 all_contracts_name = [c.name for c in all_contracts]
67 contracts_name_reused = {
68 contract for contract in all_contracts_name if all_contracts_name.count(contract) > 1
69 }
70
71 names_reused = {
72 name: compilation_unit.get_contract_from_name(name) for name in contracts_name_reused
73 }
74
75 # First show the contracts that we know are missing
76 incorrectly_constructed = [
77 contract
78 for contract in compilation_unit.contracts
79 if contract.is_incorrectly_constructed
80 ]
81
82 inheritance_corrupted = defaultdict(list)
83 for contract in incorrectly_constructed:
84 for father in contract.inheritance:
85 inheritance_corrupted[father.name].append(contract)
86
87 for contract_name, files in names_reused.items():
88 info: DETECTOR_INFO = [contract_name, " is re-used:\n"]
89 for file in files:
90 if file is None:
91 info += ["\t- In an file not found, most likely in\n"]
92 else:
93 info += ["\t- ", file, "\n"]
94
95 if contract_name in inheritance_corrupted:
96 info += ["\tAs a result, the inherited contracts are not correctly analyzed:\n"]
97 for corrupted in inheritance_corrupted[contract_name]:
98 info += ["\t\t- ", corrupted, "\n"]
99 res = self.generate_result(info)
100 results.append(res)
101
102 # Then show the contracts for which one of the father was not found
103 # Here we are not able to know
104 most_base_with_missing_inheritance = _find_missing_inheritance(compilation_unit)
105
106 for b in most_base_with_missing_inheritance:
107 info = [b, " inherits from a contract for which the name is reused.\n"]
108 if b.inheritance:
109 info += ["\t- Slither could not determine which contract has a duplicate name:\n"]
110 for inheritance in b.inheritance:
111 info += ["\t\t-", inheritance, "\n"]
112 info += ["\t- Check if:\n"]
113 info += ["\t\t- A inherited contract is missing from this list,\n"]
114 info += ["\t\t- The contract are imported from the correct files.\n"]
115 if b.derived_contracts:
116 info += [f"\t- This issue impacts the contracts inheriting from {b.name}:\n"]
117 for derived in b.derived_contracts:
118 info += ["\t\t-", derived, "\n"]
119 res = self.generate_result(info)
120 results.append(res)
121 return results
122
[end of slither/detectors/slither/name_reused.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/slither/detectors/slither/name_reused.py b/slither/detectors/slither/name_reused.py
--- a/slither/detectors/slither/name_reused.py
+++ b/slither/detectors/slither/name_reused.py
@@ -1,6 +1,8 @@
from collections import defaultdict
from typing import List
+from crytic_compile.platform import Type as PlatformType
+
from slither.core.compilation_unit import SlitherCompilationUnit
from slither.core.declarations import Contract
from slither.detectors.abstract_detector import (
@@ -61,6 +63,8 @@
def _detect(self) -> List[Output]:
results = []
compilation_unit = self.compilation_unit
+ if compilation_unit.core.crytic_compile.platform != PlatformType.TRUFFLE:
+ return []
all_contracts = compilation_unit.contracts
all_contracts_name = [c.name for c in all_contracts]
| {"golden_diff": "diff --git a/slither/detectors/slither/name_reused.py b/slither/detectors/slither/name_reused.py\n--- a/slither/detectors/slither/name_reused.py\n+++ b/slither/detectors/slither/name_reused.py\n@@ -1,6 +1,8 @@\n from collections import defaultdict\n from typing import List\n \n+from crytic_compile.platform import Type as PlatformType\n+\n from slither.core.compilation_unit import SlitherCompilationUnit\n from slither.core.declarations import Contract\n from slither.detectors.abstract_detector import (\n@@ -61,6 +63,8 @@\n def _detect(self) -> List[Output]:\n results = []\n compilation_unit = self.compilation_unit\n+ if compilation_unit.core.crytic_compile.platform != PlatformType.TRUFFLE:\n+ return []\n \n all_contracts = compilation_unit.contracts\n all_contracts_name = [c.name for c in all_contracts]\n", "issue": "filter `name-reused` to only run on Truffle projects\nThe detector should check which platform was used with https://crytic.github.io/crytic-compile/crytic_compile/crytic_compile.html#CryticCompile.platform and https://crytic.github.io/slither/slither/core/compilation_unit.html#SlitherCompilationUnit.crytic_compile \r\nhttps://github.com/crytic/slither/blob/13d7d9f66a6be4f798478fa3735fb63444b46c3d/slither/detectors/slither/name_reused.py#L51-L61\r\n\r\nhttps://github.com/crytic/crytic-compile/blob/b5c538aaa66be44b7a68d9723881a7eba2c20898/crytic_compile/platform/truffle.py#L83-L90\n", "before_files": [{"content": "from collections import defaultdict\nfrom typing import List\n\nfrom slither.core.compilation_unit import SlitherCompilationUnit\nfrom slither.core.declarations import Contract\nfrom slither.detectors.abstract_detector import (\n AbstractDetector,\n DetectorClassification,\n DETECTOR_INFO,\n)\nfrom slither.utils.output import Output\n\n\ndef _find_missing_inheritance(compilation_unit: SlitherCompilationUnit) -> List[Contract]:\n \"\"\"\n Filter contracts with missing inheritance to return only the \"most base\" contracts\n in the inheritance tree.\n :param slither:\n :return:\n \"\"\"\n missings = compilation_unit.contracts_with_missing_inheritance\n\n ret = []\n for b in missings:\n is_most_base = True\n for inheritance in b.immediate_inheritance:\n if inheritance in missings:\n is_most_base = False\n if is_most_base:\n ret.append(b)\n\n return ret\n\n\nclass NameReused(AbstractDetector):\n ARGUMENT = \"name-reused\"\n HELP = \"Contract's name reused\"\n IMPACT = DetectorClassification.HIGH\n CONFIDENCE = DetectorClassification.HIGH\n\n WIKI = \"https://github.com/crytic/slither/wiki/Detector-Documentation#name-reused\"\n\n WIKI_TITLE = \"Name reused\"\n\n # region wiki_description\n WIKI_DESCRIPTION = \"\"\"If a codebase has two contracts the similar names, the compilation artifacts\nwill not contain one of the contracts with the duplicate name.\"\"\"\n # endregion wiki_description\n\n # region wiki_exploit_scenario\n WIKI_EXPLOIT_SCENARIO = \"\"\"\nBob's `truffle` codebase has two contracts named `ERC20`.\nWhen `truffle compile` runs, only one of the two contracts will generate artifacts in `build/contracts`.\nAs a result, the second contract cannot be analyzed.\n\"\"\"\n # endregion wiki_exploit_scenario\n\n WIKI_RECOMMENDATION = \"Rename the contract.\"\n\n # pylint: disable=too-many-locals,too-many-branches\n def _detect(self) -> List[Output]:\n results = []\n compilation_unit = self.compilation_unit\n\n all_contracts = compilation_unit.contracts\n all_contracts_name = [c.name for c in all_contracts]\n contracts_name_reused = {\n contract for contract in all_contracts_name if all_contracts_name.count(contract) > 1\n }\n\n names_reused = {\n name: compilation_unit.get_contract_from_name(name) for name in contracts_name_reused\n }\n\n # First show the contracts that we know are missing\n incorrectly_constructed = [\n contract\n for contract in compilation_unit.contracts\n if contract.is_incorrectly_constructed\n ]\n\n inheritance_corrupted = defaultdict(list)\n for contract in incorrectly_constructed:\n for father in contract.inheritance:\n inheritance_corrupted[father.name].append(contract)\n\n for contract_name, files in names_reused.items():\n info: DETECTOR_INFO = [contract_name, \" is re-used:\\n\"]\n for file in files:\n if file is None:\n info += [\"\\t- In an file not found, most likely in\\n\"]\n else:\n info += [\"\\t- \", file, \"\\n\"]\n\n if contract_name in inheritance_corrupted:\n info += [\"\\tAs a result, the inherited contracts are not correctly analyzed:\\n\"]\n for corrupted in inheritance_corrupted[contract_name]:\n info += [\"\\t\\t- \", corrupted, \"\\n\"]\n res = self.generate_result(info)\n results.append(res)\n\n # Then show the contracts for which one of the father was not found\n # Here we are not able to know\n most_base_with_missing_inheritance = _find_missing_inheritance(compilation_unit)\n\n for b in most_base_with_missing_inheritance:\n info = [b, \" inherits from a contract for which the name is reused.\\n\"]\n if b.inheritance:\n info += [\"\\t- Slither could not determine which contract has a duplicate name:\\n\"]\n for inheritance in b.inheritance:\n info += [\"\\t\\t-\", inheritance, \"\\n\"]\n info += [\"\\t- Check if:\\n\"]\n info += [\"\\t\\t- A inherited contract is missing from this list,\\n\"]\n info += [\"\\t\\t- The contract are imported from the correct files.\\n\"]\n if b.derived_contracts:\n info += [f\"\\t- This issue impacts the contracts inheriting from {b.name}:\\n\"]\n for derived in b.derived_contracts:\n info += [\"\\t\\t-\", derived, \"\\n\"]\n res = self.generate_result(info)\n results.append(res)\n return results\n", "path": "slither/detectors/slither/name_reused.py"}]} | 2,028 | 206 |
gh_patches_debug_64325 | rasdani/github-patches | git_diff | pex-tool__pex-1725 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Release 2.1.80
On the docket:
+ [x] Support booting via `/bin/sh` with `--sh-boot`. (#1721)
+ [x] Fix more pathologic lock creation slowness. (#1723)
</issue>
<code>
[start of pex/version.py]
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.79"
5
[end of pex/version.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.79"
+__version__ = "2.1.80"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.79\"\n+__version__ = \"2.1.80\"\n", "issue": "Release 2.1.80\nOn the docket:\r\n+ [x] Support booting via `/bin/sh` with `--sh-boot`. (#1721)\r\n+ [x] Fix more pathologic lock creation slowness. (#1723)\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.79\"\n", "path": "pex/version.py"}]} | 641 | 97 |
gh_patches_debug_43869 | rasdani/github-patches | git_diff | aws__aws-cli-3331 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
aws configure get and aws configure set with multiword profile names are inconsistent
It seems that `aws configure set --profile "two words"` will add single quotes around the profile name, but `aws configure get --profile "two words"` will search for a profile name that does not have single quotes around the profile name.
These two methods should behave in a similar manner.
To reproduce:
```
$ aws --version
aws-cli/1.15.10 Python/3.6.5 Darwin/17.4.0 botocore/1.10.10
$ aws configure set aws_access_key_id test --profile "test profile"
$ aws configure get aws_access_key_id --profile "test profile"
The config profile (test profile) could not be found
$ aws configure get aws_access_key_id --profile "'test profile'"
test
```
</issue>
<code>
[start of awscli/customizations/configure/set.py]
1 # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"). You
4 # may not use this file except in compliance with the License. A copy of
5 # the License is located at
6 #
7 # http://aws.amazon.com/apache2.0/
8 #
9 # or in the "license" file accompanying this file. This file is
10 # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 # ANY KIND, either express or implied. See the License for the specific
12 # language governing permissions and limitations under the License.
13 import os
14
15 from awscli.customizations.commands import BasicCommand
16 from awscli.customizations.configure.writer import ConfigFileWriter
17
18 from . import PREDEFINED_SECTION_NAMES, profile_to_section
19
20
21 class ConfigureSetCommand(BasicCommand):
22 NAME = 'set'
23 DESCRIPTION = BasicCommand.FROM_FILE('configure', 'set',
24 '_description.rst')
25 SYNOPSIS = 'aws configure set varname value [--profile profile-name]'
26 EXAMPLES = BasicCommand.FROM_FILE('configure', 'set', '_examples.rst')
27 ARG_TABLE = [
28 {'name': 'varname',
29 'help_text': 'The name of the config value to set.',
30 'action': 'store',
31 'cli_type_name': 'string', 'positional_arg': True},
32 {'name': 'value',
33 'help_text': 'The value to set.',
34 'action': 'store',
35 'no_paramfile': True, # To disable the default paramfile behavior
36 'cli_type_name': 'string', 'positional_arg': True},
37 ]
38 # Any variables specified in this list will be written to
39 # the ~/.aws/credentials file instead of ~/.aws/config.
40 _WRITE_TO_CREDS_FILE = ['aws_access_key_id', 'aws_secret_access_key',
41 'aws_session_token']
42
43 def __init__(self, session, config_writer=None):
44 super(ConfigureSetCommand, self).__init__(session)
45 if config_writer is None:
46 config_writer = ConfigFileWriter()
47 self._config_writer = config_writer
48
49 def _run_main(self, args, parsed_globals):
50 varname = args.varname
51 value = args.value
52 section = 'default'
53 # Before handing things off to the config writer,
54 # we need to find out three things:
55 # 1. What section we're writing to (section).
56 # 2. The name of the config key (varname)
57 # 3. The actual value (value).
58 if '.' not in varname:
59 # unqualified name, scope it to the current
60 # profile (or leave it as the 'default' section if
61 # no profile is set).
62 if self._session.profile is not None:
63 section = profile_to_section(self._session.profile)
64 else:
65 # First figure out if it's been scoped to a profile.
66 parts = varname.split('.')
67 if parts[0] in ('default', 'profile'):
68 # Then we know we're scoped to a profile.
69 if parts[0] == 'default':
70 section = 'default'
71 remaining = parts[1:]
72 else:
73 # [profile, profile_name, ...]
74 section = profile_to_section(parts[1])
75 remaining = parts[2:]
76 varname = remaining[0]
77 if len(remaining) == 2:
78 value = {remaining[1]: value}
79 elif parts[0] not in PREDEFINED_SECTION_NAMES:
80 if self._session.profile is not None:
81 section = profile_to_section(self._session.profile)
82 else:
83 profile_name = self._session.get_config_variable('profile')
84 if profile_name is not None:
85 section = profile_name
86 varname = parts[0]
87 if len(parts) == 2:
88 value = {parts[1]: value}
89 elif len(parts) == 2:
90 # Otherwise it's something like "set preview.service true"
91 # of something in the [plugin] section.
92 section, varname = parts
93 config_filename = os.path.expanduser(
94 self._session.get_config_variable('config_file'))
95 updated_config = {'__section__': section, varname: value}
96 if varname in self._WRITE_TO_CREDS_FILE:
97 config_filename = os.path.expanduser(
98 self._session.get_config_variable('credentials_file'))
99 section_name = updated_config['__section__']
100 if section_name.startswith('profile '):
101 updated_config['__section__'] = section_name[8:]
102 self._config_writer.update_config(updated_config, config_filename)
103
[end of awscli/customizations/configure/set.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/awscli/customizations/configure/set.py b/awscli/customizations/configure/set.py
--- a/awscli/customizations/configure/set.py
+++ b/awscli/customizations/configure/set.py
@@ -46,13 +46,17 @@
config_writer = ConfigFileWriter()
self._config_writer = config_writer
+ def _get_config_file(self, path):
+ config_path = self._session.get_config_variable(path)
+ return os.path.expanduser(config_path)
+
def _run_main(self, args, parsed_globals):
varname = args.varname
value = args.value
- section = 'default'
+ profile = 'default'
# Before handing things off to the config writer,
# we need to find out three things:
- # 1. What section we're writing to (section).
+ # 1. What section we're writing to (profile).
# 2. The name of the config key (varname)
# 3. The actual value (value).
if '.' not in varname:
@@ -60,43 +64,44 @@
# profile (or leave it as the 'default' section if
# no profile is set).
if self._session.profile is not None:
- section = profile_to_section(self._session.profile)
+ profile = self._session.profile
else:
# First figure out if it's been scoped to a profile.
parts = varname.split('.')
if parts[0] in ('default', 'profile'):
# Then we know we're scoped to a profile.
if parts[0] == 'default':
- section = 'default'
+ profile = 'default'
remaining = parts[1:]
else:
# [profile, profile_name, ...]
- section = profile_to_section(parts[1])
+ profile = parts[1]
remaining = parts[2:]
varname = remaining[0]
if len(remaining) == 2:
value = {remaining[1]: value}
elif parts[0] not in PREDEFINED_SECTION_NAMES:
if self._session.profile is not None:
- section = profile_to_section(self._session.profile)
+ profile = self._session.profile
else:
profile_name = self._session.get_config_variable('profile')
if profile_name is not None:
- section = profile_name
+ profile = profile_name
varname = parts[0]
if len(parts) == 2:
value = {parts[1]: value}
elif len(parts) == 2:
# Otherwise it's something like "set preview.service true"
# of something in the [plugin] section.
- section, varname = parts
- config_filename = os.path.expanduser(
- self._session.get_config_variable('config_file'))
- updated_config = {'__section__': section, varname: value}
+ profile, varname = parts
+ config_filename = self._get_config_file('config_file')
if varname in self._WRITE_TO_CREDS_FILE:
- config_filename = os.path.expanduser(
- self._session.get_config_variable('credentials_file'))
- section_name = updated_config['__section__']
- if section_name.startswith('profile '):
- updated_config['__section__'] = section_name[8:]
+ # When writing to the creds file, the section is just the profile
+ section = profile
+ config_filename = self._get_config_file('credentials_file')
+ elif profile in PREDEFINED_SECTION_NAMES or profile == 'default':
+ section = profile
+ else:
+ section = profile_to_section(profile)
+ updated_config = {'__section__': section, varname: value}
self._config_writer.update_config(updated_config, config_filename)
| {"golden_diff": "diff --git a/awscli/customizations/configure/set.py b/awscli/customizations/configure/set.py\n--- a/awscli/customizations/configure/set.py\n+++ b/awscli/customizations/configure/set.py\n@@ -46,13 +46,17 @@\n config_writer = ConfigFileWriter()\n self._config_writer = config_writer\n \n+ def _get_config_file(self, path):\n+ config_path = self._session.get_config_variable(path)\n+ return os.path.expanduser(config_path)\n+\n def _run_main(self, args, parsed_globals):\n varname = args.varname\n value = args.value\n- section = 'default'\n+ profile = 'default'\n # Before handing things off to the config writer,\n # we need to find out three things:\n- # 1. What section we're writing to (section).\n+ # 1. What section we're writing to (profile).\n # 2. The name of the config key (varname)\n # 3. The actual value (value).\n if '.' not in varname:\n@@ -60,43 +64,44 @@\n # profile (or leave it as the 'default' section if\n # no profile is set).\n if self._session.profile is not None:\n- section = profile_to_section(self._session.profile)\n+ profile = self._session.profile\n else:\n # First figure out if it's been scoped to a profile.\n parts = varname.split('.')\n if parts[0] in ('default', 'profile'):\n # Then we know we're scoped to a profile.\n if parts[0] == 'default':\n- section = 'default'\n+ profile = 'default'\n remaining = parts[1:]\n else:\n # [profile, profile_name, ...]\n- section = profile_to_section(parts[1])\n+ profile = parts[1]\n remaining = parts[2:]\n varname = remaining[0]\n if len(remaining) == 2:\n value = {remaining[1]: value}\n elif parts[0] not in PREDEFINED_SECTION_NAMES:\n if self._session.profile is not None:\n- section = profile_to_section(self._session.profile)\n+ profile = self._session.profile\n else:\n profile_name = self._session.get_config_variable('profile')\n if profile_name is not None:\n- section = profile_name\n+ profile = profile_name\n varname = parts[0]\n if len(parts) == 2:\n value = {parts[1]: value}\n elif len(parts) == 2:\n # Otherwise it's something like \"set preview.service true\"\n # of something in the [plugin] section.\n- section, varname = parts\n- config_filename = os.path.expanduser(\n- self._session.get_config_variable('config_file'))\n- updated_config = {'__section__': section, varname: value}\n+ profile, varname = parts\n+ config_filename = self._get_config_file('config_file')\n if varname in self._WRITE_TO_CREDS_FILE:\n- config_filename = os.path.expanduser(\n- self._session.get_config_variable('credentials_file'))\n- section_name = updated_config['__section__']\n- if section_name.startswith('profile '):\n- updated_config['__section__'] = section_name[8:]\n+ # When writing to the creds file, the section is just the profile\n+ section = profile\n+ config_filename = self._get_config_file('credentials_file')\n+ elif profile in PREDEFINED_SECTION_NAMES or profile == 'default':\n+ section = profile\n+ else:\n+ section = profile_to_section(profile)\n+ updated_config = {'__section__': section, varname: value}\n self._config_writer.update_config(updated_config, config_filename)\n", "issue": "aws configure get and aws configure set with multiword profile names are inconsistent\nIt seems that `aws configure set --profile \"two words\"` will add single quotes around the profile name, but `aws configure get --profile \"two words\"` will search for a profile name that does not have single quotes around the profile name.\r\n\r\nThese two methods should behave in a similar manner.\r\n\r\nTo reproduce:\r\n\r\n```\r\n$ aws --version\r\naws-cli/1.15.10 Python/3.6.5 Darwin/17.4.0 botocore/1.10.10\r\n$ aws configure set aws_access_key_id test --profile \"test profile\"\r\n$ aws configure get aws_access_key_id --profile \"test profile\"\r\nThe config profile (test profile) could not be found\r\n$ aws configure get aws_access_key_id --profile \"'test profile'\"\r\ntest\r\n```\n", "before_files": [{"content": "# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport os\n\nfrom awscli.customizations.commands import BasicCommand\nfrom awscli.customizations.configure.writer import ConfigFileWriter\n\nfrom . import PREDEFINED_SECTION_NAMES, profile_to_section\n\n\nclass ConfigureSetCommand(BasicCommand):\n NAME = 'set'\n DESCRIPTION = BasicCommand.FROM_FILE('configure', 'set',\n '_description.rst')\n SYNOPSIS = 'aws configure set varname value [--profile profile-name]'\n EXAMPLES = BasicCommand.FROM_FILE('configure', 'set', '_examples.rst')\n ARG_TABLE = [\n {'name': 'varname',\n 'help_text': 'The name of the config value to set.',\n 'action': 'store',\n 'cli_type_name': 'string', 'positional_arg': True},\n {'name': 'value',\n 'help_text': 'The value to set.',\n 'action': 'store',\n 'no_paramfile': True, # To disable the default paramfile behavior\n 'cli_type_name': 'string', 'positional_arg': True},\n ]\n # Any variables specified in this list will be written to\n # the ~/.aws/credentials file instead of ~/.aws/config.\n _WRITE_TO_CREDS_FILE = ['aws_access_key_id', 'aws_secret_access_key',\n 'aws_session_token']\n\n def __init__(self, session, config_writer=None):\n super(ConfigureSetCommand, self).__init__(session)\n if config_writer is None:\n config_writer = ConfigFileWriter()\n self._config_writer = config_writer\n\n def _run_main(self, args, parsed_globals):\n varname = args.varname\n value = args.value\n section = 'default'\n # Before handing things off to the config writer,\n # we need to find out three things:\n # 1. What section we're writing to (section).\n # 2. The name of the config key (varname)\n # 3. The actual value (value).\n if '.' not in varname:\n # unqualified name, scope it to the current\n # profile (or leave it as the 'default' section if\n # no profile is set).\n if self._session.profile is not None:\n section = profile_to_section(self._session.profile)\n else:\n # First figure out if it's been scoped to a profile.\n parts = varname.split('.')\n if parts[0] in ('default', 'profile'):\n # Then we know we're scoped to a profile.\n if parts[0] == 'default':\n section = 'default'\n remaining = parts[1:]\n else:\n # [profile, profile_name, ...]\n section = profile_to_section(parts[1])\n remaining = parts[2:]\n varname = remaining[0]\n if len(remaining) == 2:\n value = {remaining[1]: value}\n elif parts[0] not in PREDEFINED_SECTION_NAMES:\n if self._session.profile is not None:\n section = profile_to_section(self._session.profile)\n else:\n profile_name = self._session.get_config_variable('profile')\n if profile_name is not None:\n section = profile_name\n varname = parts[0]\n if len(parts) == 2:\n value = {parts[1]: value}\n elif len(parts) == 2:\n # Otherwise it's something like \"set preview.service true\"\n # of something in the [plugin] section.\n section, varname = parts\n config_filename = os.path.expanduser(\n self._session.get_config_variable('config_file'))\n updated_config = {'__section__': section, varname: value}\n if varname in self._WRITE_TO_CREDS_FILE:\n config_filename = os.path.expanduser(\n self._session.get_config_variable('credentials_file'))\n section_name = updated_config['__section__']\n if section_name.startswith('profile '):\n updated_config['__section__'] = section_name[8:]\n self._config_writer.update_config(updated_config, config_filename)\n", "path": "awscli/customizations/configure/set.py"}]} | 1,921 | 834 |
gh_patches_debug_6234 | rasdani/github-patches | git_diff | liqd__a4-meinberlin-3564 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
moderators for b-plans
if an initiator starts a b-plan via meinBerlin (as e.g. SenWohn does, they don't have imperia) or an external project he/she is automatically added as moderator and gets mails as the one below. This is confusing because:
a) you don't see moderators in dashboard
b) you can't follow a b-plan/external project
c) the link does not go to the external page (in this case it goes here: https://mein.berlin.de/projects/bebauungsplan-8-66-buckower-felder/)
Should we take out this rule for these two templates or stop sending mails?
<img width="698" alt="bildschirmfoto 2019-02-04 um 13 32 08" src="https://user-images.githubusercontent.com/35491681/52208589-762c0780-2881-11e9-9781-21826347abe4.png">
</issue>
<code>
[start of meinberlin/apps/notifications/signals.py]
1 from django.contrib.auth import get_user_model
2 from django.db.models import signals
3 from django.dispatch import receiver
4
5 from adhocracy4.actions.models import Action
6 from adhocracy4.actions.verbs import Verbs
7 from adhocracy4.dashboard import signals as dashboard_signals
8 from adhocracy4.follows.models import Follow
9 from adhocracy4.projects.models import Project
10
11 from . import emails
12
13 User = get_user_model()
14
15
16 @receiver(signals.post_save, sender=Action)
17 def send_notifications(instance, created, **kwargs):
18 action = instance
19 verb = Verbs(action.verb)
20
21 if action.type in ('item', 'comment') \
22 and verb in (Verbs.CREATE, Verbs.ADD):
23 emails.NotifyCreatorEmail.send(action)
24
25 if action.project:
26 emails.NotifyModeratorsEmail.send(action)
27
28 elif action.type == 'phase':
29 if verb == Verbs.START:
30 emails.NotifyFollowersOnPhaseStartedEmail.send(action)
31 elif verb == Verbs.SCHEDULE:
32 emails.NotifyFollowersOnPhaseIsOverSoonEmail.send(action)
33
34 elif action.type == 'offlineevent' and verb == Verbs.START:
35 emails.NotifyFollowersOnUpcommingEventEmail.send(action)
36
37
38 @receiver(dashboard_signals.project_created)
39 def send_project_created_notifications(**kwargs):
40 project = kwargs.get('project')
41 creator = kwargs.get('user')
42 emails.NotifyInitiatorsOnProjectCreatedEmail.send(
43 project, creator_pk=creator.pk)
44
45
46 @receiver(signals.m2m_changed, sender=Project.moderators.through)
47 def autofollow_project_moderators(instance, action, pk_set, reverse, **kwargs):
48 if action == 'post_add':
49 autofollow_project(instance, pk_set, reverse)
50
51
52 def autofollow_project(instance, pk_set, reverse):
53 if not reverse:
54 project = instance
55 users_pks = pk_set
56
57 for user_pk in users_pks:
58 Follow.objects.update_or_create(
59 project=project,
60 creator_id=user_pk,
61 defaults={
62 'enabled': True
63 }
64 )
65 else:
66 user = instance
67 project_pks = pk_set
68
69 for project_pk in project_pks:
70 Follow.objects.update_or_create(
71 project_id=project_pk,
72 creator=user,
73 defaults={
74 'enabled': True
75 }
76 )
77
[end of meinberlin/apps/notifications/signals.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/meinberlin/apps/notifications/signals.py b/meinberlin/apps/notifications/signals.py
--- a/meinberlin/apps/notifications/signals.py
+++ b/meinberlin/apps/notifications/signals.py
@@ -25,7 +25,8 @@
if action.project:
emails.NotifyModeratorsEmail.send(action)
- elif action.type == 'phase':
+ elif (action.type == 'phase' and
+ action.project.project_type == 'a4projects.Project'):
if verb == Verbs.START:
emails.NotifyFollowersOnPhaseStartedEmail.send(action)
elif verb == Verbs.SCHEDULE:
| {"golden_diff": "diff --git a/meinberlin/apps/notifications/signals.py b/meinberlin/apps/notifications/signals.py\n--- a/meinberlin/apps/notifications/signals.py\n+++ b/meinberlin/apps/notifications/signals.py\n@@ -25,7 +25,8 @@\n if action.project:\n emails.NotifyModeratorsEmail.send(action)\n \n- elif action.type == 'phase':\n+ elif (action.type == 'phase' and\n+ action.project.project_type == 'a4projects.Project'):\n if verb == Verbs.START:\n emails.NotifyFollowersOnPhaseStartedEmail.send(action)\n elif verb == Verbs.SCHEDULE:\n", "issue": "moderators for b-plans\nif an initiator starts a b-plan via meinBerlin (as e.g. SenWohn does, they don't have imperia) or an external project he/she is automatically added as moderator and gets mails as the one below. This is confusing because:\r\na) you don't see moderators in dashboard\r\nb) you can't follow a b-plan/external project\r\nc) the link does not go to the external page (in this case it goes here: https://mein.berlin.de/projects/bebauungsplan-8-66-buckower-felder/)\r\n\r\nShould we take out this rule for these two templates or stop sending mails?\r\n\r\n\r\n<img width=\"698\" alt=\"bildschirmfoto 2019-02-04 um 13 32 08\" src=\"https://user-images.githubusercontent.com/35491681/52208589-762c0780-2881-11e9-9781-21826347abe4.png\">\r\n\n", "before_files": [{"content": "from django.contrib.auth import get_user_model\nfrom django.db.models import signals\nfrom django.dispatch import receiver\n\nfrom adhocracy4.actions.models import Action\nfrom adhocracy4.actions.verbs import Verbs\nfrom adhocracy4.dashboard import signals as dashboard_signals\nfrom adhocracy4.follows.models import Follow\nfrom adhocracy4.projects.models import Project\n\nfrom . import emails\n\nUser = get_user_model()\n\n\n@receiver(signals.post_save, sender=Action)\ndef send_notifications(instance, created, **kwargs):\n action = instance\n verb = Verbs(action.verb)\n\n if action.type in ('item', 'comment') \\\n and verb in (Verbs.CREATE, Verbs.ADD):\n emails.NotifyCreatorEmail.send(action)\n\n if action.project:\n emails.NotifyModeratorsEmail.send(action)\n\n elif action.type == 'phase':\n if verb == Verbs.START:\n emails.NotifyFollowersOnPhaseStartedEmail.send(action)\n elif verb == Verbs.SCHEDULE:\n emails.NotifyFollowersOnPhaseIsOverSoonEmail.send(action)\n\n elif action.type == 'offlineevent' and verb == Verbs.START:\n emails.NotifyFollowersOnUpcommingEventEmail.send(action)\n\n\n@receiver(dashboard_signals.project_created)\ndef send_project_created_notifications(**kwargs):\n project = kwargs.get('project')\n creator = kwargs.get('user')\n emails.NotifyInitiatorsOnProjectCreatedEmail.send(\n project, creator_pk=creator.pk)\n\n\n@receiver(signals.m2m_changed, sender=Project.moderators.through)\ndef autofollow_project_moderators(instance, action, pk_set, reverse, **kwargs):\n if action == 'post_add':\n autofollow_project(instance, pk_set, reverse)\n\n\ndef autofollow_project(instance, pk_set, reverse):\n if not reverse:\n project = instance\n users_pks = pk_set\n\n for user_pk in users_pks:\n Follow.objects.update_or_create(\n project=project,\n creator_id=user_pk,\n defaults={\n 'enabled': True\n }\n )\n else:\n user = instance\n project_pks = pk_set\n\n for project_pk in project_pks:\n Follow.objects.update_or_create(\n project_id=project_pk,\n creator=user,\n defaults={\n 'enabled': True\n }\n )\n", "path": "meinberlin/apps/notifications/signals.py"}]} | 1,432 | 143 |
gh_patches_debug_13285 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-341 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
La Veneciana (Argentina)
Ice cream shop.
HTML webpage to scrape: http://www.laveneciana.com.ar/sucursales.html
</issue>
<code>
[start of locations/spiders/laveneciana.py]
1 import scrapy
2 import re
3 from locations.items import GeojsonPointItem
4 class LavenecianaSpider(scrapy.Spider):
5 name = "laveneciana"
6 allowed_domains = ["www.laveneciana.com.ar"]
7 download_delay = 0.5
8 start_urls = (
9 'http://www.laveneciana.com.ar/sucursales.html',
10 )
11 def parse(self, response):
12 stores = response.xpath('//div[@class="navigation-container"]/div[@id="thumbs"]/ul[@class="thumbs noscript"]/li')
13 for store in stores:
14 addr_full_tel = store.xpath('normalize-space(./div[@class="caption"]/div[@class="image-desc"]/text())').extract_first()
15 location = store.xpath('normalize-space(./div[@class="caption"]/div[@class="ubicacion"]/iframe/@src)').extract_first()
16 position = re.findall(r"ll=[0-9-.,]+" ,location)
17 id = re.findall(r"cid=[0-9]+" ,location)
18 if(len(position)>0):
19 lat =float( position[0][3:].split(',')[0])
20 lon = float(position[0][3:].split(',')[1])
21 id = id[0][4:]
22 else:
23 lat=''
24 lon=''
25 id=''
26 addr_full = re.findall(r"^[^()]{4}[^(.)]+" , addr_full_tel)[0]
27 phone_number = re.findall(r"[0-9]{4}-[0-9]{4}",addr_full_tel)
28 if(len(phone_number)>0):
29 phone_number = phone_number[0]
30 else:
31 phone_number =''
32 if(addr_full!="Direccion"):
33 properties = {
34 'addr_full': addr_full,
35 'phone':phone_number,
36 'city': '',
37 'state': '',
38 'postcode':'',
39 'ref': id,
40 'website': response.url,
41 'lat': lat,
42 'lon': lon,
43 }
44 yield GeojsonPointItem(**properties)
45
[end of locations/spiders/laveneciana.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/spiders/laveneciana.py b/locations/spiders/laveneciana.py
--- a/locations/spiders/laveneciana.py
+++ b/locations/spiders/laveneciana.py
@@ -23,13 +23,9 @@
lat=''
lon=''
id=''
- addr_full = re.findall(r"^[^()]{4}[^(.)]+" , addr_full_tel)[0]
- phone_number = re.findall(r"[0-9]{4}-[0-9]{4}",addr_full_tel)
- if(len(phone_number)>0):
- phone_number = phone_number[0]
- else:
- phone_number =''
- if(addr_full!="Direccion"):
+ addr_full = addr_full_tel.split('Tel.: ')[0]
+ phone_number = addr_full_tel.split('Tel.: ')[1]
+ if(addr_full!="Direccion... "):
properties = {
'addr_full': addr_full,
'phone':phone_number,
| {"golden_diff": "diff --git a/locations/spiders/laveneciana.py b/locations/spiders/laveneciana.py\n--- a/locations/spiders/laveneciana.py\n+++ b/locations/spiders/laveneciana.py\n@@ -23,13 +23,9 @@\n lat=''\n lon=''\n id=''\n- addr_full = re.findall(r\"^[^()]{4}[^(.)]+\" , addr_full_tel)[0]\n- phone_number = re.findall(r\"[0-9]{4}-[0-9]{4}\",addr_full_tel)\n- if(len(phone_number)>0):\n- phone_number = phone_number[0]\n- else:\n- phone_number =''\n- if(addr_full!=\"Direccion\"):\n+ addr_full = addr_full_tel.split('Tel.: ')[0]\n+ phone_number = addr_full_tel.split('Tel.: ')[1]\n+ if(addr_full!=\"Direccion... \"):\n properties = {\n 'addr_full': addr_full,\n 'phone':phone_number,\n", "issue": "La Veneciana (Argentina)\nIce cream shop.\r\n\r\nHTML webpage to scrape: http://www.laveneciana.com.ar/sucursales.html\n", "before_files": [{"content": "import scrapy\nimport re\nfrom locations.items import GeojsonPointItem\nclass LavenecianaSpider(scrapy.Spider):\n name = \"laveneciana\"\n allowed_domains = [\"www.laveneciana.com.ar\"]\n download_delay = 0.5\n start_urls = (\n 'http://www.laveneciana.com.ar/sucursales.html',\n )\n def parse(self, response):\n stores = response.xpath('//div[@class=\"navigation-container\"]/div[@id=\"thumbs\"]/ul[@class=\"thumbs noscript\"]/li')\n for store in stores:\n addr_full_tel = store.xpath('normalize-space(./div[@class=\"caption\"]/div[@class=\"image-desc\"]/text())').extract_first()\n location = store.xpath('normalize-space(./div[@class=\"caption\"]/div[@class=\"ubicacion\"]/iframe/@src)').extract_first()\n position = re.findall(r\"ll=[0-9-.,]+\" ,location)\n id = re.findall(r\"cid=[0-9]+\" ,location)\n if(len(position)>0):\n lat =float( position[0][3:].split(',')[0])\n lon = float(position[0][3:].split(',')[1])\n id = id[0][4:]\n else:\n lat=''\n lon=''\n id=''\n addr_full = re.findall(r\"^[^()]{4}[^(.)]+\" , addr_full_tel)[0]\n phone_number = re.findall(r\"[0-9]{4}-[0-9]{4}\",addr_full_tel)\n if(len(phone_number)>0):\n phone_number = phone_number[0]\n else:\n phone_number =''\n if(addr_full!=\"Direccion\"):\n properties = {\n 'addr_full': addr_full,\n 'phone':phone_number,\n 'city': '',\n 'state': '',\n 'postcode':'',\n 'ref': id,\n 'website': response.url,\n 'lat': lat,\n 'lon': lon,\n }\n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/laveneciana.py"}]} | 1,081 | 220 |
gh_patches_debug_60373 | rasdani/github-patches | git_diff | UTNkar__moore-151 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Paragraph block alignment
<!-- Do you want to ask a question? Are you looking for support? The system administrator can help you: [email protected] -->
See image:

[Description of the issue]
### Steps to Reproduce
1. [First Step]
2. [Second Step]
3. [and so on...]
<!-- Please select the appropriate "topic category"/blue and "issue type"/yellow label -->
</issue>
<code>
[start of website/blocks/models.py]
1 from wagtail.wagtailcore import blocks
2 from wagtail.wagtailimages.blocks import ImageChooserBlock
3
4 from django.utils.translation import ugettext_lazy as _
5
6
7 class CountersBlock(blocks.StructBlock):
8 title = blocks.CharBlock()
9 counters = blocks.ListBlock(blocks.StructBlock([
10 ('icon', blocks.CharBlock(
11 help_text=_('Material icon font icon text, as found on: '
12 'https://material.io/icons'),
13 )),
14 ('value', blocks.CharBlock()),
15 ('description', blocks.CharBlock(required=False))
16 ]))
17 style = blocks.ChoiceBlock(choices=[
18 ('light', _('Light')),
19 ('dark', _('Dark')),
20 ])
21
22 class Meta:
23 label = _('Counters')
24 icon = 'fa-balance-scale'
25 template = 'blocks/counter.html'
26
27
28 class HeadingBlock(blocks.StructBlock):
29 title = blocks.CharBlock(required=True)
30 subtitle = blocks.CharBlock(required=False)
31
32 class Meta:
33 label = _('Heading')
34 icon = 'fa-header'
35 template = 'blocks/title.html'
36
37
38 class ImageDescriptionBlock(blocks.StructBlock):
39 description = blocks.RichTextBlock()
40 image = ImageChooserBlock()
41 image_alignment = blocks.ChoiceBlock(choices=[
42 ('left', _('Left')),
43 ('right', _('Right')),
44 ])
45 hide_on_med = blocks.BooleanBlock(required=False)
46
47 class Meta:
48 label = _('Image + Description')
49 icon = 'fa-file-image-o '
50 template = 'blocks/image_description.html'
51
52
53 class ImageIconsBlock(blocks.StructBlock):
54 title = blocks.CharBlock()
55 image = ImageChooserBlock()
56 image_alignment = blocks.ChoiceBlock(choices=[
57 ('left', _('Left')),
58 ('right', _('Right')),
59 ])
60 icons = blocks.ListBlock(blocks.StructBlock([
61 ('icon', blocks.CharBlock(
62 help_text=_('Material icon font icon text, as found on: '
63 'https://material.io/icons'),
64 )),
65 ('title', blocks.CharBlock()),
66 ('description', blocks.CharBlock())
67 ]))
68 hide_on_med = blocks.BooleanBlock(required=False)
69
70 class Meta:
71 label = _('Image + Icons')
72 icon = 'fa-file-excel-o'
73 template = 'blocks/image_icons.html'
74
75
76 class OverlayBlock(blocks.StructBlock):
77 image = ImageChooserBlock()
78 title = blocks.CharBlock(required=False)
79 description = blocks.CharBlock(required=False)
80
81 link = blocks.URLBlock(required=False)
82 button = blocks.CharBlock(required=False)
83
84 class Meta:
85 label = _('Image overlay')
86 icon = 'fa-clone'
87 template = 'blocks/overlay.html'
88
89
90 WAGTAIL_STATIC_BLOCKTYPES = [
91 ('heading', HeadingBlock()),
92 ('paragraph', blocks.RichTextBlock()),
93 ('image_description', ImageIconsBlock()),
94 ('image_icons', ImageDescriptionBlock()),
95 ('overlay', OverlayBlock()),
96 ('logos', blocks.ListBlock(
97 ImageChooserBlock(),
98 icon='fa-pied-piper',
99 template='blocks/logos.html',
100 label=_('Logos'),
101 )),
102 ('counters', CountersBlock()),
103 ('image', ImageChooserBlock(template='blocks/image.html')),
104 ]
105
[end of website/blocks/models.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/website/blocks/models.py b/website/blocks/models.py
--- a/website/blocks/models.py
+++ b/website/blocks/models.py
@@ -89,7 +89,7 @@
WAGTAIL_STATIC_BLOCKTYPES = [
('heading', HeadingBlock()),
- ('paragraph', blocks.RichTextBlock()),
+ ('paragraph', blocks.RichTextBlock(template='blocks/paragraph.html')),
('image_description', ImageIconsBlock()),
('image_icons', ImageDescriptionBlock()),
('overlay', OverlayBlock()),
| {"golden_diff": "diff --git a/website/blocks/models.py b/website/blocks/models.py\n--- a/website/blocks/models.py\n+++ b/website/blocks/models.py\n@@ -89,7 +89,7 @@\n \n WAGTAIL_STATIC_BLOCKTYPES = [\n ('heading', HeadingBlock()),\n- ('paragraph', blocks.RichTextBlock()),\n+ ('paragraph', blocks.RichTextBlock(template='blocks/paragraph.html')),\n ('image_description', ImageIconsBlock()),\n ('image_icons', ImageDescriptionBlock()),\n ('overlay', OverlayBlock()),\n", "issue": "Paragraph block alignment\n<!-- Do you want to ask a question? Are you looking for support? The system administrator can help you: [email protected] -->\r\n\r\nSee image:\r\n\r\n\r\n\r\n[Description of the issue]\r\n\r\n### Steps to Reproduce\r\n\r\n1. [First Step]\r\n2. [Second Step]\r\n3. [and so on...]\r\n\r\n<!-- Please select the appropriate \"topic category\"/blue and \"issue type\"/yellow label -->\r\n\n", "before_files": [{"content": "from wagtail.wagtailcore import blocks\nfrom wagtail.wagtailimages.blocks import ImageChooserBlock\n\nfrom django.utils.translation import ugettext_lazy as _\n\n\nclass CountersBlock(blocks.StructBlock):\n title = blocks.CharBlock()\n counters = blocks.ListBlock(blocks.StructBlock([\n ('icon', blocks.CharBlock(\n help_text=_('Material icon font icon text, as found on: '\n 'https://material.io/icons'),\n )),\n ('value', blocks.CharBlock()),\n ('description', blocks.CharBlock(required=False))\n ]))\n style = blocks.ChoiceBlock(choices=[\n ('light', _('Light')),\n ('dark', _('Dark')),\n ])\n\n class Meta:\n label = _('Counters')\n icon = 'fa-balance-scale'\n template = 'blocks/counter.html'\n\n\nclass HeadingBlock(blocks.StructBlock):\n title = blocks.CharBlock(required=True)\n subtitle = blocks.CharBlock(required=False)\n\n class Meta:\n label = _('Heading')\n icon = 'fa-header'\n template = 'blocks/title.html'\n\n\nclass ImageDescriptionBlock(blocks.StructBlock):\n description = blocks.RichTextBlock()\n image = ImageChooserBlock()\n image_alignment = blocks.ChoiceBlock(choices=[\n ('left', _('Left')),\n ('right', _('Right')),\n ])\n hide_on_med = blocks.BooleanBlock(required=False)\n\n class Meta:\n label = _('Image + Description')\n icon = 'fa-file-image-o '\n template = 'blocks/image_description.html'\n\n\nclass ImageIconsBlock(blocks.StructBlock):\n title = blocks.CharBlock()\n image = ImageChooserBlock()\n image_alignment = blocks.ChoiceBlock(choices=[\n ('left', _('Left')),\n ('right', _('Right')),\n ])\n icons = blocks.ListBlock(blocks.StructBlock([\n ('icon', blocks.CharBlock(\n help_text=_('Material icon font icon text, as found on: '\n 'https://material.io/icons'),\n )),\n ('title', blocks.CharBlock()),\n ('description', blocks.CharBlock())\n ]))\n hide_on_med = blocks.BooleanBlock(required=False)\n\n class Meta:\n label = _('Image + Icons')\n icon = 'fa-file-excel-o'\n template = 'blocks/image_icons.html'\n\n\nclass OverlayBlock(blocks.StructBlock):\n image = ImageChooserBlock()\n title = blocks.CharBlock(required=False)\n description = blocks.CharBlock(required=False)\n\n link = blocks.URLBlock(required=False)\n button = blocks.CharBlock(required=False)\n\n class Meta:\n label = _('Image overlay')\n icon = 'fa-clone'\n template = 'blocks/overlay.html'\n\n\nWAGTAIL_STATIC_BLOCKTYPES = [\n ('heading', HeadingBlock()),\n ('paragraph', blocks.RichTextBlock()),\n ('image_description', ImageIconsBlock()),\n ('image_icons', ImageDescriptionBlock()),\n ('overlay', OverlayBlock()),\n ('logos', blocks.ListBlock(\n ImageChooserBlock(),\n icon='fa-pied-piper',\n template='blocks/logos.html',\n label=_('Logos'),\n )),\n ('counters', CountersBlock()),\n ('image', ImageChooserBlock(template='blocks/image.html')),\n]\n", "path": "website/blocks/models.py"}]} | 1,556 | 118 |
gh_patches_debug_16424 | rasdani/github-patches | git_diff | pyinstaller__pyinstaller-3520 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
libeay32.dll and ssleay32.dll needs to be manually bundled to use PyQt5.QNetwork with SSL
If you are having errors like:
```
qt.network.ssl: QSslSocket: cannot call unresolved function SSLv23_client_method
qt.network.ssl: QSslSocket: cannot call unresolved function SSL_CTX_new
qt.network.ssl: QSslSocket: cannot call unresolved function SSL_library_init
qt.network.ssl: QSslSocket: cannot call unresolved function ERR_get_error
qt.network.ssl: QSslSocket: cannot call unresolved function ERR_get_error
```
with PyInstaller and PyQt5 on Windows, you need to manually add libeay32.dll and ssleay32.dll from your PyQt5 site-packages (probably located somewhere in `PyQt5\Qt\bin\`) to your output dir or your frozen binary in a similar path.
In my final specfile, it looks like this:
```python
# -*- mode: python -*-
block_cipher = None
a = Analysis(['cddagl\\launcher.py'],
pathex=['C:\\Program Files (x86)\\Windows Kits\\10\\Redist\\ucrt\\DLLs\\x86\\', 'C:\\Users\\remy\\Projects\\CDDA-Game-Launcher'],
binaries=[('C:\\Users\\remy\\VirtualEnvs\\CDDA-Game-Launcher\\lib\\site-packages\\PyQt5\\Qt\\bin\\libeay32.dll', 'PyQt5\\Qt\\bin'), ('C:\\Users\\remy\\VirtualEnvs\\CDDA-Game-Launcher\\lib\\site-packages\\PyQt5\\Qt\\bin\\ssleay32.dll', 'PyQt5\\Qt\\bin')],
datas=[('alembic', 'alembic'), ('bin/updated.bat', '.'), ('data', 'data'), ('cddagl/resources', 'cddagl/resources'), ('C:\\Users\\remy\\AppData\\Local\\Programs\\Python\\Python36-32\\unrar.exe', '.'), ('cddagl/locale/en/LC_MESSAGES/cddagl.mo', 'cddagl/locale/en/LC_MESSAGES'), ('cddagl/locale/fr/LC_MESSAGES/cddagl.mo', 'cddagl/locale/fr/LC_MESSAGES'), ('cddagl/locale/it/LC_MESSAGES/cddagl.mo', 'cddagl/locale/it/LC_MESSAGES'), ('cddagl/locale/ru/LC_MESSAGES/cddagl.mo', 'cddagl/locale/ru/LC_MESSAGES')],
hiddenimports=['lxml.cssselect', 'babel.numbers'],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
name='launcher',
debug=True,
strip=False,
upx=False,
runtime_tmpdir=None,
console=True , icon='cddagl\\resources\\launcher.ico')
```
I'm not sure how this can be improved, but this solution should work if you are having a similar issue. This was tested with **PyInstaller==3.4.dev0+1033a8770**
</issue>
<code>
[start of PyInstaller/hooks/hook-PyQt5.QtNetwork.py]
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2013-2018, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License with exception
5 # for distributing bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9 from PyInstaller.utils.hooks import add_qt5_dependencies
10
11 hiddenimports, binaries, datas = add_qt5_dependencies(__file__)
12
[end of PyInstaller/hooks/hook-PyQt5.QtNetwork.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/PyInstaller/hooks/hook-PyQt5.QtNetwork.py b/PyInstaller/hooks/hook-PyQt5.QtNetwork.py
--- a/PyInstaller/hooks/hook-PyQt5.QtNetwork.py
+++ b/PyInstaller/hooks/hook-PyQt5.QtNetwork.py
@@ -6,6 +6,21 @@
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
-from PyInstaller.utils.hooks import add_qt5_dependencies
+import os.path
+
+from PyInstaller.utils.hooks import pyqt5_library_info, add_qt5_dependencies
+from PyInstaller.compat import is_win
hiddenimports, binaries, datas = add_qt5_dependencies(__file__)
+
+# Add libraries needed for SSL. See issue #3520.
+if is_win:
+ rel_data_path = ['PyQt5', 'Qt', 'bin']
+ binaries += [
+ (os.path.join(pyqt5_library_info.location['BinariesPath'],
+ 'libeay32.dll'),
+ os.path.join(*rel_data_path)),
+ (os.path.join(pyqt5_library_info.location['BinariesPath'],
+ 'ssleay32.dll'),
+ os.path.join(*rel_data_path))
+ ]
| {"golden_diff": "diff --git a/PyInstaller/hooks/hook-PyQt5.QtNetwork.py b/PyInstaller/hooks/hook-PyQt5.QtNetwork.py\n--- a/PyInstaller/hooks/hook-PyQt5.QtNetwork.py\n+++ b/PyInstaller/hooks/hook-PyQt5.QtNetwork.py\n@@ -6,6 +6,21 @@\n #\n # The full license is in the file COPYING.txt, distributed with this software.\n #-----------------------------------------------------------------------------\n-from PyInstaller.utils.hooks import add_qt5_dependencies\n+import os.path\n+\n+from PyInstaller.utils.hooks import pyqt5_library_info, add_qt5_dependencies\n+from PyInstaller.compat import is_win\n \n hiddenimports, binaries, datas = add_qt5_dependencies(__file__)\n+\n+# Add libraries needed for SSL. See issue #3520.\n+if is_win:\n+ rel_data_path = ['PyQt5', 'Qt', 'bin']\n+ binaries += [\n+ (os.path.join(pyqt5_library_info.location['BinariesPath'],\n+ 'libeay32.dll'),\n+ os.path.join(*rel_data_path)),\n+ (os.path.join(pyqt5_library_info.location['BinariesPath'],\n+ 'ssleay32.dll'),\n+ os.path.join(*rel_data_path))\n+ ]\n", "issue": "libeay32.dll and ssleay32.dll needs to be manually bundled to use PyQt5.QNetwork with SSL\nIf you are having errors like:\r\n\r\n```\r\nqt.network.ssl: QSslSocket: cannot call unresolved function SSLv23_client_method\r\nqt.network.ssl: QSslSocket: cannot call unresolved function SSL_CTX_new\r\nqt.network.ssl: QSslSocket: cannot call unresolved function SSL_library_init\r\nqt.network.ssl: QSslSocket: cannot call unresolved function ERR_get_error\r\nqt.network.ssl: QSslSocket: cannot call unresolved function ERR_get_error\r\n```\r\n\r\nwith PyInstaller and PyQt5 on Windows, you need to manually add libeay32.dll and ssleay32.dll from your PyQt5 site-packages (probably located somewhere in `PyQt5\\Qt\\bin\\`) to your output dir or your frozen binary in a similar path.\r\n\r\nIn my final specfile, it looks like this:\r\n\r\n```python\r\n# -*- mode: python -*-\r\n\r\nblock_cipher = None\r\n\r\n\r\na = Analysis(['cddagl\\\\launcher.py'],\r\n pathex=['C:\\\\Program Files (x86)\\\\Windows Kits\\\\10\\\\Redist\\\\ucrt\\\\DLLs\\\\x86\\\\', 'C:\\\\Users\\\\remy\\\\Projects\\\\CDDA-Game-Launcher'],\r\n binaries=[('C:\\\\Users\\\\remy\\\\VirtualEnvs\\\\CDDA-Game-Launcher\\\\lib\\\\site-packages\\\\PyQt5\\\\Qt\\\\bin\\\\libeay32.dll', 'PyQt5\\\\Qt\\\\bin'), ('C:\\\\Users\\\\remy\\\\VirtualEnvs\\\\CDDA-Game-Launcher\\\\lib\\\\site-packages\\\\PyQt5\\\\Qt\\\\bin\\\\ssleay32.dll', 'PyQt5\\\\Qt\\\\bin')],\r\n datas=[('alembic', 'alembic'), ('bin/updated.bat', '.'), ('data', 'data'), ('cddagl/resources', 'cddagl/resources'), ('C:\\\\Users\\\\remy\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python36-32\\\\unrar.exe', '.'), ('cddagl/locale/en/LC_MESSAGES/cddagl.mo', 'cddagl/locale/en/LC_MESSAGES'), ('cddagl/locale/fr/LC_MESSAGES/cddagl.mo', 'cddagl/locale/fr/LC_MESSAGES'), ('cddagl/locale/it/LC_MESSAGES/cddagl.mo', 'cddagl/locale/it/LC_MESSAGES'), ('cddagl/locale/ru/LC_MESSAGES/cddagl.mo', 'cddagl/locale/ru/LC_MESSAGES')],\r\n hiddenimports=['lxml.cssselect', 'babel.numbers'],\r\n hookspath=[],\r\n runtime_hooks=[],\r\n excludes=[],\r\n win_no_prefer_redirects=False,\r\n win_private_assemblies=False,\r\n cipher=block_cipher)\r\npyz = PYZ(a.pure, a.zipped_data,\r\n cipher=block_cipher)\r\nexe = EXE(pyz,\r\n a.scripts,\r\n a.binaries,\r\n a.zipfiles,\r\n a.datas,\r\n name='launcher',\r\n debug=True,\r\n strip=False,\r\n upx=False,\r\n runtime_tmpdir=None,\r\n console=True , icon='cddagl\\\\resources\\\\launcher.ico')\r\n```\r\n\r\nI'm not sure how this can be improved, but this solution should work if you are having a similar issue. This was tested with **PyInstaller==3.4.dev0+1033a8770**\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2013-2018, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\nfrom PyInstaller.utils.hooks import add_qt5_dependencies\n\nhiddenimports, binaries, datas = add_qt5_dependencies(__file__)\n", "path": "PyInstaller/hooks/hook-PyQt5.QtNetwork.py"}]} | 1,405 | 279 |
gh_patches_debug_9014 | rasdani/github-patches | git_diff | stephenmcd__mezzanine-1517 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Duplicate form fields in admin for user profiles
As discussed here:
https://groups.google.com/forum/#!topic/mezzanine-users/3QmiqfNZjUM
</issue>
<code>
[start of mezzanine/accounts/admin.py]
1 from __future__ import unicode_literals
2
3 from django.contrib import admin
4 from django.contrib.auth import get_user_model
5 from mezzanine.accounts import get_profile_model, ProfileNotConfigured
6
7 from mezzanine.core.admin import SitePermissionUserAdmin
8 from mezzanine.conf import settings
9 from mezzanine.utils.email import send_approved_mail, send_verification_mail
10
11
12 User = get_user_model()
13
14 user_list_display = SitePermissionUserAdmin.list_display
15 user_list_display += ("is_active", "date_joined", "last_login")
16
17
18 class UserProfileAdmin(SitePermissionUserAdmin):
19
20 list_display = user_list_display
21
22 def save_model(self, request, obj, form, change):
23 """
24 If the ``ACCOUNTS_APPROVAL_REQUIRED`` setting is ``True``,
25 send a notification email to the user being saved if their
26 ``active`` status has changed to ``True``.
27 If the ``ACCOUNTS_VERIFICATION_REQUIRED`` setting is ``True``,
28 send a verification email instead.
29 """
30 must_send_verification_mail_after_save = False
31 if change and settings.ACCOUNTS_APPROVAL_REQUIRED:
32 if obj.is_active and not User.objects.get(id=obj.id).is_active:
33 if settings.ACCOUNTS_VERIFICATION_REQUIRED:
34 # Accounts verification requires an inactive account
35 obj.is_active = False
36 # The token generated by send_verification_mail()
37 # must match the _saved_ User object,
38 # so postpone send_verification_mail() until later
39 must_send_verification_mail_after_save = True
40 else:
41 send_approved_mail(request, obj)
42 super(UserProfileAdmin, self).save_model(request, obj, form, change)
43 if must_send_verification_mail_after_save:
44 user = User.objects.get(id=obj.id)
45 send_verification_mail(request, user, "signup_verify")
46
47
48 try:
49 class ProfileInline(admin.StackedInline):
50 model = get_profile_model()
51 can_delete = False
52 template = "admin/profile_inline.html"
53 extra = 0
54 UserProfileAdmin.inlines += (ProfileInline,)
55 except ProfileNotConfigured:
56 pass
57
58
59 if User in admin.site._registry:
60 admin.site.unregister(User)
61 admin.site.register(User, UserProfileAdmin)
62
[end of mezzanine/accounts/admin.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mezzanine/accounts/admin.py b/mezzanine/accounts/admin.py
--- a/mezzanine/accounts/admin.py
+++ b/mezzanine/accounts/admin.py
@@ -51,6 +51,13 @@
can_delete = False
template = "admin/profile_inline.html"
extra = 0
+
+ def get_min_num(self, request, obj=None, **kwargs):
+ """This causes profile forms to be shown when editing but hidden
+ when creating. If min_num is fixed at 1, Django's initial user
+ creation form fails if the profile model has a required field."""
+ return 0 if obj is None else 1
+
UserProfileAdmin.inlines += (ProfileInline,)
except ProfileNotConfigured:
pass
| {"golden_diff": "diff --git a/mezzanine/accounts/admin.py b/mezzanine/accounts/admin.py\n--- a/mezzanine/accounts/admin.py\n+++ b/mezzanine/accounts/admin.py\n@@ -51,6 +51,13 @@\n can_delete = False\n template = \"admin/profile_inline.html\"\n extra = 0\n+\n+ def get_min_num(self, request, obj=None, **kwargs):\n+ \"\"\"This causes profile forms to be shown when editing but hidden\n+ when creating. If min_num is fixed at 1, Django's initial user\n+ creation form fails if the profile model has a required field.\"\"\"\n+ return 0 if obj is None else 1\n+\n UserProfileAdmin.inlines += (ProfileInline,)\n except ProfileNotConfigured:\n pass\n", "issue": "Duplicate form fields in admin for user profiles\nAs discussed here:\n\nhttps://groups.google.com/forum/#!topic/mezzanine-users/3QmiqfNZjUM\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nfrom django.contrib import admin\nfrom django.contrib.auth import get_user_model\nfrom mezzanine.accounts import get_profile_model, ProfileNotConfigured\n\nfrom mezzanine.core.admin import SitePermissionUserAdmin\nfrom mezzanine.conf import settings\nfrom mezzanine.utils.email import send_approved_mail, send_verification_mail\n\n\nUser = get_user_model()\n\nuser_list_display = SitePermissionUserAdmin.list_display\nuser_list_display += (\"is_active\", \"date_joined\", \"last_login\")\n\n\nclass UserProfileAdmin(SitePermissionUserAdmin):\n\n list_display = user_list_display\n\n def save_model(self, request, obj, form, change):\n \"\"\"\n If the ``ACCOUNTS_APPROVAL_REQUIRED`` setting is ``True``,\n send a notification email to the user being saved if their\n ``active`` status has changed to ``True``.\n If the ``ACCOUNTS_VERIFICATION_REQUIRED`` setting is ``True``,\n send a verification email instead.\n \"\"\"\n must_send_verification_mail_after_save = False\n if change and settings.ACCOUNTS_APPROVAL_REQUIRED:\n if obj.is_active and not User.objects.get(id=obj.id).is_active:\n if settings.ACCOUNTS_VERIFICATION_REQUIRED:\n # Accounts verification requires an inactive account\n obj.is_active = False\n # The token generated by send_verification_mail()\n # must match the _saved_ User object,\n # so postpone send_verification_mail() until later\n must_send_verification_mail_after_save = True\n else:\n send_approved_mail(request, obj)\n super(UserProfileAdmin, self).save_model(request, obj, form, change)\n if must_send_verification_mail_after_save:\n user = User.objects.get(id=obj.id)\n send_verification_mail(request, user, \"signup_verify\")\n\n\ntry:\n class ProfileInline(admin.StackedInline):\n model = get_profile_model()\n can_delete = False\n template = \"admin/profile_inline.html\"\n extra = 0\n UserProfileAdmin.inlines += (ProfileInline,)\nexcept ProfileNotConfigured:\n pass\n\n\nif User in admin.site._registry:\n admin.site.unregister(User)\nadmin.site.register(User, UserProfileAdmin)\n", "path": "mezzanine/accounts/admin.py"}]} | 1,157 | 169 |
gh_patches_debug_2536 | rasdani/github-patches | git_diff | optuna__optuna-122 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`TPESampler._sample_categorical` fails with PostgreSQL backend
`TPESampler._sample_categorical` fails with PostgreSQL backend. This happens because:
- `TPESampler._sample_categorical` returns an integer as `numpy.int32`.
- The integer value is input to storage class without any cast.
- SQLAlchemy with psycopg2 backend does not support `numpy.int32` input but does `int` one.
**Repro Steps**
With any objective function using categorical sampling (e.g., example one in `chainer_mnist.py`), invoke `minimize` as:
```
study = pfnopt.create_study(storage=SOME_POSTGRES_URL)
pfnopt.minimize(objective, n_trials=100, study=study)
```
It fails after running trials `n_startup_trails` times.
</issue>
<code>
[start of pfnopt/samplers/tpe.py]
1 import math
2 import numpy
3 from typing import List # NOQA
4 from typing import Optional # NOQA
5
6 from pfnopt import distributions # NOQA
7 from pfnopt.samplers import _hyperopt
8 from pfnopt.samplers import base
9 from pfnopt.samplers import random
10 from pfnopt.storages.base import BaseStorage # NOQA
11
12
13 class TPESampler(base.BaseSampler):
14
15 def __init__(self,
16 prior_weight=_hyperopt.default_prior_weight,
17 n_startup_trials=_hyperopt.default_n_startup_trials,
18 n_ei_candidates=_hyperopt.default_n_ei_candidates,
19 gamma=_hyperopt.default_gamma,
20 seed=None):
21 # type: (float, int, int, float, Optional[int]) -> None
22 self.prior_weight = prior_weight
23 self.n_startup_trials = n_startup_trials
24 self.n_ei_candidates = n_ei_candidates
25 self.gamma = gamma
26 self.seed = seed
27
28 self.rng = numpy.random.RandomState(seed)
29 self.random_sampler = random.RandomSampler(seed=seed)
30
31 def sample(self, storage, study_id, param_name, param_distribution):
32 # type: (BaseStorage, int, str, distributions.BaseDistribution) -> float
33 observation_pairs = storage.get_trial_param_result_pairs(
34 study_id, param_name)
35 n = len(observation_pairs)
36
37 # TODO(Akiba): this behavior is slightly different from hyperopt
38 if n < self.n_startup_trials:
39 return self.random_sampler.sample(storage, study_id, param_name, param_distribution)
40
41 below_param_values, above_param_values = _hyperopt.ap_filter_trials(
42 range(n), [p[0] for p in observation_pairs],
43 range(n), [p[1] for p in observation_pairs],
44 self.gamma)
45
46 if isinstance(param_distribution, distributions.UniformDistribution):
47 return self._sample_uniform(
48 param_distribution, below_param_values, above_param_values)
49 elif isinstance(param_distribution, distributions.LogUniformDistribution):
50 return self._sample_loguniform(
51 param_distribution, below_param_values, above_param_values)
52 elif isinstance(param_distribution, distributions.CategoricalDistribution):
53 return self._sample_categorical(
54 param_distribution, below_param_values, above_param_values)
55 else:
56 raise NotImplementedError
57
58 def _sample_uniform(self, distribution, below, above):
59 # type: (distributions.UniformDistribution, List[float], List[float]) -> float
60 return _hyperopt.sample_uniform(
61 obs_below=below, obs_above=above, prior_weight=self.prior_weight,
62 low=distribution.low, high=distribution.high,
63 size=(self.n_ei_candidates,), rng=self.rng)
64
65 def _sample_loguniform(self, distribution, below, above):
66 # type: (distributions.LogUniformDistribution, List[float], List[float]) -> float
67
68 return _hyperopt.sample_loguniform(
69 obs_below=below, obs_above=above, prior_weight=self.prior_weight,
70 # `sample_loguniform` generates values in [exp(low), exp(high)]
71 low=math.log(distribution.low),
72 high=math.log(distribution.high),
73 size=(self.n_ei_candidates,), rng=self.rng)
74
75 def _sample_categorical(self, distribution, below, above):
76 # type: (distributions.CategoricalDistribution, List[float], List[float]) -> float
77 choices = distribution.choices
78 below = list(map(int, below))
79 above = list(map(int, above))
80 idx = _hyperopt.sample_categorical(
81 obs_below=below, obs_above=above, prior_weight=self.prior_weight,
82 upper=len(choices), size=(self.n_ei_candidates, ), rng=self.rng)
83 return idx
84
[end of pfnopt/samplers/tpe.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pfnopt/samplers/tpe.py b/pfnopt/samplers/tpe.py
--- a/pfnopt/samplers/tpe.py
+++ b/pfnopt/samplers/tpe.py
@@ -80,4 +80,4 @@
idx = _hyperopt.sample_categorical(
obs_below=below, obs_above=above, prior_weight=self.prior_weight,
upper=len(choices), size=(self.n_ei_candidates, ), rng=self.rng)
- return idx
+ return int(idx)
| {"golden_diff": "diff --git a/pfnopt/samplers/tpe.py b/pfnopt/samplers/tpe.py\n--- a/pfnopt/samplers/tpe.py\n+++ b/pfnopt/samplers/tpe.py\n@@ -80,4 +80,4 @@\n idx = _hyperopt.sample_categorical(\n obs_below=below, obs_above=above, prior_weight=self.prior_weight,\n upper=len(choices), size=(self.n_ei_candidates, ), rng=self.rng)\n- return idx\n+ return int(idx)\n", "issue": "`TPESampler._sample_categorical` fails with PostgreSQL backend\n`TPESampler._sample_categorical` fails with PostgreSQL backend. This happens because:\r\n- `TPESampler._sample_categorical` returns an integer as `numpy.int32`.\r\n- The integer value is input to storage class without any cast.\r\n- SQLAlchemy with psycopg2 backend does not support `numpy.int32` input but does `int` one.\r\n\r\n**Repro Steps**\r\nWith any objective function using categorical sampling (e.g., example one in `chainer_mnist.py`), invoke `minimize` as:\r\n```\r\nstudy = pfnopt.create_study(storage=SOME_POSTGRES_URL)\r\npfnopt.minimize(objective, n_trials=100, study=study)\r\n```\r\n\r\nIt fails after running trials `n_startup_trails` times.\n", "before_files": [{"content": "import math\nimport numpy\nfrom typing import List # NOQA\nfrom typing import Optional # NOQA\n\nfrom pfnopt import distributions # NOQA\nfrom pfnopt.samplers import _hyperopt\nfrom pfnopt.samplers import base\nfrom pfnopt.samplers import random\nfrom pfnopt.storages.base import BaseStorage # NOQA\n\n\nclass TPESampler(base.BaseSampler):\n\n def __init__(self,\n prior_weight=_hyperopt.default_prior_weight,\n n_startup_trials=_hyperopt.default_n_startup_trials,\n n_ei_candidates=_hyperopt.default_n_ei_candidates,\n gamma=_hyperopt.default_gamma,\n seed=None):\n # type: (float, int, int, float, Optional[int]) -> None\n self.prior_weight = prior_weight\n self.n_startup_trials = n_startup_trials\n self.n_ei_candidates = n_ei_candidates\n self.gamma = gamma\n self.seed = seed\n\n self.rng = numpy.random.RandomState(seed)\n self.random_sampler = random.RandomSampler(seed=seed)\n\n def sample(self, storage, study_id, param_name, param_distribution):\n # type: (BaseStorage, int, str, distributions.BaseDistribution) -> float\n observation_pairs = storage.get_trial_param_result_pairs(\n study_id, param_name)\n n = len(observation_pairs)\n\n # TODO(Akiba): this behavior is slightly different from hyperopt\n if n < self.n_startup_trials:\n return self.random_sampler.sample(storage, study_id, param_name, param_distribution)\n\n below_param_values, above_param_values = _hyperopt.ap_filter_trials(\n range(n), [p[0] for p in observation_pairs],\n range(n), [p[1] for p in observation_pairs],\n self.gamma)\n\n if isinstance(param_distribution, distributions.UniformDistribution):\n return self._sample_uniform(\n param_distribution, below_param_values, above_param_values)\n elif isinstance(param_distribution, distributions.LogUniformDistribution):\n return self._sample_loguniform(\n param_distribution, below_param_values, above_param_values)\n elif isinstance(param_distribution, distributions.CategoricalDistribution):\n return self._sample_categorical(\n param_distribution, below_param_values, above_param_values)\n else:\n raise NotImplementedError\n\n def _sample_uniform(self, distribution, below, above):\n # type: (distributions.UniformDistribution, List[float], List[float]) -> float\n return _hyperopt.sample_uniform(\n obs_below=below, obs_above=above, prior_weight=self.prior_weight,\n low=distribution.low, high=distribution.high,\n size=(self.n_ei_candidates,), rng=self.rng)\n\n def _sample_loguniform(self, distribution, below, above):\n # type: (distributions.LogUniformDistribution, List[float], List[float]) -> float\n\n return _hyperopt.sample_loguniform(\n obs_below=below, obs_above=above, prior_weight=self.prior_weight,\n # `sample_loguniform` generates values in [exp(low), exp(high)]\n low=math.log(distribution.low),\n high=math.log(distribution.high),\n size=(self.n_ei_candidates,), rng=self.rng)\n\n def _sample_categorical(self, distribution, below, above):\n # type: (distributions.CategoricalDistribution, List[float], List[float]) -> float\n choices = distribution.choices\n below = list(map(int, below))\n above = list(map(int, above))\n idx = _hyperopt.sample_categorical(\n obs_below=below, obs_above=above, prior_weight=self.prior_weight,\n upper=len(choices), size=(self.n_ei_candidates, ), rng=self.rng)\n return idx\n", "path": "pfnopt/samplers/tpe.py"}]} | 1,666 | 122 |
gh_patches_debug_599 | rasdani/github-patches | git_diff | pex-tool__pex-1834 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Release 2.1.95
On the docket:
+ [x] Lock creation should skip Windows-only requirements and / or allow selecting target platforms (OS classes). #1821
+ [x] Feature request: "universal" lock mode can reject unsupported platforms #1595
+ [x] Avoid ENOEXEC for --venv shebangs. #1828
+ [x] pex3 lock export does't seem to respect the platform flag. #1826
+ [x] Clarify pex3 lock export command. #1645
+ [x] Support exporting PYTHONPATH before running user code #1825
</issue>
<code>
[start of pex/version.py]
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.94"
5
[end of pex/version.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.94"
+__version__ = "2.1.95"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.94\"\n+__version__ = \"2.1.95\"\n", "issue": "Release 2.1.95\nOn the docket:\r\n+ [x] Lock creation should skip Windows-only requirements and / or allow selecting target platforms (OS classes). #1821\r\n+ [x] Feature request: \"universal\" lock mode can reject unsupported platforms #1595\r\n+ [x] Avoid ENOEXEC for --venv shebangs. #1828 \r\n+ [x] pex3 lock export does't seem to respect the platform flag. #1826\r\n+ [x] Clarify pex3 lock export command. #1645\r\n+ [x] Support exporting PYTHONPATH before running user code #1825\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.94\"\n", "path": "pex/version.py"}]} | 730 | 97 |
gh_patches_debug_20686 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-3325 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Spider labreweries is broken
During the global build at 2021-07-21-14-42-39, spider **labreweries** failed with **0 features** and **88 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-07-21-14-42-39/logs/labreweries.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-07-21-14-42-39/output/labreweries.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-07-21-14-42-39/output/labreweries.geojson))
</issue>
<code>
[start of locations/spiders/labreweries.py]
1 # -*- coding: utf-8 -*-
2 import scrapy
3 import re
4
5 from locations.items import GeojsonPointItem
6
7
8 class LaBreweriesSpider(scrapy.Spider):
9 name = "labreweries"
10 allowed_domains = ["labeerhop.com"]
11 start_urls = (
12 'http://labeerhop.com/breweries-sitemap.xml',
13 )
14
15 def store_hours(self, store_hours):
16 day_groups = []
17 this_day_group = None
18 for day in store_hours:
19 day = day.replace(' :-', ' 12:00 -')
20 day = day.split('<h5>Hours</h5>')[1].strip('<br>').strip('</aside>')
21 match = re.search(r'(closed|(\d{1,2})\S.\s*-\s*(\d{1,2})\S.)', day.lower())
22 open('/tmp/test1.txt', 'w').write(str(day))
23 (dow, f_hr, t_hr) = match.groups()
24 day_short = dow[:2]
25
26 f_hr = int(f_hr)
27 t_hr = int(t_hr)
28
29 hours = '{:02d}-{:02d}'.format(
30 f_hr,
31 t_hr,
32 )
33
34 if not this_day_group:
35 this_day_group = {
36 'from_day': day_short,
37 'to_day': day_short,
38 'hours': hours
39 }
40 elif this_day_group['hours'] != hours:
41 day_groups.append(this_day_group)
42 this_day_group = {
43 'from_day': day_short,
44 'to_day': day_short,
45 'hours': hours
46 }
47 elif this_day_group['hours'] == hours:
48 this_day_group['to_day'] = day_short
49
50 day_groups.append(this_day_group)
51
52 opening_hours = ""
53 if len(day_groups) == 1 and day_groups[0]['hours'] in ('00:00-23:59', '00:00-00:00'):
54 opening_hours = '24/7'
55 else:
56 for day_group in day_groups:
57 if day_group['from_day'] == day_group['to_day']:
58 opening_hours += '{from_day} {hours}; '.format(**day_group)
59 elif day_group['from_day'] == 'Su' and day_group['to_day'] == 'Sa':
60 opening_hours += '{hours}; '.format(**day_group)
61 else:
62 opening_hours += '{from_day}-{to_day} {hours}; '.format(**day_group)
63 opening_hours = opening_hours[:-2]
64
65 return opening_hours
66
67 def address(self, address):
68 if not address:
69 return None
70
71 addr_tags = {
72 "addr_full": address[0].split(',')[0].strip(),
73 "city": address[0].split(',')[1].strip(),
74 "state": address[0].split(' ')[-2].strip(),
75 "postcode": address[0].split(' ')[-1].strip(),
76 }
77
78 return addr_tags
79
80 def parse(self, response):
81 response.selector.remove_namespaces()
82 city_urls = response.xpath('//url/loc/text()').extract()
83 for path in city_urls:
84 if path not in "http://labeerhop.com/breweries/1056/":
85 yield scrapy.Request(
86 path.strip(),
87 callback=self.parse_store,
88 )
89
90 def parse_store(self, response):
91
92 properties = {
93 'website': response.xpath('//head/link[@rel="canonical"]/@href').extract_first(),
94 'ref': str(response.xpath('/html/body/div[1]/div[1]/header/h1/text()').extract()).strip("['']"),
95 'opening_hours': re.sub(r'\s+', ' ', response.css('#secondary').extract()[0].split('<h5>Hours</h5>')[1].replace('<br>','').replace('</aside>','').replace('\t',' ').replace('\n','').replace('\r',' ')).strip(),
96 # 'lon': float(data['geo']['longitude']), # not lon on page
97 # 'lat': float(data['geo']['latitude']), # not lat on page
98 }
99
100 address = self.address(response.xpath('/html/body/div[1]/div[1]/aside/address/text()').extract())
101 if address:
102 properties.update(address)
103
104
105 yield GeojsonPointItem(**properties)
106
[end of locations/spiders/labreweries.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/spiders/labreweries.py b/locations/spiders/labreweries.py
--- a/locations/spiders/labreweries.py
+++ b/locations/spiders/labreweries.py
@@ -92,7 +92,7 @@
properties = {
'website': response.xpath('//head/link[@rel="canonical"]/@href').extract_first(),
'ref': str(response.xpath('/html/body/div[1]/div[1]/header/h1/text()').extract()).strip("['']"),
- 'opening_hours': re.sub(r'\s+', ' ', response.css('#secondary').extract()[0].split('<h5>Hours</h5>')[1].replace('<br>','').replace('</aside>','').replace('\t',' ').replace('\n','').replace('\r',' ')).strip(),
+ 'opening_hours': re.sub(r'\s+', ' ', response.xpath('//*[@id="content"]/div/div[2]/div[3]').extract()[0].split('<h5 class="mb-2">Hours</h5>')[1].replace('<br>','').replace('</div>','').replace('\t',' ').replace('\n','').replace('\r',' ')).strip(),
# 'lon': float(data['geo']['longitude']), # not lon on page
# 'lat': float(data['geo']['latitude']), # not lat on page
}
| {"golden_diff": "diff --git a/locations/spiders/labreweries.py b/locations/spiders/labreweries.py\n--- a/locations/spiders/labreweries.py\n+++ b/locations/spiders/labreweries.py\n@@ -92,7 +92,7 @@\n properties = {\n 'website': response.xpath('//head/link[@rel=\"canonical\"]/@href').extract_first(),\n 'ref': str(response.xpath('/html/body/div[1]/div[1]/header/h1/text()').extract()).strip(\"['']\"),\n- 'opening_hours': re.sub(r'\\s+', ' ', response.css('#secondary').extract()[0].split('<h5>Hours</h5>')[1].replace('<br>','').replace('</aside>','').replace('\\t',' ').replace('\\n','').replace('\\r',' ')).strip(),\n+ 'opening_hours': re.sub(r'\\s+', ' ', response.xpath('//*[@id=\"content\"]/div/div[2]/div[3]').extract()[0].split('<h5 class=\"mb-2\">Hours</h5>')[1].replace('<br>','').replace('</div>','').replace('\\t',' ').replace('\\n','').replace('\\r',' ')).strip(),\n # 'lon': float(data['geo']['longitude']), # not lon on page\n # 'lat': float(data['geo']['latitude']), # not lat on page\n }\n", "issue": "Spider labreweries is broken\nDuring the global build at 2021-07-21-14-42-39, spider **labreweries** failed with **0 features** and **88 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-07-21-14-42-39/logs/labreweries.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-07-21-14-42-39/output/labreweries.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-07-21-14-42-39/output/labreweries.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport scrapy\nimport re\n\nfrom locations.items import GeojsonPointItem\n\n\nclass LaBreweriesSpider(scrapy.Spider):\n name = \"labreweries\"\n allowed_domains = [\"labeerhop.com\"]\n start_urls = (\n 'http://labeerhop.com/breweries-sitemap.xml',\n )\n\n def store_hours(self, store_hours):\n day_groups = []\n this_day_group = None\n for day in store_hours:\n day = day.replace(' :-', ' 12:00 -')\n day = day.split('<h5>Hours</h5>')[1].strip('<br>').strip('</aside>')\n match = re.search(r'(closed|(\\d{1,2})\\S.\\s*-\\s*(\\d{1,2})\\S.)', day.lower())\n open('/tmp/test1.txt', 'w').write(str(day))\n (dow, f_hr, t_hr) = match.groups()\n day_short = dow[:2]\n\n f_hr = int(f_hr)\n t_hr = int(t_hr)\n\n hours = '{:02d}-{:02d}'.format(\n f_hr,\n t_hr,\n )\n\n if not this_day_group:\n this_day_group = {\n 'from_day': day_short,\n 'to_day': day_short,\n 'hours': hours\n }\n elif this_day_group['hours'] != hours:\n day_groups.append(this_day_group)\n this_day_group = {\n 'from_day': day_short,\n 'to_day': day_short,\n 'hours': hours\n }\n elif this_day_group['hours'] == hours:\n this_day_group['to_day'] = day_short\n\n day_groups.append(this_day_group)\n\n opening_hours = \"\"\n if len(day_groups) == 1 and day_groups[0]['hours'] in ('00:00-23:59', '00:00-00:00'):\n opening_hours = '24/7'\n else:\n for day_group in day_groups:\n if day_group['from_day'] == day_group['to_day']:\n opening_hours += '{from_day} {hours}; '.format(**day_group)\n elif day_group['from_day'] == 'Su' and day_group['to_day'] == 'Sa':\n opening_hours += '{hours}; '.format(**day_group)\n else:\n opening_hours += '{from_day}-{to_day} {hours}; '.format(**day_group)\n opening_hours = opening_hours[:-2]\n\n return opening_hours\n\n def address(self, address):\n if not address:\n return None\n\n addr_tags = {\n \"addr_full\": address[0].split(',')[0].strip(),\n \"city\": address[0].split(',')[1].strip(),\n \"state\": address[0].split(' ')[-2].strip(),\n \"postcode\": address[0].split(' ')[-1].strip(),\n }\n\n return addr_tags\n\n def parse(self, response):\n response.selector.remove_namespaces()\n city_urls = response.xpath('//url/loc/text()').extract()\n for path in city_urls:\n if path not in \"http://labeerhop.com/breweries/1056/\":\n yield scrapy.Request(\n path.strip(),\n callback=self.parse_store,\n )\n\n def parse_store(self, response):\n\n properties = {\n 'website': response.xpath('//head/link[@rel=\"canonical\"]/@href').extract_first(),\n 'ref': str(response.xpath('/html/body/div[1]/div[1]/header/h1/text()').extract()).strip(\"['']\"),\n 'opening_hours': re.sub(r'\\s+', ' ', response.css('#secondary').extract()[0].split('<h5>Hours</h5>')[1].replace('<br>','').replace('</aside>','').replace('\\t',' ').replace('\\n','').replace('\\r',' ')).strip(),\n # 'lon': float(data['geo']['longitude']), # not lon on page\n # 'lat': float(data['geo']['latitude']), # not lat on page\n }\n\n address = self.address(response.xpath('/html/body/div[1]/div[1]/aside/address/text()').extract())\n if address:\n properties.update(address)\n\n\n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/labreweries.py"}]} | 1,888 | 300 |
gh_patches_debug_47927 | rasdani/github-patches | git_diff | uccser__cs-unplugged-885 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Minor adjustments to navbar and homepage
## Navbar
- [x] There should be more space between logo and 'Topics'.
- [x] The search bar can be ~20% smaller.
## Hompeage
- [x] Navbar should be transparent and fade in when user scrolls down.
</issue>
<code>
[start of csunplugged/general/views.py]
1 """Views for the general application."""
2
3 from django.views.generic import TemplateView
4 from django.http import HttpResponse
5
6
7 class GeneralIndexView(TemplateView):
8 """View for the homepage that renders from a template."""
9
10 template_name = "general/index.html"
11
12
13 class GeneralAboutView(TemplateView):
14 """View for the about page that renders from a template."""
15
16 template_name = "general/about.html"
17
18
19 class GeneralContactView(TemplateView):
20 """View for the contact page that renders from a template."""
21
22 template_name = "general/contact.html"
23
24
25 class GeneralPeopleView(TemplateView):
26 """View for the people page that renders from a template."""
27
28 template_name = "general/people.html"
29
30
31 class GeneralPrinciplesView(TemplateView):
32 """View for the princples page that renders from a template."""
33
34 template_name = "general/principles.html"
35
36
37 class WhatIsCSView(TemplateView):
38 """View for the 'What is Computer Science?' page that renders from a template."""
39
40 template_name = "general/what-is-computer-science.html"
41
42
43 class ComputationalThinkingView(TemplateView):
44 """View for the Computational Thinking page that renders from a template."""
45
46 template_name = "general/computational-thinking.html"
47
48
49 class HowDoITeachCSUnpluggedView(TemplateView):
50 """View for the 'How do I teach CS Unplugged?' page that renders from a template."""
51
52 template_name = "general/how-do-i-teach-cs-unplugged.html"
53
54
55 def health_check(request):
56 """Return heath check response for Google App Engine.
57
58 Returns a 200 HTTP response for Google App Engine to detect the system
59 is running.
60 """
61 return HttpResponse(status=200)
62
[end of csunplugged/general/views.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/csunplugged/general/views.py b/csunplugged/general/views.py
--- a/csunplugged/general/views.py
+++ b/csunplugged/general/views.py
@@ -9,6 +9,16 @@
template_name = "general/index.html"
+ def get_context_data(self, **kwargs):
+ """Provide the context data for the homepage.
+
+ Returns:
+ Dictionary of context data.
+ """
+ context = super(GeneralIndexView, self).get_context_data(**kwargs)
+ context["homepage"] = True
+ return context
+
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
| {"golden_diff": "diff --git a/csunplugged/general/views.py b/csunplugged/general/views.py\n--- a/csunplugged/general/views.py\n+++ b/csunplugged/general/views.py\n@@ -9,6 +9,16 @@\n \n template_name = \"general/index.html\"\n \n+ def get_context_data(self, **kwargs):\n+ \"\"\"Provide the context data for the homepage.\n+\n+ Returns:\n+ Dictionary of context data.\n+ \"\"\"\n+ context = super(GeneralIndexView, self).get_context_data(**kwargs)\n+ context[\"homepage\"] = True\n+ return context\n+\n \n class GeneralAboutView(TemplateView):\n \"\"\"View for the about page that renders from a template.\"\"\"\n", "issue": "Minor adjustments to navbar and homepage\n## Navbar\r\n\r\n- [x] There should be more space between logo and 'Topics'.\r\n- [x] The search bar can be ~20% smaller.\r\n\r\n## Hompeage\r\n\r\n- [x] Navbar should be transparent and fade in when user scrolls down.\n", "before_files": [{"content": "\"\"\"Views for the general application.\"\"\"\n\nfrom django.views.generic import TemplateView\nfrom django.http import HttpResponse\n\n\nclass GeneralIndexView(TemplateView):\n \"\"\"View for the homepage that renders from a template.\"\"\"\n\n template_name = \"general/index.html\"\n\n\nclass GeneralAboutView(TemplateView):\n \"\"\"View for the about page that renders from a template.\"\"\"\n\n template_name = \"general/about.html\"\n\n\nclass GeneralContactView(TemplateView):\n \"\"\"View for the contact page that renders from a template.\"\"\"\n\n template_name = \"general/contact.html\"\n\n\nclass GeneralPeopleView(TemplateView):\n \"\"\"View for the people page that renders from a template.\"\"\"\n\n template_name = \"general/people.html\"\n\n\nclass GeneralPrinciplesView(TemplateView):\n \"\"\"View for the princples page that renders from a template.\"\"\"\n\n template_name = \"general/principles.html\"\n\n\nclass WhatIsCSView(TemplateView):\n \"\"\"View for the 'What is Computer Science?' page that renders from a template.\"\"\"\n\n template_name = \"general/what-is-computer-science.html\"\n\n\nclass ComputationalThinkingView(TemplateView):\n \"\"\"View for the Computational Thinking page that renders from a template.\"\"\"\n\n template_name = \"general/computational-thinking.html\"\n\n\nclass HowDoITeachCSUnpluggedView(TemplateView):\n \"\"\"View for the 'How do I teach CS Unplugged?' page that renders from a template.\"\"\"\n\n template_name = \"general/how-do-i-teach-cs-unplugged.html\"\n\n\ndef health_check(request):\n \"\"\"Return heath check response for Google App Engine.\n\n Returns a 200 HTTP response for Google App Engine to detect the system\n is running.\n \"\"\"\n return HttpResponse(status=200)\n", "path": "csunplugged/general/views.py"}]} | 1,101 | 155 |
gh_patches_debug_40993 | rasdani/github-patches | git_diff | apluslms__a-plus-1062 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Feature request: send email when teacher adds news
Hi
It would be very nice to be able to notify students via email when teacher adds news. This should be an option so the teacher could decide on case-by-case basis whether to send the email or not.
What do you think?
Thanks!
Feature request: send email when teacher adds news
Hi
It would be very nice to be able to notify students via email when teacher adds news. This should be an option so the teacher could decide on case-by-case basis whether to send the email or not.
What do you think?
Thanks!
</issue>
<code>
[start of news/forms.py]
1 from django import forms
2
3 from .models import News
4
5
6 class NewsForm(forms.ModelForm):
7
8 class Meta:
9 model = News
10 fields = [
11 'audience',
12 'publish',
13 'pin',
14 'title',
15 'body',
16 ]
17
[end of news/forms.py]
[start of lib/email_messages.py]
1 import logging
2 import traceback
3 from django.conf import settings
4 from django.core.mail import send_mail
5 from django.urls import reverse
6 from .helpers import build_aplus_url
7
8
9 logger = logging.getLogger('aplus.lib.email_messages')
10
11
12 def email_course_instance(instance, subject, message, everyone=False) -> bool:
13 """
14 Sends an email to a course instance's technical support emails or teachers if technical support not set.
15 If everyone == True, sends emails to teachers anyway.
16 """
17 recipients = []
18 if instance.technical_error_emails:
19 recipients = instance.technical_error_emails.split(",")
20 if everyone or not recipients:
21 recipients = instance.teachers.exclude(user__email='').values_list("user__email", flat=True)
22
23 if not recipients:
24 raise ValueError("No recipients")
25
26 try:
27 return send_mail(subject, message, settings.SERVER_EMAIL, recipients, True) == 1
28 except:
29 logger.exception('Failed to send course instance emails.')
30 raise
31
32
33 def email_course_error(request, exercise, message, exception=True):
34 """
35 Sends error message to course instance's teachers or technical support emails if set.
36 """
37 instance = exercise.course_instance
38
39 error_trace = "-"
40 if exception:
41 error_trace = traceback.format_exc()
42
43 if request:
44 request_fields = repr(request)
45 else:
46 request_fields = "No request available"
47
48 subject = settings.EXERCISE_ERROR_SUBJECT.format(
49 course=instance.course.code,
50 exercise=str(exercise))
51 body = settings.EXERCISE_ERROR_DESCRIPTION.format(
52 message=message,
53 exercise_url=build_aplus_url(
54 exercise.get_absolute_url(), user_url=True),
55 course_edit_url=build_aplus_url(
56 instance.get_url('course-details'), user_url=True),
57 error_trace=error_trace,
58 request_fields=request_fields)
59
60 try:
61 email_course_instance(instance, subject, body)
62 except:
63 pass
64
[end of lib/email_messages.py]
[start of news/views.py]
1 from django.core.exceptions import PermissionDenied
2 from django.http import Http404
3 from django.shortcuts import get_object_or_404
4
5 from authorization.permissions import ACCESS
6 from course.viewbase import CourseInstanceBaseView, CourseInstanceMixin
7 from lib.viewbase import BaseFormView, BaseRedirectView
8 from .forms import NewsForm
9 from .models import News
10
11
12 class ListNewsView(CourseInstanceBaseView):
13 access_mode = ACCESS.TEACHER
14 template_name = "news/list.html"
15
16 def get_common_objects(self):
17 super().get_common_objects()
18 self.news = self.instance.news.all()
19 self.note("news")
20
21
22 class EditNewsView(CourseInstanceMixin, BaseFormView):
23 access_mode = ACCESS.TEACHER
24 template_name = "news/edit.html"
25 form_class = NewsForm
26 news_item_kw = "news_id"
27
28 def get_form_kwargs(self):
29 kwargs = super().get_form_kwargs()
30
31 news_id = self._get_kwarg(self.news_item_kw, default=None)
32 if news_id:
33 self.news_item = get_object_or_404(
34 News,
35 pk=news_id,
36 course_instance=self.instance
37 )
38 self.note("news_item")
39 else:
40 self.news_item = News(course_instance=self.instance)
41
42 kwargs["instance"] = self.news_item
43 return kwargs
44
45 def get_success_url(self):
46 return self.instance.get_url("news-list")
47
48 def form_valid(self, form):
49 form.save()
50 return super().form_valid(form)
51
52
53 class RemoveNewsView(CourseInstanceMixin, BaseRedirectView):
54 access_mode = ACCESS.TEACHER
55 news_item_kw = "news_id"
56
57 def get_resource_objects(self):
58 super().get_resource_objects()
59 self.news_item = get_object_or_404(
60 News,
61 id=self._get_kwarg(self.news_item_kw),
62 course_instance=self.instance,
63 )
64 self.note("news_item")
65
66 def post(self, request, *args, **kwargs):
67 self.news_item.delete()
68 return self.redirect(self.instance.get_url("news-list"))
69
[end of news/views.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/lib/email_messages.py b/lib/email_messages.py
--- a/lib/email_messages.py
+++ b/lib/email_messages.py
@@ -1,9 +1,10 @@
import logging
import traceback
from django.conf import settings
-from django.core.mail import send_mail
-from django.urls import reverse
-from .helpers import build_aplus_url
+from django.core.mail import send_mail, send_mass_mail
+
+from .helpers import Enum, build_aplus_url
+from course.models import CourseInstance
logger = logging.getLogger('aplus.lib.email_messages')
@@ -61,3 +62,30 @@
email_course_instance(instance, subject, body)
except:
pass
+
+
+def email_course_students(
+ instance: CourseInstance,
+ subject: str,
+ message: str,
+ audience: Enum = CourseInstance.ENROLLMENT_AUDIENCE.ALL_USERS,
+ ) -> int:
+ """
+ Sends an email to students on the course. Audience parameter controls whether the mail goes
+ to all (default), just internal, or just external students.
+ Returns number of emails sent, or -1 in case of error.
+ """
+ students = instance.students
+ if audience == CourseInstance.ENROLLMENT_AUDIENCE.INTERNAL_USERS:
+ students = students.filter(organization=settings.LOCAL_ORGANIZATION)
+ elif audience == CourseInstance.ENROLLMENT_AUDIENCE.EXTERNAL_USERS:
+ students = students.exclude(organization=settings.LOCAL_ORGANIZATION)
+
+ recipients = students.exclude(user__email='').values_list("user__email", flat=True)
+ emails = tuple(map(lambda x: (subject, message, settings.SERVER_EMAIL, [x]), recipients))
+
+ try:
+ return send_mass_mail(emails)
+ except:
+ logger.exception('Failed to send course instance emails.')
+ return -1
diff --git a/news/forms.py b/news/forms.py
--- a/news/forms.py
+++ b/news/forms.py
@@ -1,16 +1,25 @@
+from typing import Any
+
from django import forms
+from django.utils.translation import gettext_lazy as _
from .models import News
class NewsForm(forms.ModelForm):
+ email = forms.BooleanField(
+ required=False,
+ label=_("SEND_EMAIL_TO_STUDENTS"),
+ )
+
class Meta:
model = News
fields = [
'audience',
'publish',
'pin',
+ 'email',
'title',
'body',
]
diff --git a/news/views.py b/news/views.py
--- a/news/views.py
+++ b/news/views.py
@@ -1,10 +1,14 @@
+from django.conf import settings
+from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.shortcuts import get_object_or_404
+from django.utils.translation import gettext_lazy as _
from authorization.permissions import ACCESS
from course.viewbase import CourseInstanceBaseView, CourseInstanceMixin
from lib.viewbase import BaseFormView, BaseRedirectView
+from lib.email_messages import email_course_students
from .forms import NewsForm
from .models import News
@@ -47,6 +51,15 @@
def form_valid(self, form):
form.save()
+ if form.cleaned_data['email']:
+ subject = f"[{settings.BRAND_NAME} course news] {self.instance.course.code}: {self.news_item.title}"
+ if email_course_students(
+ self.instance,
+ subject,
+ self.news_item.body,
+ self.news_item.audience,
+ ) < 0:
+ messages.error(self.request, _('FAILED_TO_SEND_EMAIL'))
return super().form_valid(form)
| {"golden_diff": "diff --git a/lib/email_messages.py b/lib/email_messages.py\n--- a/lib/email_messages.py\n+++ b/lib/email_messages.py\n@@ -1,9 +1,10 @@\n import logging\n import traceback\n from django.conf import settings\n-from django.core.mail import send_mail\n-from django.urls import reverse\n-from .helpers import build_aplus_url\n+from django.core.mail import send_mail, send_mass_mail\n+\n+from .helpers import Enum, build_aplus_url\n+from course.models import CourseInstance\n \n \n logger = logging.getLogger('aplus.lib.email_messages')\n@@ -61,3 +62,30 @@\n email_course_instance(instance, subject, body)\n except:\n pass\n+\n+\n+def email_course_students(\n+ instance: CourseInstance,\n+ subject: str,\n+ message: str,\n+ audience: Enum = CourseInstance.ENROLLMENT_AUDIENCE.ALL_USERS,\n+ ) -> int:\n+ \"\"\"\n+ Sends an email to students on the course. Audience parameter controls whether the mail goes\n+ to all (default), just internal, or just external students.\n+ Returns number of emails sent, or -1 in case of error.\n+ \"\"\"\n+ students = instance.students\n+ if audience == CourseInstance.ENROLLMENT_AUDIENCE.INTERNAL_USERS:\n+ students = students.filter(organization=settings.LOCAL_ORGANIZATION)\n+ elif audience == CourseInstance.ENROLLMENT_AUDIENCE.EXTERNAL_USERS:\n+ students = students.exclude(organization=settings.LOCAL_ORGANIZATION)\n+\n+ recipients = students.exclude(user__email='').values_list(\"user__email\", flat=True)\n+ emails = tuple(map(lambda x: (subject, message, settings.SERVER_EMAIL, [x]), recipients))\n+\n+ try:\n+ return send_mass_mail(emails)\n+ except:\n+ logger.exception('Failed to send course instance emails.')\n+ return -1\ndiff --git a/news/forms.py b/news/forms.py\n--- a/news/forms.py\n+++ b/news/forms.py\n@@ -1,16 +1,25 @@\n+from typing import Any\n+\n from django import forms\n+from django.utils.translation import gettext_lazy as _\n \n from .models import News\n \n \n class NewsForm(forms.ModelForm):\n \n+ email = forms.BooleanField(\n+ required=False,\n+ label=_(\"SEND_EMAIL_TO_STUDENTS\"),\n+ )\n+\n class Meta:\n model = News\n fields = [\n 'audience',\n 'publish',\n 'pin',\n+ 'email',\n 'title',\n 'body',\n ]\ndiff --git a/news/views.py b/news/views.py\n--- a/news/views.py\n+++ b/news/views.py\n@@ -1,10 +1,14 @@\n+from django.conf import settings\n+from django.contrib import messages\n from django.core.exceptions import PermissionDenied\n from django.http import Http404\n from django.shortcuts import get_object_or_404\n+from django.utils.translation import gettext_lazy as _\n \n from authorization.permissions import ACCESS\n from course.viewbase import CourseInstanceBaseView, CourseInstanceMixin\n from lib.viewbase import BaseFormView, BaseRedirectView\n+from lib.email_messages import email_course_students\n from .forms import NewsForm\n from .models import News\n \n@@ -47,6 +51,15 @@\n \n def form_valid(self, form):\n form.save()\n+ if form.cleaned_data['email']:\n+ subject = f\"[{settings.BRAND_NAME} course news] {self.instance.course.code}: {self.news_item.title}\"\n+ if email_course_students(\n+ self.instance,\n+ subject,\n+ self.news_item.body,\n+ self.news_item.audience,\n+ ) < 0:\n+ messages.error(self.request, _('FAILED_TO_SEND_EMAIL'))\n return super().form_valid(form)\n", "issue": "Feature request: send email when teacher adds news\nHi\r\n\r\nIt would be very nice to be able to notify students via email when teacher adds news. This should be an option so the teacher could decide on case-by-case basis whether to send the email or not.\r\n\r\nWhat do you think?\r\n\r\nThanks!\nFeature request: send email when teacher adds news\nHi\r\n\r\nIt would be very nice to be able to notify students via email when teacher adds news. This should be an option so the teacher could decide on case-by-case basis whether to send the email or not.\r\n\r\nWhat do you think?\r\n\r\nThanks!\n", "before_files": [{"content": "from django import forms\n\nfrom .models import News\n\n\nclass NewsForm(forms.ModelForm):\n\n class Meta:\n model = News\n fields = [\n 'audience',\n 'publish',\n 'pin',\n 'title',\n 'body',\n ]\n", "path": "news/forms.py"}, {"content": "import logging\nimport traceback\nfrom django.conf import settings\nfrom django.core.mail import send_mail\nfrom django.urls import reverse\nfrom .helpers import build_aplus_url\n\n\nlogger = logging.getLogger('aplus.lib.email_messages')\n\n\ndef email_course_instance(instance, subject, message, everyone=False) -> bool:\n \"\"\"\n Sends an email to a course instance's technical support emails or teachers if technical support not set.\n If everyone == True, sends emails to teachers anyway.\n \"\"\"\n recipients = []\n if instance.technical_error_emails:\n recipients = instance.technical_error_emails.split(\",\")\n if everyone or not recipients:\n recipients = instance.teachers.exclude(user__email='').values_list(\"user__email\", flat=True)\n\n if not recipients:\n raise ValueError(\"No recipients\")\n\n try:\n return send_mail(subject, message, settings.SERVER_EMAIL, recipients, True) == 1\n except:\n logger.exception('Failed to send course instance emails.')\n raise\n\n\ndef email_course_error(request, exercise, message, exception=True):\n \"\"\"\n Sends error message to course instance's teachers or technical support emails if set.\n \"\"\"\n instance = exercise.course_instance\n\n error_trace = \"-\"\n if exception:\n error_trace = traceback.format_exc()\n\n if request:\n request_fields = repr(request)\n else:\n request_fields = \"No request available\"\n\n subject = settings.EXERCISE_ERROR_SUBJECT.format(\n course=instance.course.code,\n exercise=str(exercise))\n body = settings.EXERCISE_ERROR_DESCRIPTION.format(\n message=message,\n exercise_url=build_aplus_url(\n exercise.get_absolute_url(), user_url=True),\n course_edit_url=build_aplus_url(\n instance.get_url('course-details'), user_url=True),\n error_trace=error_trace,\n request_fields=request_fields)\n\n try:\n email_course_instance(instance, subject, body)\n except:\n pass\n", "path": "lib/email_messages.py"}, {"content": "from django.core.exceptions import PermissionDenied\nfrom django.http import Http404\nfrom django.shortcuts import get_object_or_404\n\nfrom authorization.permissions import ACCESS\nfrom course.viewbase import CourseInstanceBaseView, CourseInstanceMixin\nfrom lib.viewbase import BaseFormView, BaseRedirectView\nfrom .forms import NewsForm\nfrom .models import News\n\n\nclass ListNewsView(CourseInstanceBaseView):\n access_mode = ACCESS.TEACHER\n template_name = \"news/list.html\"\n\n def get_common_objects(self):\n super().get_common_objects()\n self.news = self.instance.news.all()\n self.note(\"news\")\n\n\nclass EditNewsView(CourseInstanceMixin, BaseFormView):\n access_mode = ACCESS.TEACHER\n template_name = \"news/edit.html\"\n form_class = NewsForm\n news_item_kw = \"news_id\"\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n\n news_id = self._get_kwarg(self.news_item_kw, default=None)\n if news_id:\n self.news_item = get_object_or_404(\n News,\n pk=news_id,\n course_instance=self.instance\n )\n self.note(\"news_item\")\n else:\n self.news_item = News(course_instance=self.instance)\n\n kwargs[\"instance\"] = self.news_item\n return kwargs\n\n def get_success_url(self):\n return self.instance.get_url(\"news-list\")\n\n def form_valid(self, form):\n form.save()\n return super().form_valid(form)\n\n\nclass RemoveNewsView(CourseInstanceMixin, BaseRedirectView):\n access_mode = ACCESS.TEACHER\n news_item_kw = \"news_id\"\n\n def get_resource_objects(self):\n super().get_resource_objects()\n self.news_item = get_object_or_404(\n News,\n id=self._get_kwarg(self.news_item_kw),\n course_instance=self.instance,\n )\n self.note(\"news_item\")\n\n def post(self, request, *args, **kwargs):\n self.news_item.delete()\n return self.redirect(self.instance.get_url(\"news-list\"))\n", "path": "news/views.py"}]} | 1,870 | 813 |
gh_patches_debug_358 | rasdani/github-patches | git_diff | spacetelescope__jwql-550 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Cron jobs for monitors currently failing
Traceback (most recent call last):
File "/home/jwqladm/repositories/jwql/jwql/instrument_monitors/common_monitors/bias_monitor.py", line 58, in <module>
from jwql.instrument_monitors.common_monitors.dark_monitor import mast_query_darks
File "/home/jwqladm/repositories/jwql/jwql/instrument_monitors/common_monitors/dark_monitor.py", line 77, in <module>
from jwql.jwql_monitors import monitor_mast
File "/home/jwqladm/repositories/jwql/jwql/jwql_monitors/monitor_mast.py", line 25, in <module>
from bokeh.embed import components
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/__init__.py", line 81, in <module>
from .util import logconfig
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/util/logconfig.py", line 87, in <module>
level = settings.py_log_level()
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/settings.py", line 310, in __call__
return self._convert(os.environ[self._env_var])
File "/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/settings.py", line 236, in convert_logging
raise ValueError("Cannot convert {} to log level, valid values are: {}".format(value, ", ".join(_log_levels)))
ValueError: Cannot convert WARN to log level, valid values are: CRITICAL, ERROR, WARNING, INFO, DEBUG, TRACE, NONE
</issue>
<code>
[start of setup.py]
1 import numpy as np
2 from setuptools import setup
3 from setuptools import find_packages
4
5 VERSION = '0.22.0'
6
7 AUTHORS = 'Matthew Bourque, Misty Cracraft, Joe Filippazzo, Bryan Hilbert, '
8 AUTHORS += 'Graham Kanarek, Catherine Martlin, Johannes Sahlmann, Ben Sunnquist'
9
10 DESCRIPTION = 'The James Webb Space Telescope Quicklook Project'
11
12 DEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']
13 REQUIRES = [
14 'asdf>=2.3.3',
15 'astropy>=3.2.1',
16 'astroquery>=0.3.9',
17 'authlib',
18 'bokeh>=1.0',
19 'codecov',
20 'django>=2.0',
21 'flake8',
22 'inflection',
23 'ipython',
24 'jinja2',
25 'jsonschema==2.6.0',
26 'jwedb>=0.0.3',
27 'matplotlib',
28 'numpy',
29 'numpydoc',
30 'pandas',
31 'psycopg2',
32 'pysiaf',
33 'pytest',
34 'pytest-cov',
35 'scipy',
36 'sphinx',
37 'sqlalchemy',
38 'stsci_rtd_theme',
39 'twine'
40 ]
41
42 setup(
43 name='jwql',
44 version=VERSION,
45 description=DESCRIPTION,
46 url='https://github.com/spacetelescope/jwql.git',
47 author=AUTHORS,
48 author_email='[email protected]',
49 license='BSD',
50 keywords=['astronomy', 'python'],
51 classifiers=['Programming Language :: Python'],
52 packages=find_packages(),
53 install_requires=REQUIRES,
54 dependency_links=DEPENDENCY_LINKS,
55 include_package_data=True,
56 include_dirs=[np.get_include()],
57 )
58
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@
'astropy>=3.2.1',
'astroquery>=0.3.9',
'authlib',
- 'bokeh>=1.0',
+ 'bokeh>=1.0,<1.4',
'codecov',
'django>=2.0',
'flake8',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -15,7 +15,7 @@\n 'astropy>=3.2.1',\n 'astroquery>=0.3.9',\n 'authlib',\n- 'bokeh>=1.0',\n+ 'bokeh>=1.0,<1.4',\n 'codecov',\n 'django>=2.0',\n 'flake8',\n", "issue": "Cron jobs for monitors currently failing \nTraceback (most recent call last):\r\n File \"/home/jwqladm/repositories/jwql/jwql/instrument_monitors/common_monitors/bias_monitor.py\", line 58, in <module>\r\n from jwql.instrument_monitors.common_monitors.dark_monitor import mast_query_darks\r\n File \"/home/jwqladm/repositories/jwql/jwql/instrument_monitors/common_monitors/dark_monitor.py\", line 77, in <module>\r\n from jwql.jwql_monitors import monitor_mast\r\n File \"/home/jwqladm/repositories/jwql/jwql/jwql_monitors/monitor_mast.py\", line 25, in <module>\r\n from bokeh.embed import components\r\n File \"/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/__init__.py\", line 81, in <module>\r\n from .util import logconfig\r\n File \"/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/util/logconfig.py\", line 87, in <module>\r\n level = settings.py_log_level()\r\n File \"/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/settings.py\", line 310, in __call__\r\n return self._convert(os.environ[self._env_var])\r\n File \"/grp/jwst/ins/jwql/envs/miniconda3/envs/jwql-3.6/lib/python3.6/site-packages/bokeh/settings.py\", line 236, in convert_logging\r\n raise ValueError(\"Cannot convert {} to log level, valid values are: {}\".format(value, \", \".join(_log_levels)))\r\nValueError: Cannot convert WARN to log level, valid values are: CRITICAL, ERROR, WARNING, INFO, DEBUG, TRACE, NONE\n", "before_files": [{"content": "import numpy as np\nfrom setuptools import setup\nfrom setuptools import find_packages\n\nVERSION = '0.22.0'\n\nAUTHORS = 'Matthew Bourque, Misty Cracraft, Joe Filippazzo, Bryan Hilbert, '\nAUTHORS += 'Graham Kanarek, Catherine Martlin, Johannes Sahlmann, Ben Sunnquist'\n\nDESCRIPTION = 'The James Webb Space Telescope Quicklook Project'\n\nDEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']\nREQUIRES = [\n 'asdf>=2.3.3',\n 'astropy>=3.2.1',\n 'astroquery>=0.3.9',\n 'authlib',\n 'bokeh>=1.0',\n 'codecov',\n 'django>=2.0',\n 'flake8',\n 'inflection',\n 'ipython',\n 'jinja2',\n 'jsonschema==2.6.0',\n 'jwedb>=0.0.3',\n 'matplotlib',\n 'numpy',\n 'numpydoc',\n 'pandas',\n 'psycopg2',\n 'pysiaf',\n 'pytest',\n 'pytest-cov',\n 'scipy',\n 'sphinx',\n 'sqlalchemy',\n 'stsci_rtd_theme',\n 'twine'\n]\n\nsetup(\n name='jwql',\n version=VERSION,\n description=DESCRIPTION,\n url='https://github.com/spacetelescope/jwql.git',\n author=AUTHORS,\n author_email='[email protected]',\n license='BSD',\n keywords=['astronomy', 'python'],\n classifiers=['Programming Language :: Python'],\n packages=find_packages(),\n install_requires=REQUIRES,\n dependency_links=DEPENDENCY_LINKS,\n include_package_data=True,\n include_dirs=[np.get_include()],\n)\n", "path": "setup.py"}]} | 1,495 | 102 |
gh_patches_debug_1710 | rasdani/github-patches | git_diff | encode__httpx-407 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ASGIDispatch and WSGIDispatch should be importable from the top-level httpx package
From #396:
> ``ASGIDispatch`` and ``WSGIDispatch`` are documented as top-level but aren't exposed at the top level. This is definitely an issue, I'd recommend the route of making both available top-level.
</issue>
<code>
[start of httpx/dispatch/__init__.py]
1 """
2 Dispatch classes handle the raw network connections and the implementation
3 details of making the HTTP request and receiving the response.
4 """
5
[end of httpx/dispatch/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/httpx/dispatch/__init__.py b/httpx/dispatch/__init__.py
--- a/httpx/dispatch/__init__.py
+++ b/httpx/dispatch/__init__.py
@@ -2,3 +2,7 @@
Dispatch classes handle the raw network connections and the implementation
details of making the HTTP request and receiving the response.
"""
+from .asgi import ASGIDispatch
+from .wsgi import WSGIDispatch
+
+__all__ = ["ASGIDispatch", "WSGIDispatch"]
| {"golden_diff": "diff --git a/httpx/dispatch/__init__.py b/httpx/dispatch/__init__.py\n--- a/httpx/dispatch/__init__.py\n+++ b/httpx/dispatch/__init__.py\n@@ -2,3 +2,7 @@\n Dispatch classes handle the raw network connections and the implementation\n details of making the HTTP request and receiving the response.\n \"\"\"\n+from .asgi import ASGIDispatch\n+from .wsgi import WSGIDispatch\n+\n+__all__ = [\"ASGIDispatch\", \"WSGIDispatch\"]\n", "issue": "ASGIDispatch and WSGIDispatch should be importable from the top-level httpx package\nFrom #396:\r\n\r\n> ``ASGIDispatch`` and ``WSGIDispatch`` are documented as top-level but aren't exposed at the top level. This is definitely an issue, I'd recommend the route of making both available top-level.\n", "before_files": [{"content": "\"\"\"\nDispatch classes handle the raw network connections and the implementation\ndetails of making the HTTP request and receiving the response.\n\"\"\"\n", "path": "httpx/dispatch/__init__.py"}]} | 648 | 122 |
gh_patches_debug_21028 | rasdani/github-patches | git_diff | techmatters__terraso-backend-141 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Landscape creation and update
<!--
Use a concise title that describes the request.
Bad: localization
Good: Translate site into Spanish
Bad: customize hostname
Good: Determine hostname at build time from environment
-->
## Description
- Default landscape group should be created when a new landscape is created
- Manager should be assigned at the creation of a landscape
- Only managers can update landscape data
</issue>
<code>
[start of terraso_backend/apps/core/models/landscapes.py]
1 import structlog
2 from django.db import models
3
4 from apps.core import permission_rules as perm_rules
5
6 from .commons import BaseModel, SlugModel
7 from .groups import Group
8 from .users import User
9
10 logger = structlog.get_logger(__name__)
11
12
13 class Landscape(SlugModel):
14 """
15 This model represents a Landscape on Terraso platform.
16
17 A Landscape is a socio-ecological system that consists of natural
18 and/or human-modified ecosystems. Defined by its stakeholds, a
19 Landscape usually has geographical boundaries. It may correspond to,
20 or be a combination of, natural boundaries, distinct land features,
21 socially defined areas such as indigenous territories, and/or
22 jurisdictional and administrative boundaries. The boundaries of a
23 Landscape can cross several countries.
24 """
25
26 name = models.CharField(max_length=128, unique=True)
27 description = models.TextField(max_length=512, blank=True, default="")
28 website = models.URLField(blank=True, default="")
29 location = models.CharField(max_length=128, blank=True, default="")
30 area_polygon = models.JSONField(blank=True, null=True)
31
32 created_by = models.ForeignKey(
33 User,
34 blank=True,
35 null=True,
36 on_delete=models.PROTECT,
37 related_name="created_landscapes",
38 )
39 groups = models.ManyToManyField(Group, through="LandscapeGroup")
40
41 field_to_slug = "name"
42
43 class Meta(SlugModel.Meta):
44 rules_permissions = {
45 "change": perm_rules.allowed_to_change_landscape,
46 "delete": perm_rules.allowed_to_delete_landscape,
47 }
48
49 def get_default_group(self):
50 """
51 A default Group in a Landscape is that Group where any
52 individual (associated or not with other Groups) is added when
53 associating directly with a Landscape.
54 """
55 try:
56 # associated_groups is the related_name defined on
57 # LandscapeGroup relationship with Landscape. It returns a
58 # queryset of LandscapeGroup
59 landscape_group = self.associated_groups.get(is_default_landscape_group=True)
60 except LandscapeGroup.DoesNotExist:
61 logger.error(
62 "Landscape has no default group, but it must have", extra={"landscape_id": self.pk}
63 )
64 return None
65
66 return landscape_group.group
67
68 def __str__(self):
69 return self.name
70
71
72 class LandscapeGroup(BaseModel):
73 """
74 This model represents the association between a Landscape and a Group on
75 Terraso platform.
76 """
77
78 landscape = models.ForeignKey(
79 Landscape, on_delete=models.CASCADE, related_name="associated_groups"
80 )
81 group = models.ForeignKey(Group, on_delete=models.CASCADE, related_name="associated_landscapes")
82
83 is_default_landscape_group = models.BooleanField(blank=True, default=False)
84
85 class Meta:
86 rules_permissions = {
87 "add": perm_rules.allowed_to_add_landscape_group,
88 "delete": perm_rules.allowed_to_delete_landscape_group,
89 }
90 constraints = (
91 models.UniqueConstraint(
92 fields=("group", "landscape"),
93 condition=models.Q(deleted_at__isnull=True),
94 name="unique_active_landscape_group",
95 ),
96 )
97
[end of terraso_backend/apps/core/models/landscapes.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/terraso_backend/apps/core/models/landscapes.py b/terraso_backend/apps/core/models/landscapes.py
--- a/terraso_backend/apps/core/models/landscapes.py
+++ b/terraso_backend/apps/core/models/landscapes.py
@@ -1,5 +1,5 @@
import structlog
-from django.db import models
+from django.db import models, transaction
from apps.core import permission_rules as perm_rules
@@ -46,6 +46,24 @@
"delete": perm_rules.allowed_to_delete_landscape,
}
+ def save(self, *args, **kwargs):
+ with transaction.atomic():
+ creating = not Landscape.objects.filter(pk=self.pk).exists()
+
+ super().save(*args, **kwargs)
+
+ if creating and self.created_by:
+ group = Group(
+ name="Group {}".format(self.slug),
+ description="",
+ created_by=self.created_by,
+ )
+ group.save()
+ landscape_group = LandscapeGroup(
+ group=group, landscape=self, is_default_landscape_group=True
+ )
+ landscape_group.save()
+
def get_default_group(self):
"""
A default Group in a Landscape is that Group where any
| {"golden_diff": "diff --git a/terraso_backend/apps/core/models/landscapes.py b/terraso_backend/apps/core/models/landscapes.py\n--- a/terraso_backend/apps/core/models/landscapes.py\n+++ b/terraso_backend/apps/core/models/landscapes.py\n@@ -1,5 +1,5 @@\n import structlog\n-from django.db import models\n+from django.db import models, transaction\n \n from apps.core import permission_rules as perm_rules\n \n@@ -46,6 +46,24 @@\n \"delete\": perm_rules.allowed_to_delete_landscape,\n }\n \n+ def save(self, *args, **kwargs):\n+ with transaction.atomic():\n+ creating = not Landscape.objects.filter(pk=self.pk).exists()\n+\n+ super().save(*args, **kwargs)\n+\n+ if creating and self.created_by:\n+ group = Group(\n+ name=\"Group {}\".format(self.slug),\n+ description=\"\",\n+ created_by=self.created_by,\n+ )\n+ group.save()\n+ landscape_group = LandscapeGroup(\n+ group=group, landscape=self, is_default_landscape_group=True\n+ )\n+ landscape_group.save()\n+\n def get_default_group(self):\n \"\"\"\n A default Group in a Landscape is that Group where any\n", "issue": "Landscape creation and update\n<!--\r\nUse a concise title that describes the request.\r\nBad: localization\r\nGood: Translate site into Spanish\r\n\r\nBad: customize hostname\r\nGood: Determine hostname at build time from environment\r\n-->\r\n\r\n## Description\r\n- Default landscape group should be created when a new landscape is created\r\n- Manager should be assigned at the creation of a landscape\r\n- Only managers can update landscape data\n", "before_files": [{"content": "import structlog\nfrom django.db import models\n\nfrom apps.core import permission_rules as perm_rules\n\nfrom .commons import BaseModel, SlugModel\nfrom .groups import Group\nfrom .users import User\n\nlogger = structlog.get_logger(__name__)\n\n\nclass Landscape(SlugModel):\n \"\"\"\n This model represents a Landscape on Terraso platform.\n\n A Landscape is a socio-ecological system that consists of natural\n and/or human-modified ecosystems. Defined by its stakeholds, a\n Landscape usually has geographical boundaries. It may correspond to,\n or be a combination of, natural boundaries, distinct land features,\n socially defined areas such as indigenous territories, and/or\n jurisdictional and administrative boundaries. The boundaries of a\n Landscape can cross several countries.\n \"\"\"\n\n name = models.CharField(max_length=128, unique=True)\n description = models.TextField(max_length=512, blank=True, default=\"\")\n website = models.URLField(blank=True, default=\"\")\n location = models.CharField(max_length=128, blank=True, default=\"\")\n area_polygon = models.JSONField(blank=True, null=True)\n\n created_by = models.ForeignKey(\n User,\n blank=True,\n null=True,\n on_delete=models.PROTECT,\n related_name=\"created_landscapes\",\n )\n groups = models.ManyToManyField(Group, through=\"LandscapeGroup\")\n\n field_to_slug = \"name\"\n\n class Meta(SlugModel.Meta):\n rules_permissions = {\n \"change\": perm_rules.allowed_to_change_landscape,\n \"delete\": perm_rules.allowed_to_delete_landscape,\n }\n\n def get_default_group(self):\n \"\"\"\n A default Group in a Landscape is that Group where any\n individual (associated or not with other Groups) is added when\n associating directly with a Landscape.\n \"\"\"\n try:\n # associated_groups is the related_name defined on\n # LandscapeGroup relationship with Landscape. It returns a\n # queryset of LandscapeGroup\n landscape_group = self.associated_groups.get(is_default_landscape_group=True)\n except LandscapeGroup.DoesNotExist:\n logger.error(\n \"Landscape has no default group, but it must have\", extra={\"landscape_id\": self.pk}\n )\n return None\n\n return landscape_group.group\n\n def __str__(self):\n return self.name\n\n\nclass LandscapeGroup(BaseModel):\n \"\"\"\n This model represents the association between a Landscape and a Group on\n Terraso platform.\n \"\"\"\n\n landscape = models.ForeignKey(\n Landscape, on_delete=models.CASCADE, related_name=\"associated_groups\"\n )\n group = models.ForeignKey(Group, on_delete=models.CASCADE, related_name=\"associated_landscapes\")\n\n is_default_landscape_group = models.BooleanField(blank=True, default=False)\n\n class Meta:\n rules_permissions = {\n \"add\": perm_rules.allowed_to_add_landscape_group,\n \"delete\": perm_rules.allowed_to_delete_landscape_group,\n }\n constraints = (\n models.UniqueConstraint(\n fields=(\"group\", \"landscape\"),\n condition=models.Q(deleted_at__isnull=True),\n name=\"unique_active_landscape_group\",\n ),\n )\n", "path": "terraso_backend/apps/core/models/landscapes.py"}]} | 1,477 | 279 |
gh_patches_debug_20437 | rasdani/github-patches | git_diff | graspologic-org__graspologic-488 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Remove * imports in subgraph, utils
the` __init__.py` files in `graspy.subgraph` and `graspy.utils` shouldn't have import *.
</issue>
<code>
[start of graspy/utils/__init__.py]
1 # Copyright (c) Microsoft Corporation and contributors.
2 # Licensed under the MIT License.
3
4 from .utils import *
5 from .ptr import *
6
[end of graspy/utils/__init__.py]
[start of graspy/subgraph/__init__.py]
1 # Copyright (c) Microsoft Corporation and contributors.
2 # Licensed under the MIT License.
3
4 from .sg import *
5
[end of graspy/subgraph/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/graspy/subgraph/__init__.py b/graspy/subgraph/__init__.py
--- a/graspy/subgraph/__init__.py
+++ b/graspy/subgraph/__init__.py
@@ -1,4 +1,7 @@
# Copyright (c) Microsoft Corporation and contributors.
# Licensed under the MIT License.
-from .sg import *
+
+from .sg import SignalSubgraph
+
+__all__ = [SignalSubgraph]
diff --git a/graspy/utils/__init__.py b/graspy/utils/__init__.py
--- a/graspy/utils/__init__.py
+++ b/graspy/utils/__init__.py
@@ -1,5 +1,42 @@
# Copyright (c) Microsoft Corporation and contributors.
# Licensed under the MIT License.
-from .utils import *
-from .ptr import *
+from .utils import (
+ import_graph,
+ import_edgelist,
+ is_symmetric,
+ is_loopless,
+ is_unweighted,
+ is_almost_symmetric,
+ symmetrize,
+ remove_loops,
+ to_laplace,
+ is_fully_connected,
+ get_lcc,
+ get_multigraph_union_lcc,
+ get_multigraph_intersect_lcc,
+ augment_diagonal,
+ binarize,
+ cartprod,
+)
+from .ptr import pass_to_ranks
+
+__all__ = [
+ import_graph,
+ import_edgelist,
+ is_symmetric,
+ is_loopless,
+ is_unweighted,
+ is_almost_symmetric,
+ symmetrize,
+ remove_loops,
+ to_laplace,
+ is_fully_connected,
+ get_lcc,
+ get_multigraph_union_lcc,
+ get_multigraph_intersect_lcc,
+ augment_diagonal,
+ binarize,
+ cartprod,
+ pass_to_ranks,
+]
| {"golden_diff": "diff --git a/graspy/subgraph/__init__.py b/graspy/subgraph/__init__.py\n--- a/graspy/subgraph/__init__.py\n+++ b/graspy/subgraph/__init__.py\n@@ -1,4 +1,7 @@\n # Copyright (c) Microsoft Corporation and contributors.\n # Licensed under the MIT License.\n \n-from .sg import *\n+\n+from .sg import SignalSubgraph\n+\n+__all__ = [SignalSubgraph]\ndiff --git a/graspy/utils/__init__.py b/graspy/utils/__init__.py\n--- a/graspy/utils/__init__.py\n+++ b/graspy/utils/__init__.py\n@@ -1,5 +1,42 @@\n # Copyright (c) Microsoft Corporation and contributors.\n # Licensed under the MIT License.\n \n-from .utils import *\n-from .ptr import *\n+from .utils import (\n+ import_graph,\n+ import_edgelist,\n+ is_symmetric,\n+ is_loopless,\n+ is_unweighted,\n+ is_almost_symmetric,\n+ symmetrize,\n+ remove_loops,\n+ to_laplace,\n+ is_fully_connected,\n+ get_lcc,\n+ get_multigraph_union_lcc,\n+ get_multigraph_intersect_lcc,\n+ augment_diagonal,\n+ binarize,\n+ cartprod,\n+)\n+from .ptr import pass_to_ranks\n+\n+__all__ = [\n+ import_graph,\n+ import_edgelist,\n+ is_symmetric,\n+ is_loopless,\n+ is_unweighted,\n+ is_almost_symmetric,\n+ symmetrize,\n+ remove_loops,\n+ to_laplace,\n+ is_fully_connected,\n+ get_lcc,\n+ get_multigraph_union_lcc,\n+ get_multigraph_intersect_lcc,\n+ augment_diagonal,\n+ binarize,\n+ cartprod,\n+ pass_to_ranks,\n+]\n", "issue": "Remove * imports in subgraph, utils\nthe` __init__.py` files in `graspy.subgraph` and `graspy.utils` shouldn't have import *.\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation and contributors.\n# Licensed under the MIT License.\n\nfrom .utils import *\nfrom .ptr import *\n", "path": "graspy/utils/__init__.py"}, {"content": "# Copyright (c) Microsoft Corporation and contributors.\n# Licensed under the MIT License.\n\nfrom .sg import *\n", "path": "graspy/subgraph/__init__.py"}]} | 663 | 430 |
gh_patches_debug_1928 | rasdani/github-patches | git_diff | goauthentik__authentik-3299 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Get username from mailcow source
**Is your feature request related to a problem? Please describe.**
I like to get a username from mailcow. With username the enrollment for new users is more simple.
**Describe the solution you'd like**
Set username to full_name provided by mailcow oauths source.
**Additional context**
For other sources the username is also set redundant to another attribute if there is no special source attribute:
azure_ad.py:
```
"username": info.get("displayName"),
"name": info.get("displayName"),
```
discord.py:
```
"username": info.get("username"),
"name": info.get("username"),
```
facebook.py:
```
"username": info.get("name"),
"name": info.get("name"),
```
reddit.py
```
"username": info.get("name"),
"name": info.get("name"),
```
</issue>
<code>
[start of authentik/sources/oauth/types/mailcow.py]
1 """Mailcow OAuth Views"""
2 from typing import Any, Optional
3
4 from requests.exceptions import RequestException
5 from structlog.stdlib import get_logger
6
7 from authentik.sources.oauth.clients.oauth2 import OAuth2Client
8 from authentik.sources.oauth.types.manager import MANAGER, SourceType
9 from authentik.sources.oauth.views.callback import OAuthCallback
10 from authentik.sources.oauth.views.redirect import OAuthRedirect
11
12 LOGGER = get_logger()
13
14
15 class MailcowOAuthRedirect(OAuthRedirect):
16 """Mailcow OAuth2 Redirect"""
17
18 def get_additional_parameters(self, source): # pragma: no cover
19 return {
20 "scope": ["profile"],
21 }
22
23
24 class MailcowOAuth2Client(OAuth2Client):
25 """MailcowOAuth2Client, for some reason, mailcow does not like the default headers"""
26
27 def get_profile_info(self, token: dict[str, str]) -> Optional[dict[str, Any]]:
28 "Fetch user profile information."
29 profile_url = self.source.type.profile_url or ""
30 if self.source.type.urls_customizable and self.source.profile_url:
31 profile_url = self.source.profile_url
32 try:
33 response = self.session.request(
34 "get",
35 f"{profile_url}?access_token={token['access_token']}",
36 )
37 response.raise_for_status()
38 except RequestException as exc:
39 LOGGER.warning("Unable to fetch user profile", exc=exc, body=response.text)
40 return None
41 else:
42 return response.json()
43
44
45 class MailcowOAuth2Callback(OAuthCallback):
46 """Mailcow OAuth2 Callback"""
47
48 client_class = MailcowOAuth2Client
49
50 def get_user_enroll_context(
51 self,
52 info: dict[str, Any],
53 ) -> dict[str, Any]:
54 return {
55 "email": info.get("email"),
56 "name": info.get("full_name"),
57 }
58
59
60 @MANAGER.type()
61 class MailcowType(SourceType):
62 """Mailcow Type definition"""
63
64 callback_view = MailcowOAuth2Callback
65 redirect_view = MailcowOAuthRedirect
66 name = "Mailcow"
67 slug = "mailcow"
68
69 urls_customizable = True
70
[end of authentik/sources/oauth/types/mailcow.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/authentik/sources/oauth/types/mailcow.py b/authentik/sources/oauth/types/mailcow.py
--- a/authentik/sources/oauth/types/mailcow.py
+++ b/authentik/sources/oauth/types/mailcow.py
@@ -52,6 +52,7 @@
info: dict[str, Any],
) -> dict[str, Any]:
return {
+ "username": info.get("full_name"),
"email": info.get("email"),
"name": info.get("full_name"),
}
| {"golden_diff": "diff --git a/authentik/sources/oauth/types/mailcow.py b/authentik/sources/oauth/types/mailcow.py\n--- a/authentik/sources/oauth/types/mailcow.py\n+++ b/authentik/sources/oauth/types/mailcow.py\n@@ -52,6 +52,7 @@\n info: dict[str, Any],\n ) -> dict[str, Any]:\n return {\n+ \"username\": info.get(\"full_name\"),\n \"email\": info.get(\"email\"),\n \"name\": info.get(\"full_name\"),\n }\n", "issue": "Get username from mailcow source\n**Is your feature request related to a problem? Please describe.**\r\nI like to get a username from mailcow. With username the enrollment for new users is more simple.\r\n\r\n**Describe the solution you'd like**\r\nSet username to full_name provided by mailcow oauths source.\r\n\r\n**Additional context**\r\nFor other sources the username is also set redundant to another attribute if there is no special source attribute:\r\nazure_ad.py:\r\n```\r\n \"username\": info.get(\"displayName\"),\r\n \"name\": info.get(\"displayName\"),\r\n```\r\n\r\ndiscord.py:\r\n```\r\n \"username\": info.get(\"username\"),\r\n \"name\": info.get(\"username\"),\r\n```\r\n\r\nfacebook.py:\r\n```\r\n \"username\": info.get(\"name\"),\r\n \"name\": info.get(\"name\"),\r\n```\r\n\r\nreddit.py\r\n```\r\n \"username\": info.get(\"name\"),\r\n \"name\": info.get(\"name\"),\r\n```\r\n\n", "before_files": [{"content": "\"\"\"Mailcow OAuth Views\"\"\"\nfrom typing import Any, Optional\n\nfrom requests.exceptions import RequestException\nfrom structlog.stdlib import get_logger\n\nfrom authentik.sources.oauth.clients.oauth2 import OAuth2Client\nfrom authentik.sources.oauth.types.manager import MANAGER, SourceType\nfrom authentik.sources.oauth.views.callback import OAuthCallback\nfrom authentik.sources.oauth.views.redirect import OAuthRedirect\n\nLOGGER = get_logger()\n\n\nclass MailcowOAuthRedirect(OAuthRedirect):\n \"\"\"Mailcow OAuth2 Redirect\"\"\"\n\n def get_additional_parameters(self, source): # pragma: no cover\n return {\n \"scope\": [\"profile\"],\n }\n\n\nclass MailcowOAuth2Client(OAuth2Client):\n \"\"\"MailcowOAuth2Client, for some reason, mailcow does not like the default headers\"\"\"\n\n def get_profile_info(self, token: dict[str, str]) -> Optional[dict[str, Any]]:\n \"Fetch user profile information.\"\n profile_url = self.source.type.profile_url or \"\"\n if self.source.type.urls_customizable and self.source.profile_url:\n profile_url = self.source.profile_url\n try:\n response = self.session.request(\n \"get\",\n f\"{profile_url}?access_token={token['access_token']}\",\n )\n response.raise_for_status()\n except RequestException as exc:\n LOGGER.warning(\"Unable to fetch user profile\", exc=exc, body=response.text)\n return None\n else:\n return response.json()\n\n\nclass MailcowOAuth2Callback(OAuthCallback):\n \"\"\"Mailcow OAuth2 Callback\"\"\"\n\n client_class = MailcowOAuth2Client\n\n def get_user_enroll_context(\n self,\n info: dict[str, Any],\n ) -> dict[str, Any]:\n return {\n \"email\": info.get(\"email\"),\n \"name\": info.get(\"full_name\"),\n }\n\n\[email protected]()\nclass MailcowType(SourceType):\n \"\"\"Mailcow Type definition\"\"\"\n\n callback_view = MailcowOAuth2Callback\n redirect_view = MailcowOAuthRedirect\n name = \"Mailcow\"\n slug = \"mailcow\"\n\n urls_customizable = True\n", "path": "authentik/sources/oauth/types/mailcow.py"}]} | 1,320 | 112 |
gh_patches_debug_3662 | rasdani/github-patches | git_diff | scikit-hep__awkward-2169 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
run cpp tests in CI
### Version of Awkward Array
2.0.6
### Description and code to reproduce
@agoose77 and @jpivarski - I think, we need to have at least one node to run the cpp tests.
</issue>
<code>
[start of dev/copy-cpp-headers.py]
1 """Copy the header-only cpp headers into the various package directories that they are required"""
2 import pathlib
3 import shutil
4
5 root_path = pathlib.Path(__file__).absolute().parents[1]
6 source_path = root_path / "header-only"
7 dest_paths = (
8 root_path / "awkward-cpp" / "header-only",
9 root_path / "src" / "awkward" / "_connect" / "header-only",
10 )
11
12 if __name__ == "__main__":
13 for path in dest_paths:
14 if path.exists():
15 shutil.rmtree(path)
16 shutil.copytree(source_path, path)
17
[end of dev/copy-cpp-headers.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dev/copy-cpp-headers.py b/dev/copy-cpp-headers.py
--- a/dev/copy-cpp-headers.py
+++ b/dev/copy-cpp-headers.py
@@ -3,7 +3,7 @@
import shutil
root_path = pathlib.Path(__file__).absolute().parents[1]
-source_path = root_path / "header-only"
+source_path = root_path / "header-only" / "include"
dest_paths = (
root_path / "awkward-cpp" / "header-only",
root_path / "src" / "awkward" / "_connect" / "header-only",
| {"golden_diff": "diff --git a/dev/copy-cpp-headers.py b/dev/copy-cpp-headers.py\n--- a/dev/copy-cpp-headers.py\n+++ b/dev/copy-cpp-headers.py\n@@ -3,7 +3,7 @@\n import shutil\n \n root_path = pathlib.Path(__file__).absolute().parents[1]\n-source_path = root_path / \"header-only\"\n+source_path = root_path / \"header-only\" / \"include\"\n dest_paths = (\n root_path / \"awkward-cpp\" / \"header-only\",\n root_path / \"src\" / \"awkward\" / \"_connect\" / \"header-only\",\n", "issue": "run cpp tests in CI \n### Version of Awkward Array\n\n2.0.6\n\n### Description and code to reproduce\n\n@agoose77 and @jpivarski - I think, we need to have at least one node to run the cpp tests.\n", "before_files": [{"content": "\"\"\"Copy the header-only cpp headers into the various package directories that they are required\"\"\"\nimport pathlib\nimport shutil\n\nroot_path = pathlib.Path(__file__).absolute().parents[1]\nsource_path = root_path / \"header-only\"\ndest_paths = (\n root_path / \"awkward-cpp\" / \"header-only\",\n root_path / \"src\" / \"awkward\" / \"_connect\" / \"header-only\",\n)\n\nif __name__ == \"__main__\":\n for path in dest_paths:\n if path.exists():\n shutil.rmtree(path)\n shutil.copytree(source_path, path)\n", "path": "dev/copy-cpp-headers.py"}]} | 747 | 137 |
gh_patches_debug_11962 | rasdani/github-patches | git_diff | svthalia__concrexit-2591 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Updating promo requests
**Describe the bug**
It is not possible to update the promo request within a week of the publish date
**How to reproduce**
Create a promo request
Try to update the designer within a week to publish date
**Expected behavior**
The request can only be created more than a week before the publishing date, but assigned to, status and drive folder can always be edited.
</issue>
<code>
[start of website/promotion/forms.py]
1 from django import forms
2 from django.utils import timezone
3
4 from promotion.models import PromotionRequest
5 from thaliawebsite.settings import PROMO_PUBLISH_DATE_TIMEDELTA
6
7
8 class PromotionRequestForm(forms.ModelForm):
9 class Meta:
10 model = PromotionRequest
11 fields = [
12 "event",
13 "publish_date",
14 "channel",
15 "assigned_to",
16 "status",
17 "drive_folder",
18 "remarks",
19 ]
20
21 def clean_publish_date(self):
22 publish_date = self.cleaned_data.get("publish_date")
23 create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA
24 if timezone.localdate() > create_time_minimum:
25 raise forms.ValidationError(
26 "Publish date cannot be within a week from now."
27 )
28 if "publish_date" in self.changed_data:
29 create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA
30 if timezone.localdate() > create_time_minimum:
31 raise forms.ValidationError(
32 "Publish date cannot be within a week from now."
33 )
34 return publish_date
35
[end of website/promotion/forms.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/website/promotion/forms.py b/website/promotion/forms.py
--- a/website/promotion/forms.py
+++ b/website/promotion/forms.py
@@ -20,11 +20,6 @@
def clean_publish_date(self):
publish_date = self.cleaned_data.get("publish_date")
- create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA
- if timezone.localdate() > create_time_minimum:
- raise forms.ValidationError(
- "Publish date cannot be within a week from now."
- )
if "publish_date" in self.changed_data:
create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA
if timezone.localdate() > create_time_minimum:
| {"golden_diff": "diff --git a/website/promotion/forms.py b/website/promotion/forms.py\n--- a/website/promotion/forms.py\n+++ b/website/promotion/forms.py\n@@ -20,11 +20,6 @@\n \n def clean_publish_date(self):\n publish_date = self.cleaned_data.get(\"publish_date\")\n- create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA\n- if timezone.localdate() > create_time_minimum:\n- raise forms.ValidationError(\n- \"Publish date cannot be within a week from now.\"\n- )\n if \"publish_date\" in self.changed_data:\n create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA\n if timezone.localdate() > create_time_minimum:\n", "issue": "Updating promo requests \n**Describe the bug**\r\nIt is not possible to update the promo request within a week of the publish date \r\n\r\n**How to reproduce** \r\nCreate a promo request \r\nTry to update the designer within a week to publish date \r\n\r\n**Expected behavior** \r\nThe request can only be created more than a week before the publishing date, but assigned to, status and drive folder can always be edited. \n", "before_files": [{"content": "from django import forms\nfrom django.utils import timezone\n\nfrom promotion.models import PromotionRequest\nfrom thaliawebsite.settings import PROMO_PUBLISH_DATE_TIMEDELTA\n\n\nclass PromotionRequestForm(forms.ModelForm):\n class Meta:\n model = PromotionRequest\n fields = [\n \"event\",\n \"publish_date\",\n \"channel\",\n \"assigned_to\",\n \"status\",\n \"drive_folder\",\n \"remarks\",\n ]\n\n def clean_publish_date(self):\n publish_date = self.cleaned_data.get(\"publish_date\")\n create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA\n if timezone.localdate() > create_time_minimum:\n raise forms.ValidationError(\n \"Publish date cannot be within a week from now.\"\n )\n if \"publish_date\" in self.changed_data:\n create_time_minimum = publish_date - PROMO_PUBLISH_DATE_TIMEDELTA\n if timezone.localdate() > create_time_minimum:\n raise forms.ValidationError(\n \"Publish date cannot be within a week from now.\"\n )\n return publish_date\n", "path": "website/promotion/forms.py"}]} | 901 | 161 |
gh_patches_debug_2192 | rasdani/github-patches | git_diff | LMFDB__lmfdb-5179 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
PIP dependencies
We have several deprecated dependencies that we should fix ASAP
```
flask<=1.1.4
markupsafe<=2.0.1
itsdangerous<=2.0.1
```
in particular, this prevents using lmfdb in an environment with jupyterlab installed, which is something we would like to have working on a short time basis.
</issue>
<code>
[start of lmfdb/local_fields/__init__.py]
1 # -*- coding: utf-8 -*-
2 from lmfdb.app import app
3 from lmfdb.logger import make_logger
4 from flask import Blueprint, request, redirect
5
6 local_fields_page = Blueprint("local_fields", __name__, template_folder='templates', static_folder="static")
7 logger = make_logger(local_fields_page)
8
9
10 @local_fields_page.context_processor
11 def body_class():
12 return {'body_class': 'local_fields'}
13
14 from . import main
15 assert main
16
17 from urllib.parse import urlparse, urlunparse
18
19
20 @local_fields_page.before_request
21 def redirect_local():
22 urlparts = urlparse(request.url)
23 if 'LocalNumberField' in urlparts.path:
24 urlparts = urlparts._replace(path=urlparts.path.replace('LocalNumberField', 'padicField'))
25 return redirect(urlunparse(urlparts), 301)
26 return
27
28
29 app.register_blueprint(local_fields_page, url_prefix="/padicField")
30 app.register_blueprint(local_fields_page, url_prefix="/LocalNumberField")
31
32 # API2 has been disabled for now
33 #from lmfdb.api2.searchers import register_search_function
34 #register_search_function(
35 # "$p$-adic_fields",
36 # "$p$-adic fields",
37 # "Search over $p$-adic fields",
38 # auto_search = 'lf_fields'
39 #)
40
[end of lmfdb/local_fields/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/lmfdb/local_fields/__init__.py b/lmfdb/local_fields/__init__.py
--- a/lmfdb/local_fields/__init__.py
+++ b/lmfdb/local_fields/__init__.py
@@ -27,7 +27,6 @@
app.register_blueprint(local_fields_page, url_prefix="/padicField")
-app.register_blueprint(local_fields_page, url_prefix="/LocalNumberField")
# API2 has been disabled for now
#from lmfdb.api2.searchers import register_search_function
| {"golden_diff": "diff --git a/lmfdb/local_fields/__init__.py b/lmfdb/local_fields/__init__.py\n--- a/lmfdb/local_fields/__init__.py\n+++ b/lmfdb/local_fields/__init__.py\n@@ -27,7 +27,6 @@\n \n \n app.register_blueprint(local_fields_page, url_prefix=\"/padicField\")\n-app.register_blueprint(local_fields_page, url_prefix=\"/LocalNumberField\")\n \n # API2 has been disabled for now\n #from lmfdb.api2.searchers import register_search_function\n", "issue": "PIP dependencies\nWe have several deprecated dependencies that we should fix ASAP\r\n```\r\nflask<=1.1.4\r\nmarkupsafe<=2.0.1\r\nitsdangerous<=2.0.1\r\n```\r\n\r\nin particular, this prevents using lmfdb in an environment with jupyterlab installed, which is something we would like to have working on a short time basis. \n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom lmfdb.app import app\nfrom lmfdb.logger import make_logger\nfrom flask import Blueprint, request, redirect\n\nlocal_fields_page = Blueprint(\"local_fields\", __name__, template_folder='templates', static_folder=\"static\")\nlogger = make_logger(local_fields_page)\n\n\n@local_fields_page.context_processor\ndef body_class():\n return {'body_class': 'local_fields'}\n\nfrom . import main\nassert main\n\nfrom urllib.parse import urlparse, urlunparse\n\n\n@local_fields_page.before_request\ndef redirect_local():\n urlparts = urlparse(request.url)\n if 'LocalNumberField' in urlparts.path:\n urlparts = urlparts._replace(path=urlparts.path.replace('LocalNumberField', 'padicField'))\n return redirect(urlunparse(urlparts), 301)\n return\n\n\napp.register_blueprint(local_fields_page, url_prefix=\"/padicField\")\napp.register_blueprint(local_fields_page, url_prefix=\"/LocalNumberField\")\n\n# API2 has been disabled for now\n#from lmfdb.api2.searchers import register_search_function\n#register_search_function(\n# \"$p$-adic_fields\",\n# \"$p$-adic fields\",\n# \"Search over $p$-adic fields\",\n# auto_search = 'lf_fields'\n#)\n", "path": "lmfdb/local_fields/__init__.py"}]} | 977 | 114 |
gh_patches_debug_30051 | rasdani/github-patches | git_diff | doccano__doccano-2228 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add a function to filter labels
When I rechecked the labels of the annotated data, I had no way of filtering out the labels I wanted to see. For example, when I am doing a check of dichotomous annotations, I would like to filter the data set to find out which labels are positive and which are negative, so that I can save time on the check. However, due to the lack of this function, I have to filter one by one from dataset, which wastes a lot of time and manpower.
Thanks for every contributor!
</issue>
<code>
[start of backend/examples/filters.py]
1 from django.db.models import Count, Q
2 from django_filters.rest_framework import BooleanFilter, FilterSet
3
4 from .models import Example
5
6
7 class ExampleFilter(FilterSet):
8 confirmed = BooleanFilter(field_name="states", method="filter_by_state")
9
10 def filter_by_state(self, queryset, field_name, is_confirmed: bool):
11 queryset = queryset.annotate(
12 num_confirm=Count(
13 expression=field_name,
14 filter=Q(**{f"{field_name}__confirmed_by": self.request.user})
15 | Q(project__collaborative_annotation=True),
16 )
17 )
18 if is_confirmed:
19 queryset = queryset.filter(num_confirm__gte=1)
20 else:
21 queryset = queryset.filter(num_confirm__lte=0)
22 return queryset
23
24 class Meta:
25 model = Example
26 fields = ("project", "text", "created_at", "updated_at")
27
[end of backend/examples/filters.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/backend/examples/filters.py b/backend/examples/filters.py
--- a/backend/examples/filters.py
+++ b/backend/examples/filters.py
@@ -1,11 +1,12 @@
-from django.db.models import Count, Q
-from django_filters.rest_framework import BooleanFilter, FilterSet
+from django.db.models import Count, Q, QuerySet
+from django_filters.rest_framework import BooleanFilter, CharFilter, FilterSet
from .models import Example
class ExampleFilter(FilterSet):
confirmed = BooleanFilter(field_name="states", method="filter_by_state")
+ label = CharFilter(method="filter_by_label")
def filter_by_state(self, queryset, field_name, is_confirmed: bool):
queryset = queryset.annotate(
@@ -21,6 +22,35 @@
queryset = queryset.filter(num_confirm__lte=0)
return queryset
+ def filter_by_label(self, queryset: QuerySet, field_name: str, label: str) -> QuerySet:
+ """Filter examples by a given label name.
+
+ This performs filtering on all of the following labels at once:
+ - categories
+ - spans
+ - relations
+ - bboxes
+ - segmentations
+
+ Todo: Consider project type to make filtering more efficient.
+
+ Args:
+ queryset (QuerySet): QuerySet to filter.
+ field_name (str): This equals to `label`.
+ label (str): The label name to filter.
+
+ Returns:
+ QuerySet: Filtered examples.
+ """
+ queryset = queryset.filter(
+ Q(categories__label__text=label)
+ | Q(spans__label__text=label)
+ | Q(relations__type__text=label)
+ | Q(bboxes__label__text=label)
+ | Q(segmentations__label__text=label)
+ )
+ return queryset
+
class Meta:
model = Example
- fields = ("project", "text", "created_at", "updated_at")
+ fields = ("project", "text", "created_at", "updated_at", "label")
| {"golden_diff": "diff --git a/backend/examples/filters.py b/backend/examples/filters.py\n--- a/backend/examples/filters.py\n+++ b/backend/examples/filters.py\n@@ -1,11 +1,12 @@\n-from django.db.models import Count, Q\n-from django_filters.rest_framework import BooleanFilter, FilterSet\n+from django.db.models import Count, Q, QuerySet\n+from django_filters.rest_framework import BooleanFilter, CharFilter, FilterSet\n \n from .models import Example\n \n \n class ExampleFilter(FilterSet):\n confirmed = BooleanFilter(field_name=\"states\", method=\"filter_by_state\")\n+ label = CharFilter(method=\"filter_by_label\")\n \n def filter_by_state(self, queryset, field_name, is_confirmed: bool):\n queryset = queryset.annotate(\n@@ -21,6 +22,35 @@\n queryset = queryset.filter(num_confirm__lte=0)\n return queryset\n \n+ def filter_by_label(self, queryset: QuerySet, field_name: str, label: str) -> QuerySet:\n+ \"\"\"Filter examples by a given label name.\n+\n+ This performs filtering on all of the following labels at once:\n+ - categories\n+ - spans\n+ - relations\n+ - bboxes\n+ - segmentations\n+\n+ Todo: Consider project type to make filtering more efficient.\n+\n+ Args:\n+ queryset (QuerySet): QuerySet to filter.\n+ field_name (str): This equals to `label`.\n+ label (str): The label name to filter.\n+\n+ Returns:\n+ QuerySet: Filtered examples.\n+ \"\"\"\n+ queryset = queryset.filter(\n+ Q(categories__label__text=label)\n+ | Q(spans__label__text=label)\n+ | Q(relations__type__text=label)\n+ | Q(bboxes__label__text=label)\n+ | Q(segmentations__label__text=label)\n+ )\n+ return queryset\n+\n class Meta:\n model = Example\n- fields = (\"project\", \"text\", \"created_at\", \"updated_at\")\n+ fields = (\"project\", \"text\", \"created_at\", \"updated_at\", \"label\")\n", "issue": "Add a function to filter labels\nWhen I rechecked the labels of the annotated data, I had no way of filtering out the labels I wanted to see. For example, when I am doing a check of dichotomous annotations, I would like to filter the data set to find out which labels are positive and which are negative, so that I can save time on the check. However, due to the lack of this function, I have to filter one by one from dataset, which wastes a lot of time and manpower.\r\n\r\nThanks for every contributor!\n", "before_files": [{"content": "from django.db.models import Count, Q\nfrom django_filters.rest_framework import BooleanFilter, FilterSet\n\nfrom .models import Example\n\n\nclass ExampleFilter(FilterSet):\n confirmed = BooleanFilter(field_name=\"states\", method=\"filter_by_state\")\n\n def filter_by_state(self, queryset, field_name, is_confirmed: bool):\n queryset = queryset.annotate(\n num_confirm=Count(\n expression=field_name,\n filter=Q(**{f\"{field_name}__confirmed_by\": self.request.user})\n | Q(project__collaborative_annotation=True),\n )\n )\n if is_confirmed:\n queryset = queryset.filter(num_confirm__gte=1)\n else:\n queryset = queryset.filter(num_confirm__lte=0)\n return queryset\n\n class Meta:\n model = Example\n fields = (\"project\", \"text\", \"created_at\", \"updated_at\")\n", "path": "backend/examples/filters.py"}]} | 880 | 464 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.