problem_id
stringlengths
18
22
source
stringclasses
1 value
task_type
stringclasses
1 value
in_source_id
stringlengths
13
58
prompt
stringlengths
1.71k
9.01k
golden_diff
stringlengths
151
4.94k
verification_info
stringlengths
465
11.3k
num_tokens_prompt
int64
557
2.05k
num_tokens_diff
int64
48
1.02k
gh_patches_debug_53980
rasdani/github-patches
git_diff
scikit-hep__pyhf-2135
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Matplotlib broken in Pyodide demo in docs In the docs https://pyhf.readthedocs.io/en/v0.7.0/, the Pyodide example is broken for me: ```pytb --------------------------------------------------------------------------- ModuleNotFoundError Traceback (most recent call last) Cell In[1], line 3 1 import piplite 2 await piplite.install(["pyhf==0.7.0"]) ----> 3 get_ipython().run_line_magic('matplotlib', 'inline') 4 import pyhf File /lib/python3.10/site-packages/IPython/core/interactiveshell.py:2369, in InteractiveShell.run_line_magic(self, magic_name, line, _stack_depth) 2367 kwargs['local_ns'] = self.get_local_scope(stack_depth) 2368 with self.builtin_trap: -> 2369 result = fn(*args, **kwargs) 2371 # The code below prevents the output from being displayed 2372 # when using magics with decodator @output_can_be_silenced 2373 # when the last Python token in the expression is a ';'. 2374 if getattr(fn, magic.MAGIC_OUTPUT_CAN_BE_SILENCED, False): File /lib/python3.10/site-packages/IPython/core/magics/pylab.py:99, in PylabMagics.matplotlib(self, line) 97 print("Available matplotlib backends: %s" % backends_list) 98 else: ---> 99 gui, backend = self.shell.enable_matplotlib(args.gui.lower() if isinstance(args.gui, str) else args.gui) 100 self._show_matplotlib_backend(args.gui, backend) File /lib/python3.10/site-packages/IPython/core/interactiveshell.py:3540, in InteractiveShell.enable_matplotlib(self, gui) 3519 def enable_matplotlib(self, gui=None): 3520 """Enable interactive matplotlib and inline figure support. 3521 3522 This takes the following steps: (...) 3538 display figures inline. 3539 """ -> 3540 from matplotlib_inline.backend_inline import configure_inline_support 3542 from IPython.core import pylabtools as pt 3543 gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select) File /lib/python3.10/site-packages/matplotlib_inline/__init__.py:1 ----> 1 from . import backend_inline, config # noqa 2 __version__ = "0.1.6" # noqa File /lib/python3.10/site-packages/matplotlib_inline/backend_inline.py:6 1 """A matplotlib backend for publishing figures via display_data""" 3 # Copyright (c) IPython Development Team. 4 # Distributed under the terms of the BSD 3-Clause License. ----> 6 import matplotlib 7 from matplotlib import colors 8 from matplotlib.backends import backend_agg ModuleNotFoundError: The module 'matplotlib' is included in the Pyodide distribution, but it is not installed. You can install it by calling: await micropip.install("matplotlib") in Python, or await pyodide.loadPackage("matplotlib") in JavaScript See https://pyodide.org/en/stable/usage/loading-packages.html for more details. ``` It used to work previously, though I can not say for sure when it last worked for me. Running on MacOS (ARM), tried Firefox and Chrome (resulting in the above), while Safari seems stuck executing the import commands provided. </issue> <code> [start of docs/generate_jupyterlite_iframe.py] 1 import urllib.parse 2 3 4 def main(): 5 code = """\ 6 import piplite 7 await piplite.install(["pyhf==0.7.0"]) 8 %matplotlib inline 9 import pyhf\ 10 """ 11 12 parsed_url = urllib.parse.quote(code) 13 url_base = "https://jupyterlite.github.io/demo/repl/index.html" 14 jupyterlite_options = "?kernel=python&toolbar=1&code=" 15 jupyterlite_url = url_base + jupyterlite_options + parsed_url 16 17 print(f"# jupyterlite URL:\n{jupyterlite_url}") 18 19 jupyterlite_iframe_rst = f"""\ 20 <iframe 21 src="{jupyterlite_url}" 22 width="100%" 23 height="500px" 24 ></iframe>\ 25 """ 26 print(f"\n# RST for iframe for jupyterlite.rst:\n{jupyterlite_iframe_rst}") 27 28 29 if __name__ == "__main__": 30 raise SystemExit(main()) 31 [end of docs/generate_jupyterlite_iframe.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/docs/generate_jupyterlite_iframe.py b/docs/generate_jupyterlite_iframe.py --- a/docs/generate_jupyterlite_iframe.py +++ b/docs/generate_jupyterlite_iframe.py @@ -4,7 +4,7 @@ def main(): code = """\ import piplite -await piplite.install(["pyhf==0.7.0"]) +await piplite.install(["pyhf==0.7.0", "matplotlib>=3.0.0"]) %matplotlib inline import pyhf\ """
{"golden_diff": "diff --git a/docs/generate_jupyterlite_iframe.py b/docs/generate_jupyterlite_iframe.py\n--- a/docs/generate_jupyterlite_iframe.py\n+++ b/docs/generate_jupyterlite_iframe.py\n@@ -4,7 +4,7 @@\n def main():\n code = \"\"\"\\\n import piplite\n-await piplite.install([\"pyhf==0.7.0\"])\n+await piplite.install([\"pyhf==0.7.0\", \"matplotlib>=3.0.0\"])\n %matplotlib inline\n import pyhf\\\n \"\"\"\n", "issue": "Matplotlib broken in Pyodide demo in docs\nIn the docs https://pyhf.readthedocs.io/en/v0.7.0/, the Pyodide example is broken for me:\r\n```pytb\r\n---------------------------------------------------------------------------\r\nModuleNotFoundError Traceback (most recent call last)\r\nCell In[1], line 3\r\n 1 import piplite\r\n 2 await piplite.install([\"pyhf==0.7.0\"])\r\n----> 3 get_ipython().run_line_magic('matplotlib', 'inline')\r\n 4 import pyhf\r\n\r\nFile /lib/python3.10/site-packages/IPython/core/interactiveshell.py:2369, in InteractiveShell.run_line_magic(self, magic_name, line, _stack_depth)\r\n 2367 kwargs['local_ns'] = self.get_local_scope(stack_depth)\r\n 2368 with self.builtin_trap:\r\n-> 2369 result = fn(*args, **kwargs)\r\n 2371 # The code below prevents the output from being displayed\r\n 2372 # when using magics with decodator @output_can_be_silenced\r\n 2373 # when the last Python token in the expression is a ';'.\r\n 2374 if getattr(fn, magic.MAGIC_OUTPUT_CAN_BE_SILENCED, False):\r\n\r\nFile /lib/python3.10/site-packages/IPython/core/magics/pylab.py:99, in PylabMagics.matplotlib(self, line)\r\n 97 print(\"Available matplotlib backends: %s\" % backends_list)\r\n 98 else:\r\n---> 99 gui, backend = self.shell.enable_matplotlib(args.gui.lower() if isinstance(args.gui, str) else args.gui)\r\n 100 self._show_matplotlib_backend(args.gui, backend)\r\n\r\nFile /lib/python3.10/site-packages/IPython/core/interactiveshell.py:3540, in InteractiveShell.enable_matplotlib(self, gui)\r\n 3519 def enable_matplotlib(self, gui=None):\r\n 3520 \"\"\"Enable interactive matplotlib and inline figure support.\r\n 3521 \r\n 3522 This takes the following steps:\r\n (...)\r\n 3538 display figures inline.\r\n 3539 \"\"\"\r\n-> 3540 from matplotlib_inline.backend_inline import configure_inline_support\r\n 3542 from IPython.core import pylabtools as pt\r\n 3543 gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)\r\n\r\nFile /lib/python3.10/site-packages/matplotlib_inline/__init__.py:1\r\n----> 1 from . import backend_inline, config # noqa\r\n 2 __version__ = \"0.1.6\" # noqa\r\n\r\nFile /lib/python3.10/site-packages/matplotlib_inline/backend_inline.py:6\r\n 1 \"\"\"A matplotlib backend for publishing figures via display_data\"\"\"\r\n 3 # Copyright (c) IPython Development Team.\r\n 4 # Distributed under the terms of the BSD 3-Clause License.\r\n----> 6 import matplotlib\r\n 7 from matplotlib import colors\r\n 8 from matplotlib.backends import backend_agg\r\n\r\nModuleNotFoundError: The module 'matplotlib' is included in the Pyodide distribution, but it is not installed.\r\nYou can install it by calling:\r\n await micropip.install(\"matplotlib\") in Python, or\r\n await pyodide.loadPackage(\"matplotlib\") in JavaScript\r\nSee https://pyodide.org/en/stable/usage/loading-packages.html for more details.\r\n```\r\nIt used to work previously, though I can not say for sure when it last worked for me. Running on MacOS (ARM), tried Firefox and Chrome (resulting in the above), while Safari seems stuck executing the import commands provided.\n", "before_files": [{"content": "import urllib.parse\n\n\ndef main():\n code = \"\"\"\\\nimport piplite\nawait piplite.install([\"pyhf==0.7.0\"])\n%matplotlib inline\nimport pyhf\\\n\"\"\"\n\n parsed_url = urllib.parse.quote(code)\n url_base = \"https://jupyterlite.github.io/demo/repl/index.html\"\n jupyterlite_options = \"?kernel=python&toolbar=1&code=\"\n jupyterlite_url = url_base + jupyterlite_options + parsed_url\n\n print(f\"# jupyterlite URL:\\n{jupyterlite_url}\")\n\n jupyterlite_iframe_rst = f\"\"\"\\\n <iframe\n src=\"{jupyterlite_url}\"\n width=\"100%\"\n height=\"500px\"\n ></iframe>\\\n\"\"\"\n print(f\"\\n# RST for iframe for jupyterlite.rst:\\n{jupyterlite_iframe_rst}\")\n\n\nif __name__ == \"__main__\":\n raise SystemExit(main())\n", "path": "docs/generate_jupyterlite_iframe.py"}]}
1,633
121
gh_patches_debug_36049
rasdani/github-patches
git_diff
mozilla__pontoon-2716
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Pretranslated Fluent string has the ID translated or modified It happened for at least two strings. ``` remember-pw-link = Remember your password? Sign in ``` Became ``` Remember-pw-link = Ricordi la password? Accedi ``` No clue why it changed to uppercase. On the other hand, for ``` plan-price-interval-year = { $intervalCount -> [one] { $amount } all’anno *[other] { $amount } ogni { $intervalCount } anni } .title = { $intervalCount -> [one] { $amount } all’anno *[other] { $amount } ogni { $intervalCount } anni } ``` The id was translated to `piano-prezzo-intervallo-anno`(but the translation was good besides that). </issue> <code> [start of pontoon/pretranslation/pretranslate.py] 1 import operator 2 3 from fluent.syntax import FluentSerializer 4 from functools import reduce 5 6 from django.db.models import CharField, Value as V 7 from django.db.models.functions import Concat 8 9 from pontoon.base.models import User, TranslatedResource 10 from pontoon.machinery.utils import ( 11 get_google_translate_data, 12 get_translation_memory_data, 13 ) 14 15 from pontoon.base.templatetags.helpers import ( 16 as_simple_translation, 17 is_single_input_ftl_string, 18 get_reconstructed_message, 19 ) 20 21 22 serializer = FluentSerializer() 23 24 25 def get_translations(entity, locale): 26 """ 27 Get pretranslations for the entity-locale pair 28 29 :arg Entity entity: the Entity object 30 :arg Locale locale: the Locale object 31 32 :returns: a list of tuple with: 33 - a pretranslation of the entity 34 - plural form 35 - user - tm_user/gt_user 36 """ 37 tm_user = User.objects.get(email="[email protected]") 38 gt_user = User.objects.get(email="[email protected]") 39 40 strings = [] 41 plural_forms = range(0, locale.nplurals or 1) 42 43 entity_string = ( 44 as_simple_translation(entity.string) 45 if is_single_input_ftl_string(entity.string) 46 else entity.string 47 ) 48 49 # Try to get matches from translation_memory 50 tm_response = get_translation_memory_data( 51 text=entity_string, 52 locale=locale, 53 ) 54 55 tm_response = [t for t in tm_response if int(t["quality"]) == 100] 56 57 if tm_response: 58 if entity.string_plural == "": 59 translation = tm_response[0]["target"] 60 61 if entity.string != entity_string: 62 translation = serializer.serialize_entry( 63 get_reconstructed_message(entity.string, translation) 64 ) 65 66 strings = [(translation, None, tm_user)] 67 else: 68 for plural_form in plural_forms: 69 strings.append((tm_response[0]["target"], plural_form, tm_user)) 70 71 # Else fetch from google translate 72 elif locale.google_translate_code: 73 gt_response = get_google_translate_data( 74 text=entity.string, 75 locale=locale, 76 ) 77 78 if gt_response["status"]: 79 if entity.string_plural == "": 80 strings = [(gt_response["translation"], None, gt_user)] 81 else: 82 for plural_form in plural_forms: 83 strings.append((gt_response["translation"], plural_form, gt_user)) 84 return strings 85 86 87 def update_changed_instances(tr_filter, tr_dict, translations): 88 """ 89 Update the latest activity and stats for changed Locales, ProjectLocales 90 & TranslatedResources 91 """ 92 tr_filter = tuple(tr_filter) 93 # Combine all generated filters with an OK operator. 94 # `operator.ior` is the '|' Python operator, which turns into a logical OR 95 # when used between django ORM query objects. 96 tr_query = reduce(operator.ior, tr_filter) 97 98 translatedresources = TranslatedResource.objects.filter(tr_query).annotate( 99 locale_resource=Concat( 100 "locale_id", V("-"), "resource_id", output_field=CharField() 101 ) 102 ) 103 104 translatedresources.update_stats() 105 106 for tr in translatedresources: 107 index = tr_dict[tr.locale_resource] 108 translation = translations[index] 109 translation.update_latest_translation() 110 [end of pontoon/pretranslation/pretranslate.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pontoon/pretranslation/pretranslate.py b/pontoon/pretranslation/pretranslate.py --- a/pontoon/pretranslation/pretranslate.py +++ b/pontoon/pretranslation/pretranslate.py @@ -18,6 +18,7 @@ get_reconstructed_message, ) +UNTRANSLATABLE_KEY = "AIzaSyDX3R5Y1kxh_8lJ4OAO" serializer = FluentSerializer() @@ -40,7 +41,7 @@ strings = [] plural_forms = range(0, locale.nplurals or 1) - entity_string = ( + tm_input = ( as_simple_translation(entity.string) if is_single_input_ftl_string(entity.string) else entity.string @@ -48,7 +49,7 @@ # Try to get matches from translation_memory tm_response = get_translation_memory_data( - text=entity_string, + text=tm_input, locale=locale, ) @@ -58,7 +59,7 @@ if entity.string_plural == "": translation = tm_response[0]["target"] - if entity.string != entity_string: + if entity.string != tm_input: translation = serializer.serialize_entry( get_reconstructed_message(entity.string, translation) ) @@ -70,12 +71,23 @@ # Else fetch from google translate elif locale.google_translate_code: + gt_input = ( + entity.string.replace(entity.key, UNTRANSLATABLE_KEY, 1) + if entity.resource.format == "ftl" + else entity.string + ) + gt_response = get_google_translate_data( - text=entity.string, + text=gt_input, locale=locale, ) if gt_response["status"]: + if entity.string != gt_input: + gt_response["translation"] = gt_response["translation"].replace( + UNTRANSLATABLE_KEY, entity.key + ) + if entity.string_plural == "": strings = [(gt_response["translation"], None, gt_user)] else:
{"golden_diff": "diff --git a/pontoon/pretranslation/pretranslate.py b/pontoon/pretranslation/pretranslate.py\n--- a/pontoon/pretranslation/pretranslate.py\n+++ b/pontoon/pretranslation/pretranslate.py\n@@ -18,6 +18,7 @@\n get_reconstructed_message,\n )\n \n+UNTRANSLATABLE_KEY = \"AIzaSyDX3R5Y1kxh_8lJ4OAO\"\n \n serializer = FluentSerializer()\n \n@@ -40,7 +41,7 @@\n strings = []\n plural_forms = range(0, locale.nplurals or 1)\n \n- entity_string = (\n+ tm_input = (\n as_simple_translation(entity.string)\n if is_single_input_ftl_string(entity.string)\n else entity.string\n@@ -48,7 +49,7 @@\n \n # Try to get matches from translation_memory\n tm_response = get_translation_memory_data(\n- text=entity_string,\n+ text=tm_input,\n locale=locale,\n )\n \n@@ -58,7 +59,7 @@\n if entity.string_plural == \"\":\n translation = tm_response[0][\"target\"]\n \n- if entity.string != entity_string:\n+ if entity.string != tm_input:\n translation = serializer.serialize_entry(\n get_reconstructed_message(entity.string, translation)\n )\n@@ -70,12 +71,23 @@\n \n # Else fetch from google translate\n elif locale.google_translate_code:\n+ gt_input = (\n+ entity.string.replace(entity.key, UNTRANSLATABLE_KEY, 1)\n+ if entity.resource.format == \"ftl\"\n+ else entity.string\n+ )\n+\n gt_response = get_google_translate_data(\n- text=entity.string,\n+ text=gt_input,\n locale=locale,\n )\n \n if gt_response[\"status\"]:\n+ if entity.string != gt_input:\n+ gt_response[\"translation\"] = gt_response[\"translation\"].replace(\n+ UNTRANSLATABLE_KEY, entity.key\n+ )\n+\n if entity.string_plural == \"\":\n strings = [(gt_response[\"translation\"], None, gt_user)]\n else:\n", "issue": "Pretranslated Fluent string has the ID translated or modified\nIt happened for at least two strings.\r\n\r\n```\r\nremember-pw-link = Remember your password? Sign in\r\n```\r\n\r\nBecame\r\n\r\n```\r\nRemember-pw-link = Ricordi la password? Accedi\r\n```\r\n\r\nNo clue why it changed to uppercase.\r\n\r\nOn the other hand, for \r\n\r\n```\r\nplan-price-interval-year =\r\n { $intervalCount ->\r\n [one] { $amount } all\u2019anno\r\n *[other] { $amount } ogni { $intervalCount } anni\r\n }\r\n .title =\r\n { $intervalCount ->\r\n [one] { $amount } all\u2019anno\r\n *[other] { $amount } ogni { $intervalCount } anni\r\n }\r\n```\r\n\r\nThe id was translated to `piano-prezzo-intervallo-anno`(but the translation was good besides that).\r\n\n", "before_files": [{"content": "import operator\n\nfrom fluent.syntax import FluentSerializer\nfrom functools import reduce\n\nfrom django.db.models import CharField, Value as V\nfrom django.db.models.functions import Concat\n\nfrom pontoon.base.models import User, TranslatedResource\nfrom pontoon.machinery.utils import (\n get_google_translate_data,\n get_translation_memory_data,\n)\n\nfrom pontoon.base.templatetags.helpers import (\n as_simple_translation,\n is_single_input_ftl_string,\n get_reconstructed_message,\n)\n\n\nserializer = FluentSerializer()\n\n\ndef get_translations(entity, locale):\n \"\"\"\n Get pretranslations for the entity-locale pair\n\n :arg Entity entity: the Entity object\n :arg Locale locale: the Locale object\n\n :returns: a list of tuple with:\n - a pretranslation of the entity\n - plural form\n - user - tm_user/gt_user\n \"\"\"\n tm_user = User.objects.get(email=\"[email protected]\")\n gt_user = User.objects.get(email=\"[email protected]\")\n\n strings = []\n plural_forms = range(0, locale.nplurals or 1)\n\n entity_string = (\n as_simple_translation(entity.string)\n if is_single_input_ftl_string(entity.string)\n else entity.string\n )\n\n # Try to get matches from translation_memory\n tm_response = get_translation_memory_data(\n text=entity_string,\n locale=locale,\n )\n\n tm_response = [t for t in tm_response if int(t[\"quality\"]) == 100]\n\n if tm_response:\n if entity.string_plural == \"\":\n translation = tm_response[0][\"target\"]\n\n if entity.string != entity_string:\n translation = serializer.serialize_entry(\n get_reconstructed_message(entity.string, translation)\n )\n\n strings = [(translation, None, tm_user)]\n else:\n for plural_form in plural_forms:\n strings.append((tm_response[0][\"target\"], plural_form, tm_user))\n\n # Else fetch from google translate\n elif locale.google_translate_code:\n gt_response = get_google_translate_data(\n text=entity.string,\n locale=locale,\n )\n\n if gt_response[\"status\"]:\n if entity.string_plural == \"\":\n strings = [(gt_response[\"translation\"], None, gt_user)]\n else:\n for plural_form in plural_forms:\n strings.append((gt_response[\"translation\"], plural_form, gt_user))\n return strings\n\n\ndef update_changed_instances(tr_filter, tr_dict, translations):\n \"\"\"\n Update the latest activity and stats for changed Locales, ProjectLocales\n & TranslatedResources\n \"\"\"\n tr_filter = tuple(tr_filter)\n # Combine all generated filters with an OK operator.\n # `operator.ior` is the '|' Python operator, which turns into a logical OR\n # when used between django ORM query objects.\n tr_query = reduce(operator.ior, tr_filter)\n\n translatedresources = TranslatedResource.objects.filter(tr_query).annotate(\n locale_resource=Concat(\n \"locale_id\", V(\"-\"), \"resource_id\", output_field=CharField()\n )\n )\n\n translatedresources.update_stats()\n\n for tr in translatedresources:\n index = tr_dict[tr.locale_resource]\n translation = translations[index]\n translation.update_latest_translation()\n", "path": "pontoon/pretranslation/pretranslate.py"}]}
1,652
468
gh_patches_debug_2042
rasdani/github-patches
git_diff
aws__aws-cli-357
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> pip install awscli fails I tried `pip install awscli` from https://github.com/aws/aws-cli/blob/develop/README.rst and failed: http://sprunge.us/NfbW /home/hendry/.pip/pip.log = http://ix.io/7SC Hilarious how bad Python packaging is. I'm running Archlinux with Python 3.3.2. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 import sys 3 4 from setuptools import setup, find_packages 5 6 import awscli 7 8 9 requires = ['botocore>=0.16.0,<0.17.0', 10 'bcdoc>=0.9.0,<0.10.0', 11 'six>=1.1.0', 12 'colorama==0.2.5', 13 'docutils>=0.10', 14 'rsa==3.1.1'] 15 16 if sys.version_info[:2] == (2, 6): 17 # For python2.6 we have to require argparse since it 18 # was not in stdlib until 2.7. 19 requires.append('argparse>=1.1') 20 21 22 setup_options = dict( 23 name='awscli', 24 version=awscli.__version__, 25 description='Universal Command Line Environment for AWS.', 26 long_description=open('README.rst').read(), 27 author='Mitch Garnaat', 28 author_email='[email protected]', 29 url='http://aws.amazon.com/cli/', 30 scripts=['bin/aws', 'bin/aws.cmd', 31 'bin/aws_completer', 'bin/aws_zsh_completer.sh'], 32 packages=find_packages('.', exclude=['tests*']), 33 package_dir={'awscli': 'awscli'}, 34 package_data={'awscli': ['data/*.json', 'examples/*/*']}, 35 install_requires=requires, 36 license="Apache License 2.0", 37 classifiers=( 38 'Development Status :: 5 - Production/Stable', 39 'Intended Audience :: Developers', 40 'Intended Audience :: System Administrators', 41 'Natural Language :: English', 42 'License :: OSI Approved :: Apache Software License', 43 'Programming Language :: Python', 44 'Programming Language :: Python :: 2.6', 45 'Programming Language :: Python :: 2.7', 46 'Programming Language :: Python :: 3', 47 'Programming Language :: Python :: 3.3', 48 ), 49 ) 50 51 if 'py2exe' in sys.argv: 52 # This will actually give us a py2exe command. 53 import py2exe 54 # And we have some py2exe specific options. 55 setup_options['options'] = { 56 'py2exe': { 57 'optimize': 0, 58 'skip_archive': True, 59 'includes': ['ConfigParser', 'urllib', 'httplib', 60 'docutils.readers.standalone', 61 'docutils.parsers.rst', 62 'docutils.languages.en', 63 'xml.etree.ElementTree', 'HTMLParser', 64 'awscli.handlers'], 65 } 66 } 67 setup_options['console'] = ['bin/aws'] 68 69 70 setup(**setup_options) 71 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ 'six>=1.1.0', 'colorama==0.2.5', 'docutils>=0.10', - 'rsa==3.1.1'] + 'rsa==3.1.2'] if sys.version_info[:2] == (2, 6): # For python2.6 we have to require argparse since it
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -11,7 +11,7 @@\n 'six>=1.1.0',\n 'colorama==0.2.5',\n 'docutils>=0.10',\n- 'rsa==3.1.1']\n+ 'rsa==3.1.2']\n \n if sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n", "issue": "pip install awscli fails\nI tried `pip install awscli` from https://github.com/aws/aws-cli/blob/develop/README.rst and failed:\n\nhttp://sprunge.us/NfbW\n/home/hendry/.pip/pip.log = http://ix.io/7SC\n\nHilarious how bad Python packaging is. I'm running Archlinux with Python 3.3.2.\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport sys\n\nfrom setuptools import setup, find_packages\n\nimport awscli\n\n\nrequires = ['botocore>=0.16.0,<0.17.0',\n 'bcdoc>=0.9.0,<0.10.0',\n 'six>=1.1.0',\n 'colorama==0.2.5',\n 'docutils>=0.10',\n 'rsa==3.1.1']\n\nif sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n # was not in stdlib until 2.7.\n requires.append('argparse>=1.1')\n\n\nsetup_options = dict(\n name='awscli',\n version=awscli.__version__,\n description='Universal Command Line Environment for AWS.',\n long_description=open('README.rst').read(),\n author='Mitch Garnaat',\n author_email='[email protected]',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh'],\n packages=find_packages('.', exclude=['tests*']),\n package_dir={'awscli': 'awscli'},\n package_data={'awscli': ['data/*.json', 'examples/*/*']},\n install_requires=requires,\n license=\"Apache License 2.0\",\n classifiers=(\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n ),\n)\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'includes': ['ConfigParser', 'urllib', 'httplib',\n 'docutils.readers.standalone',\n 'docutils.parsers.rst',\n 'docutils.languages.en',\n 'xml.etree.ElementTree', 'HTMLParser',\n 'awscli.handlers'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}]}
1,317
116
gh_patches_debug_8933
rasdani/github-patches
git_diff
akvo__akvo-rsr-1945
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Public projects filter in REST API not working correctly ## Test plan All projects (and other objects) should be displayed in the REST API. ## Issue description As a quick fix, just display all projects (public and private) in the API. </issue> <code> [start of akvo/rest/viewsets.py] 1 # -*- coding: utf-8 -*- 2 3 # Akvo RSR is covered by the GNU Affero General Public License. 4 # See more details in the license.txt file located at the root folder of the Akvo RSR module. 5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >. 6 7 from rest_framework import filters 8 from rest_framework import viewsets 9 from rest_framework.authentication import SessionAuthentication 10 from rest_framework.permissions import DjangoObjectPermissions 11 12 from .models import TastyTokenAuthentication 13 14 from akvo.rsr.models import Project 15 16 17 class BaseRSRViewSet(viewsets.ModelViewSet): 18 """ 19 Base class used for the view sets for RSR models. Provides unified auth and perms settings. 20 Only public projects will be shown by filtering the queryset. 21 """ 22 authentication_classes = (SessionAuthentication, TastyTokenAuthentication, ) 23 permission_classes = (DjangoObjectPermissions, ) 24 filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter, ) 25 ordering_fields = '__all__' 26 27 def get_queryset(self): 28 """Filter out any private projects.""" 29 for related_obj in self.queryset.model._meta.get_all_related_objects(): 30 if related_obj.model == Project: 31 self.queryset = self.queryset.filter(project__is_public=True) 32 break 33 return super(BaseRSRViewSet, self).get_queryset() 34 [end of akvo/rest/viewsets.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/akvo/rest/viewsets.py b/akvo/rest/viewsets.py --- a/akvo/rest/viewsets.py +++ b/akvo/rest/viewsets.py @@ -23,11 +23,3 @@ permission_classes = (DjangoObjectPermissions, ) filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter, ) ordering_fields = '__all__' - - def get_queryset(self): - """Filter out any private projects.""" - for related_obj in self.queryset.model._meta.get_all_related_objects(): - if related_obj.model == Project: - self.queryset = self.queryset.filter(project__is_public=True) - break - return super(BaseRSRViewSet, self).get_queryset()
{"golden_diff": "diff --git a/akvo/rest/viewsets.py b/akvo/rest/viewsets.py\n--- a/akvo/rest/viewsets.py\n+++ b/akvo/rest/viewsets.py\n@@ -23,11 +23,3 @@\n permission_classes = (DjangoObjectPermissions, )\n filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter, )\n ordering_fields = '__all__'\n-\n- def get_queryset(self):\n- \"\"\"Filter out any private projects.\"\"\"\n- for related_obj in self.queryset.model._meta.get_all_related_objects():\n- if related_obj.model == Project:\n- self.queryset = self.queryset.filter(project__is_public=True)\n- break\n- return super(BaseRSRViewSet, self).get_queryset()\n", "issue": "Public projects filter in REST API not working correctly\n## Test plan\n\nAll projects (and other objects) should be displayed in the REST API.\n## Issue description\n\nAs a quick fix, just display all projects (public and private) in the API.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom rest_framework import filters\nfrom rest_framework import viewsets\nfrom rest_framework.authentication import SessionAuthentication\nfrom rest_framework.permissions import DjangoObjectPermissions\n\nfrom .models import TastyTokenAuthentication\n\nfrom akvo.rsr.models import Project\n\n\nclass BaseRSRViewSet(viewsets.ModelViewSet):\n \"\"\"\n Base class used for the view sets for RSR models. Provides unified auth and perms settings.\n Only public projects will be shown by filtering the queryset.\n \"\"\"\n authentication_classes = (SessionAuthentication, TastyTokenAuthentication, )\n permission_classes = (DjangoObjectPermissions, )\n filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter, )\n ordering_fields = '__all__'\n\n def get_queryset(self):\n \"\"\"Filter out any private projects.\"\"\"\n for related_obj in self.queryset.model._meta.get_all_related_objects():\n if related_obj.model == Project:\n self.queryset = self.queryset.filter(project__is_public=True)\n break\n return super(BaseRSRViewSet, self).get_queryset()\n", "path": "akvo/rest/viewsets.py"}]}
936
168
gh_patches_debug_13127
rasdani/github-patches
git_diff
GoogleCloudPlatform__PerfKitBenchmarker-680
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> silo benchmark fails behind proxy From @mateusz-blaszkowski in #475: > silo - failed with Clone of 'git://github.com/kohler/masstree-beta.git' into submodule path 'masstree' failed. I run the test behind the proxy and this is the case. I would have changed the path to Git repository to https:// but it is hidden somewhere in 'dbtest' (look a the command which failed: cd /tmp/pkb/silo && MODE=perf DEBUG=0 CHECK_INVARIANTS=0 make -j80 dbtest). Oh, i found that the exact path is specified here: https://github.com/stephentu/silo/blob/cc11ca1ea949ef266ee12a9b1c310392519d9e3b/.gitmodules We should switch it to `https://`. </issue> <code> [start of perfkitbenchmarker/linux_packages/silo.py] 1 # Copyright 2014 PerfKitBenchmarker Authors. All rights reserved. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 16 """Module containing Silo installation and cleanup functions.""" 17 18 from perfkitbenchmarker import vm_util 19 20 GIT_REPO = 'https://github.com/stephentu/silo.git' 21 GIT_TAG = '62d2d498984bf69d3b46a74e310e1fd12fd1f692' 22 SILO_DIR = '%s/silo' % vm_util.VM_TMP_DIR 23 APT_PACKAGES = ('libjemalloc-dev libnuma-dev libdb++-dev ' 24 'libmysqld-dev libaio-dev libssl-dev') 25 YUM_PACKAGES = ('jemalloc-devel numactl-devel libdb-cxx-devel mysql-devel ' 26 'libaio-devel openssl-devel') 27 28 29 def _Install(vm): 30 """Installs the Silo package on the VM.""" 31 nthreads = vm.num_cpus * 2 32 vm.Install('build_tools') 33 vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, SILO_DIR)) 34 vm.RemoteCommand('cd {0} && git checkout {1}'.format(SILO_DIR, 35 GIT_TAG)) 36 vm.RemoteCommand('cd {0} && MODE=perf DEBUG=0 CHECK_INVARIANTS=0 make\ 37 -j{1} dbtest'.format(SILO_DIR, nthreads)) 38 39 40 def YumInstall(vm): 41 """Installs the Silo package on the VM.""" 42 vm.InstallPackages(YUM_PACKAGES) 43 _Install(vm) 44 45 46 def AptInstall(vm): 47 """Installs the Silo package on the VM.""" 48 vm.InstallPackages(APT_PACKAGES) 49 _Install(vm) 50 [end of perfkitbenchmarker/linux_packages/silo.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/perfkitbenchmarker/linux_packages/silo.py b/perfkitbenchmarker/linux_packages/silo.py --- a/perfkitbenchmarker/linux_packages/silo.py +++ b/perfkitbenchmarker/linux_packages/silo.py @@ -33,6 +33,9 @@ vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, SILO_DIR)) vm.RemoteCommand('cd {0} && git checkout {1}'.format(SILO_DIR, GIT_TAG)) + # This is due to a failing clone command when executing behind a proxy. + # Replacing the protocol to https instead of git fixes the issue. + vm.RemoteCommand('git config --global url."https://".insteadOf git://') vm.RemoteCommand('cd {0} && MODE=perf DEBUG=0 CHECK_INVARIANTS=0 make\ -j{1} dbtest'.format(SILO_DIR, nthreads))
{"golden_diff": "diff --git a/perfkitbenchmarker/linux_packages/silo.py b/perfkitbenchmarker/linux_packages/silo.py\n--- a/perfkitbenchmarker/linux_packages/silo.py\n+++ b/perfkitbenchmarker/linux_packages/silo.py\n@@ -33,6 +33,9 @@\n vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, SILO_DIR))\n vm.RemoteCommand('cd {0} && git checkout {1}'.format(SILO_DIR,\n GIT_TAG))\n+ # This is due to a failing clone command when executing behind a proxy.\n+ # Replacing the protocol to https instead of git fixes the issue.\n+ vm.RemoteCommand('git config --global url.\"https://\".insteadOf git://')\n vm.RemoteCommand('cd {0} && MODE=perf DEBUG=0 CHECK_INVARIANTS=0 make\\\n -j{1} dbtest'.format(SILO_DIR, nthreads))\n", "issue": "silo benchmark fails behind proxy\nFrom @mateusz-blaszkowski in #475: \n\n> silo - failed with Clone of 'git://github.com/kohler/masstree-beta.git' into submodule path 'masstree' failed. I run the test behind the proxy and this is the case. I would have changed the path to Git repository to https:// but it is hidden somewhere in 'dbtest' (look a the command which failed: cd /tmp/pkb/silo && MODE=perf DEBUG=0 CHECK_INVARIANTS=0 make -j80 dbtest). Oh, i found that the exact path is specified here: https://github.com/stephentu/silo/blob/cc11ca1ea949ef266ee12a9b1c310392519d9e3b/.gitmodules\n\nWe should switch it to `https://`.\n\n", "before_files": [{"content": "# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\n\"\"\"Module containing Silo installation and cleanup functions.\"\"\"\n\nfrom perfkitbenchmarker import vm_util\n\nGIT_REPO = 'https://github.com/stephentu/silo.git'\nGIT_TAG = '62d2d498984bf69d3b46a74e310e1fd12fd1f692'\nSILO_DIR = '%s/silo' % vm_util.VM_TMP_DIR\nAPT_PACKAGES = ('libjemalloc-dev libnuma-dev libdb++-dev '\n 'libmysqld-dev libaio-dev libssl-dev')\nYUM_PACKAGES = ('jemalloc-devel numactl-devel libdb-cxx-devel mysql-devel '\n 'libaio-devel openssl-devel')\n\n\ndef _Install(vm):\n \"\"\"Installs the Silo package on the VM.\"\"\"\n nthreads = vm.num_cpus * 2\n vm.Install('build_tools')\n vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, SILO_DIR))\n vm.RemoteCommand('cd {0} && git checkout {1}'.format(SILO_DIR,\n GIT_TAG))\n vm.RemoteCommand('cd {0} && MODE=perf DEBUG=0 CHECK_INVARIANTS=0 make\\\n -j{1} dbtest'.format(SILO_DIR, nthreads))\n\n\ndef YumInstall(vm):\n \"\"\"Installs the Silo package on the VM.\"\"\"\n vm.InstallPackages(YUM_PACKAGES)\n _Install(vm)\n\n\ndef AptInstall(vm):\n \"\"\"Installs the Silo package on the VM.\"\"\"\n vm.InstallPackages(APT_PACKAGES)\n _Install(vm)\n", "path": "perfkitbenchmarker/linux_packages/silo.py"}]}
1,332
207
gh_patches_debug_32491
rasdani/github-patches
git_diff
openai__gym-1573
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Support wrapper transformations to VecEnv @tristandeleu @pzhokhov @christopherhesse It might be rather convenient for the user experience to provide a list of wrapper transformations for the atomic environments when creating vectorized environment, e.g. ```python transforms = [AtariPreprocessing, SignReward, ...] env = gym.vector.make('Pong-v0', 16, True, transforms=transforms) ``` For additional arguments, the user is required to use `partial()` to define them within the transform list. So that each internal environment is wrapped according to the transformation list. </issue> <code> [start of gym/vector/__init__.py] 1 from gym.vector.async_vector_env import AsyncVectorEnv 2 from gym.vector.sync_vector_env import SyncVectorEnv 3 from gym.vector.vector_env import VectorEnv 4 5 __all__ = ['AsyncVectorEnv', 'SyncVectorEnv', 'VectorEnv', 'make'] 6 7 def make(id, num_envs=1, asynchronous=True, **kwargs): 8 """Create a vectorized environment from multiple copies of an environment, 9 from its id 10 11 Parameters 12 ---------- 13 id : str 14 The environment ID. This must be a valid ID from the registry. 15 16 num_envs : int 17 Number of copies of the environment. 18 19 asynchronous : bool (default: `True`) 20 If `True`, wraps the environments in an `AsyncVectorEnv` (which uses 21 `multiprocessing` to run the environments in parallel). If `False`, 22 wraps the environments in a `SyncVectorEnv`. 23 24 Returns 25 ------- 26 env : `gym.vector.VectorEnv` instance 27 The vectorized environment. 28 29 Example 30 ------- 31 >>> import gym 32 >>> env = gym.vector.make('CartPole-v1', 3) 33 >>> env.reset() 34 array([[-0.04456399, 0.04653909, 0.01326909, -0.02099827], 35 [ 0.03073904, 0.00145001, -0.03088818, -0.03131252], 36 [ 0.03468829, 0.01500225, 0.01230312, 0.01825218]], 37 dtype=float32) 38 """ 39 from gym.envs import make as make_ 40 def _make_env(): 41 return make_(id, **kwargs) 42 env_fns = [_make_env for _ in range(num_envs)] 43 return AsyncVectorEnv(env_fns) if asynchronous else SyncVectorEnv(env_fns) 44 [end of gym/vector/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/gym/vector/__init__.py b/gym/vector/__init__.py --- a/gym/vector/__init__.py +++ b/gym/vector/__init__.py @@ -1,10 +1,15 @@ +try: + from collections.abc import Iterable +except ImportError: + Iterable = (tuple, list) + from gym.vector.async_vector_env import AsyncVectorEnv from gym.vector.sync_vector_env import SyncVectorEnv from gym.vector.vector_env import VectorEnv __all__ = ['AsyncVectorEnv', 'SyncVectorEnv', 'VectorEnv', 'make'] -def make(id, num_envs=1, asynchronous=True, **kwargs): +def make(id, num_envs=1, asynchronous=True, wrappers=None, **kwargs): """Create a vectorized environment from multiple copies of an environment, from its id @@ -20,6 +25,10 @@ If `True`, wraps the environments in an `AsyncVectorEnv` (which uses `multiprocessing` to run the environments in parallel). If `False`, wraps the environments in a `SyncVectorEnv`. + + wrappers : Callable or Iterable of Callables (default: `None`) + If not `None`, then apply the wrappers to each internal + environment during creation. Returns ------- @@ -38,6 +47,15 @@ """ from gym.envs import make as make_ def _make_env(): - return make_(id, **kwargs) + env = make_(id, **kwargs) + if wrappers is not None: + if callable(wrappers): + env = wrappers(env) + elif isinstance(wrappers, Iterable) and all([callable(w) for w in wrappers]): + for wrapper in wrappers: + env = wrapper(env) + else: + raise NotImplementedError + return env env_fns = [_make_env for _ in range(num_envs)] return AsyncVectorEnv(env_fns) if asynchronous else SyncVectorEnv(env_fns)
{"golden_diff": "diff --git a/gym/vector/__init__.py b/gym/vector/__init__.py\n--- a/gym/vector/__init__.py\n+++ b/gym/vector/__init__.py\n@@ -1,10 +1,15 @@\n+try:\n+ from collections.abc import Iterable\n+except ImportError:\n+ Iterable = (tuple, list)\n+\n from gym.vector.async_vector_env import AsyncVectorEnv\n from gym.vector.sync_vector_env import SyncVectorEnv\n from gym.vector.vector_env import VectorEnv\n \n __all__ = ['AsyncVectorEnv', 'SyncVectorEnv', 'VectorEnv', 'make']\n \n-def make(id, num_envs=1, asynchronous=True, **kwargs):\n+def make(id, num_envs=1, asynchronous=True, wrappers=None, **kwargs):\n \"\"\"Create a vectorized environment from multiple copies of an environment,\n from its id\n \n@@ -20,6 +25,10 @@\n If `True`, wraps the environments in an `AsyncVectorEnv` (which uses \n `multiprocessing` to run the environments in parallel). If `False`,\n wraps the environments in a `SyncVectorEnv`.\n+ \n+ wrappers : Callable or Iterable of Callables (default: `None`)\n+ If not `None`, then apply the wrappers to each internal \n+ environment during creation. \n \n Returns\n -------\n@@ -38,6 +47,15 @@\n \"\"\"\n from gym.envs import make as make_\n def _make_env():\n- return make_(id, **kwargs)\n+ env = make_(id, **kwargs)\n+ if wrappers is not None:\n+ if callable(wrappers):\n+ env = wrappers(env)\n+ elif isinstance(wrappers, Iterable) and all([callable(w) for w in wrappers]):\n+ for wrapper in wrappers:\n+ env = wrapper(env)\n+ else:\n+ raise NotImplementedError\n+ return env\n env_fns = [_make_env for _ in range(num_envs)]\n return AsyncVectorEnv(env_fns) if asynchronous else SyncVectorEnv(env_fns)\n", "issue": "Support wrapper transformations to VecEnv\n@tristandeleu @pzhokhov @christopherhesse It might be rather convenient for the user experience to provide a list of wrapper transformations for the atomic environments when creating vectorized environment, e.g.\r\n\r\n```python\r\ntransforms = [AtariPreprocessing, SignReward, ...]\r\nenv = gym.vector.make('Pong-v0', 16, True, transforms=transforms)\r\n```\r\nFor additional arguments, the user is required to use `partial()` to define them within the transform list. So that each internal environment is wrapped according to the transformation list. \n", "before_files": [{"content": "from gym.vector.async_vector_env import AsyncVectorEnv\nfrom gym.vector.sync_vector_env import SyncVectorEnv\nfrom gym.vector.vector_env import VectorEnv\n\n__all__ = ['AsyncVectorEnv', 'SyncVectorEnv', 'VectorEnv', 'make']\n\ndef make(id, num_envs=1, asynchronous=True, **kwargs):\n \"\"\"Create a vectorized environment from multiple copies of an environment,\n from its id\n\n Parameters\n ----------\n id : str\n The environment ID. This must be a valid ID from the registry.\n\n num_envs : int\n Number of copies of the environment. \n\n asynchronous : bool (default: `True`)\n If `True`, wraps the environments in an `AsyncVectorEnv` (which uses \n `multiprocessing` to run the environments in parallel). If `False`,\n wraps the environments in a `SyncVectorEnv`.\n\n Returns\n -------\n env : `gym.vector.VectorEnv` instance\n The vectorized environment.\n\n Example\n -------\n >>> import gym\n >>> env = gym.vector.make('CartPole-v1', 3)\n >>> env.reset()\n array([[-0.04456399, 0.04653909, 0.01326909, -0.02099827],\n [ 0.03073904, 0.00145001, -0.03088818, -0.03131252],\n [ 0.03468829, 0.01500225, 0.01230312, 0.01825218]],\n dtype=float32)\n \"\"\"\n from gym.envs import make as make_\n def _make_env():\n return make_(id, **kwargs)\n env_fns = [_make_env for _ in range(num_envs)]\n return AsyncVectorEnv(env_fns) if asynchronous else SyncVectorEnv(env_fns)\n", "path": "gym/vector/__init__.py"}]}
1,215
450
gh_patches_debug_16813
rasdani/github-patches
git_diff
nautobot__nautobot-5593
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Plugins not loaded with Gunicorn ### Environment * Nautobot version (Docker tag too if applicable): 2.2.1 * Python version: 3.11 * Database platform, version: psql * Middleware(s): ### Steps to Reproduce 1. Use systemd 2. With gunicorn 21.2.0 or 22.0.0 <!-- What did you expect to happen? --> ### Expected Behavior All applications to show ### Observed Behavior We attempted to upgrade our dev environment from 2.1.9 to 2.2.1 but are hitting a weird issue where our plugins are reported as missing. We are only loading 1 or 2 basic plugins right now while we work on updating all our other plugins for 2.x. Oddly we are only seeing this issue on 1 out of 3 identical servers with identical Nautobot installs. This looks very much like this issue from 2021: [Plugin Load Failure · Issue #95 · nautobot/nautobot (github.com)](https://github.com/nautobot/nautobot/issues/95) </issue> <code> [start of nautobot/core/wsgi.py] 1 import logging 2 import os 3 4 from django.core import cache 5 from django.core.wsgi import get_wsgi_application 6 from django.db import connections 7 8 os.environ["DJANGO_SETTINGS_MODULE"] = "nautobot_config" 9 10 # Use try/except because we might not be running uWSGI. If `settings.WEBSERVER_WARMUP` is `True`, 11 # will first call `get_internal_wsgi_application` which does not have `uwsgi` module loaded 12 # already. Therefore, `settings.WEBSERVER_WARMUP` to `False` for this code to be loaded. 13 try: 14 import uwsgidecorators 15 16 @uwsgidecorators.postfork 17 def fix_uwsgi(): 18 import uwsgi 19 20 logging.getLogger(__name__).info( 21 f"Closing existing DB and cache connections on worker {uwsgi.worker_id()} after uWSGI forked ..." 22 ) 23 connections.close_all() 24 cache.close_caches() 25 26 except ImportError: 27 pass 28 29 application = get_wsgi_application() 30 [end of nautobot/core/wsgi.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/nautobot/core/wsgi.py b/nautobot/core/wsgi.py --- a/nautobot/core/wsgi.py +++ b/nautobot/core/wsgi.py @@ -1,11 +1,18 @@ import logging -import os from django.core import cache from django.core.wsgi import get_wsgi_application from django.db import connections -os.environ["DJANGO_SETTINGS_MODULE"] = "nautobot_config" +import nautobot + +# This is the Django default left here for visibility on how the Nautobot pattern +# differs. +# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nautobot.core.settings") + +# Instead of just pointing to `DJANGO_SETTINGS_MODULE` and letting Django run with it, +# we're using the custom Nautobot loader code to read environment or config path for us. +nautobot.setup() # Use try/except because we might not be running uWSGI. If `settings.WEBSERVER_WARMUP` is `True`, # will first call `get_internal_wsgi_application` which does not have `uwsgi` module loaded
{"golden_diff": "diff --git a/nautobot/core/wsgi.py b/nautobot/core/wsgi.py\n--- a/nautobot/core/wsgi.py\n+++ b/nautobot/core/wsgi.py\n@@ -1,11 +1,18 @@\n import logging\n-import os\n \n from django.core import cache\n from django.core.wsgi import get_wsgi_application\n from django.db import connections\n \n-os.environ[\"DJANGO_SETTINGS_MODULE\"] = \"nautobot_config\"\n+import nautobot\n+\n+# This is the Django default left here for visibility on how the Nautobot pattern\n+# differs.\n+# os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"nautobot.core.settings\")\n+\n+# Instead of just pointing to `DJANGO_SETTINGS_MODULE` and letting Django run with it,\n+# we're using the custom Nautobot loader code to read environment or config path for us.\n+nautobot.setup()\n \n # Use try/except because we might not be running uWSGI. If `settings.WEBSERVER_WARMUP` is `True`,\n # will first call `get_internal_wsgi_application` which does not have `uwsgi` module loaded\n", "issue": "Plugins not loaded with Gunicorn\n\r\n### Environment\r\n\r\n* Nautobot version (Docker tag too if applicable): 2.2.1\r\n* Python version: 3.11\r\n* Database platform, version: psql\r\n* Middleware(s):\r\n\r\n\r\n### Steps to Reproduce\r\n1. Use systemd\r\n2. With gunicorn 21.2.0 or 22.0.0\r\n\r\n\r\n<!-- What did you expect to happen? -->\r\n### Expected Behavior\r\n\r\nAll applications to show \r\n\r\n### Observed Behavior\r\n\r\nWe attempted to upgrade our dev environment from 2.1.9 to 2.2.1 but are hitting a weird issue where our plugins are reported as missing. We are only loading 1 or 2 basic plugins right now while we work on updating all our other plugins for 2.x. Oddly we are only seeing this issue on 1 out of 3 identical servers with identical Nautobot installs.\r\n\r\nThis looks very much like this issue from 2021: [Plugin Load Failure \u00b7 Issue #95 \u00b7 nautobot/nautobot (github.com)](https://github.com/nautobot/nautobot/issues/95)\n", "before_files": [{"content": "import logging\nimport os\n\nfrom django.core import cache\nfrom django.core.wsgi import get_wsgi_application\nfrom django.db import connections\n\nos.environ[\"DJANGO_SETTINGS_MODULE\"] = \"nautobot_config\"\n\n# Use try/except because we might not be running uWSGI. If `settings.WEBSERVER_WARMUP` is `True`,\n# will first call `get_internal_wsgi_application` which does not have `uwsgi` module loaded\n# already. Therefore, `settings.WEBSERVER_WARMUP` to `False` for this code to be loaded.\ntry:\n import uwsgidecorators\n\n @uwsgidecorators.postfork\n def fix_uwsgi():\n import uwsgi\n\n logging.getLogger(__name__).info(\n f\"Closing existing DB and cache connections on worker {uwsgi.worker_id()} after uWSGI forked ...\"\n )\n connections.close_all()\n cache.close_caches()\n\nexcept ImportError:\n pass\n\napplication = get_wsgi_application()\n", "path": "nautobot/core/wsgi.py"}]}
1,058
240
gh_patches_debug_12067
rasdani/github-patches
git_diff
sktime__sktime-1453
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [BUG] sktime.datatypes._panel._examples raises pandas.core.common.SettingWithCopyError **Describe the bug** Attempting to install [tsai](https://pypi.org/project/tsai/) as an upstream package also installs this package, but the install raises an error that traces to line 67 in "/opt/conda/lib/python3.8/site-packages/sktime/datatypes/_panel/_examples.py" ``` X.iloc[0][0] = pd.Series([1, 2, 3]) ``` **To Reproduce** Importing any code that executes the code starting at line 67 of /sktime/datatypes/_panel/_examples.py can raise a Pandas error, depending on Pandas version that may be installed **Expected behavior** No error should be raised on install or import of sktime as a dependency. **Additional context** <!-- Add any other context about the problem here. --> **Versions** System: python: 3.9.1 (default, Sep 16 2021, 11:42:30) [Clang 12.0.5 (clang-1205.0.22.11)] executable: /.../.pyenv/versions/3.9.1/bin/python machine: macOS-11.6-x86_64-i386-64bit Python dependencies: pip: 21.2.4 setuptools: 49.2.1 sklearn: 1.0 sktime: 0.8.0 statsmodels: 0.12.2 numpy: 1.20.3 scipy: 1.7.1 Cython: None pandas: 1.3.3 matplotlib: 3.4.3 joblib: 1.0.1 numba: 0.53.1 pmdarima: None tsfresh: 0.18.0 </issue> <code> [start of sktime/datatypes/_panel/_examples.py] 1 # -*- coding: utf-8 -*- 2 """Example generation for testing. 3 4 Exports dict of examples, useful for testing as fixtures. 5 6 example_dict: dict indexed by triple 7 1st element = mtype - str 8 2nd element = considered as this scitype - str 9 3rd element = int - index of example 10 elements are data objects, considered examples for the mtype 11 all examples with same index are considered "same" on scitype content 12 if None, indicates that representation is not possible 13 14 example_lossy: dict of bool indexed by pairs of str 15 1st element = mtype - str 16 2nd element = considered as this scitype - str 17 3rd element = int - index of example 18 elements are bool, indicate whether representation has information removed 19 all examples with same index are considered "same" on scitype content 20 21 overall, conversions from non-lossy representations to any other ones 22 should yield the element exactly, identidally (given same index) 23 """ 24 25 import pandas as pd 26 import numpy as np 27 28 example_dict = dict() 29 example_dict_lossy = dict() 30 31 ### 32 33 34 X = np.array( 35 [[[1, 2, 3], [4, 5, 6]], [[1, 2, 3], [4, 55, 6]], [[1, 2, 3], [42, 5, 6]]], 36 dtype=np.int64, 37 ) 38 39 example_dict[("numpy3D", "Panel", 0)] = X 40 example_dict_lossy[("numpy3D", "Panel", 0)] = False 41 42 cols = [f"var_{i}" for i in range(2)] 43 Xlist = [ 44 pd.DataFrame([[1, 4], [2, 5], [3, 6]], columns=cols), 45 pd.DataFrame([[1, 4], [2, 55], [3, 6]], columns=cols), 46 pd.DataFrame([[1, 42], [2, 5], [3, 6]], columns=cols), 47 ] 48 49 example_dict[("df-list", "Panel", 0)] = Xlist 50 example_dict_lossy[("df-list", "Panel", 0)] = False 51 52 cols = ["instances", "timepoints"] + [f"var_{i}" for i in range(2)] 53 54 Xlist = [ 55 pd.DataFrame([[0, 0, 1, 4], [0, 1, 2, 5], [0, 2, 3, 6]], columns=cols), 56 pd.DataFrame([[1, 0, 1, 4], [1, 1, 2, 55], [1, 2, 3, 6]], columns=cols), 57 pd.DataFrame([[2, 0, 1, 42], [2, 1, 2, 5], [2, 2, 3, 6]], columns=cols), 58 ] 59 X = pd.concat(Xlist) 60 X = X.set_index(["instances", "timepoints"]) 61 62 example_dict[("pd-multiindex", "Panel", 0)] = X 63 example_dict_lossy[("pd-multiindex", "Panel", 0)] = False 64 65 cols = [f"var_{i}" for i in range(2)] 66 X = pd.DataFrame(columns=cols, index=[0, 1, 2]) 67 X.iloc[0][0] = pd.Series([1, 2, 3]) 68 X.iloc[0][1] = pd.Series([4, 5, 6]) 69 X.iloc[1][0] = pd.Series([1, 2, 3]) 70 X.iloc[1][1] = pd.Series([4, 55, 6]) 71 X.iloc[2][0] = pd.Series([1, 2, 3]) 72 X.iloc[2][1] = pd.Series([42, 5, 6]) 73 74 example_dict[("nested_univ", "Panel", 0)] = X 75 example_dict_lossy[("nested_univ", "Panel", 0)] = False 76 [end of sktime/datatypes/_panel/_examples.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sktime/datatypes/_panel/_examples.py b/sktime/datatypes/_panel/_examples.py --- a/sktime/datatypes/_panel/_examples.py +++ b/sktime/datatypes/_panel/_examples.py @@ -64,12 +64,13 @@ cols = [f"var_{i}" for i in range(2)] X = pd.DataFrame(columns=cols, index=[0, 1, 2]) -X.iloc[0][0] = pd.Series([1, 2, 3]) -X.iloc[0][1] = pd.Series([4, 5, 6]) -X.iloc[1][0] = pd.Series([1, 2, 3]) -X.iloc[1][1] = pd.Series([4, 55, 6]) -X.iloc[2][0] = pd.Series([1, 2, 3]) -X.iloc[2][1] = pd.Series([42, 5, 6]) +X["var_0"] = pd.Series( + [pd.Series([1, 2, 3]), pd.Series([1, 2, 3]), pd.Series([1, 2, 3])] +) + +X["var_1"] = pd.Series( + [pd.Series([4, 5, 6]), pd.Series([4, 55, 6]), pd.Series([42, 5, 6])] +) example_dict[("nested_univ", "Panel", 0)] = X example_dict_lossy[("nested_univ", "Panel", 0)] = False
{"golden_diff": "diff --git a/sktime/datatypes/_panel/_examples.py b/sktime/datatypes/_panel/_examples.py\n--- a/sktime/datatypes/_panel/_examples.py\n+++ b/sktime/datatypes/_panel/_examples.py\n@@ -64,12 +64,13 @@\n \n cols = [f\"var_{i}\" for i in range(2)]\n X = pd.DataFrame(columns=cols, index=[0, 1, 2])\n-X.iloc[0][0] = pd.Series([1, 2, 3])\n-X.iloc[0][1] = pd.Series([4, 5, 6])\n-X.iloc[1][0] = pd.Series([1, 2, 3])\n-X.iloc[1][1] = pd.Series([4, 55, 6])\n-X.iloc[2][0] = pd.Series([1, 2, 3])\n-X.iloc[2][1] = pd.Series([42, 5, 6])\n+X[\"var_0\"] = pd.Series(\n+ [pd.Series([1, 2, 3]), pd.Series([1, 2, 3]), pd.Series([1, 2, 3])]\n+)\n+\n+X[\"var_1\"] = pd.Series(\n+ [pd.Series([4, 5, 6]), pd.Series([4, 55, 6]), pd.Series([42, 5, 6])]\n+)\n \n example_dict[(\"nested_univ\", \"Panel\", 0)] = X\n example_dict_lossy[(\"nested_univ\", \"Panel\", 0)] = False\n", "issue": "[BUG] sktime.datatypes._panel._examples raises pandas.core.common.SettingWithCopyError\n**Describe the bug**\r\nAttempting to install [tsai](https://pypi.org/project/tsai/) as an upstream package also installs this package, but the install raises an error that traces to line 67 in \"/opt/conda/lib/python3.8/site-packages/sktime/datatypes/_panel/_examples.py\"\r\n```\r\n X.iloc[0][0] = pd.Series([1, 2, 3])\r\n```\r\n\r\n**To Reproduce**\r\nImporting any code that executes the code starting at line 67 of /sktime/datatypes/_panel/_examples.py can raise a Pandas error, depending on Pandas version that may be installed\r\n\r\n**Expected behavior**\r\nNo error should be raised on install or import of sktime as a dependency. \r\n\r\n**Additional context**\r\n<!--\r\nAdd any other context about the problem here.\r\n-->\r\n\r\n**Versions**\r\nSystem:\r\n python: 3.9.1 (default, Sep 16 2021, 11:42:30) [Clang 12.0.5 (clang-1205.0.22.11)]\r\nexecutable: /.../.pyenv/versions/3.9.1/bin/python\r\n machine: macOS-11.6-x86_64-i386-64bit\r\n\r\nPython dependencies:\r\n pip: 21.2.4\r\n setuptools: 49.2.1\r\n sklearn: 1.0\r\n sktime: 0.8.0\r\n statsmodels: 0.12.2\r\n numpy: 1.20.3\r\n scipy: 1.7.1\r\n Cython: None\r\n pandas: 1.3.3\r\n matplotlib: 3.4.3\r\n joblib: 1.0.1\r\n numba: 0.53.1\r\n pmdarima: None\r\n tsfresh: 0.18.0\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"Example generation for testing.\n\nExports dict of examples, useful for testing as fixtures.\n\nexample_dict: dict indexed by triple\n 1st element = mtype - str\n 2nd element = considered as this scitype - str\n 3rd element = int - index of example\nelements are data objects, considered examples for the mtype\n all examples with same index are considered \"same\" on scitype content\n if None, indicates that representation is not possible\n\nexample_lossy: dict of bool indexed by pairs of str\n 1st element = mtype - str\n 2nd element = considered as this scitype - str\n 3rd element = int - index of example\nelements are bool, indicate whether representation has information removed\n all examples with same index are considered \"same\" on scitype content\n\noverall, conversions from non-lossy representations to any other ones\n should yield the element exactly, identidally (given same index)\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\n\nexample_dict = dict()\nexample_dict_lossy = dict()\n\n###\n\n\nX = np.array(\n [[[1, 2, 3], [4, 5, 6]], [[1, 2, 3], [4, 55, 6]], [[1, 2, 3], [42, 5, 6]]],\n dtype=np.int64,\n)\n\nexample_dict[(\"numpy3D\", \"Panel\", 0)] = X\nexample_dict_lossy[(\"numpy3D\", \"Panel\", 0)] = False\n\ncols = [f\"var_{i}\" for i in range(2)]\nXlist = [\n pd.DataFrame([[1, 4], [2, 5], [3, 6]], columns=cols),\n pd.DataFrame([[1, 4], [2, 55], [3, 6]], columns=cols),\n pd.DataFrame([[1, 42], [2, 5], [3, 6]], columns=cols),\n]\n\nexample_dict[(\"df-list\", \"Panel\", 0)] = Xlist\nexample_dict_lossy[(\"df-list\", \"Panel\", 0)] = False\n\ncols = [\"instances\", \"timepoints\"] + [f\"var_{i}\" for i in range(2)]\n\nXlist = [\n pd.DataFrame([[0, 0, 1, 4], [0, 1, 2, 5], [0, 2, 3, 6]], columns=cols),\n pd.DataFrame([[1, 0, 1, 4], [1, 1, 2, 55], [1, 2, 3, 6]], columns=cols),\n pd.DataFrame([[2, 0, 1, 42], [2, 1, 2, 5], [2, 2, 3, 6]], columns=cols),\n]\nX = pd.concat(Xlist)\nX = X.set_index([\"instances\", \"timepoints\"])\n\nexample_dict[(\"pd-multiindex\", \"Panel\", 0)] = X\nexample_dict_lossy[(\"pd-multiindex\", \"Panel\", 0)] = False\n\ncols = [f\"var_{i}\" for i in range(2)]\nX = pd.DataFrame(columns=cols, index=[0, 1, 2])\nX.iloc[0][0] = pd.Series([1, 2, 3])\nX.iloc[0][1] = pd.Series([4, 5, 6])\nX.iloc[1][0] = pd.Series([1, 2, 3])\nX.iloc[1][1] = pd.Series([4, 55, 6])\nX.iloc[2][0] = pd.Series([1, 2, 3])\nX.iloc[2][1] = pd.Series([42, 5, 6])\n\nexample_dict[(\"nested_univ\", \"Panel\", 0)] = X\nexample_dict_lossy[(\"nested_univ\", \"Panel\", 0)] = False\n", "path": "sktime/datatypes/_panel/_examples.py"}]}
2,036
356
gh_patches_debug_319
rasdani/github-patches
git_diff
pwr-Solaar__Solaar-730
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> better identification of Solaar versions `git describe` produces 0.9.2-339-g39791be Instead it should produce something based on 1.0.1 `git describe --tags` produces 1.0.1-58-g39791be which is much better. I think that all that is required is to upgrade the 1.0.1 tag that already exists. </issue> <code> [start of lib/solaar/__init__.py] 1 # -*- python-mode -*- 2 # -*- coding: UTF-8 -*- 3 4 ## Copyright (C) 2012-2013 Daniel Pavel 5 ## 6 ## This program is free software; you can redistribute it and/or modify 7 ## it under the terms of the GNU General Public License as published by 8 ## the Free Software Foundation; either version 2 of the License, or 9 ## (at your option) any later version. 10 ## 11 ## This program is distributed in the hope that it will be useful, 12 ## but WITHOUT ANY WARRANTY; without even the implied warranty of 13 ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 ## GNU General Public License for more details. 15 ## 16 ## You should have received a copy of the GNU General Public License along 17 ## with this program; if not, write to the Free Software Foundation, Inc., 18 ## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 19 20 from __future__ import absolute_import, division, print_function, unicode_literals 21 22 __version__ = '1.0.1' 23 NAME = 'Solaar' 24 [end of lib/solaar/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/lib/solaar/__init__.py b/lib/solaar/__init__.py --- a/lib/solaar/__init__.py +++ b/lib/solaar/__init__.py @@ -19,5 +19,5 @@ from __future__ import absolute_import, division, print_function, unicode_literals -__version__ = '1.0.1' +__version__ = '1.0.2-rc1' NAME = 'Solaar'
{"golden_diff": "diff --git a/lib/solaar/__init__.py b/lib/solaar/__init__.py\n--- a/lib/solaar/__init__.py\n+++ b/lib/solaar/__init__.py\n@@ -19,5 +19,5 @@\n \n from __future__ import absolute_import, division, print_function, unicode_literals\n \n-__version__ = '1.0.1'\n+__version__ = '1.0.2-rc1'\n NAME = 'Solaar'\n", "issue": "better identification of Solaar versions\n`git describe` produces\r\n0.9.2-339-g39791be\r\nInstead it should produce something based on 1.0.1\r\n`git describe --tags` produces\r\n1.0.1-58-g39791be\r\nwhich is much better.\r\n\r\nI think that all that is required is to upgrade the 1.0.1 tag that already exists.\n", "before_files": [{"content": "# -*- python-mode -*-\n# -*- coding: UTF-8 -*-\n\n## Copyright (C) 2012-2013 Daniel Pavel\n##\n## This program is free software; you can redistribute it and/or modify\n## it under the terms of the GNU General Public License as published by\n## the Free Software Foundation; either version 2 of the License, or\n## (at your option) any later version.\n##\n## This program is distributed in the hope that it will be useful,\n## but WITHOUT ANY WARRANTY; without even the implied warranty of\n## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n## GNU General Public License for more details.\n##\n## You should have received a copy of the GNU General Public License along\n## with this program; if not, write to the Free Software Foundation, Inc.,\n## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\n__version__ = '1.0.1'\nNAME = 'Solaar'\n", "path": "lib/solaar/__init__.py"}]}
912
108
gh_patches_debug_9857
rasdani/github-patches
git_diff
saulpw__visidata-2160
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [parquet] can't load parquet directory anymore: `IsADirectoryError` **Small description** Hi @saulpw @anjakefala @takacsd - it seems that forcing opening the path as file with `.open()` - introduced with #2133 - breaks the use case where the multiple parquet files are stored in a directory, and this directory is then read by visidata. This is common with Hive partitioning or when working with spark. A simple fix would be to check if the path is a directory with `os.path.is_dir()` and then retaining old behavior of passing it as a string to `read_table()`. If it is not an existing directory, we move to the new way of opening as a binary buffer. I have already added this workaround to my clone of visidata, and it fixes my issue, but maybe you have some better ideas how to handle it instead of `if-else` statement in the `ParquetSheet`. **Expected result** ```bash vd -f parquet parquet_dir ``` should load a parquet into visidata **Actual result with screenshot** ![image](https://github.com/saulpw/visidata/assets/9008816/f262c403-704e-47f0-8994-68bf214b3db6) **Additional context** ```bash # freshest develop visidata@9fd728b72c115e50e99c24b455caaf020381b48e pyarrow==12.0.0 python 3.10.2 ``` </issue> <code> [start of visidata/loaders/parquet.py] 1 from visidata import Sheet, VisiData, TypedWrapper, anytype, date, vlen, Column, vd 2 from collections import defaultdict 3 4 5 @VisiData.api 6 def open_parquet(vd, p): 7 return ParquetSheet(p.name, source=p) 8 9 10 class ParquetColumn(Column): 11 def calcValue(self, row): 12 val = self.source[row["__rownum__"]] 13 if val.type == 'large_string': 14 return memoryview(val.as_buffer())[:2**20].tobytes().decode('utf-8') 15 else: 16 return val.as_py() 17 18 19 class ParquetSheet(Sheet): 20 # rowdef: {'__rownum__':int, parquet_col:overridden_value, ...} 21 def iterload(self): 22 pa = vd.importExternal("pyarrow", "pyarrow") 23 pq = vd.importExternal("pyarrow.parquet", "pyarrow") 24 from visidata.loaders.arrow import arrow_to_vdtype 25 26 with self.source.open('rb') as f: 27 self.tbl = pq.read_table(f) 28 29 self.columns = [] 30 for colname, col in zip(self.tbl.column_names, self.tbl.columns): 31 c = ParquetColumn(colname, 32 type=arrow_to_vdtype(col.type), 33 source=col, 34 cache=(col.type.id == pa.lib.Type_LARGE_STRING)) 35 self.addColumn(c) 36 37 for i in range(self.tbl.num_rows): 38 yield dict(__rownum__=i) 39 40 41 @VisiData.api 42 def save_parquet(vd, p, sheet): 43 pa = vd.importExternal("pyarrow") 44 pq = vd.importExternal("pyarrow.parquet", "pyarrow") 45 46 typemap = { 47 anytype: pa.string(), 48 int: pa.int64(), 49 vlen: pa.int64(), 50 float: pa.float64(), 51 str: pa.string(), 52 date: pa.date64(), 53 # list: pa.array(), 54 } 55 56 for t in vd.numericTypes: 57 if t not in typemap: 58 typemap[t] = pa.float64() 59 60 databycol = defaultdict(list) # col -> [values] 61 62 for typedvals in sheet.iterdispvals(format=False): 63 for col, val in typedvals.items(): 64 if isinstance(val, TypedWrapper): 65 val = None 66 67 databycol[col].append(val) 68 69 data = [ 70 pa.array(vals, type=typemap.get(col.type, pa.string())) 71 for col, vals in databycol.items() 72 ] 73 74 schema = pa.schema( 75 [(c.name, typemap.get(c.type, pa.string())) for c in sheet.visibleCols] 76 ) 77 with p.open_bytes(mode="w") as outf: 78 with pq.ParquetWriter(outf, schema) as writer: 79 writer.write_batch( 80 pa.record_batch(data, names=[c.name for c in sheet.visibleCols]) 81 ) 82 [end of visidata/loaders/parquet.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/visidata/loaders/parquet.py b/visidata/loaders/parquet.py --- a/visidata/loaders/parquet.py +++ b/visidata/loaders/parquet.py @@ -23,8 +23,11 @@ pq = vd.importExternal("pyarrow.parquet", "pyarrow") from visidata.loaders.arrow import arrow_to_vdtype - with self.source.open('rb') as f: - self.tbl = pq.read_table(f) + if self.source.is_dir(): + self.tbl = pq.read_table(str(self.source)) + else: + with self.source.open('rb') as f: + self.tbl = pq.read_table(f) self.columns = [] for colname, col in zip(self.tbl.column_names, self.tbl.columns):
{"golden_diff": "diff --git a/visidata/loaders/parquet.py b/visidata/loaders/parquet.py\n--- a/visidata/loaders/parquet.py\n+++ b/visidata/loaders/parquet.py\n@@ -23,8 +23,11 @@\n pq = vd.importExternal(\"pyarrow.parquet\", \"pyarrow\")\n from visidata.loaders.arrow import arrow_to_vdtype\n \n- with self.source.open('rb') as f:\n- self.tbl = pq.read_table(f)\n+ if self.source.is_dir():\n+ self.tbl = pq.read_table(str(self.source))\n+ else: \n+ with self.source.open('rb') as f:\n+ self.tbl = pq.read_table(f)\n \n self.columns = []\n for colname, col in zip(self.tbl.column_names, self.tbl.columns):\n", "issue": "[parquet] can't load parquet directory anymore: `IsADirectoryError`\n**Small description**\r\n\r\nHi @saulpw @anjakefala @takacsd - it seems that forcing opening the path as file with `.open()` - introduced with #2133 - breaks the use case where the multiple parquet files are stored in a directory, and this directory is then read by visidata. This is common with Hive partitioning or when working with spark. A simple fix would be to check if the path is a directory with `os.path.is_dir()` and then retaining old behavior of passing it as a string to `read_table()`. If it is not an existing directory, we move to the new way of opening as a binary buffer.\r\n\r\nI have already added this workaround to my clone of visidata, and it fixes my issue, but maybe you have some better ideas how to handle it instead of `if-else` statement in the `ParquetSheet`.\r\n\r\n**Expected result**\r\n\r\n```bash\r\nvd -f parquet parquet_dir\r\n```\r\nshould load a parquet into visidata\r\n\r\n**Actual result with screenshot**\r\n![image](https://github.com/saulpw/visidata/assets/9008816/f262c403-704e-47f0-8994-68bf214b3db6)\r\n\r\n**Additional context**\r\n\r\n```bash\r\n# freshest develop\r\nvisidata@9fd728b72c115e50e99c24b455caaf020381b48e\r\n\r\npyarrow==12.0.0\r\npython 3.10.2\r\n```\r\n\n", "before_files": [{"content": "from visidata import Sheet, VisiData, TypedWrapper, anytype, date, vlen, Column, vd\nfrom collections import defaultdict\n\n\[email protected]\ndef open_parquet(vd, p):\n return ParquetSheet(p.name, source=p)\n\n\nclass ParquetColumn(Column):\n def calcValue(self, row):\n val = self.source[row[\"__rownum__\"]]\n if val.type == 'large_string':\n return memoryview(val.as_buffer())[:2**20].tobytes().decode('utf-8')\n else:\n return val.as_py()\n\n\nclass ParquetSheet(Sheet):\n # rowdef: {'__rownum__':int, parquet_col:overridden_value, ...}\n def iterload(self):\n pa = vd.importExternal(\"pyarrow\", \"pyarrow\")\n pq = vd.importExternal(\"pyarrow.parquet\", \"pyarrow\")\n from visidata.loaders.arrow import arrow_to_vdtype\n\n with self.source.open('rb') as f:\n self.tbl = pq.read_table(f)\n\n self.columns = []\n for colname, col in zip(self.tbl.column_names, self.tbl.columns):\n c = ParquetColumn(colname,\n type=arrow_to_vdtype(col.type),\n source=col,\n cache=(col.type.id == pa.lib.Type_LARGE_STRING))\n self.addColumn(c)\n\n for i in range(self.tbl.num_rows):\n yield dict(__rownum__=i)\n\n\[email protected]\ndef save_parquet(vd, p, sheet):\n pa = vd.importExternal(\"pyarrow\")\n pq = vd.importExternal(\"pyarrow.parquet\", \"pyarrow\")\n\n typemap = {\n anytype: pa.string(),\n int: pa.int64(),\n vlen: pa.int64(),\n float: pa.float64(),\n str: pa.string(),\n date: pa.date64(),\n # list: pa.array(),\n }\n\n for t in vd.numericTypes:\n if t not in typemap:\n typemap[t] = pa.float64()\n\n databycol = defaultdict(list) # col -> [values]\n\n for typedvals in sheet.iterdispvals(format=False):\n for col, val in typedvals.items():\n if isinstance(val, TypedWrapper):\n val = None\n\n databycol[col].append(val)\n\n data = [\n pa.array(vals, type=typemap.get(col.type, pa.string()))\n for col, vals in databycol.items()\n ]\n\n schema = pa.schema(\n [(c.name, typemap.get(c.type, pa.string())) for c in sheet.visibleCols]\n )\n with p.open_bytes(mode=\"w\") as outf:\n with pq.ParquetWriter(outf, schema) as writer:\n writer.write_batch(\n pa.record_batch(data, names=[c.name for c in sheet.visibleCols])\n )\n", "path": "visidata/loaders/parquet.py"}]}
1,695
183
gh_patches_debug_17528
rasdani/github-patches
git_diff
allegro__ralph-3222
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Table 'ralph_ng.transitions_transition' doesn't exist when I follow the document to setup a develop environment, I met the error" default: django.db.utils.ProgrammingError: (1146, "Table 'ralph_ng.transitions_transition' doesn't exist") ". I think it is because there are no such tables when newly install ralph3 develop environment but ralph3 try to migrate them(from ralph2). I am on mac and have download the default box manually which will be used in vagrant up. </issue> <code> [start of src/ralph/lib/transitions/checks.py] 1 from django.core.checks import Error 2 from django.db.utils import OperationalError 3 from django.template.base import TemplateDoesNotExist 4 from django.template.loader import get_template 5 6 7 def check_transition_templates(transition_templates): 8 # to prevent AppRegistryNotReady 9 from ralph.lib.transitions.models import Transition 10 11 errors = [] 12 if transition_templates: 13 if not isinstance(transition_templates, (list, tuple)): 14 errors.append(Error( 15 'TRANSITION_TEMPLATES must be a list or a tuple', 16 id='transitions.E001' 17 )) 18 else: 19 for index, item in enumerate(transition_templates): 20 try: 21 path, template = item 22 except (ValueError, TypeError): 23 errors.append(Error( 24 'Element #{} must be a two elements tuple'.format( 25 index 26 ), 27 id='transitions.E003' 28 )) 29 continue 30 try: 31 get_template(path) 32 except TemplateDoesNotExist: 33 errors.append(Error( 34 'Template {} ({}) doesn\'t exist'.format( 35 template, path 36 ), 37 hint='Check TRANSITION_TEMPLATES settings', 38 id='transitions.E002' 39 )) 40 excluded_templates = [''] 41 if transition_templates: 42 try: 43 excluded_templates.extend( 44 {template for template, _ in transition_templates} 45 ) 46 except ValueError: 47 pass 48 transitions_with_custom_templates = Transition.objects.exclude( 49 template_name__in=excluded_templates 50 ) 51 try: 52 for transition in transitions_with_custom_templates: 53 errors.append(Error( 54 'Template {} for {} transition is ' 55 'defined only in transition'.format( 56 transition.template_name, transition 57 ), 58 hint=( 59 'Change your TRANSITION_TEMPLATES settings by adding' 60 ' ({}, "Your template name") and then ' 61 'edit {} transition').format( 62 transition.template_name, transition 63 ), 64 id='transitions.E004' 65 )) 66 except OperationalError: 67 pass 68 return errors 69 [end of src/ralph/lib/transitions/checks.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/ralph/lib/transitions/checks.py b/src/ralph/lib/transitions/checks.py --- a/src/ralph/lib/transitions/checks.py +++ b/src/ralph/lib/transitions/checks.py @@ -1,9 +1,14 @@ +import logging + from django.core.checks import Error -from django.db.utils import OperationalError +from django.db.utils import DatabaseError from django.template.base import TemplateDoesNotExist from django.template.loader import get_template +logger = logging.getLogger(__name__) + + def check_transition_templates(transition_templates): # to prevent AppRegistryNotReady from ralph.lib.transitions.models import Transition @@ -63,6 +68,6 @@ ), id='transitions.E004' )) - except OperationalError: - pass + except DatabaseError as e: + logger.error(e) return errors
{"golden_diff": "diff --git a/src/ralph/lib/transitions/checks.py b/src/ralph/lib/transitions/checks.py\n--- a/src/ralph/lib/transitions/checks.py\n+++ b/src/ralph/lib/transitions/checks.py\n@@ -1,9 +1,14 @@\n+import logging\n+\n from django.core.checks import Error\n-from django.db.utils import OperationalError\n+from django.db.utils import DatabaseError\n from django.template.base import TemplateDoesNotExist\n from django.template.loader import get_template\n \n \n+logger = logging.getLogger(__name__)\n+\n+\n def check_transition_templates(transition_templates):\n # to prevent AppRegistryNotReady\n from ralph.lib.transitions.models import Transition\n@@ -63,6 +68,6 @@\n ),\n id='transitions.E004'\n ))\n- except OperationalError:\n- pass\n+ except DatabaseError as e:\n+ logger.error(e)\n return errors\n", "issue": "Table 'ralph_ng.transitions_transition' doesn't exist\nwhen I follow the document to setup a develop environment, I met the error\" default: django.db.utils.ProgrammingError: (1146, \"Table 'ralph_ng.transitions_transition' doesn't exist\") \". I think it is because there are no such tables when newly install ralph3 develop environment but ralph3 try to migrate them(from ralph2). I am on mac and have download the default box manually which will be used in vagrant up.\n", "before_files": [{"content": "from django.core.checks import Error\nfrom django.db.utils import OperationalError\nfrom django.template.base import TemplateDoesNotExist\nfrom django.template.loader import get_template\n\n\ndef check_transition_templates(transition_templates):\n # to prevent AppRegistryNotReady\n from ralph.lib.transitions.models import Transition\n\n errors = []\n if transition_templates:\n if not isinstance(transition_templates, (list, tuple)):\n errors.append(Error(\n 'TRANSITION_TEMPLATES must be a list or a tuple',\n id='transitions.E001'\n ))\n else:\n for index, item in enumerate(transition_templates):\n try:\n path, template = item\n except (ValueError, TypeError):\n errors.append(Error(\n 'Element #{} must be a two elements tuple'.format(\n index\n ),\n id='transitions.E003'\n ))\n continue\n try:\n get_template(path)\n except TemplateDoesNotExist:\n errors.append(Error(\n 'Template {} ({}) doesn\\'t exist'.format(\n template, path\n ),\n hint='Check TRANSITION_TEMPLATES settings',\n id='transitions.E002'\n ))\n excluded_templates = ['']\n if transition_templates:\n try:\n excluded_templates.extend(\n {template for template, _ in transition_templates}\n )\n except ValueError:\n pass\n transitions_with_custom_templates = Transition.objects.exclude(\n template_name__in=excluded_templates\n )\n try:\n for transition in transitions_with_custom_templates:\n errors.append(Error(\n 'Template {} for {} transition is '\n 'defined only in transition'.format(\n transition.template_name, transition\n ),\n hint=(\n 'Change your TRANSITION_TEMPLATES settings by adding'\n ' ({}, \"Your template name\") and then '\n 'edit {} transition').format(\n transition.template_name, transition\n ),\n id='transitions.E004'\n ))\n except OperationalError:\n pass\n return errors\n", "path": "src/ralph/lib/transitions/checks.py"}]}
1,208
203
gh_patches_debug_33816
rasdani/github-patches
git_diff
marshmallow-code__webargs-464
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> RFC: Only accept delimited string in DelimitedList `DelimitedList` accepts either a list or a delimited string (e.g. "foo,bar,baz"). I'd like to make it more strict by only accepting a delimited list. Rather than adding a `strict` parameter, I'm thinking of dropping the whole "also accept a list" feature. Any reason to support both? I understand it inherits from `List` because once the string is parsed, it can be deserialized as a normal list. But are there cases where you'd expect either a list or a delimited string? </issue> <code> [start of src/webargs/fields.py] 1 """Field classes. 2 3 Includes all fields from `marshmallow.fields` in addition to a custom 4 `Nested` field and `DelimitedList`. 5 6 All fields can optionally take a special `location` keyword argument, which 7 tells webargs where to parse the request argument from. 8 9 .. code-block:: python 10 11 args = { 12 "active": fields.Bool(location="query"), 13 "content_type": fields.Str(data_key="Content-Type", location="headers"), 14 } 15 16 Note: `data_key` replaced `load_from` in marshmallow 3. 17 When using marshmallow 2, use `load_from`. 18 """ 19 import marshmallow as ma 20 21 # Expose all fields from marshmallow.fields. 22 from marshmallow.fields import * # noqa: F40 23 from webargs.compat import MARSHMALLOW_VERSION_INFO 24 from webargs.dict2schema import dict2schema 25 26 __all__ = ["DelimitedList"] + ma.fields.__all__ 27 28 29 class Nested(ma.fields.Nested): 30 """Same as `marshmallow.fields.Nested`, except can be passed a dictionary as 31 the first argument, which will be converted to a `marshmallow.Schema`. 32 33 .. note:: 34 35 The schema class here will always be `marshmallow.Schema`, regardless 36 of whether a custom schema class is set on the parser. Pass an explicit schema 37 class if necessary. 38 """ 39 40 def __init__(self, nested, *args, **kwargs): 41 if isinstance(nested, dict): 42 nested = dict2schema(nested) 43 super().__init__(nested, *args, **kwargs) 44 45 46 class DelimitedList(ma.fields.List): 47 """Same as `marshmallow.fields.List`, except can load from either a list or 48 a delimited string (e.g. "foo,bar,baz"). 49 50 :param Field cls_or_instance: A field class or instance. 51 :param str delimiter: Delimiter between values. 52 :param bool as_string: Dump values to string. 53 """ 54 55 delimiter = "," 56 57 def __init__(self, cls_or_instance, delimiter=None, as_string=False, **kwargs): 58 self.delimiter = delimiter or self.delimiter 59 self.as_string = as_string 60 super().__init__(cls_or_instance, **kwargs) 61 62 def _serialize(self, value, attr, obj): 63 ret = super()._serialize(value, attr, obj) 64 if self.as_string: 65 return self.delimiter.join(format(each) for each in ret) 66 return ret 67 68 def _deserialize(self, value, attr, data, **kwargs): 69 try: 70 ret = ( 71 value 72 if ma.utils.is_iterable_but_not_string(value) 73 else value.split(self.delimiter) 74 ) 75 except AttributeError: 76 if MARSHMALLOW_VERSION_INFO[0] < 3: 77 self.fail("invalid") 78 else: 79 raise self.make_error("invalid") 80 return super()._deserialize(ret, attr, data, **kwargs) 81 [end of src/webargs/fields.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/webargs/fields.py b/src/webargs/fields.py --- a/src/webargs/fields.py +++ b/src/webargs/fields.py @@ -44,37 +44,35 @@ class DelimitedList(ma.fields.List): - """Same as `marshmallow.fields.List`, except can load from either a list or - a delimited string (e.g. "foo,bar,baz"). + """A field which is similar to a List, but takes its input as a delimited + string (e.g. "foo,bar,baz"). + + Like List, it can be given a nested field type which it will use to + de/serialize each element of the list. :param Field cls_or_instance: A field class or instance. :param str delimiter: Delimiter between values. - :param bool as_string: Dump values to string. """ + default_error_messages = {"invalid": "Not a valid delimited list."} delimiter = "," - def __init__(self, cls_or_instance, delimiter=None, as_string=False, **kwargs): + def __init__(self, cls_or_instance, delimiter=None, **kwargs): self.delimiter = delimiter or self.delimiter - self.as_string = as_string super().__init__(cls_or_instance, **kwargs) def _serialize(self, value, attr, obj): - ret = super()._serialize(value, attr, obj) - if self.as_string: - return self.delimiter.join(format(each) for each in ret) - return ret + # serializing will start with List serialization, so that we correctly + # output lists of non-primitive types, e.g. DelimitedList(DateTime) + return self.delimiter.join( + format(each) for each in super()._serialize(value, attr, obj) + ) def _deserialize(self, value, attr, data, **kwargs): - try: - ret = ( - value - if ma.utils.is_iterable_but_not_string(value) - else value.split(self.delimiter) - ) - except AttributeError: + # attempting to deserialize from a non-string source is an error + if not isinstance(value, (str, bytes)): if MARSHMALLOW_VERSION_INFO[0] < 3: self.fail("invalid") else: raise self.make_error("invalid") - return super()._deserialize(ret, attr, data, **kwargs) + return super()._deserialize(value.split(self.delimiter), attr, data, **kwargs)
{"golden_diff": "diff --git a/src/webargs/fields.py b/src/webargs/fields.py\n--- a/src/webargs/fields.py\n+++ b/src/webargs/fields.py\n@@ -44,37 +44,35 @@\n \n \n class DelimitedList(ma.fields.List):\n- \"\"\"Same as `marshmallow.fields.List`, except can load from either a list or\n- a delimited string (e.g. \"foo,bar,baz\").\n+ \"\"\"A field which is similar to a List, but takes its input as a delimited\n+ string (e.g. \"foo,bar,baz\").\n+\n+ Like List, it can be given a nested field type which it will use to\n+ de/serialize each element of the list.\n \n :param Field cls_or_instance: A field class or instance.\n :param str delimiter: Delimiter between values.\n- :param bool as_string: Dump values to string.\n \"\"\"\n \n+ default_error_messages = {\"invalid\": \"Not a valid delimited list.\"}\n delimiter = \",\"\n \n- def __init__(self, cls_or_instance, delimiter=None, as_string=False, **kwargs):\n+ def __init__(self, cls_or_instance, delimiter=None, **kwargs):\n self.delimiter = delimiter or self.delimiter\n- self.as_string = as_string\n super().__init__(cls_or_instance, **kwargs)\n \n def _serialize(self, value, attr, obj):\n- ret = super()._serialize(value, attr, obj)\n- if self.as_string:\n- return self.delimiter.join(format(each) for each in ret)\n- return ret\n+ # serializing will start with List serialization, so that we correctly\n+ # output lists of non-primitive types, e.g. DelimitedList(DateTime)\n+ return self.delimiter.join(\n+ format(each) for each in super()._serialize(value, attr, obj)\n+ )\n \n def _deserialize(self, value, attr, data, **kwargs):\n- try:\n- ret = (\n- value\n- if ma.utils.is_iterable_but_not_string(value)\n- else value.split(self.delimiter)\n- )\n- except AttributeError:\n+ # attempting to deserialize from a non-string source is an error\n+ if not isinstance(value, (str, bytes)):\n if MARSHMALLOW_VERSION_INFO[0] < 3:\n self.fail(\"invalid\")\n else:\n raise self.make_error(\"invalid\")\n- return super()._deserialize(ret, attr, data, **kwargs)\n+ return super()._deserialize(value.split(self.delimiter), attr, data, **kwargs)\n", "issue": "RFC: Only accept delimited string in DelimitedList\n`DelimitedList` accepts either a list or a delimited string (e.g. \"foo,bar,baz\").\r\n\r\nI'd like to make it more strict by only accepting a delimited list. Rather than adding a `strict` parameter, I'm thinking of dropping the whole \"also accept a list\" feature.\r\n\r\nAny reason to support both?\r\n\r\nI understand it inherits from `List` because once the string is parsed, it can be deserialized as a normal list. But are there cases where you'd expect either a list or a delimited string?\n", "before_files": [{"content": "\"\"\"Field classes.\n\nIncludes all fields from `marshmallow.fields` in addition to a custom\n`Nested` field and `DelimitedList`.\n\nAll fields can optionally take a special `location` keyword argument, which\ntells webargs where to parse the request argument from.\n\n.. code-block:: python\n\n args = {\n \"active\": fields.Bool(location=\"query\"),\n \"content_type\": fields.Str(data_key=\"Content-Type\", location=\"headers\"),\n }\n\nNote: `data_key` replaced `load_from` in marshmallow 3.\nWhen using marshmallow 2, use `load_from`.\n\"\"\"\nimport marshmallow as ma\n\n# Expose all fields from marshmallow.fields.\nfrom marshmallow.fields import * # noqa: F40\nfrom webargs.compat import MARSHMALLOW_VERSION_INFO\nfrom webargs.dict2schema import dict2schema\n\n__all__ = [\"DelimitedList\"] + ma.fields.__all__\n\n\nclass Nested(ma.fields.Nested):\n \"\"\"Same as `marshmallow.fields.Nested`, except can be passed a dictionary as\n the first argument, which will be converted to a `marshmallow.Schema`.\n\n .. note::\n\n The schema class here will always be `marshmallow.Schema`, regardless\n of whether a custom schema class is set on the parser. Pass an explicit schema\n class if necessary.\n \"\"\"\n\n def __init__(self, nested, *args, **kwargs):\n if isinstance(nested, dict):\n nested = dict2schema(nested)\n super().__init__(nested, *args, **kwargs)\n\n\nclass DelimitedList(ma.fields.List):\n \"\"\"Same as `marshmallow.fields.List`, except can load from either a list or\n a delimited string (e.g. \"foo,bar,baz\").\n\n :param Field cls_or_instance: A field class or instance.\n :param str delimiter: Delimiter between values.\n :param bool as_string: Dump values to string.\n \"\"\"\n\n delimiter = \",\"\n\n def __init__(self, cls_or_instance, delimiter=None, as_string=False, **kwargs):\n self.delimiter = delimiter or self.delimiter\n self.as_string = as_string\n super().__init__(cls_or_instance, **kwargs)\n\n def _serialize(self, value, attr, obj):\n ret = super()._serialize(value, attr, obj)\n if self.as_string:\n return self.delimiter.join(format(each) for each in ret)\n return ret\n\n def _deserialize(self, value, attr, data, **kwargs):\n try:\n ret = (\n value\n if ma.utils.is_iterable_but_not_string(value)\n else value.split(self.delimiter)\n )\n except AttributeError:\n if MARSHMALLOW_VERSION_INFO[0] < 3:\n self.fail(\"invalid\")\n else:\n raise self.make_error(\"invalid\")\n return super()._deserialize(ret, attr, data, **kwargs)\n", "path": "src/webargs/fields.py"}]}
1,440
566
gh_patches_debug_49452
rasdani/github-patches
git_diff
wagtail__wagtail-840
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Paginator and search pagination expect different parameters for page The Paginator (as in `django.core.paginator`) used pretty much everywhere uses `page` as the query parameter. The search view, however, [expects](https://github.com/torchbox/wagtail/blob/100797796df0bc8ca96035092f32a9275d2b3713/wagtail/wagtailsearch/views/queries.py#L28) a `p` query parameter for pagination. While not a bug, it is a bit confusing and makes it less elegant to share a pagination include. Certainly made me scratch my head. Worth a PR? Cheers, Dan </issue> <code> [start of wagtail/wagtailsearch/views/frontend.py] 1 import json 2 3 from django.conf import settings 4 from django.shortcuts import render 5 from django.http import HttpResponse 6 from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger 7 8 from wagtail.wagtailcore import models 9 from wagtail.wagtailsearch.models import Query 10 11 12 def search( 13 request, 14 template=None, 15 template_ajax=None, 16 results_per_page=10, 17 use_json=False, 18 json_attrs=['title', 'url'], 19 show_unpublished=False, 20 search_title_only=False, 21 extra_filters={}, 22 path=None, 23 ): 24 25 # Get default templates 26 if template is None: 27 if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE'): 28 template = settings.WAGTAILSEARCH_RESULTS_TEMPLATE 29 else: 30 template = 'wagtailsearch/search_results.html' 31 32 if template_ajax is None: 33 if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX'): 34 template_ajax = settings.WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX 35 else: 36 template_ajax = template 37 38 # Get query string and page from GET paramters 39 query_string = request.GET.get('q', '') 40 page = request.GET.get('p', 1) 41 42 # Search 43 if query_string != '': 44 search_results = models.Page.search( 45 query_string, 46 show_unpublished=show_unpublished, 47 search_title_only=search_title_only, 48 extra_filters=extra_filters, 49 path=path if path else request.site.root_page.path 50 ) 51 52 # Get query object 53 query = Query.get(query_string) 54 55 # Add hit 56 query.add_hit() 57 58 # Pagination 59 paginator = Paginator(search_results, results_per_page) 60 try: 61 search_results = paginator.page(page) 62 except PageNotAnInteger: 63 search_results = paginator.page(1) 64 except EmptyPage: 65 search_results = paginator.page(paginator.num_pages) 66 else: 67 query = None 68 search_results = None 69 70 if use_json: # Return a json response 71 if search_results: 72 search_results_json = [] 73 for result in search_results: 74 result_specific = result.specific 75 76 search_results_json.append(dict( 77 (attr, getattr(result_specific, attr)) 78 for attr in json_attrs 79 if hasattr(result_specific, attr) 80 )) 81 82 return HttpResponse(json.dumps(search_results_json)) 83 else: 84 return HttpResponse('[]') 85 else: # Render a template 86 if request.is_ajax() and template_ajax: 87 template = template_ajax 88 89 return render(request, template, dict( 90 query_string=query_string, 91 search_results=search_results, 92 is_ajax=request.is_ajax(), 93 query=query 94 )) 95 [end of wagtail/wagtailsearch/views/frontend.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/wagtail/wagtailsearch/views/frontend.py b/wagtail/wagtailsearch/views/frontend.py --- a/wagtail/wagtailsearch/views/frontend.py +++ b/wagtail/wagtailsearch/views/frontend.py @@ -37,7 +37,7 @@ # Get query string and page from GET paramters query_string = request.GET.get('q', '') - page = request.GET.get('p', 1) + page = request.GET.get('page', request.GET.get('p', 1)) # Search if query_string != '':
{"golden_diff": "diff --git a/wagtail/wagtailsearch/views/frontend.py b/wagtail/wagtailsearch/views/frontend.py\n--- a/wagtail/wagtailsearch/views/frontend.py\n+++ b/wagtail/wagtailsearch/views/frontend.py\n@@ -37,7 +37,7 @@\n \n # Get query string and page from GET paramters\n query_string = request.GET.get('q', '')\n- page = request.GET.get('p', 1)\n+ page = request.GET.get('page', request.GET.get('p', 1))\n \n # Search\n if query_string != '':\n", "issue": "Paginator and search pagination expect different parameters for page\nThe Paginator (as in `django.core.paginator`) used pretty much everywhere uses `page` as the query parameter. The search view, however, [expects](https://github.com/torchbox/wagtail/blob/100797796df0bc8ca96035092f32a9275d2b3713/wagtail/wagtailsearch/views/queries.py#L28) a `p` query parameter for pagination.\n\nWhile not a bug, it is a bit confusing and makes it less elegant to share a pagination include. Certainly made me scratch my head.\n\nWorth a PR?\n\nCheers,\nDan\n\n", "before_files": [{"content": "import json\n\nfrom django.conf import settings\nfrom django.shortcuts import render\nfrom django.http import HttpResponse\nfrom django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n\nfrom wagtail.wagtailcore import models\nfrom wagtail.wagtailsearch.models import Query\n\n\ndef search(\n request,\n template=None,\n template_ajax=None,\n results_per_page=10,\n use_json=False,\n json_attrs=['title', 'url'],\n show_unpublished=False,\n search_title_only=False,\n extra_filters={},\n path=None,\n ):\n\n # Get default templates\n if template is None:\n if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE'):\n template = settings.WAGTAILSEARCH_RESULTS_TEMPLATE\n else:\n template = 'wagtailsearch/search_results.html'\n\n if template_ajax is None:\n if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX'):\n template_ajax = settings.WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX\n else:\n template_ajax = template\n\n # Get query string and page from GET paramters\n query_string = request.GET.get('q', '')\n page = request.GET.get('p', 1)\n\n # Search\n if query_string != '':\n search_results = models.Page.search(\n query_string,\n show_unpublished=show_unpublished,\n search_title_only=search_title_only,\n extra_filters=extra_filters,\n path=path if path else request.site.root_page.path\n )\n\n # Get query object\n query = Query.get(query_string)\n\n # Add hit\n query.add_hit()\n\n # Pagination\n paginator = Paginator(search_results, results_per_page)\n try:\n search_results = paginator.page(page)\n except PageNotAnInteger:\n search_results = paginator.page(1)\n except EmptyPage:\n search_results = paginator.page(paginator.num_pages)\n else:\n query = None\n search_results = None\n\n if use_json: # Return a json response\n if search_results:\n search_results_json = []\n for result in search_results:\n result_specific = result.specific\n\n search_results_json.append(dict(\n (attr, getattr(result_specific, attr))\n for attr in json_attrs\n if hasattr(result_specific, attr)\n ))\n\n return HttpResponse(json.dumps(search_results_json))\n else:\n return HttpResponse('[]')\n else: # Render a template\n if request.is_ajax() and template_ajax:\n template = template_ajax\n\n return render(request, template, dict(\n query_string=query_string,\n search_results=search_results,\n is_ajax=request.is_ajax(),\n query=query\n ))\n", "path": "wagtail/wagtailsearch/views/frontend.py"}]}
1,446
132
gh_patches_debug_10307
rasdani/github-patches
git_diff
getnikola__nikola-2238
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> deploy crashes with state system Will investigate later. ``` pytb Traceback (most recent call last): File "/home/kwpolska/virtualenvs/nikola-py3/lib/python3.5/site-packages/doit/doit_cmd.py", line 168, in run return command.parse_execute(args) File "/home/kwpolska/virtualenvs/nikola-py3/lib/python3.5/site-packages/doit/cmd_base.py", line 122, in parse_execute return self.execute(params, args) File "/home/kwpolska/git/nikola/nikola/plugin_categories.py", line 124, in execute return self._execute(options, args) File "/home/kwpolska/git/nikola/nikola/plugins/command/deploy.py", line 135, in _execute self.site.state.set('last_deploy', new_deploy.isoformat()) File "/home/kwpolska/git/nikola/nikola/state.py", line 64, in set self._save() File "/home/kwpolska/git/nikola/nikola/state.py", line 82, in _save json.dump(self._local.data, outf, sort_keys=True, indent=2) File "/usr/lib64/python3.5/json/__init__.py", line 179, in dump fp.write(chunk) File "/home/kwpolska/virtualenvs/nikola-py3/lib/python3.5/tempfile.py", line 483, in func_wrapper return func(*args, **kwargs) TypeError: a bytes-like object is required, not 'str' ``` </issue> <code> [start of nikola/state.py] 1 # -*- coding: utf-8 -*- 2 3 # Copyright © 2012-2016 Roberto Alsina and others. 4 5 # Permission is hereby granted, free of charge, to any 6 # person obtaining a copy of this software and associated 7 # documentation files (the "Software"), to deal in the 8 # Software without restriction, including without limitation 9 # the rights to use, copy, modify, merge, publish, 10 # distribute, sublicense, and/or sell copies of the 11 # Software, and to permit persons to whom the Software is 12 # furnished to do so, subject to the following conditions: 13 # 14 # The above copyright notice and this permission notice 15 # shall be included in all copies or substantial portions of 16 # the Software. 17 # 18 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 19 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 20 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR 21 # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 22 # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR 23 # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 24 # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 25 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 26 27 """Persistent state implementation.""" 28 29 import json 30 import os 31 import shutil 32 import tempfile 33 import threading 34 35 from . import utils 36 37 38 class Persistor(): 39 """Persist stuff in a place. 40 41 This is an intentionally dumb implementation. It is *not* meant to be 42 fast, or useful for arbitrarily large data. Use lightly. 43 44 Intentionally it has no namespaces, sections, etc. Use as a 45 responsible adult. 46 """ 47 48 def __init__(self, path): 49 """Where do you want it persisted.""" 50 self._path = path 51 utils.makedirs(os.path.dirname(path)) 52 self._local = threading.local() 53 self._local.data = {} 54 55 def get(self, key): 56 """Get data stored in key.""" 57 self._read() 58 return self._local.data.get(key) 59 60 def set(self, key, value): 61 """Store value in key.""" 62 self._read() 63 self._local.data[key] = value 64 self._save() 65 66 def delete(self, key): 67 """Delete key and the value it contains.""" 68 self._read() 69 if key in self._local.data: 70 self._local.data.pop(key) 71 self._save() 72 73 def _read(self): 74 if os.path.isfile(self._path): 75 with open(self._path) as inf: 76 self._local.data = json.load(inf) 77 78 def _save(self): 79 dname = os.path.dirname(self._path) 80 with tempfile.NamedTemporaryFile(dir=dname, delete=False) as outf: 81 tname = outf.name 82 json.dump(self._local.data, outf, sort_keys=True, indent=2) 83 shutil.move(tname, self._path) 84 [end of nikola/state.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/nikola/state.py b/nikola/state.py --- a/nikola/state.py +++ b/nikola/state.py @@ -78,6 +78,11 @@ def _save(self): dname = os.path.dirname(self._path) with tempfile.NamedTemporaryFile(dir=dname, delete=False) as outf: + # TODO replace with encoding='utf-8' and mode 'w+' in v8 tname = outf.name - json.dump(self._local.data, outf, sort_keys=True, indent=2) + data = json.dumps(self._local.data, sort_keys=True, indent=2) + try: + outf.write(data) + except TypeError: + outf.write(data.encode('utf-8')) shutil.move(tname, self._path)
{"golden_diff": "diff --git a/nikola/state.py b/nikola/state.py\n--- a/nikola/state.py\n+++ b/nikola/state.py\n@@ -78,6 +78,11 @@\n def _save(self):\n dname = os.path.dirname(self._path)\n with tempfile.NamedTemporaryFile(dir=dname, delete=False) as outf:\n+ # TODO replace with encoding='utf-8' and mode 'w+' in v8\n tname = outf.name\n- json.dump(self._local.data, outf, sort_keys=True, indent=2)\n+ data = json.dumps(self._local.data, sort_keys=True, indent=2)\n+ try:\n+ outf.write(data)\n+ except TypeError:\n+ outf.write(data.encode('utf-8'))\n shutil.move(tname, self._path)\n", "issue": "deploy crashes with state system\nWill investigate later.\n\n``` pytb\nTraceback (most recent call last):\n File \"/home/kwpolska/virtualenvs/nikola-py3/lib/python3.5/site-packages/doit/doit_cmd.py\", line 168, in run\n return command.parse_execute(args)\n File \"/home/kwpolska/virtualenvs/nikola-py3/lib/python3.5/site-packages/doit/cmd_base.py\", line 122, in parse_execute\n return self.execute(params, args)\n File \"/home/kwpolska/git/nikola/nikola/plugin_categories.py\", line 124, in execute\n return self._execute(options, args)\n File \"/home/kwpolska/git/nikola/nikola/plugins/command/deploy.py\", line 135, in _execute\n self.site.state.set('last_deploy', new_deploy.isoformat())\n File \"/home/kwpolska/git/nikola/nikola/state.py\", line 64, in set\n self._save()\n File \"/home/kwpolska/git/nikola/nikola/state.py\", line 82, in _save\n json.dump(self._local.data, outf, sort_keys=True, indent=2)\n File \"/usr/lib64/python3.5/json/__init__.py\", line 179, in dump\n fp.write(chunk)\n File \"/home/kwpolska/virtualenvs/nikola-py3/lib/python3.5/tempfile.py\", line 483, in func_wrapper\n return func(*args, **kwargs)\nTypeError: a bytes-like object is required, not 'str'\n```\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright \u00a9 2012-2016 Roberto Alsina and others.\n\n# Permission is hereby granted, free of charge, to any\n# person obtaining a copy of this software and associated\n# documentation files (the \"Software\"), to deal in the\n# Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the\n# Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice\n# shall be included in all copies or substantial portions of\n# the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS\n# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR\n# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\"\"\"Persistent state implementation.\"\"\"\n\nimport json\nimport os\nimport shutil\nimport tempfile\nimport threading\n\nfrom . import utils\n\n\nclass Persistor():\n \"\"\"Persist stuff in a place.\n\n This is an intentionally dumb implementation. It is *not* meant to be\n fast, or useful for arbitrarily large data. Use lightly.\n\n Intentionally it has no namespaces, sections, etc. Use as a\n responsible adult.\n \"\"\"\n\n def __init__(self, path):\n \"\"\"Where do you want it persisted.\"\"\"\n self._path = path\n utils.makedirs(os.path.dirname(path))\n self._local = threading.local()\n self._local.data = {}\n\n def get(self, key):\n \"\"\"Get data stored in key.\"\"\"\n self._read()\n return self._local.data.get(key)\n\n def set(self, key, value):\n \"\"\"Store value in key.\"\"\"\n self._read()\n self._local.data[key] = value\n self._save()\n\n def delete(self, key):\n \"\"\"Delete key and the value it contains.\"\"\"\n self._read()\n if key in self._local.data:\n self._local.data.pop(key)\n self._save()\n\n def _read(self):\n if os.path.isfile(self._path):\n with open(self._path) as inf:\n self._local.data = json.load(inf)\n\n def _save(self):\n dname = os.path.dirname(self._path)\n with tempfile.NamedTemporaryFile(dir=dname, delete=False) as outf:\n tname = outf.name\n json.dump(self._local.data, outf, sort_keys=True, indent=2)\n shutil.move(tname, self._path)\n", "path": "nikola/state.py"}]}
1,683
181
gh_patches_debug_21053
rasdani/github-patches
git_diff
zestedesavoir__zds-site-6083
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> API: filtrer les notifications par `is_read` **Description du besoin** Pour les besoins de l'extension, il serait intéressant de pouvoir filtrer les notifications (URL `/api/notifications`) selon leur propriété `is_read` pour ne récupérer que les non lues (les autres n'ayant pas d'intérêt pour ce cas d'usage). **Description de la solution** Ajouter un filtre pour `is_read` (booléen) sur l'URL `/api/notifications` **Description des alternatives** Pouvoir trier selon cette propriété (pour avoir les non-lues d'abord), _a minima_. **Contexte additionnel** Voir le code de [notifier.js#64](https://github.com/zestedesavoir/extensions-notificateurs/blob/master/Universal/notifier.js#L64) pour voir le cas d'usage en question (qui me permettrait de supprimer le `.filter()` ligne 78 tout en récupérant des notifications potentiellement anciennes mais non lues qui sont actuellement inaccessibles). </issue> <code> [start of zds/notification/api/views.py] 1 import datetime 2 from django.core.cache import cache 3 from django.db.models.signals import post_delete 4 from django.db.models.signals import post_save 5 from dry_rest_permissions.generics import DRYPermissions 6 from rest_framework import filters 7 from rest_framework.generics import ListAPIView 8 from rest_framework.permissions import IsAuthenticated 9 from rest_framework_extensions.cache.decorators import cache_response 10 from rest_framework_extensions.etag.decorators import etag 11 from rest_framework_extensions.key_constructor import bits 12 from rest_framework_extensions.key_constructor.constructors import DefaultKeyConstructor 13 14 from zds.api.bits import DJRF3xPaginationKeyBit, UpdatedAtKeyBit 15 from zds.notification.api.serializers import NotificationSerializer 16 from zds.notification.models import Notification 17 18 19 class PagingNotificationListKeyConstructor(DefaultKeyConstructor): 20 pagination = DJRF3xPaginationKeyBit() 21 search = bits.QueryParamsKeyBit(["search", "ordering", "type"]) 22 list_sql_query = bits.ListSqlQueryKeyBit() 23 unique_view_id = bits.UniqueViewIdKeyBit() 24 user = bits.UserKeyBit() 25 updated_at = UpdatedAtKeyBit("api_updated_notification") 26 27 28 def change_api_notification_updated_at(sender=None, instance=None, *args, **kwargs): 29 cache.set("api_updated_notification", datetime.datetime.utcnow()) 30 31 32 post_save.connect(receiver=change_api_notification_updated_at, sender=Notification) 33 post_delete.connect(receiver=change_api_notification_updated_at, sender=Notification) 34 35 36 class NotificationListAPI(ListAPIView): 37 """ 38 List of notification. 39 """ 40 41 filter_backends = (filters.SearchFilter, filters.OrderingFilter) 42 search_fields = ("title",) 43 ordering_fields = ( 44 "pubdate", 45 "title", 46 ) 47 list_key_func = PagingNotificationListKeyConstructor() 48 serializer_class = NotificationSerializer 49 permission_classes = ( 50 IsAuthenticated, 51 DRYPermissions, 52 ) 53 54 @etag(list_key_func) 55 @cache_response(key_func=list_key_func) 56 def get(self, request, *args, **kwargs): 57 """ 58 Lists all notifications of a user. 59 --- 60 61 parameters: 62 - name: Authorization 63 description: Bearer token to make an authenticated request. 64 required: true 65 paramType: header 66 - name: page 67 description: Restricts output to the given page number. 68 required: false 69 paramType: query 70 - name: page_size 71 description: Sets the number of notifications per page. 72 required: false 73 paramType: query 74 - name: search 75 description: Filters by title. 76 required: false 77 paramType: query 78 - name: ordering 79 description: Sorts the results. You can order by (-)pubdate or (-)title. 80 paramType: query 81 - name: type 82 description: Filters by notification type. 83 paramType: query 84 - name: subscription_type 85 description: Filters by subscription type. 86 paramType: query 87 - name: expand 88 description: Returns an object instead of an identifier representing the given field. 89 required: false 90 paramType: query 91 responseMessages: 92 - code: 401 93 message: Not Authenticated 94 - code: 404 95 message: Not Found 96 """ 97 return self.list(request, *args, **kwargs) 98 99 def get_queryset(self): 100 queryset = Notification.objects.get_notifications_of(self.request.user) 101 subscription_type = self.request.query_params.get("subscription_type", None) 102 if subscription_type: 103 queryset = queryset.filter(subscription__content_type__model=subscription_type) 104 _type = self.request.query_params.get("type", None) 105 if _type: 106 queryset = queryset.filter(content_type__model=_type) 107 return queryset 108 [end of zds/notification/api/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/zds/notification/api/views.py b/zds/notification/api/views.py --- a/zds/notification/api/views.py +++ b/zds/notification/api/views.py @@ -84,6 +84,9 @@ - name: subscription_type description: Filters by subscription type. paramType: query + - name: is_read + description: Filters by read status. + paramType: query - name: expand description: Returns an object instead of an identifier representing the given field. required: false @@ -104,4 +107,9 @@ _type = self.request.query_params.get("type", None) if _type: queryset = queryset.filter(content_type__model=_type) + is_read = str(self.request.query_params.get("is_read", None)).lower() + if is_read == "true": + queryset = queryset.filter(is_read=True) + elif is_read == "false": + queryset = queryset.filter(is_read=False) return queryset
{"golden_diff": "diff --git a/zds/notification/api/views.py b/zds/notification/api/views.py\n--- a/zds/notification/api/views.py\n+++ b/zds/notification/api/views.py\n@@ -84,6 +84,9 @@\n - name: subscription_type\n description: Filters by subscription type.\n paramType: query\n+ - name: is_read\n+ description: Filters by read status.\n+ paramType: query\n - name: expand\n description: Returns an object instead of an identifier representing the given field.\n required: false\n@@ -104,4 +107,9 @@\n _type = self.request.query_params.get(\"type\", None)\n if _type:\n queryset = queryset.filter(content_type__model=_type)\n+ is_read = str(self.request.query_params.get(\"is_read\", None)).lower()\n+ if is_read == \"true\":\n+ queryset = queryset.filter(is_read=True)\n+ elif is_read == \"false\":\n+ queryset = queryset.filter(is_read=False)\n return queryset\n", "issue": "API: filtrer les notifications par `is_read`\n**Description du besoin**\r\n\r\nPour les besoins de l'extension, il serait int\u00e9ressant de pouvoir filtrer les notifications (URL `/api/notifications`) selon leur propri\u00e9t\u00e9 `is_read` pour ne r\u00e9cup\u00e9rer que les non lues (les autres n'ayant pas d'int\u00e9r\u00eat pour ce cas d'usage).\r\n\r\n**Description de la solution**\r\n\r\nAjouter un filtre pour `is_read` (bool\u00e9en) sur l'URL `/api/notifications`\r\n\r\n**Description des alternatives**\r\n\r\nPouvoir trier selon cette propri\u00e9t\u00e9 (pour avoir les non-lues d'abord), _a minima_.\r\n\r\n**Contexte additionnel**\r\n\r\nVoir le code de [notifier.js#64](https://github.com/zestedesavoir/extensions-notificateurs/blob/master/Universal/notifier.js#L64) pour voir le cas d'usage en question (qui me permettrait de supprimer le `.filter()` ligne 78 tout en r\u00e9cup\u00e9rant des notifications potentiellement anciennes mais non lues qui sont actuellement inaccessibles).\r\n\n", "before_files": [{"content": "import datetime\nfrom django.core.cache import cache\nfrom django.db.models.signals import post_delete\nfrom django.db.models.signals import post_save\nfrom dry_rest_permissions.generics import DRYPermissions\nfrom rest_framework import filters\nfrom rest_framework.generics import ListAPIView\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework_extensions.cache.decorators import cache_response\nfrom rest_framework_extensions.etag.decorators import etag\nfrom rest_framework_extensions.key_constructor import bits\nfrom rest_framework_extensions.key_constructor.constructors import DefaultKeyConstructor\n\nfrom zds.api.bits import DJRF3xPaginationKeyBit, UpdatedAtKeyBit\nfrom zds.notification.api.serializers import NotificationSerializer\nfrom zds.notification.models import Notification\n\n\nclass PagingNotificationListKeyConstructor(DefaultKeyConstructor):\n pagination = DJRF3xPaginationKeyBit()\n search = bits.QueryParamsKeyBit([\"search\", \"ordering\", \"type\"])\n list_sql_query = bits.ListSqlQueryKeyBit()\n unique_view_id = bits.UniqueViewIdKeyBit()\n user = bits.UserKeyBit()\n updated_at = UpdatedAtKeyBit(\"api_updated_notification\")\n\n\ndef change_api_notification_updated_at(sender=None, instance=None, *args, **kwargs):\n cache.set(\"api_updated_notification\", datetime.datetime.utcnow())\n\n\npost_save.connect(receiver=change_api_notification_updated_at, sender=Notification)\npost_delete.connect(receiver=change_api_notification_updated_at, sender=Notification)\n\n\nclass NotificationListAPI(ListAPIView):\n \"\"\"\n List of notification.\n \"\"\"\n\n filter_backends = (filters.SearchFilter, filters.OrderingFilter)\n search_fields = (\"title\",)\n ordering_fields = (\n \"pubdate\",\n \"title\",\n )\n list_key_func = PagingNotificationListKeyConstructor()\n serializer_class = NotificationSerializer\n permission_classes = (\n IsAuthenticated,\n DRYPermissions,\n )\n\n @etag(list_key_func)\n @cache_response(key_func=list_key_func)\n def get(self, request, *args, **kwargs):\n \"\"\"\n Lists all notifications of a user.\n ---\n\n parameters:\n - name: Authorization\n description: Bearer token to make an authenticated request.\n required: true\n paramType: header\n - name: page\n description: Restricts output to the given page number.\n required: false\n paramType: query\n - name: page_size\n description: Sets the number of notifications per page.\n required: false\n paramType: query\n - name: search\n description: Filters by title.\n required: false\n paramType: query\n - name: ordering\n description: Sorts the results. You can order by (-)pubdate or (-)title.\n paramType: query\n - name: type\n description: Filters by notification type.\n paramType: query\n - name: subscription_type\n description: Filters by subscription type.\n paramType: query\n - name: expand\n description: Returns an object instead of an identifier representing the given field.\n required: false\n paramType: query\n responseMessages:\n - code: 401\n message: Not Authenticated\n - code: 404\n message: Not Found\n \"\"\"\n return self.list(request, *args, **kwargs)\n\n def get_queryset(self):\n queryset = Notification.objects.get_notifications_of(self.request.user)\n subscription_type = self.request.query_params.get(\"subscription_type\", None)\n if subscription_type:\n queryset = queryset.filter(subscription__content_type__model=subscription_type)\n _type = self.request.query_params.get(\"type\", None)\n if _type:\n queryset = queryset.filter(content_type__model=_type)\n return queryset\n", "path": "zds/notification/api/views.py"}]}
1,781
226
gh_patches_debug_8845
rasdani/github-patches
git_diff
safe-global__safe-config-service-14
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Include provider info in the serialized response of `GET /safe-apps/` The `/safe-apps` endpoint should include data about the provider if any </issue> <code> [start of src/safe_apps/serializers.py] 1 from rest_framework import serializers 2 3 from .models import SafeApp 4 5 6 class SafeAppsResponseSerializer(serializers.ModelSerializer): 7 class Meta: 8 model = SafeApp 9 fields = ['url', 'name', 'icon_url', 'description', 'networks'] 10 [end of src/safe_apps/serializers.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/safe_apps/serializers.py b/src/safe_apps/serializers.py --- a/src/safe_apps/serializers.py +++ b/src/safe_apps/serializers.py @@ -1,9 +1,17 @@ from rest_framework import serializers -from .models import SafeApp +from .models import SafeApp, Provider + + +class ProviderSerializer(serializers.ModelSerializer): + class Meta: + model = Provider + fields = ['url', 'name'] class SafeAppsResponseSerializer(serializers.ModelSerializer): + provider = ProviderSerializer() + class Meta: model = SafeApp - fields = ['url', 'name', 'icon_url', 'description', 'networks'] + fields = ['url', 'name', 'icon_url', 'description', 'networks', 'provider']
{"golden_diff": "diff --git a/src/safe_apps/serializers.py b/src/safe_apps/serializers.py\n--- a/src/safe_apps/serializers.py\n+++ b/src/safe_apps/serializers.py\n@@ -1,9 +1,17 @@\n from rest_framework import serializers\n \n-from .models import SafeApp\n+from .models import SafeApp, Provider\n+\n+\n+class ProviderSerializer(serializers.ModelSerializer):\n+ class Meta:\n+ model = Provider\n+ fields = ['url', 'name']\n \n \n class SafeAppsResponseSerializer(serializers.ModelSerializer):\n+ provider = ProviderSerializer()\n+\n class Meta:\n model = SafeApp\n- fields = ['url', 'name', 'icon_url', 'description', 'networks']\n+ fields = ['url', 'name', 'icon_url', 'description', 'networks', 'provider']\n", "issue": "Include provider info in the serialized response of `GET /safe-apps/`\nThe `/safe-apps` endpoint should include data about the provider if any\n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom .models import SafeApp\n\n\nclass SafeAppsResponseSerializer(serializers.ModelSerializer):\n class Meta:\n model = SafeApp\n fields = ['url', 'name', 'icon_url', 'description', 'networks']\n", "path": "src/safe_apps/serializers.py"}]}
639
181
gh_patches_debug_29629
rasdani/github-patches
git_diff
aio-libs__aiohttp-4556
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> GET Requests to link-local IPv6 addresses don't work on Python 3.7+ 🐞 **Describe the bug** The aiohttp resolver loses information related to linklocal IPv6 addresses on Python 3.7+ due to a changes in the representation returned by `socket.getaddrinfo()` 💡 **To Reproduce** Try to get an URL like `http://[fe80::1%eth0]:8080/`, it will result in an OSError (Invalid argument) exception. This seems to be due to the way that scopeid's are handled in [resolver.py](https://github.com/aio-libs/aiohttp/blob/72c2acd4850b1cbc638b413a7c28d96882b4d7e8/aiohttp/resolver.py#L31-L37): Run `socket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4]` on python 3.6: ```python socket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4] >>> socket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4] ('fe80::1%eth0', 8080, 0, 4) ``` Run it on python 3.7: ```python >>> socket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4] ('fe80::1', 8080, 0, 4)y ``` The `address` element of the tuple no longer includes the textual representation of the scope id, it's only contained in the matching scope_id element of the tuple - which then is missing when later callings _loop.create_connection(). 💡 **Expected behavior** The URL is successfully retrieved for link local IPv6 addresses. 📋 **Logs/tracebacks** ```python-traceback (paste your traceback in the next line) N/A ``` 📋 **Your version of the Python** ```console $ python3 --version Python 3.6.6 $ python3.7 --version Python 3.7.5 ``` 📋 **Your version of the aiohttp/yarl/multidict distributions** ```console $ python -m pip show aiohttp python -m pip show aiohttp Name: aiohttp Version: 3.6.2 ``` ```console $ python -m pip show multidict Name: multidict Version: 4.7.4 ``` ```console $ python -m pip show yarl Name: yarl Version: 1.4.2 ``` 📋 **Additional context** OS: Centos7 Linux Proxy Server: No Related to: client </issue> <code> [start of aiohttp/resolver.py] 1 import socket 2 from typing import Any, Dict, List 3 4 from .abc import AbstractResolver 5 from .helpers import get_running_loop 6 7 __all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver') 8 9 try: 10 import aiodns 11 12 # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') 13 except ImportError: # pragma: no cover 14 aiodns = None 15 16 aiodns_default = False 17 18 19 class ThreadedResolver(AbstractResolver): 20 """Use Executor for synchronous getaddrinfo() calls, which defaults to 21 concurrent.futures.ThreadPoolExecutor. 22 """ 23 24 def __init__(self) -> None: 25 self._loop = get_running_loop() 26 27 async def resolve(self, host: str, port: int=0, 28 family: int=socket.AF_INET) -> List[Dict[str, Any]]: 29 infos = await self._loop.getaddrinfo( 30 host, port, type=socket.SOCK_STREAM, family=family) 31 32 hosts = [] 33 for family, _, proto, _, address in infos: 34 hosts.append( 35 {'hostname': host, 36 'host': address[0], 'port': address[1], 37 'family': family, 'proto': proto, 38 'flags': socket.AI_NUMERICHOST}) 39 40 return hosts 41 42 async def close(self) -> None: 43 pass 44 45 46 class AsyncResolver(AbstractResolver): 47 """Use the `aiodns` package to make asynchronous DNS lookups""" 48 49 def __init__(self, *args: Any, **kwargs: Any) -> None: 50 if aiodns is None: 51 raise RuntimeError("Resolver requires aiodns library") 52 53 self._loop = get_running_loop() 54 self._resolver = aiodns.DNSResolver(*args, loop=self._loop, **kwargs) 55 56 async def resolve(self, host: str, port: int=0, 57 family: int=socket.AF_INET) -> List[Dict[str, Any]]: 58 try: 59 resp = await self._resolver.gethostbyname(host, family) 60 except aiodns.error.DNSError as exc: 61 msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" 62 raise OSError(msg) from exc 63 hosts = [] 64 for address in resp.addresses: 65 hosts.append( 66 {'hostname': host, 67 'host': address, 'port': port, 68 'family': family, 'proto': 0, 69 'flags': socket.AI_NUMERICHOST}) 70 71 if not hosts: 72 raise OSError("DNS lookup failed") 73 74 return hosts 75 76 async def close(self) -> None: 77 return self._resolver.cancel() 78 79 80 DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver 81 [end of aiohttp/resolver.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -31,11 +31,23 @@ hosts = [] for family, _, proto, _, address in infos: - hosts.append( - {'hostname': host, - 'host': address[0], 'port': address[1], - 'family': family, 'proto': proto, - 'flags': socket.AI_NUMERICHOST}) + if family == socket.AF_INET6 and address[3]: # type: ignore + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + host, _port = socket.getnameinfo( + address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV) + port = int(_port) + else: + host, port = address[:2] + hosts.append({ + 'hostname': host, + 'host': host, + 'port': port, + 'family': family, + 'proto': proto, + 'flags': socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, + }) return hosts @@ -62,11 +74,14 @@ raise OSError(msg) from exc hosts = [] for address in resp.addresses: - hosts.append( - {'hostname': host, - 'host': address, 'port': port, - 'family': family, 'proto': 0, - 'flags': socket.AI_NUMERICHOST}) + hosts.append({ + 'hostname': host, + 'host': address, + 'port': port, + 'family': family, + 'proto': 0, + 'flags': socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, + }) if not hosts: raise OSError("DNS lookup failed")
{"golden_diff": "diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py\n--- a/aiohttp/resolver.py\n+++ b/aiohttp/resolver.py\n@@ -31,11 +31,23 @@\n \n hosts = []\n for family, _, proto, _, address in infos:\n- hosts.append(\n- {'hostname': host,\n- 'host': address[0], 'port': address[1],\n- 'family': family, 'proto': proto,\n- 'flags': socket.AI_NUMERICHOST})\n+ if family == socket.AF_INET6 and address[3]: # type: ignore\n+ # This is essential for link-local IPv6 addresses.\n+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use\n+ # getnameinfo() unconditionally, but performance makes sense.\n+ host, _port = socket.getnameinfo(\n+ address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV)\n+ port = int(_port)\n+ else:\n+ host, port = address[:2]\n+ hosts.append({\n+ 'hostname': host,\n+ 'host': host,\n+ 'port': port,\n+ 'family': family,\n+ 'proto': proto,\n+ 'flags': socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,\n+ })\n \n return hosts\n \n@@ -62,11 +74,14 @@\n raise OSError(msg) from exc\n hosts = []\n for address in resp.addresses:\n- hosts.append(\n- {'hostname': host,\n- 'host': address, 'port': port,\n- 'family': family, 'proto': 0,\n- 'flags': socket.AI_NUMERICHOST})\n+ hosts.append({\n+ 'hostname': host,\n+ 'host': address,\n+ 'port': port,\n+ 'family': family,\n+ 'proto': 0,\n+ 'flags': socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,\n+ })\n \n if not hosts:\n raise OSError(\"DNS lookup failed\")\n", "issue": "GET Requests to link-local IPv6 addresses don't work on Python 3.7+\n\ud83d\udc1e **Describe the bug**\r\nThe aiohttp resolver loses information related to linklocal IPv6 addresses on Python 3.7+ due to a changes in the representation returned by `socket.getaddrinfo()`\r\n\r\n\ud83d\udca1 **To Reproduce**\r\nTry to get an URL like `http://[fe80::1%eth0]:8080/`, it will result in an OSError (Invalid argument) exception.\r\n\r\nThis seems to be due to the way that scopeid's are handled in [resolver.py](https://github.com/aio-libs/aiohttp/blob/72c2acd4850b1cbc638b413a7c28d96882b4d7e8/aiohttp/resolver.py#L31-L37):\r\n\r\nRun `socket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4]` on python 3.6:\r\n```python\r\nsocket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4]\r\n>>> socket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4]\r\n('fe80::1%eth0', 8080, 0, 4)\r\n```\r\n\r\nRun it on python 3.7:\r\n```python\r\n>>> socket.getaddrinfo('fe80::1%eth0', 8080, family=socket.AF_INET6, proto=socket.IPPROTO_TCP)[0][4]\r\n('fe80::1', 8080, 0, 4)y\r\n```\r\n\r\nThe `address` element of the tuple no longer includes the textual representation of the scope id, it's only contained in the matching scope_id element of the tuple - which then is missing when later callings _loop.create_connection().\r\n\r\n\ud83d\udca1 **Expected behavior**\r\nThe URL is successfully retrieved for link local IPv6 addresses.\r\n\r\n\r\n\ud83d\udccb **Logs/tracebacks**\r\n```python-traceback (paste your traceback in the next line)\r\nN/A\r\n```\r\n\r\n\ud83d\udccb **Your version of the Python**\r\n```console\r\n$ python3 --version\r\nPython 3.6.6\r\n$ python3.7 --version\r\nPython 3.7.5\r\n```\r\n\r\n\ud83d\udccb **Your version of the aiohttp/yarl/multidict distributions**\r\n```console\r\n$ python -m pip show aiohttp\r\npython -m pip show aiohttp\r\nName: aiohttp\r\nVersion: 3.6.2\r\n```\r\n```console\r\n$ python -m pip show multidict\r\nName: multidict\r\nVersion: 4.7.4\r\n```\r\n```console\r\n$ python -m pip show yarl\r\nName: yarl\r\nVersion: 1.4.2\r\n```\r\n\r\n\ud83d\udccb **Additional context**\r\nOS: Centos7 Linux\r\nProxy Server: No\r\nRelated to: client\n", "before_files": [{"content": "import socket\nfrom typing import Any, Dict, List\n\nfrom .abc import AbstractResolver\nfrom .helpers import get_running_loop\n\n__all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver')\n\ntry:\n import aiodns\n\n # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')\nexcept ImportError: # pragma: no cover\n aiodns = None\n\naiodns_default = False\n\n\nclass ThreadedResolver(AbstractResolver):\n \"\"\"Use Executor for synchronous getaddrinfo() calls, which defaults to\n concurrent.futures.ThreadPoolExecutor.\n \"\"\"\n\n def __init__(self) -> None:\n self._loop = get_running_loop()\n\n async def resolve(self, host: str, port: int=0,\n family: int=socket.AF_INET) -> List[Dict[str, Any]]:\n infos = await self._loop.getaddrinfo(\n host, port, type=socket.SOCK_STREAM, family=family)\n\n hosts = []\n for family, _, proto, _, address in infos:\n hosts.append(\n {'hostname': host,\n 'host': address[0], 'port': address[1],\n 'family': family, 'proto': proto,\n 'flags': socket.AI_NUMERICHOST})\n\n return hosts\n\n async def close(self) -> None:\n pass\n\n\nclass AsyncResolver(AbstractResolver):\n \"\"\"Use the `aiodns` package to make asynchronous DNS lookups\"\"\"\n\n def __init__(self, *args: Any, **kwargs: Any) -> None:\n if aiodns is None:\n raise RuntimeError(\"Resolver requires aiodns library\")\n\n self._loop = get_running_loop()\n self._resolver = aiodns.DNSResolver(*args, loop=self._loop, **kwargs)\n\n async def resolve(self, host: str, port: int=0,\n family: int=socket.AF_INET) -> List[Dict[str, Any]]:\n try:\n resp = await self._resolver.gethostbyname(host, family)\n except aiodns.error.DNSError as exc:\n msg = exc.args[1] if len(exc.args) >= 1 else \"DNS lookup failed\"\n raise OSError(msg) from exc\n hosts = []\n for address in resp.addresses:\n hosts.append(\n {'hostname': host,\n 'host': address, 'port': port,\n 'family': family, 'proto': 0,\n 'flags': socket.AI_NUMERICHOST})\n\n if not hosts:\n raise OSError(\"DNS lookup failed\")\n\n return hosts\n\n async def close(self) -> None:\n return self._resolver.cancel()\n\n\nDefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver\n", "path": "aiohttp/resolver.py"}]}
1,959
459
gh_patches_debug_22504
rasdani/github-patches
git_diff
wright-group__WrightTools-360
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Windows Tempfile Error On Windows, tempfiles attempted to be opened using h5py cause errors. I do not have the error message in front of me at present, but I believe it was a 'file already exists' flavor of problem. We may need to remove the created tmpfile and just use the name.... </issue> <code> [start of WrightTools/_base.py] 1 """WrightTools base classes and associated.""" 2 3 4 # --- import -------------------------------------------------------------------------------------- 5 6 7 import shutil 8 import weakref 9 import tempfile 10 import posixpath 11 12 import numpy as np 13 14 import h5py 15 16 17 # --- define -------------------------------------------------------------------------------------- 18 19 20 wt5_version = '0.0.0' 21 22 23 # --- dataset ------------------------------------------------------------------------------------- 24 25 26 class Dataset(h5py.Dataset): 27 instances = {} 28 29 30 # --- group --------------------------------------------------------------------------------------- 31 32 33 class Group(h5py.Group): 34 instances = {} 35 class_name = 'Group' 36 37 def __init__(self, filepath=None, parent=None, name=None, **kwargs): 38 if filepath is None: 39 return 40 if parent == '': 41 parent = posixpath.sep 42 # file 43 self.filepath = filepath 44 path = parent + posixpath.sep + name 45 file = h5py.File(self.filepath, 'a') 46 file.require_group(parent) 47 file.require_group(path) 48 h5py.Group.__init__(self, bind=file[path].id) 49 self.__n = 0 50 self.fid = self.file.fid 51 if name is not None: 52 self.attrs['name'] = name 53 self.attrs.update(kwargs) 54 self.attrs['class'] = self.class_name 55 # load from file 56 self._items = [] 57 for name in self.item_names: 58 self._items.append(self[name]) 59 setattr(self, name, self[name]) 60 # kwargs 61 self.attrs.update(kwargs) 62 # the following are populated if not already recorded 63 self.__version__ 64 self.natural_name 65 66 def __new__(cls, *args, **kwargs): 67 # extract 68 filepath = args[0] if len(args) > 0 else kwargs.get('filepath', None) 69 parent = args[1] if len(args) > 1 else kwargs.get('parent', None) 70 name = args[2] if len(args) > 2 else kwargs.get('name', cls.class_name.lower()) 71 edit_local = args[3] if len(args) > 3 else kwargs.get('edit_local', False) 72 # tempfile 73 tmpfile = None 74 if edit_local and filepath is None: 75 raise Exception # TODO: better exception 76 if not edit_local: 77 tmpfile = tempfile.NamedTemporaryFile(prefix='', suffix='.wt5') 78 p = tmpfile.name 79 if filepath: 80 shutil.copyfile(src=filepath, dst=p) 81 elif edit_local and filepath: 82 p = filepath 83 # construct fullpath 84 if parent is None: 85 parent = '' 86 name = '/' 87 fullpath = p + '::' + parent + name 88 # create and/or return 89 if fullpath not in cls.instances.keys(): 90 kwargs['filepath'] = p 91 kwargs['parent'] = parent 92 kwargs['name'] = name 93 instance = super(Group, cls).__new__(cls) 94 cls.__init__(instance, **kwargs) 95 cls.instances[fullpath] = instance 96 if tmpfile: 97 setattr(instance, '_tmpfile', tmpfile) 98 weakref.finalize(instance, instance.close) 99 return instance 100 instance = cls.instances[fullpath] 101 return instance 102 103 @property 104 def __version__(self): 105 if '__version__' not in self.file.attrs.keys(): 106 self.file.attrs['__version__'] = wt5_version 107 return self.file.attrs['__version__'] 108 109 @property 110 def fullpath(self): 111 return self.filepath + '::' + self.name 112 113 @property 114 def item_names(self): 115 if 'item_names' not in self.attrs.keys(): 116 self.attrs['item_names'] = np.array([], dtype='S') 117 return self.attrs['item_names'] 118 119 @property 120 def natural_name(self): 121 if 'name' not in self.attrs.keys(): 122 self.attrs['name'] = self.__class__.default_name 123 return self.attrs['name'] 124 125 @property 126 def parent(self): 127 from .collection import Collection 128 group = super().parent 129 parent = group.parent.name 130 if parent == posixpath.sep: 131 parent = None 132 return Collection(self.filepath, parent=parent, name=group.attrs['name']) 133 134 def close(self): 135 if(self.fid.valid > 0): 136 self.__class__.instances.pop(self.fullpath) 137 self.file.flush() 138 self.file.close() 139 if hasattr(self, '_tmpfile'): 140 self._tmpfile.close() 141 142 def flush(self): 143 self.file.flush() 144 [end of WrightTools/_base.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/WrightTools/_base.py b/WrightTools/_base.py --- a/WrightTools/_base.py +++ b/WrightTools/_base.py @@ -5,6 +5,7 @@ import shutil +import os import weakref import tempfile import posixpath @@ -74,8 +75,8 @@ if edit_local and filepath is None: raise Exception # TODO: better exception if not edit_local: - tmpfile = tempfile.NamedTemporaryFile(prefix='', suffix='.wt5') - p = tmpfile.name + tmpfile = tempfile.mkstemp(prefix='', suffix='.wt5') + p = tmpfile[1] if filepath: shutil.copyfile(src=filepath, dst=p) elif edit_local and filepath: @@ -137,7 +138,8 @@ self.file.flush() self.file.close() if hasattr(self, '_tmpfile'): - self._tmpfile.close() + os.close(self._tmpfile[0]) + os.remove(self._tmpfile[1]) def flush(self): self.file.flush()
{"golden_diff": "diff --git a/WrightTools/_base.py b/WrightTools/_base.py\n--- a/WrightTools/_base.py\n+++ b/WrightTools/_base.py\n@@ -5,6 +5,7 @@\n \n \n import shutil\n+import os\n import weakref\n import tempfile\n import posixpath\n@@ -74,8 +75,8 @@\n if edit_local and filepath is None:\n raise Exception # TODO: better exception\n if not edit_local:\n- tmpfile = tempfile.NamedTemporaryFile(prefix='', suffix='.wt5')\n- p = tmpfile.name\n+ tmpfile = tempfile.mkstemp(prefix='', suffix='.wt5')\n+ p = tmpfile[1]\n if filepath:\n shutil.copyfile(src=filepath, dst=p)\n elif edit_local and filepath:\n@@ -137,7 +138,8 @@\n self.file.flush()\n self.file.close()\n if hasattr(self, '_tmpfile'):\n- self._tmpfile.close()\n+ os.close(self._tmpfile[0])\n+ os.remove(self._tmpfile[1])\n \n def flush(self):\n self.file.flush()\n", "issue": "Windows Tempfile Error\nOn Windows, tempfiles attempted to be opened using h5py cause errors.\r\n\r\nI do not have the error message in front of me at present, but I believe it was a 'file already exists' flavor of problem. \r\n\r\nWe may need to remove the created tmpfile and just use the name....\n", "before_files": [{"content": "\"\"\"WrightTools base classes and associated.\"\"\"\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nimport shutil\nimport weakref\nimport tempfile\nimport posixpath\n\nimport numpy as np\n\nimport h5py\n\n\n# --- define --------------------------------------------------------------------------------------\n\n\nwt5_version = '0.0.0'\n\n\n# --- dataset -------------------------------------------------------------------------------------\n\n\nclass Dataset(h5py.Dataset):\n instances = {}\n\n\n# --- group ---------------------------------------------------------------------------------------\n\n\nclass Group(h5py.Group):\n instances = {}\n class_name = 'Group'\n\n def __init__(self, filepath=None, parent=None, name=None, **kwargs):\n if filepath is None:\n return\n if parent == '':\n parent = posixpath.sep\n # file\n self.filepath = filepath\n path = parent + posixpath.sep + name\n file = h5py.File(self.filepath, 'a')\n file.require_group(parent)\n file.require_group(path)\n h5py.Group.__init__(self, bind=file[path].id)\n self.__n = 0\n self.fid = self.file.fid\n if name is not None:\n self.attrs['name'] = name\n self.attrs.update(kwargs)\n self.attrs['class'] = self.class_name\n # load from file\n self._items = []\n for name in self.item_names:\n self._items.append(self[name])\n setattr(self, name, self[name])\n # kwargs\n self.attrs.update(kwargs)\n # the following are populated if not already recorded\n self.__version__\n self.natural_name\n\n def __new__(cls, *args, **kwargs):\n # extract\n filepath = args[0] if len(args) > 0 else kwargs.get('filepath', None)\n parent = args[1] if len(args) > 1 else kwargs.get('parent', None)\n name = args[2] if len(args) > 2 else kwargs.get('name', cls.class_name.lower())\n edit_local = args[3] if len(args) > 3 else kwargs.get('edit_local', False)\n # tempfile\n tmpfile = None\n if edit_local and filepath is None:\n raise Exception # TODO: better exception\n if not edit_local:\n tmpfile = tempfile.NamedTemporaryFile(prefix='', suffix='.wt5')\n p = tmpfile.name\n if filepath:\n shutil.copyfile(src=filepath, dst=p)\n elif edit_local and filepath:\n p = filepath\n # construct fullpath\n if parent is None:\n parent = ''\n name = '/'\n fullpath = p + '::' + parent + name\n # create and/or return\n if fullpath not in cls.instances.keys():\n kwargs['filepath'] = p\n kwargs['parent'] = parent\n kwargs['name'] = name\n instance = super(Group, cls).__new__(cls)\n cls.__init__(instance, **kwargs)\n cls.instances[fullpath] = instance\n if tmpfile:\n setattr(instance, '_tmpfile', tmpfile)\n weakref.finalize(instance, instance.close)\n return instance\n instance = cls.instances[fullpath]\n return instance\n\n @property\n def __version__(self):\n if '__version__' not in self.file.attrs.keys():\n self.file.attrs['__version__'] = wt5_version\n return self.file.attrs['__version__']\n\n @property\n def fullpath(self):\n return self.filepath + '::' + self.name\n\n @property\n def item_names(self):\n if 'item_names' not in self.attrs.keys():\n self.attrs['item_names'] = np.array([], dtype='S')\n return self.attrs['item_names']\n\n @property\n def natural_name(self):\n if 'name' not in self.attrs.keys():\n self.attrs['name'] = self.__class__.default_name\n return self.attrs['name']\n\n @property\n def parent(self):\n from .collection import Collection\n group = super().parent\n parent = group.parent.name\n if parent == posixpath.sep:\n parent = None\n return Collection(self.filepath, parent=parent, name=group.attrs['name'])\n\n def close(self):\n if(self.fid.valid > 0):\n self.__class__.instances.pop(self.fullpath)\n self.file.flush()\n self.file.close()\n if hasattr(self, '_tmpfile'):\n self._tmpfile.close()\n\n def flush(self):\n self.file.flush()\n", "path": "WrightTools/_base.py"}]}
1,891
248
gh_patches_debug_25508
rasdani/github-patches
git_diff
strawberry-graphql__strawberry-728
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> UploadFile causes ASGI application's exception when it is in debug mode Apparently when we use the ASGI application in debug mode, it cannot print the value of variables if they are not JSON serializable. In my use case, when I tried to use the [file upload](https://strawberry.rocks/docs/features/file-upload) example in debug mode this issue ended up happening. I believe it is because of this: https://github.com/strawberry-graphql/strawberry/blob/de215370b247a417af8a8dd5fc382d71e305bcd7/strawberry/utils/debug.py#L26-L29 Perhaps converting variables to string might help. </issue> <code> [start of strawberry/utils/debug.py] 1 import datetime 2 import json 3 import typing 4 5 from pygments import highlight, lexers 6 from pygments.formatters import Terminal256Formatter 7 8 from .graphql_lexer import GraphQLLexer 9 10 11 def pretty_print_graphql_operation( 12 operation_name: str, query: str, variables: typing.Dict["str", typing.Any] 13 ): # pragma: no cover 14 """Pretty print a GraphQL operation using pygments. 15 16 Won't print introspection operation to prevent noise in the output.""" 17 18 if operation_name == "IntrospectionQuery": 19 return 20 21 now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") 22 23 print(f"[{now}]: {operation_name or 'No operation name'}") 24 print(highlight(query, GraphQLLexer(), Terminal256Formatter())) 25 26 if variables: 27 variables_json = json.dumps(variables, indent=4) 28 29 print(highlight(variables_json, lexers.JsonLexer(), Terminal256Formatter())) 30 [end of strawberry/utils/debug.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/strawberry/utils/debug.py b/strawberry/utils/debug.py --- a/strawberry/utils/debug.py +++ b/strawberry/utils/debug.py @@ -1,6 +1,7 @@ import datetime import json -import typing +from json import JSONEncoder +from typing import Any, Dict from pygments import highlight, lexers from pygments.formatters import Terminal256Formatter @@ -8,9 +9,14 @@ from .graphql_lexer import GraphQLLexer +class StrawberryJSONEncoder(JSONEncoder): + def default(self, o: Any) -> Any: + return repr(o) + + def pretty_print_graphql_operation( - operation_name: str, query: str, variables: typing.Dict["str", typing.Any] -): # pragma: no cover + operation_name: str, query: str, variables: Dict["str", Any] +): """Pretty print a GraphQL operation using pygments. Won't print introspection operation to prevent noise in the output.""" @@ -24,6 +30,6 @@ print(highlight(query, GraphQLLexer(), Terminal256Formatter())) if variables: - variables_json = json.dumps(variables, indent=4) + variables_json = json.dumps(variables, indent=4, cls=StrawberryJSONEncoder) print(highlight(variables_json, lexers.JsonLexer(), Terminal256Formatter()))
{"golden_diff": "diff --git a/strawberry/utils/debug.py b/strawberry/utils/debug.py\n--- a/strawberry/utils/debug.py\n+++ b/strawberry/utils/debug.py\n@@ -1,6 +1,7 @@\n import datetime\n import json\n-import typing\n+from json import JSONEncoder\n+from typing import Any, Dict\n \n from pygments import highlight, lexers\n from pygments.formatters import Terminal256Formatter\n@@ -8,9 +9,14 @@\n from .graphql_lexer import GraphQLLexer\n \n \n+class StrawberryJSONEncoder(JSONEncoder):\n+ def default(self, o: Any) -> Any:\n+ return repr(o)\n+\n+\n def pretty_print_graphql_operation(\n- operation_name: str, query: str, variables: typing.Dict[\"str\", typing.Any]\n-): # pragma: no cover\n+ operation_name: str, query: str, variables: Dict[\"str\", Any]\n+):\n \"\"\"Pretty print a GraphQL operation using pygments.\n \n Won't print introspection operation to prevent noise in the output.\"\"\"\n@@ -24,6 +30,6 @@\n print(highlight(query, GraphQLLexer(), Terminal256Formatter()))\n \n if variables:\n- variables_json = json.dumps(variables, indent=4)\n+ variables_json = json.dumps(variables, indent=4, cls=StrawberryJSONEncoder)\n \n print(highlight(variables_json, lexers.JsonLexer(), Terminal256Formatter()))\n", "issue": "UploadFile causes ASGI application's exception when it is in debug mode\nApparently when we use the ASGI application in debug mode, it cannot print the value of variables if they are not JSON serializable.\r\n\r\nIn my use case, when I tried to use the [file upload](https://strawberry.rocks/docs/features/file-upload) example in debug mode this issue ended up happening.\r\n\r\nI believe it is because of this:\r\n\r\nhttps://github.com/strawberry-graphql/strawberry/blob/de215370b247a417af8a8dd5fc382d71e305bcd7/strawberry/utils/debug.py#L26-L29\r\n\r\nPerhaps converting variables to string might help.\n", "before_files": [{"content": "import datetime\nimport json\nimport typing\n\nfrom pygments import highlight, lexers\nfrom pygments.formatters import Terminal256Formatter\n\nfrom .graphql_lexer import GraphQLLexer\n\n\ndef pretty_print_graphql_operation(\n operation_name: str, query: str, variables: typing.Dict[\"str\", typing.Any]\n): # pragma: no cover\n \"\"\"Pretty print a GraphQL operation using pygments.\n\n Won't print introspection operation to prevent noise in the output.\"\"\"\n\n if operation_name == \"IntrospectionQuery\":\n return\n\n now = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\n\n print(f\"[{now}]: {operation_name or 'No operation name'}\")\n print(highlight(query, GraphQLLexer(), Terminal256Formatter()))\n\n if variables:\n variables_json = json.dumps(variables, indent=4)\n\n print(highlight(variables_json, lexers.JsonLexer(), Terminal256Formatter()))\n", "path": "strawberry/utils/debug.py"}]}
957
318
gh_patches_debug_33721
rasdani/github-patches
git_diff
docker__docker-py-1178
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Support create network EnableIPv6 and Labels options Check the remote API: https://docs.docker.com/engine/reference/api/docker_remote_api_v1.23/#create-a-network There are two missing JSON parameters: ``` EnableIPv6 - Enable IPv6 on the network Labels - Labels to set on the network, specified as a map: {"key":"value" [,"key2":"value2"]} ``` </issue> <code> [start of docker/api/network.py] 1 import json 2 3 from ..errors import InvalidVersion 4 from ..utils import check_resource, minimum_version 5 from ..utils import version_lt 6 7 8 class NetworkApiMixin(object): 9 @minimum_version('1.21') 10 def networks(self, names=None, ids=None): 11 filters = {} 12 if names: 13 filters['name'] = names 14 if ids: 15 filters['id'] = ids 16 17 params = {'filters': json.dumps(filters)} 18 19 url = self._url("/networks") 20 res = self._get(url, params=params) 21 return self._result(res, json=True) 22 23 @minimum_version('1.21') 24 def create_network(self, name, driver=None, options=None, ipam=None, 25 check_duplicate=None, internal=False): 26 if options is not None and not isinstance(options, dict): 27 raise TypeError('options must be a dictionary') 28 29 data = { 30 'Name': name, 31 'Driver': driver, 32 'Options': options, 33 'IPAM': ipam, 34 'CheckDuplicate': check_duplicate 35 } 36 37 if internal: 38 if version_lt(self._version, '1.22'): 39 raise InvalidVersion('Internal networks are not ' 40 'supported in API version < 1.22') 41 data['Internal'] = True 42 43 url = self._url("/networks/create") 44 res = self._post_json(url, data=data) 45 return self._result(res, json=True) 46 47 @minimum_version('1.21') 48 def remove_network(self, net_id): 49 url = self._url("/networks/{0}", net_id) 50 res = self._delete(url) 51 self._raise_for_status(res) 52 53 @minimum_version('1.21') 54 def inspect_network(self, net_id): 55 url = self._url("/networks/{0}", net_id) 56 res = self._get(url) 57 return self._result(res, json=True) 58 59 @check_resource 60 @minimum_version('1.21') 61 def connect_container_to_network(self, container, net_id, 62 ipv4_address=None, ipv6_address=None, 63 aliases=None, links=None, 64 link_local_ips=None): 65 data = { 66 "Container": container, 67 "EndpointConfig": self.create_endpoint_config( 68 aliases=aliases, links=links, ipv4_address=ipv4_address, 69 ipv6_address=ipv6_address, link_local_ips=link_local_ips 70 ), 71 } 72 73 url = self._url("/networks/{0}/connect", net_id) 74 res = self._post_json(url, data=data) 75 self._raise_for_status(res) 76 77 @check_resource 78 @minimum_version('1.21') 79 def disconnect_container_from_network(self, container, net_id): 80 data = {"container": container} 81 url = self._url("/networks/{0}/disconnect", net_id) 82 res = self._post_json(url, data=data) 83 self._raise_for_status(res) 84 [end of docker/api/network.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/docker/api/network.py b/docker/api/network.py --- a/docker/api/network.py +++ b/docker/api/network.py @@ -22,7 +22,8 @@ @minimum_version('1.21') def create_network(self, name, driver=None, options=None, ipam=None, - check_duplicate=None, internal=False): + check_duplicate=None, internal=False, labels=None, + enable_ipv6=False): if options is not None and not isinstance(options, dict): raise TypeError('options must be a dictionary') @@ -34,6 +35,22 @@ 'CheckDuplicate': check_duplicate } + if labels is not None: + if version_lt(self._version, '1.23'): + raise InvalidVersion( + 'network labels were introduced in API 1.23' + ) + if not isinstance(labels, dict): + raise TypeError('labels must be a dictionary') + data["Labels"] = labels + + if enable_ipv6: + if version_lt(self._version, '1.23'): + raise InvalidVersion( + 'enable_ipv6 was introduced in API 1.23' + ) + data['EnableIPv6'] = True + if internal: if version_lt(self._version, '1.22'): raise InvalidVersion('Internal networks are not ' @@ -76,8 +93,15 @@ @check_resource @minimum_version('1.21') - def disconnect_container_from_network(self, container, net_id): - data = {"container": container} + def disconnect_container_from_network(self, container, net_id, + force=False): + data = {"Container": container} + if force: + if version_lt(self._version, '1.22'): + raise InvalidVersion( + 'Forced disconnect was introduced in API 1.22' + ) + data['Force'] = force url = self._url("/networks/{0}/disconnect", net_id) res = self._post_json(url, data=data) self._raise_for_status(res)
{"golden_diff": "diff --git a/docker/api/network.py b/docker/api/network.py\n--- a/docker/api/network.py\n+++ b/docker/api/network.py\n@@ -22,7 +22,8 @@\n \n @minimum_version('1.21')\n def create_network(self, name, driver=None, options=None, ipam=None,\n- check_duplicate=None, internal=False):\n+ check_duplicate=None, internal=False, labels=None,\n+ enable_ipv6=False):\n if options is not None and not isinstance(options, dict):\n raise TypeError('options must be a dictionary')\n \n@@ -34,6 +35,22 @@\n 'CheckDuplicate': check_duplicate\n }\n \n+ if labels is not None:\n+ if version_lt(self._version, '1.23'):\n+ raise InvalidVersion(\n+ 'network labels were introduced in API 1.23'\n+ )\n+ if not isinstance(labels, dict):\n+ raise TypeError('labels must be a dictionary')\n+ data[\"Labels\"] = labels\n+\n+ if enable_ipv6:\n+ if version_lt(self._version, '1.23'):\n+ raise InvalidVersion(\n+ 'enable_ipv6 was introduced in API 1.23'\n+ )\n+ data['EnableIPv6'] = True\n+\n if internal:\n if version_lt(self._version, '1.22'):\n raise InvalidVersion('Internal networks are not '\n@@ -76,8 +93,15 @@\n \n @check_resource\n @minimum_version('1.21')\n- def disconnect_container_from_network(self, container, net_id):\n- data = {\"container\": container}\n+ def disconnect_container_from_network(self, container, net_id,\n+ force=False):\n+ data = {\"Container\": container}\n+ if force:\n+ if version_lt(self._version, '1.22'):\n+ raise InvalidVersion(\n+ 'Forced disconnect was introduced in API 1.22'\n+ )\n+ data['Force'] = force\n url = self._url(\"/networks/{0}/disconnect\", net_id)\n res = self._post_json(url, data=data)\n self._raise_for_status(res)\n", "issue": "Support create network EnableIPv6 and Labels options \nCheck the remote API:\nhttps://docs.docker.com/engine/reference/api/docker_remote_api_v1.23/#create-a-network\n\nThere are two missing JSON parameters:\n\n```\nEnableIPv6 - Enable IPv6 on the network\nLabels - Labels to set on the network, specified as a map: {\"key\":\"value\" [,\"key2\":\"value2\"]}\n```\n\n", "before_files": [{"content": "import json\n\nfrom ..errors import InvalidVersion\nfrom ..utils import check_resource, minimum_version\nfrom ..utils import version_lt\n\n\nclass NetworkApiMixin(object):\n @minimum_version('1.21')\n def networks(self, names=None, ids=None):\n filters = {}\n if names:\n filters['name'] = names\n if ids:\n filters['id'] = ids\n\n params = {'filters': json.dumps(filters)}\n\n url = self._url(\"/networks\")\n res = self._get(url, params=params)\n return self._result(res, json=True)\n\n @minimum_version('1.21')\n def create_network(self, name, driver=None, options=None, ipam=None,\n check_duplicate=None, internal=False):\n if options is not None and not isinstance(options, dict):\n raise TypeError('options must be a dictionary')\n\n data = {\n 'Name': name,\n 'Driver': driver,\n 'Options': options,\n 'IPAM': ipam,\n 'CheckDuplicate': check_duplicate\n }\n\n if internal:\n if version_lt(self._version, '1.22'):\n raise InvalidVersion('Internal networks are not '\n 'supported in API version < 1.22')\n data['Internal'] = True\n\n url = self._url(\"/networks/create\")\n res = self._post_json(url, data=data)\n return self._result(res, json=True)\n\n @minimum_version('1.21')\n def remove_network(self, net_id):\n url = self._url(\"/networks/{0}\", net_id)\n res = self._delete(url)\n self._raise_for_status(res)\n\n @minimum_version('1.21')\n def inspect_network(self, net_id):\n url = self._url(\"/networks/{0}\", net_id)\n res = self._get(url)\n return self._result(res, json=True)\n\n @check_resource\n @minimum_version('1.21')\n def connect_container_to_network(self, container, net_id,\n ipv4_address=None, ipv6_address=None,\n aliases=None, links=None,\n link_local_ips=None):\n data = {\n \"Container\": container,\n \"EndpointConfig\": self.create_endpoint_config(\n aliases=aliases, links=links, ipv4_address=ipv4_address,\n ipv6_address=ipv6_address, link_local_ips=link_local_ips\n ),\n }\n\n url = self._url(\"/networks/{0}/connect\", net_id)\n res = self._post_json(url, data=data)\n self._raise_for_status(res)\n\n @check_resource\n @minimum_version('1.21')\n def disconnect_container_from_network(self, container, net_id):\n data = {\"container\": container}\n url = self._url(\"/networks/{0}/disconnect\", net_id)\n res = self._post_json(url, data=data)\n self._raise_for_status(res)\n", "path": "docker/api/network.py"}]}
1,418
481
gh_patches_debug_20448
rasdani/github-patches
git_diff
litestar-org__litestar-3454
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Docs: Document SSE ### Summary The SSE documentation is currently lacking: - Docs for `ServerSentEventMessage` - Sending messages a dicts <!-- POLAR PLEDGE BADGE START --> --- > [!NOTE] > While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and > [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship. > > Check out all issues funded or available for funding [on our Polar.sh dashboard](https://polar.sh/litestar-org) > * If you would like to see an issue prioritized, make a pledge towards it! > * We receive the pledge once the issue is completed & verified > * This, along with engagement in the community, helps us know which features are a priority to our users. <a href="https://polar.sh/litestar-org/litestar/issues/3011"> <picture> <source media="(prefers-color-scheme: dark)" srcset="https://polar.sh/api/github/litestar-org/litestar/issues/3011/pledge.svg?darkmode=1"> <img alt="Fund with Polar" src="https://polar.sh/api/github/litestar-org/litestar/issues/3011/pledge.svg"> </picture> </a> <!-- POLAR PLEDGE BADGE END --> </issue> <code> [start of docs/examples/responses/sse_responses.py] 1 from asyncio import sleep 2 from typing import AsyncGenerator 3 4 from litestar import Litestar, get 5 from litestar.response import ServerSentEvent 6 7 8 async def my_generator() -> AsyncGenerator[bytes, None]: 9 count = 0 10 while count < 10: 11 await sleep(0.01) 12 count += 1 13 yield str(count) 14 15 16 @get(path="/count", sync_to_thread=False) 17 def sse_handler() -> ServerSentEvent: 18 return ServerSentEvent(my_generator()) 19 20 21 app = Litestar(route_handlers=[sse_handler]) 22 [end of docs/examples/responses/sse_responses.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/docs/examples/responses/sse_responses.py b/docs/examples/responses/sse_responses.py --- a/docs/examples/responses/sse_responses.py +++ b/docs/examples/responses/sse_responses.py @@ -2,15 +2,28 @@ from typing import AsyncGenerator from litestar import Litestar, get -from litestar.response import ServerSentEvent +from litestar.response import ServerSentEvent, ServerSentEventMessage +from litestar.types import SSEData -async def my_generator() -> AsyncGenerator[bytes, None]: +async def my_generator() -> AsyncGenerator[SSEData, None]: count = 0 while count < 10: await sleep(0.01) count += 1 + # In the generator you can yield integers, strings, bytes, dictionaries, or ServerSentEventMessage objects + # dicts can have the following keys: data, event, id, retry, comment + + # here we yield an integer + yield count + # here a string yield str(count) + # here bytes + yield str(count).encode("utf-8") + # here a dictionary + yield {"data": 2 * count, "event": "event2", "retry": 10} + # here a ServerSentEventMessage object + yield ServerSentEventMessage(event="something-with-comment", retry=1000, comment="some comment") @get(path="/count", sync_to_thread=False)
{"golden_diff": "diff --git a/docs/examples/responses/sse_responses.py b/docs/examples/responses/sse_responses.py\n--- a/docs/examples/responses/sse_responses.py\n+++ b/docs/examples/responses/sse_responses.py\n@@ -2,15 +2,28 @@\n from typing import AsyncGenerator\n \n from litestar import Litestar, get\n-from litestar.response import ServerSentEvent\n+from litestar.response import ServerSentEvent, ServerSentEventMessage\n+from litestar.types import SSEData\n \n \n-async def my_generator() -> AsyncGenerator[bytes, None]:\n+async def my_generator() -> AsyncGenerator[SSEData, None]:\n count = 0\n while count < 10:\n await sleep(0.01)\n count += 1\n+ # In the generator you can yield integers, strings, bytes, dictionaries, or ServerSentEventMessage objects\n+ # dicts can have the following keys: data, event, id, retry, comment\n+\n+ # here we yield an integer\n+ yield count\n+ # here a string\n yield str(count)\n+ # here bytes\n+ yield str(count).encode(\"utf-8\")\n+ # here a dictionary\n+ yield {\"data\": 2 * count, \"event\": \"event2\", \"retry\": 10}\n+ # here a ServerSentEventMessage object\n+ yield ServerSentEventMessage(event=\"something-with-comment\", retry=1000, comment=\"some comment\")\n \n \n @get(path=\"/count\", sync_to_thread=False)\n", "issue": "Docs: Document SSE\n### Summary\n\nThe SSE documentation is currently lacking:\r\n\r\n- Docs for `ServerSentEventMessage`\r\n- Sending messages a dicts\r\n\r\n\n\n<!-- POLAR PLEDGE BADGE START -->\n---\n> [!NOTE] \n> While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and \n> [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship.\n>\n> Check out all issues funded or available for funding [on our Polar.sh dashboard](https://polar.sh/litestar-org)\n> * If you would like to see an issue prioritized, make a pledge towards it!\n> * We receive the pledge once the issue is completed & verified\n> * This, along with engagement in the community, helps us know which features are a priority to our users.\n\n<a href=\"https://polar.sh/litestar-org/litestar/issues/3011\">\n<picture>\n <source media=\"(prefers-color-scheme: dark)\" srcset=\"https://polar.sh/api/github/litestar-org/litestar/issues/3011/pledge.svg?darkmode=1\">\n <img alt=\"Fund with Polar\" src=\"https://polar.sh/api/github/litestar-org/litestar/issues/3011/pledge.svg\">\n</picture>\n</a>\n<!-- POLAR PLEDGE BADGE END -->\n\n", "before_files": [{"content": "from asyncio import sleep\nfrom typing import AsyncGenerator\n\nfrom litestar import Litestar, get\nfrom litestar.response import ServerSentEvent\n\n\nasync def my_generator() -> AsyncGenerator[bytes, None]:\n count = 0\n while count < 10:\n await sleep(0.01)\n count += 1\n yield str(count)\n\n\n@get(path=\"/count\", sync_to_thread=False)\ndef sse_handler() -> ServerSentEvent:\n return ServerSentEvent(my_generator())\n\n\napp = Litestar(route_handlers=[sse_handler])\n", "path": "docs/examples/responses/sse_responses.py"}]}
1,019
333
gh_patches_debug_26741
rasdani/github-patches
git_diff
pre-commit__pre-commit-893
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Incorrect shebang in .git/hooks/pre-commit for python3 only installations The shebang for `.git/hooks/pre-commit` is `#!/usr/bin/env python`. I work with setups where `python3` is the only python in env. Could the shebang be the install python instead? I.e. the installation under `INSTALL_PYTHON = '/usr/bin/python3'` </issue> <code> [start of pre_commit/commands/install_uninstall.py] 1 from __future__ import print_function 2 from __future__ import unicode_literals 3 4 import io 5 import logging 6 import os.path 7 import sys 8 9 from pre_commit import git 10 from pre_commit import output 11 from pre_commit.repository import repositories 12 from pre_commit.util import cmd_output 13 from pre_commit.util import make_executable 14 from pre_commit.util import mkdirp 15 from pre_commit.util import resource_text 16 17 18 logger = logging.getLogger(__name__) 19 20 # This is used to identify the hook file we install 21 PRIOR_HASHES = ( 22 '4d9958c90bc262f47553e2c073f14cfe', 23 'd8ee923c46731b42cd95cc869add4062', 24 '49fd668cb42069aa1b6048464be5d395', 25 '79f09a650522a87b0da915d0d983b2de', 26 'e358c9dae00eac5d06b38dfdb1e33a8c', 27 ) 28 CURRENT_HASH = '138fd403232d2ddd5efb44317e38bf03' 29 TEMPLATE_START = '# start templated\n' 30 TEMPLATE_END = '# end templated\n' 31 32 33 def _hook_paths(git_root, hook_type): 34 pth = os.path.join(git.get_git_dir(git_root), 'hooks', hook_type) 35 return pth, '{}.legacy'.format(pth) 36 37 38 def is_our_script(filename): 39 if not os.path.exists(filename): 40 return False 41 with io.open(filename) as f: 42 contents = f.read() 43 return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES) 44 45 46 def install( 47 runner, store, overwrite=False, hooks=False, hook_type='pre-commit', 48 skip_on_missing_conf=False, 49 ): 50 """Install the pre-commit hooks.""" 51 if cmd_output('git', 'config', 'core.hooksPath', retcode=None)[1].strip(): 52 logger.error( 53 'Cowardly refusing to install hooks with `core.hooksPath` set.\n' 54 'hint: `git config --unset-all core.hooksPath`', 55 ) 56 return 1 57 58 hook_path, legacy_path = _hook_paths(runner.git_root, hook_type) 59 60 mkdirp(os.path.dirname(hook_path)) 61 62 # If we have an existing hook, move it to pre-commit.legacy 63 if os.path.lexists(hook_path) and not is_our_script(hook_path): 64 os.rename(hook_path, legacy_path) 65 66 # If we specify overwrite, we simply delete the legacy file 67 if overwrite and os.path.exists(legacy_path): 68 os.remove(legacy_path) 69 elif os.path.exists(legacy_path): 70 output.write_line( 71 'Running in migration mode with existing hooks at {}\n' 72 'Use -f to use only pre-commit.'.format(legacy_path), 73 ) 74 75 params = { 76 'CONFIG': runner.config_file, 77 'HOOK_TYPE': hook_type, 78 'INSTALL_PYTHON': sys.executable, 79 'SKIP_ON_MISSING_CONFIG': skip_on_missing_conf, 80 } 81 82 with io.open(hook_path, 'w') as hook_file: 83 contents = resource_text('hook-tmpl') 84 before, rest = contents.split(TEMPLATE_START) 85 to_template, after = rest.split(TEMPLATE_END) 86 87 hook_file.write(before + TEMPLATE_START) 88 for line in to_template.splitlines(): 89 var = line.split()[0] 90 hook_file.write('{} = {!r}\n'.format(var, params[var])) 91 hook_file.write(TEMPLATE_END + after) 92 make_executable(hook_path) 93 94 output.write_line('pre-commit installed at {}'.format(hook_path)) 95 96 # If they requested we install all of the hooks, do so. 97 if hooks: 98 install_hooks(runner, store) 99 100 return 0 101 102 103 def install_hooks(runner, store): 104 for repository in repositories(runner.config, store): 105 repository.require_installed() 106 107 108 def uninstall(runner, hook_type='pre-commit'): 109 """Uninstall the pre-commit hooks.""" 110 hook_path, legacy_path = _hook_paths(runner.git_root, hook_type) 111 112 # If our file doesn't exist or it isn't ours, gtfo. 113 if not os.path.exists(hook_path) or not is_our_script(hook_path): 114 return 0 115 116 os.remove(hook_path) 117 output.write_line('{} uninstalled'.format(hook_type)) 118 119 if os.path.exists(legacy_path): 120 os.rename(legacy_path, hook_path) 121 output.write_line('Restored previous hooks to {}'.format(hook_path)) 122 123 return 0 124 [end of pre_commit/commands/install_uninstall.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pre_commit/commands/install_uninstall.py b/pre_commit/commands/install_uninstall.py --- a/pre_commit/commands/install_uninstall.py +++ b/pre_commit/commands/install_uninstall.py @@ -8,6 +8,7 @@ from pre_commit import git from pre_commit import output +from pre_commit.languages import python from pre_commit.repository import repositories from pre_commit.util import cmd_output from pre_commit.util import make_executable @@ -43,6 +44,16 @@ return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES) +def shebang(): + if sys.platform == 'win32': + py = 'python' + else: + py = python.get_default_version() + if py == 'default': + py = 'python' + return '#!/usr/bin/env {}'.format(py) + + def install( runner, store, overwrite=False, hooks=False, hook_type='pre-commit', skip_on_missing_conf=False, @@ -84,6 +95,8 @@ before, rest = contents.split(TEMPLATE_START) to_template, after = rest.split(TEMPLATE_END) + before = before.replace('#!/usr/bin/env python', shebang()) + hook_file.write(before + TEMPLATE_START) for line in to_template.splitlines(): var = line.split()[0]
{"golden_diff": "diff --git a/pre_commit/commands/install_uninstall.py b/pre_commit/commands/install_uninstall.py\n--- a/pre_commit/commands/install_uninstall.py\n+++ b/pre_commit/commands/install_uninstall.py\n@@ -8,6 +8,7 @@\n \n from pre_commit import git\n from pre_commit import output\n+from pre_commit.languages import python\n from pre_commit.repository import repositories\n from pre_commit.util import cmd_output\n from pre_commit.util import make_executable\n@@ -43,6 +44,16 @@\n return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES)\n \n \n+def shebang():\n+ if sys.platform == 'win32':\n+ py = 'python'\n+ else:\n+ py = python.get_default_version()\n+ if py == 'default':\n+ py = 'python'\n+ return '#!/usr/bin/env {}'.format(py)\n+\n+\n def install(\n runner, store, overwrite=False, hooks=False, hook_type='pre-commit',\n skip_on_missing_conf=False,\n@@ -84,6 +95,8 @@\n before, rest = contents.split(TEMPLATE_START)\n to_template, after = rest.split(TEMPLATE_END)\n \n+ before = before.replace('#!/usr/bin/env python', shebang())\n+\n hook_file.write(before + TEMPLATE_START)\n for line in to_template.splitlines():\n var = line.split()[0]\n", "issue": "Incorrect shebang in .git/hooks/pre-commit for python3 only installations\nThe shebang for `.git/hooks/pre-commit` is `#!/usr/bin/env python`. I work with setups where `python3` is the only python in env.\r\n\r\nCould the shebang be the install python instead? I.e. the installation under `INSTALL_PYTHON = '/usr/bin/python3'`\n", "before_files": [{"content": "from __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport io\nimport logging\nimport os.path\nimport sys\n\nfrom pre_commit import git\nfrom pre_commit import output\nfrom pre_commit.repository import repositories\nfrom pre_commit.util import cmd_output\nfrom pre_commit.util import make_executable\nfrom pre_commit.util import mkdirp\nfrom pre_commit.util import resource_text\n\n\nlogger = logging.getLogger(__name__)\n\n# This is used to identify the hook file we install\nPRIOR_HASHES = (\n '4d9958c90bc262f47553e2c073f14cfe',\n 'd8ee923c46731b42cd95cc869add4062',\n '49fd668cb42069aa1b6048464be5d395',\n '79f09a650522a87b0da915d0d983b2de',\n 'e358c9dae00eac5d06b38dfdb1e33a8c',\n)\nCURRENT_HASH = '138fd403232d2ddd5efb44317e38bf03'\nTEMPLATE_START = '# start templated\\n'\nTEMPLATE_END = '# end templated\\n'\n\n\ndef _hook_paths(git_root, hook_type):\n pth = os.path.join(git.get_git_dir(git_root), 'hooks', hook_type)\n return pth, '{}.legacy'.format(pth)\n\n\ndef is_our_script(filename):\n if not os.path.exists(filename):\n return False\n with io.open(filename) as f:\n contents = f.read()\n return any(h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES)\n\n\ndef install(\n runner, store, overwrite=False, hooks=False, hook_type='pre-commit',\n skip_on_missing_conf=False,\n):\n \"\"\"Install the pre-commit hooks.\"\"\"\n if cmd_output('git', 'config', 'core.hooksPath', retcode=None)[1].strip():\n logger.error(\n 'Cowardly refusing to install hooks with `core.hooksPath` set.\\n'\n 'hint: `git config --unset-all core.hooksPath`',\n )\n return 1\n\n hook_path, legacy_path = _hook_paths(runner.git_root, hook_type)\n\n mkdirp(os.path.dirname(hook_path))\n\n # If we have an existing hook, move it to pre-commit.legacy\n if os.path.lexists(hook_path) and not is_our_script(hook_path):\n os.rename(hook_path, legacy_path)\n\n # If we specify overwrite, we simply delete the legacy file\n if overwrite and os.path.exists(legacy_path):\n os.remove(legacy_path)\n elif os.path.exists(legacy_path):\n output.write_line(\n 'Running in migration mode with existing hooks at {}\\n'\n 'Use -f to use only pre-commit.'.format(legacy_path),\n )\n\n params = {\n 'CONFIG': runner.config_file,\n 'HOOK_TYPE': hook_type,\n 'INSTALL_PYTHON': sys.executable,\n 'SKIP_ON_MISSING_CONFIG': skip_on_missing_conf,\n }\n\n with io.open(hook_path, 'w') as hook_file:\n contents = resource_text('hook-tmpl')\n before, rest = contents.split(TEMPLATE_START)\n to_template, after = rest.split(TEMPLATE_END)\n\n hook_file.write(before + TEMPLATE_START)\n for line in to_template.splitlines():\n var = line.split()[0]\n hook_file.write('{} = {!r}\\n'.format(var, params[var]))\n hook_file.write(TEMPLATE_END + after)\n make_executable(hook_path)\n\n output.write_line('pre-commit installed at {}'.format(hook_path))\n\n # If they requested we install all of the hooks, do so.\n if hooks:\n install_hooks(runner, store)\n\n return 0\n\n\ndef install_hooks(runner, store):\n for repository in repositories(runner.config, store):\n repository.require_installed()\n\n\ndef uninstall(runner, hook_type='pre-commit'):\n \"\"\"Uninstall the pre-commit hooks.\"\"\"\n hook_path, legacy_path = _hook_paths(runner.git_root, hook_type)\n\n # If our file doesn't exist or it isn't ours, gtfo.\n if not os.path.exists(hook_path) or not is_our_script(hook_path):\n return 0\n\n os.remove(hook_path)\n output.write_line('{} uninstalled'.format(hook_type))\n\n if os.path.exists(legacy_path):\n os.rename(legacy_path, hook_path)\n output.write_line('Restored previous hooks to {}'.format(hook_path))\n\n return 0\n", "path": "pre_commit/commands/install_uninstall.py"}]}
1,948
304
gh_patches_debug_35746
rasdani/github-patches
git_diff
vispy__vispy-1391
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> SceneGraph: HowTo view single scene in different viewboxes Using https://github.com/vispy/vispy/blob/master/examples/basics/scene/one_scene_four_cams.py to view a single scene in four different viewboxes doesn't work. The scene is actually generated four times, not only once. There are reminders of multi-parenting commented out in the example, but this won't work any more (since removal of multi-parenting). Is it possible to have one scene viewed from different angels (eg. top view, front view and side view) without recreating the scene four times? </issue> <code> [start of examples/basics/scene/one_scene_four_cams.py] 1 # -*- coding: utf-8 -*- 2 # ----------------------------------------------------------------------------- 3 # Copyright (c) Vispy Development Team. All Rights Reserved. 4 # Distributed under the (new) BSD License. See LICENSE.txt for more info. 5 # ----------------------------------------------------------------------------- 6 # vispy: gallery 2 7 8 """ 9 Demonstrating a single scene that is shown in four different viewboxes, 10 each with a different camera. 11 """ 12 13 # todo: the panzoom camera sometimes work, sometimes not. Not sure why. 14 # we should probably make iterating over children deterministic, so that 15 # an error like this becomes easier to reproduce ... 16 17 import sys 18 19 from vispy import app, scene, io 20 21 canvas = scene.SceneCanvas(keys='interactive') 22 canvas.size = 800, 600 23 canvas.show() 24 25 # Create two ViewBoxes, place side-by-side 26 vb1 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene) 27 vb2 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene) 28 vb3 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene) 29 vb4 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene) 30 scenes = vb1.scene, vb2.scene, vb3.scene, vb4.scene 31 32 # Put viewboxes in a grid 33 grid = canvas.central_widget.add_grid() 34 grid.padding = 6 35 grid.add_widget(vb1, 0, 0) 36 grid.add_widget(vb2, 0, 1) 37 grid.add_widget(vb3, 1, 0) 38 grid.add_widget(vb4, 1, 1) 39 40 # Create some visuals to show 41 # AK: Ideally, we could just create one visual that is present in all 42 # scenes, but that results in flicker for the PanZoomCamera, I suspect 43 # due to errors in transform caching. 44 im1 = io.load_crate().astype('float32') / 255 45 #image1 = scene.visuals.Image(im1, grid=(20, 20), parent=scenes) 46 for par in scenes: 47 image = scene.visuals.Image(im1, grid=(20, 20), parent=par) 48 49 #vol1 = np.load(io.load_data_file('volume/stent.npz'))['arr_0'] 50 #volume1 = scene.visuals.Volume(vol1, parent=scenes) 51 #volume1.transform = scene.STTransform(translate=(0, 0, 10)) 52 53 # Assign cameras 54 vb1.camera = scene.BaseCamera() 55 vb2.camera = scene.PanZoomCamera() 56 vb3.camera = scene.TurntableCamera() 57 vb4.camera = scene.FlyCamera() 58 59 60 # If True, show a cuboid at each camera 61 if False: 62 cube = scene.visuals.Cube((3, 3, 5)) 63 cube.transform = scene.STTransform(translate=(0, 0, 6)) 64 for vb in (vb1, vb2, vb3, vb4): 65 vb.camera.parents = scenes 66 cube.add_parent(vb.camera) 67 68 if __name__ == '__main__': 69 if sys.flags.interactive != 1: 70 app.run() 71 [end of examples/basics/scene/one_scene_four_cams.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/examples/basics/scene/one_scene_four_cams.py b/examples/basics/scene/one_scene_four_cams.py --- a/examples/basics/scene/one_scene_four_cams.py +++ b/examples/basics/scene/one_scene_four_cams.py @@ -8,11 +8,12 @@ """ Demonstrating a single scene that is shown in four different viewboxes, each with a different camera. -""" -# todo: the panzoom camera sometimes work, sometimes not. Not sure why. -# we should probably make iterating over children deterministic, so that -# an error like this becomes easier to reproduce ... +Note: + This example just creates four scenes using the same visual. + Multiple views are currently not available. See #1124 how this could + be achieved. +""" import sys @@ -22,7 +23,7 @@ canvas.size = 800, 600 canvas.show() -# Create two ViewBoxes, place side-by-side +# Create four ViewBoxes vb1 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene) vb2 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene) vb3 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene) @@ -38,33 +39,16 @@ grid.add_widget(vb4, 1, 1) # Create some visuals to show -# AK: Ideally, we could just create one visual that is present in all -# scenes, but that results in flicker for the PanZoomCamera, I suspect -# due to errors in transform caching. im1 = io.load_crate().astype('float32') / 255 -#image1 = scene.visuals.Image(im1, grid=(20, 20), parent=scenes) for par in scenes: image = scene.visuals.Image(im1, grid=(20, 20), parent=par) -#vol1 = np.load(io.load_data_file('volume/stent.npz'))['arr_0'] -#volume1 = scene.visuals.Volume(vol1, parent=scenes) -#volume1.transform = scene.STTransform(translate=(0, 0, 10)) - # Assign cameras vb1.camera = scene.BaseCamera() vb2.camera = scene.PanZoomCamera() vb3.camera = scene.TurntableCamera() vb4.camera = scene.FlyCamera() - -# If True, show a cuboid at each camera -if False: - cube = scene.visuals.Cube((3, 3, 5)) - cube.transform = scene.STTransform(translate=(0, 0, 6)) - for vb in (vb1, vb2, vb3, vb4): - vb.camera.parents = scenes - cube.add_parent(vb.camera) - if __name__ == '__main__': if sys.flags.interactive != 1: app.run()
{"golden_diff": "diff --git a/examples/basics/scene/one_scene_four_cams.py b/examples/basics/scene/one_scene_four_cams.py\n--- a/examples/basics/scene/one_scene_four_cams.py\n+++ b/examples/basics/scene/one_scene_four_cams.py\n@@ -8,11 +8,12 @@\n \"\"\"\n Demonstrating a single scene that is shown in four different viewboxes,\n each with a different camera.\n-\"\"\"\n \n-# todo: the panzoom camera sometimes work, sometimes not. Not sure why.\n-# we should probably make iterating over children deterministic, so that\n-# an error like this becomes easier to reproduce ...\n+Note:\n+ This example just creates four scenes using the same visual.\n+ Multiple views are currently not available. See #1124 how this could\n+ be achieved.\n+\"\"\"\n \n import sys\n \n@@ -22,7 +23,7 @@\n canvas.size = 800, 600\n canvas.show()\n \n-# Create two ViewBoxes, place side-by-side\n+# Create four ViewBoxes\n vb1 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene)\n vb2 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene)\n vb3 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene)\n@@ -38,33 +39,16 @@\n grid.add_widget(vb4, 1, 1)\n \n # Create some visuals to show\n-# AK: Ideally, we could just create one visual that is present in all\n-# scenes, but that results in flicker for the PanZoomCamera, I suspect\n-# due to errors in transform caching.\n im1 = io.load_crate().astype('float32') / 255\n-#image1 = scene.visuals.Image(im1, grid=(20, 20), parent=scenes)\n for par in scenes:\n image = scene.visuals.Image(im1, grid=(20, 20), parent=par)\n \n-#vol1 = np.load(io.load_data_file('volume/stent.npz'))['arr_0']\n-#volume1 = scene.visuals.Volume(vol1, parent=scenes)\n-#volume1.transform = scene.STTransform(translate=(0, 0, 10))\n-\n # Assign cameras\n vb1.camera = scene.BaseCamera()\n vb2.camera = scene.PanZoomCamera()\n vb3.camera = scene.TurntableCamera()\n vb4.camera = scene.FlyCamera()\n \n-\n-# If True, show a cuboid at each camera\n-if False:\n- cube = scene.visuals.Cube((3, 3, 5))\n- cube.transform = scene.STTransform(translate=(0, 0, 6))\n- for vb in (vb1, vb2, vb3, vb4):\n- vb.camera.parents = scenes\n- cube.add_parent(vb.camera)\n-\n if __name__ == '__main__':\n if sys.flags.interactive != 1:\n app.run()\n", "issue": "SceneGraph: HowTo view single scene in different viewboxes\nUsing https://github.com/vispy/vispy/blob/master/examples/basics/scene/one_scene_four_cams.py to view a single scene in four different viewboxes doesn't work.\n\nThe scene is actually generated four times, not only once. There are reminders of multi-parenting commented out in the example, but this won't work any more (since removal of multi-parenting).\n\nIs it possible to have one scene viewed from different angels (eg. top view, front view and side view) without recreating the scene four times?\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# -----------------------------------------------------------------------------\n# Copyright (c) Vispy Development Team. All Rights Reserved.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n# -----------------------------------------------------------------------------\n# vispy: gallery 2\n\n\"\"\"\nDemonstrating a single scene that is shown in four different viewboxes,\neach with a different camera.\n\"\"\"\n\n# todo: the panzoom camera sometimes work, sometimes not. Not sure why.\n# we should probably make iterating over children deterministic, so that\n# an error like this becomes easier to reproduce ...\n\nimport sys\n\nfrom vispy import app, scene, io\n\ncanvas = scene.SceneCanvas(keys='interactive')\ncanvas.size = 800, 600\ncanvas.show()\n\n# Create two ViewBoxes, place side-by-side\nvb1 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene)\nvb2 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene)\nvb3 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene)\nvb4 = scene.widgets.ViewBox(border_color='white', parent=canvas.scene)\nscenes = vb1.scene, vb2.scene, vb3.scene, vb4.scene\n\n# Put viewboxes in a grid\ngrid = canvas.central_widget.add_grid()\ngrid.padding = 6\ngrid.add_widget(vb1, 0, 0)\ngrid.add_widget(vb2, 0, 1)\ngrid.add_widget(vb3, 1, 0)\ngrid.add_widget(vb4, 1, 1)\n\n# Create some visuals to show\n# AK: Ideally, we could just create one visual that is present in all\n# scenes, but that results in flicker for the PanZoomCamera, I suspect\n# due to errors in transform caching.\nim1 = io.load_crate().astype('float32') / 255\n#image1 = scene.visuals.Image(im1, grid=(20, 20), parent=scenes)\nfor par in scenes:\n image = scene.visuals.Image(im1, grid=(20, 20), parent=par)\n\n#vol1 = np.load(io.load_data_file('volume/stent.npz'))['arr_0']\n#volume1 = scene.visuals.Volume(vol1, parent=scenes)\n#volume1.transform = scene.STTransform(translate=(0, 0, 10))\n\n# Assign cameras\nvb1.camera = scene.BaseCamera()\nvb2.camera = scene.PanZoomCamera()\nvb3.camera = scene.TurntableCamera()\nvb4.camera = scene.FlyCamera()\n\n\n# If True, show a cuboid at each camera\nif False:\n cube = scene.visuals.Cube((3, 3, 5))\n cube.transform = scene.STTransform(translate=(0, 0, 6))\n for vb in (vb1, vb2, vb3, vb4):\n vb.camera.parents = scenes\n cube.add_parent(vb.camera)\n\nif __name__ == '__main__':\n if sys.flags.interactive != 1:\n app.run()\n", "path": "examples/basics/scene/one_scene_four_cams.py"}]}
1,465
654
gh_patches_debug_26263
rasdani/github-patches
git_diff
pypa__pip-2303
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Selfcheck failure on Windows I get this warning all the time: ``` There was an error checking the latest version of pip Traceback (most recent call last): File "C:\Python27\lib\site-packages\pip\utils\outdated.py", line 115, in pip_version_check state.save(pypi_version, current_time) File "C:\Python27\lib\site-packages\pip\utils\outdated.py", line 62, in save with open(self.statefile_path) as statefile: IOError: [Errno 2] No such file or directory: u'C:\\Users\\ionel_000\\AppData\\Local\\pip\\Cache\\selfcheck.json' ``` If I create the file, it complains about invalid json. I've put `{}` inside, the warning has gone away, but this seems very wrong to me. </issue> <code> [start of pip/utils/outdated.py] 1 from __future__ import absolute_import 2 3 import datetime 4 import json 5 import logging 6 import os.path 7 import sys 8 9 from pip._vendor import lockfile 10 from pip._vendor import pkg_resources 11 12 from pip.compat import total_seconds 13 from pip.index import PyPI 14 from pip.locations import USER_CACHE_DIR, running_under_virtualenv 15 16 17 SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" 18 19 20 logger = logging.getLogger(__name__) 21 22 23 class VirtualenvSelfCheckState(object): 24 def __init__(self): 25 self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") 26 27 # Load the existing state 28 try: 29 with open(self.statefile_path) as statefile: 30 self.state = json.load(statefile) 31 except (IOError, ValueError): 32 self.state = {} 33 34 def save(self, pypi_version, current_time): 35 # Attempt to write out our version check file 36 with open(self.statefile_path, "w") as statefile: 37 json.dump( 38 { 39 "last_check": current_time.strftime(SELFCHECK_DATE_FMT), 40 "pypi_version": pypi_version, 41 }, 42 statefile, 43 sort_keys=True, 44 separators=(",", ":") 45 ) 46 47 48 class GlobalSelfCheckState(object): 49 def __init__(self): 50 self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") 51 52 # Load the existing state 53 try: 54 with open(self.statefile_path) as statefile: 55 self.state = json.load(statefile)[sys.prefix] 56 except (IOError, ValueError, KeyError): 57 self.state = {} 58 59 def save(self, pypi_version, current_time): 60 # Attempt to write out our version check file 61 with lockfile.LockFile(self.statefile_path): 62 with open(self.statefile_path) as statefile: 63 state = json.load(statefile) 64 65 state[sys.prefix] = { 66 "last_check": current_time.strftime(SELFCHECK_DATE_FMT), 67 "pypi_version": pypi_version, 68 } 69 70 with open(self.statefile_path, "w") as statefile: 71 json.dump(state, statefile, sort_keys=True, 72 separators=(",", ":")) 73 74 75 def load_selfcheck_statefile(): 76 if running_under_virtualenv(): 77 return VirtualenvSelfCheckState() 78 else: 79 return GlobalSelfCheckState() 80 81 82 def pip_version_check(session): 83 """Check for an update for pip. 84 85 Limit the frequency of checks to once per week. State is stored either in 86 the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix 87 of the pip script path. 88 """ 89 import pip # imported here to prevent circular imports 90 pypi_version = None 91 92 try: 93 state = load_selfcheck_statefile() 94 95 current_time = datetime.datetime.utcnow() 96 # Determine if we need to refresh the state 97 if "last_check" in state.state and "pypi_version" in state.state: 98 last_check = datetime.datetime.strptime( 99 state.state["last_check"], 100 SELFCHECK_DATE_FMT 101 ) 102 if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60: 103 pypi_version = state.state["pypi_version"] 104 105 # Refresh the version if we need to or just see if we need to warn 106 if pypi_version is None: 107 resp = session.get( 108 PyPI.pip_json_url, 109 headers={"Accept": "application/json"}, 110 ) 111 resp.raise_for_status() 112 pypi_version = resp.json()["info"]["version"] 113 114 # save that we've performed a check 115 state.save(pypi_version, current_time) 116 117 pip_version = pkg_resources.parse_version(pip.__version__) 118 119 # Determine if our pypi_version is older 120 if pip_version < pkg_resources.parse_version(pypi_version): 121 logger.warning( 122 "You are using pip version %s, however version %s is " 123 "available.\nYou should consider upgrading via the " 124 "'pip install --upgrade pip' command." % (pip.__version__, 125 pypi_version) 126 ) 127 128 except Exception: 129 logger.debug( 130 "There was an error checking the latest version of pip", 131 exc_info=True, 132 ) 133 [end of pip/utils/outdated.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pip/utils/outdated.py b/pip/utils/outdated.py --- a/pip/utils/outdated.py +++ b/pip/utils/outdated.py @@ -1,6 +1,7 @@ from __future__ import absolute_import import datetime +import errno import json import logging import os.path @@ -12,6 +13,7 @@ from pip.compat import total_seconds from pip.index import PyPI from pip.locations import USER_CACHE_DIR, running_under_virtualenv +from pip.utils.filesystem import check_path_owner SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" @@ -57,6 +59,19 @@ self.state = {} def save(self, pypi_version, current_time): + # Check to make sure that we own the directory + if not check_path_owner( + os.path.dirname(self.statefile_path), os.geteuid()): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + try: + os.makedirs(os.path.dirname(self.statefile_path)) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + # Attempt to write out our version check file with lockfile.LockFile(self.statefile_path): with open(self.statefile_path) as statefile:
{"golden_diff": "diff --git a/pip/utils/outdated.py b/pip/utils/outdated.py\n--- a/pip/utils/outdated.py\n+++ b/pip/utils/outdated.py\n@@ -1,6 +1,7 @@\n from __future__ import absolute_import\n \n import datetime\n+import errno\n import json\n import logging\n import os.path\n@@ -12,6 +13,7 @@\n from pip.compat import total_seconds\n from pip.index import PyPI\n from pip.locations import USER_CACHE_DIR, running_under_virtualenv\n+from pip.utils.filesystem import check_path_owner\n \n \n SELFCHECK_DATE_FMT = \"%Y-%m-%dT%H:%M:%SZ\"\n@@ -57,6 +59,19 @@\n self.state = {}\n \n def save(self, pypi_version, current_time):\n+ # Check to make sure that we own the directory\n+ if not check_path_owner(\n+ os.path.dirname(self.statefile_path), os.geteuid()):\n+ return\n+\n+ # Now that we've ensured the directory is owned by this user, we'll go\n+ # ahead and make sure that all our directories are created.\n+ try:\n+ os.makedirs(os.path.dirname(self.statefile_path))\n+ except OSError as exc:\n+ if exc.errno != errno.EEXIST:\n+ raise\n+\n # Attempt to write out our version check file\n with lockfile.LockFile(self.statefile_path):\n with open(self.statefile_path) as statefile:\n", "issue": "Selfcheck failure on Windows\nI get this warning all the time:\n\n```\nThere was an error checking the latest version of pip\nTraceback (most recent call last):\n File \"C:\\Python27\\lib\\site-packages\\pip\\utils\\outdated.py\", line 115, in pip_version_check\n state.save(pypi_version, current_time)\n File \"C:\\Python27\\lib\\site-packages\\pip\\utils\\outdated.py\", line 62, in save\n with open(self.statefile_path) as statefile:\nIOError: [Errno 2] No such file or directory: u'C:\\\\Users\\\\ionel_000\\\\AppData\\\\Local\\\\pip\\\\Cache\\\\selfcheck.json'\n```\n\nIf I create the file, it complains about invalid json. I've put `{}` inside, the warning has gone away, but this seems very wrong to me.\n\n", "before_files": [{"content": "from __future__ import absolute_import\n\nimport datetime\nimport json\nimport logging\nimport os.path\nimport sys\n\nfrom pip._vendor import lockfile\nfrom pip._vendor import pkg_resources\n\nfrom pip.compat import total_seconds\nfrom pip.index import PyPI\nfrom pip.locations import USER_CACHE_DIR, running_under_virtualenv\n\n\nSELFCHECK_DATE_FMT = \"%Y-%m-%dT%H:%M:%SZ\"\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass VirtualenvSelfCheckState(object):\n def __init__(self):\n self.statefile_path = os.path.join(sys.prefix, \"pip-selfcheck.json\")\n\n # Load the existing state\n try:\n with open(self.statefile_path) as statefile:\n self.state = json.load(statefile)\n except (IOError, ValueError):\n self.state = {}\n\n def save(self, pypi_version, current_time):\n # Attempt to write out our version check file\n with open(self.statefile_path, \"w\") as statefile:\n json.dump(\n {\n \"last_check\": current_time.strftime(SELFCHECK_DATE_FMT),\n \"pypi_version\": pypi_version,\n },\n statefile,\n sort_keys=True,\n separators=(\",\", \":\")\n )\n\n\nclass GlobalSelfCheckState(object):\n def __init__(self):\n self.statefile_path = os.path.join(USER_CACHE_DIR, \"selfcheck.json\")\n\n # Load the existing state\n try:\n with open(self.statefile_path) as statefile:\n self.state = json.load(statefile)[sys.prefix]\n except (IOError, ValueError, KeyError):\n self.state = {}\n\n def save(self, pypi_version, current_time):\n # Attempt to write out our version check file\n with lockfile.LockFile(self.statefile_path):\n with open(self.statefile_path) as statefile:\n state = json.load(statefile)\n\n state[sys.prefix] = {\n \"last_check\": current_time.strftime(SELFCHECK_DATE_FMT),\n \"pypi_version\": pypi_version,\n }\n\n with open(self.statefile_path, \"w\") as statefile:\n json.dump(state, statefile, sort_keys=True,\n separators=(\",\", \":\"))\n\n\ndef load_selfcheck_statefile():\n if running_under_virtualenv():\n return VirtualenvSelfCheckState()\n else:\n return GlobalSelfCheckState()\n\n\ndef pip_version_check(session):\n \"\"\"Check for an update for pip.\n\n Limit the frequency of checks to once per week. State is stored either in\n the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix\n of the pip script path.\n \"\"\"\n import pip # imported here to prevent circular imports\n pypi_version = None\n\n try:\n state = load_selfcheck_statefile()\n\n current_time = datetime.datetime.utcnow()\n # Determine if we need to refresh the state\n if \"last_check\" in state.state and \"pypi_version\" in state.state:\n last_check = datetime.datetime.strptime(\n state.state[\"last_check\"],\n SELFCHECK_DATE_FMT\n )\n if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60:\n pypi_version = state.state[\"pypi_version\"]\n\n # Refresh the version if we need to or just see if we need to warn\n if pypi_version is None:\n resp = session.get(\n PyPI.pip_json_url,\n headers={\"Accept\": \"application/json\"},\n )\n resp.raise_for_status()\n pypi_version = resp.json()[\"info\"][\"version\"]\n\n # save that we've performed a check\n state.save(pypi_version, current_time)\n\n pip_version = pkg_resources.parse_version(pip.__version__)\n\n # Determine if our pypi_version is older\n if pip_version < pkg_resources.parse_version(pypi_version):\n logger.warning(\n \"You are using pip version %s, however version %s is \"\n \"available.\\nYou should consider upgrading via the \"\n \"'pip install --upgrade pip' command.\" % (pip.__version__,\n pypi_version)\n )\n\n except Exception:\n logger.debug(\n \"There was an error checking the latest version of pip\",\n exc_info=True,\n )\n", "path": "pip/utils/outdated.py"}]}
1,942
320
gh_patches_debug_3852
rasdani/github-patches
git_diff
electricitymaps__electricitymaps-contrib-1773
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Taiwan real-time data has stopped working Taiwain seems to have been offline recently It used to work correctly, something may have changed in the data source? Kibana error description [here](https://kibana.electricitymap.org/app/kibana#/discover/10af54f0-0c4a-11e9-85c1-1d63df8c862c?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(message,extra.key,level),filters:!(('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:level,negate:!f,params:(query:ERROR,type:phrase),type:phrase,value:ERROR),query:(match:(level:(query:ERROR,type:phrase)))),('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:'@timestamp',negate:!f,params:(query:'2019-02-13T09:56:26.971Z',type:phrase),type:phrase,value:'February%2013th%202019,%2010:56:26.971'),query:(match:('@timestamp':(query:'2019-02-13T09:56:26.971Z',type:phrase))))),index:'96f67170-0c49-11e9-85c1-1d63df8c862c',interval:auto,query:(language:lucene,query:''),sort:!('@timestamp',asc))) </issue> <code> [start of parsers/TW.py] 1 #!/usr/bin/env python3 2 import arrow 3 import requests 4 import pandas 5 import dateutil 6 7 8 def fetch_production(zone_key='TW', session=None, target_datetime=None, logger=None): 9 if target_datetime: 10 raise NotImplementedError('This parser is not yet able to parse past dates') 11 12 url = 'http://data.taipower.com.tw/opendata01/apply/file/d006001/001.txt' 13 response = requests.get(url) 14 data = response.json() 15 16 dumpDate = data[''] 17 prodData = data['aaData'] 18 19 tz = 'Asia/Taipei' 20 dumpDate = arrow.get(dumpDate, 'YYYY-MM-DD HH:mm').replace(tzinfo=dateutil.tz.gettz(tz)) 21 22 objData = pandas.DataFrame(prodData) 23 24 objData.columns = ['fueltype', 'name', 'capacity', 'output', 'percentage', 25 'additional'] 26 27 objData['fueltype'] = objData.fueltype.str.split('(').str[1] 28 objData['fueltype'] = objData.fueltype.str.split(')').str[0] 29 objData.drop('additional', axis=1, inplace=True) 30 objData.drop('percentage', axis=1, inplace=True) 31 32 objData = objData.convert_objects(convert_numeric=True) 33 production = pandas.DataFrame(objData.groupby('fueltype').sum()) 34 production.columns = ['capacity', 'output'] 35 36 coal_capacity = production.ix['Coal'].capacity + production.ix['IPP-Coal'].capacity 37 gas_capacity = production.ix['LNG'].capacity + production.ix['IPP-LNG'].capacity 38 oil_capacity = production.ix['Oil'].capacity + production.ix['Diesel'].capacity 39 40 coal_production = production.ix['Coal'].output + production.ix['IPP-Coal'].output 41 gas_production = production.ix['LNG'].output + production.ix['IPP-LNG'].output 42 oil_production = production.ix['Oil'].output + production.ix['Diesel'].output 43 44 # For storage, note that load will be negative, and generation positive. 45 # We require the opposite 46 47 returndata = { 48 'zoneKey': zone_key, 49 'datetime': dumpDate.datetime, 50 'production': { 51 'coal': coal_production, 52 'gas': gas_production, 53 'oil': oil_production, 54 'hydro': production.ix['Hydro'].output, 55 'nuclear': production.ix['Nuclear'].output, 56 'solar': production.ix['Solar'].output, 57 'wind': production.ix['Wind'].output, 58 'unknown': production.ix['Co-Gen'].output 59 }, 60 'capacity': { 61 'coal': coal_capacity, 62 'gas': gas_capacity, 63 'oil': oil_capacity, 64 'hydro': production.ix['Hydro'].capacity, 65 'hydro storage':production.ix['Pumping Gen'].capacity, 66 'nuclear': production.ix['Nuclear'].capacity, 67 'solar': production.ix['Solar'].capacity, 68 'wind': production.ix['Wind'].capacity, 69 'unknown': production.ix['Co-Gen'].capacity 70 }, 71 'storage': { 72 'hydro': -1 * production.ix['Pumping Load'].output - production.ix['Pumping Gen'].output 73 }, 74 'source': 'taipower.com.tw' 75 } 76 77 return returndata 78 79 80 if __name__ == '__main__': 81 print(fetch_production()) 82 [end of parsers/TW.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/parsers/TW.py b/parsers/TW.py --- a/parsers/TW.py +++ b/parsers/TW.py @@ -9,7 +9,7 @@ if target_datetime: raise NotImplementedError('This parser is not yet able to parse past dates') - url = 'http://data.taipower.com.tw/opendata01/apply/file/d006001/001.txt' + url = 'http://www.taipower.com.tw/d006/loadGraph/loadGraph/data/genary.txt' response = requests.get(url) data = response.json()
{"golden_diff": "diff --git a/parsers/TW.py b/parsers/TW.py\n--- a/parsers/TW.py\n+++ b/parsers/TW.py\n@@ -9,7 +9,7 @@\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n \n- url = 'http://data.taipower.com.tw/opendata01/apply/file/d006001/001.txt'\n+ url = 'http://www.taipower.com.tw/d006/loadGraph/loadGraph/data/genary.txt'\n response = requests.get(url)\n data = response.json()\n", "issue": "Taiwan real-time data has stopped working\nTaiwain seems to have been offline recently\r\nIt used to work correctly, something may have changed in the data source?\r\n\r\nKibana error description [here](https://kibana.electricitymap.org/app/kibana#/discover/10af54f0-0c4a-11e9-85c1-1d63df8c862c?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(message,extra.key,level),filters:!(('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:level,negate:!f,params:(query:ERROR,type:phrase),type:phrase,value:ERROR),query:(match:(level:(query:ERROR,type:phrase)))),('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:'@timestamp',negate:!f,params:(query:'2019-02-13T09:56:26.971Z',type:phrase),type:phrase,value:'February%2013th%202019,%2010:56:26.971'),query:(match:('@timestamp':(query:'2019-02-13T09:56:26.971Z',type:phrase))))),index:'96f67170-0c49-11e9-85c1-1d63df8c862c',interval:auto,query:(language:lucene,query:''),sort:!('@timestamp',asc)))\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\nimport arrow\nimport requests\nimport pandas\nimport dateutil\n\n\ndef fetch_production(zone_key='TW', session=None, target_datetime=None, logger=None):\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n url = 'http://data.taipower.com.tw/opendata01/apply/file/d006001/001.txt'\n response = requests.get(url)\n data = response.json()\n\n dumpDate = data['']\n prodData = data['aaData']\n\n tz = 'Asia/Taipei'\n dumpDate = arrow.get(dumpDate, 'YYYY-MM-DD HH:mm').replace(tzinfo=dateutil.tz.gettz(tz))\n\n objData = pandas.DataFrame(prodData)\n\n objData.columns = ['fueltype', 'name', 'capacity', 'output', 'percentage',\n 'additional']\n\n objData['fueltype'] = objData.fueltype.str.split('(').str[1]\n objData['fueltype'] = objData.fueltype.str.split(')').str[0]\n objData.drop('additional', axis=1, inplace=True)\n objData.drop('percentage', axis=1, inplace=True)\n\n objData = objData.convert_objects(convert_numeric=True)\n production = pandas.DataFrame(objData.groupby('fueltype').sum())\n production.columns = ['capacity', 'output']\n\n coal_capacity = production.ix['Coal'].capacity + production.ix['IPP-Coal'].capacity\n gas_capacity = production.ix['LNG'].capacity + production.ix['IPP-LNG'].capacity\n oil_capacity = production.ix['Oil'].capacity + production.ix['Diesel'].capacity\n\n coal_production = production.ix['Coal'].output + production.ix['IPP-Coal'].output\n gas_production = production.ix['LNG'].output + production.ix['IPP-LNG'].output\n oil_production = production.ix['Oil'].output + production.ix['Diesel'].output\n\n # For storage, note that load will be negative, and generation positive.\n # We require the opposite\n\n returndata = {\n 'zoneKey': zone_key,\n 'datetime': dumpDate.datetime,\n 'production': {\n 'coal': coal_production,\n 'gas': gas_production,\n 'oil': oil_production,\n 'hydro': production.ix['Hydro'].output,\n 'nuclear': production.ix['Nuclear'].output,\n 'solar': production.ix['Solar'].output,\n 'wind': production.ix['Wind'].output,\n 'unknown': production.ix['Co-Gen'].output\n },\n 'capacity': {\n 'coal': coal_capacity,\n 'gas': gas_capacity,\n 'oil': oil_capacity,\n 'hydro': production.ix['Hydro'].capacity,\n 'hydro storage':production.ix['Pumping Gen'].capacity,\n 'nuclear': production.ix['Nuclear'].capacity,\n 'solar': production.ix['Solar'].capacity,\n 'wind': production.ix['Wind'].capacity,\n 'unknown': production.ix['Co-Gen'].capacity\n },\n 'storage': {\n 'hydro': -1 * production.ix['Pumping Load'].output - production.ix['Pumping Gen'].output\n },\n 'source': 'taipower.com.tw'\n }\n\n return returndata\n\n\nif __name__ == '__main__':\n print(fetch_production())\n", "path": "parsers/TW.py"}]}
1,896
137
gh_patches_debug_25842
rasdani/github-patches
git_diff
amundsen-io__amundsen-1303
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Bug Report: Glue search_tables with Filters and result tables more than 100 items <!--- Provide a general summary of the issue in the Title above --> <!--- Look through existing open and closed issues to see if someone has reported the issue before --> There is a bug while applying Filters for the database which contains more than 100 items. Since glue returns 100 items per page and to move to the next page we need to specify `NextToken`. I have 138 tables, which means I will be iterating 2 times over the result. The filter: ```python { 'Key': 'DatabaseName', 'Value': glue_database_name } ``` Every time I run the code I get different results: the length of the list is always the same - 138. However, the length of the set is always different. It ranges from 1 to 30. I run my check over 10 times. I took look at the documentation and found a proper parameter `MaxResults` for further checking. Since I know precisely desired table count, I put it as 150 and the issue has totally gone. ## Expected Behavior Get the exact same result for filtered tables. ## Current Behavior Query result from [`self._glue.search_tables(**kwargs)`](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/glue_extractor.py#L78) contains duplicates ## Possible Solution I'm not sure, but I think for the next (second) iteration (page, which contains up to 100 items) we are using a new `NextToken` with previous filters. Maybe the problem lies here. ## Steps to Reproduce 1. Have more than 100 glue tables in a single DB in AWS 2. Query it using the abovementioned `DatabaseName` filter 3. Observe duplicates in the list ## Hot-fix 1. Add `MaxResults` to [`kwargs`](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/glue_extractor.py#L80) that is more than your actual size of overall tables 2. Observe a proper behavior ## Context Q: How has this issue affected you? A: It affects our production system ## Your Environment ``` amundsen-databuilder==4.5.3 amundsen-gremlin==0.0.9 Flask==1.1.4 gremlinpython==3.4.9 requests-aws4auth==1.1.1 typing-extensions==3.10.0 overrides==6.1.0 ``` </issue> <code> [start of databuilder/databuilder/extractor/glue_extractor.py] 1 # Copyright Contributors to the Amundsen project. 2 # SPDX-License-Identifier: Apache-2.0 3 4 from typing import ( 5 Any, Dict, Iterator, List, Union, 6 ) 7 8 import boto3 9 from pyhocon import ConfigFactory, ConfigTree 10 11 from databuilder.extractor.base_extractor import Extractor 12 from databuilder.models.table_metadata import ColumnMetadata, TableMetadata 13 14 15 class GlueExtractor(Extractor): 16 """ 17 Extracts tables and columns metadata from AWS Glue metastore 18 """ 19 20 CLUSTER_KEY = 'cluster' 21 FILTER_KEY = 'filters' 22 DEFAULT_CONFIG = ConfigFactory.from_dict({CLUSTER_KEY: 'gold', FILTER_KEY: None}) 23 24 def init(self, conf: ConfigTree) -> None: 25 conf = conf.with_fallback(GlueExtractor.DEFAULT_CONFIG) 26 self._cluster = conf.get_string(GlueExtractor.CLUSTER_KEY) 27 self._filters = conf.get(GlueExtractor.FILTER_KEY) 28 self._glue = boto3.client('glue') 29 self._extract_iter: Union[None, Iterator] = None 30 31 def extract(self) -> Union[TableMetadata, None]: 32 if not self._extract_iter: 33 self._extract_iter = self._get_extract_iter() 34 try: 35 return next(self._extract_iter) 36 except StopIteration: 37 return None 38 39 def get_scope(self) -> str: 40 return 'extractor.glue' 41 42 def _get_extract_iter(self) -> Iterator[TableMetadata]: 43 """ 44 It gets all tables and yields TableMetadata 45 :return: 46 """ 47 for row in self._get_raw_extract_iter(): 48 columns, i = [], 0 49 50 for column in row['StorageDescriptor']['Columns'] \ 51 + row.get('PartitionKeys', []): 52 columns.append(ColumnMetadata( 53 column['Name'], 54 column['Comment'] if 'Comment' in column else None, 55 column['Type'], 56 i 57 )) 58 i += 1 59 60 yield TableMetadata( 61 'glue', 62 self._cluster, 63 row['DatabaseName'], 64 row['Name'], 65 row.get('Description') or row.get('Parameters', {}).get('comment'), 66 columns, 67 row.get('TableType') == 'VIRTUAL_VIEW', 68 ) 69 70 def _get_raw_extract_iter(self) -> Iterator[Dict[str, Any]]: 71 """ 72 Provides iterator of results row from glue client 73 :return: 74 """ 75 tables = self._search_tables() 76 return iter(tables) 77 78 def _search_tables(self) -> List[Dict[str, Any]]: 79 tables = [] 80 kwargs = {} 81 if self._filters is not None: 82 kwargs['Filters'] = self._filters 83 data = self._glue.search_tables(**kwargs) 84 tables += data['TableList'] 85 while 'NextToken' in data: 86 token = data['NextToken'] 87 kwargs['NextToken'] = token 88 data = self._glue.search_tables(**kwargs) 89 tables += data['TableList'] 90 return tables 91 [end of databuilder/databuilder/extractor/glue_extractor.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/databuilder/databuilder/extractor/glue_extractor.py b/databuilder/databuilder/extractor/glue_extractor.py --- a/databuilder/databuilder/extractor/glue_extractor.py +++ b/databuilder/databuilder/extractor/glue_extractor.py @@ -19,12 +19,14 @@ CLUSTER_KEY = 'cluster' FILTER_KEY = 'filters' - DEFAULT_CONFIG = ConfigFactory.from_dict({CLUSTER_KEY: 'gold', FILTER_KEY: None}) + MAX_RESULTS_KEY = 'max_results' + DEFAULT_CONFIG = ConfigFactory.from_dict({CLUSTER_KEY: 'gold', FILTER_KEY: None, MAX_RESULTS_KEY: 500}) def init(self, conf: ConfigTree) -> None: conf = conf.with_fallback(GlueExtractor.DEFAULT_CONFIG) self._cluster = conf.get_string(GlueExtractor.CLUSTER_KEY) self._filters = conf.get(GlueExtractor.FILTER_KEY) + self._max_results = conf.get(GlueExtractor.MAX_RESULTS_KEY) self._glue = boto3.client('glue') self._extract_iter: Union[None, Iterator] = None @@ -80,6 +82,7 @@ kwargs = {} if self._filters is not None: kwargs['Filters'] = self._filters + kwargs['MaxResults'] = self._max_results data = self._glue.search_tables(**kwargs) tables += data['TableList'] while 'NextToken' in data:
{"golden_diff": "diff --git a/databuilder/databuilder/extractor/glue_extractor.py b/databuilder/databuilder/extractor/glue_extractor.py\n--- a/databuilder/databuilder/extractor/glue_extractor.py\n+++ b/databuilder/databuilder/extractor/glue_extractor.py\n@@ -19,12 +19,14 @@\n \n CLUSTER_KEY = 'cluster'\n FILTER_KEY = 'filters'\n- DEFAULT_CONFIG = ConfigFactory.from_dict({CLUSTER_KEY: 'gold', FILTER_KEY: None})\n+ MAX_RESULTS_KEY = 'max_results'\n+ DEFAULT_CONFIG = ConfigFactory.from_dict({CLUSTER_KEY: 'gold', FILTER_KEY: None, MAX_RESULTS_KEY: 500})\n \n def init(self, conf: ConfigTree) -> None:\n conf = conf.with_fallback(GlueExtractor.DEFAULT_CONFIG)\n self._cluster = conf.get_string(GlueExtractor.CLUSTER_KEY)\n self._filters = conf.get(GlueExtractor.FILTER_KEY)\n+ self._max_results = conf.get(GlueExtractor.MAX_RESULTS_KEY)\n self._glue = boto3.client('glue')\n self._extract_iter: Union[None, Iterator] = None\n \n@@ -80,6 +82,7 @@\n kwargs = {}\n if self._filters is not None:\n kwargs['Filters'] = self._filters\n+ kwargs['MaxResults'] = self._max_results\n data = self._glue.search_tables(**kwargs)\n tables += data['TableList']\n while 'NextToken' in data:\n", "issue": "Bug Report: Glue search_tables with Filters and result tables more than 100 items\n<!--- Provide a general summary of the issue in the Title above -->\r\n<!--- Look through existing open and closed issues to see if someone has reported the issue before -->\r\n\r\nThere is a bug while applying Filters for the database which contains more than 100 items. Since glue returns 100 items per page and to move to the next page we need to specify `NextToken`.\r\nI have 138 tables, which means I will be iterating 2 times over the result.\r\n\r\nThe filter:\r\n```python\r\n{\r\n 'Key': 'DatabaseName',\r\n 'Value': glue_database_name\r\n}\r\n```\r\n\r\nEvery time I run the code I get different results: the length of the list is always the same - 138. However, the length of the set is always different. It ranges from 1 to 30.\r\nI run my check over 10 times.\r\n\r\nI took look at the documentation and found a proper parameter `MaxResults` for further checking. Since I know precisely desired table count, I put it as 150 and the issue has totally gone.\r\n\r\n## Expected Behavior\r\nGet the exact same result for filtered tables.\r\n\r\n## Current Behavior\r\nQuery result from [`self._glue.search_tables(**kwargs)`](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/glue_extractor.py#L78) contains duplicates\r\n\r\n## Possible Solution\r\nI'm not sure, but I think for the next (second) iteration (page, which contains up to 100 items) we are using a new `NextToken` with previous filters. Maybe the problem lies here.\r\n\r\n## Steps to Reproduce\r\n1. Have more than 100 glue tables in a single DB in AWS\r\n2. Query it using the abovementioned `DatabaseName` filter\r\n3. Observe duplicates in the list\r\n\r\n## Hot-fix\r\n1. Add `MaxResults` to [`kwargs`](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/glue_extractor.py#L80) that is more than your actual size of overall tables\r\n2. Observe a proper behavior\r\n\r\n## Context\r\nQ: How has this issue affected you?\r\nA: It affects our production system\r\n\r\n## Your Environment\r\n```\r\namundsen-databuilder==4.5.3\r\namundsen-gremlin==0.0.9\r\nFlask==1.1.4\r\ngremlinpython==3.4.9\r\nrequests-aws4auth==1.1.1\r\ntyping-extensions==3.10.0\r\noverrides==6.1.0\r\n```\n", "before_files": [{"content": "# Copyright Contributors to the Amundsen project.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom typing import (\n Any, Dict, Iterator, List, Union,\n)\n\nimport boto3\nfrom pyhocon import ConfigFactory, ConfigTree\n\nfrom databuilder.extractor.base_extractor import Extractor\nfrom databuilder.models.table_metadata import ColumnMetadata, TableMetadata\n\n\nclass GlueExtractor(Extractor):\n \"\"\"\n Extracts tables and columns metadata from AWS Glue metastore\n \"\"\"\n\n CLUSTER_KEY = 'cluster'\n FILTER_KEY = 'filters'\n DEFAULT_CONFIG = ConfigFactory.from_dict({CLUSTER_KEY: 'gold', FILTER_KEY: None})\n\n def init(self, conf: ConfigTree) -> None:\n conf = conf.with_fallback(GlueExtractor.DEFAULT_CONFIG)\n self._cluster = conf.get_string(GlueExtractor.CLUSTER_KEY)\n self._filters = conf.get(GlueExtractor.FILTER_KEY)\n self._glue = boto3.client('glue')\n self._extract_iter: Union[None, Iterator] = None\n\n def extract(self) -> Union[TableMetadata, None]:\n if not self._extract_iter:\n self._extract_iter = self._get_extract_iter()\n try:\n return next(self._extract_iter)\n except StopIteration:\n return None\n\n def get_scope(self) -> str:\n return 'extractor.glue'\n\n def _get_extract_iter(self) -> Iterator[TableMetadata]:\n \"\"\"\n It gets all tables and yields TableMetadata\n :return:\n \"\"\"\n for row in self._get_raw_extract_iter():\n columns, i = [], 0\n\n for column in row['StorageDescriptor']['Columns'] \\\n + row.get('PartitionKeys', []):\n columns.append(ColumnMetadata(\n column['Name'],\n column['Comment'] if 'Comment' in column else None,\n column['Type'],\n i\n ))\n i += 1\n\n yield TableMetadata(\n 'glue',\n self._cluster,\n row['DatabaseName'],\n row['Name'],\n row.get('Description') or row.get('Parameters', {}).get('comment'),\n columns,\n row.get('TableType') == 'VIRTUAL_VIEW',\n )\n\n def _get_raw_extract_iter(self) -> Iterator[Dict[str, Any]]:\n \"\"\"\n Provides iterator of results row from glue client\n :return:\n \"\"\"\n tables = self._search_tables()\n return iter(tables)\n\n def _search_tables(self) -> List[Dict[str, Any]]:\n tables = []\n kwargs = {}\n if self._filters is not None:\n kwargs['Filters'] = self._filters\n data = self._glue.search_tables(**kwargs)\n tables += data['TableList']\n while 'NextToken' in data:\n token = data['NextToken']\n kwargs['NextToken'] = token\n data = self._glue.search_tables(**kwargs)\n tables += data['TableList']\n return tables\n", "path": "databuilder/databuilder/extractor/glue_extractor.py"}]}
1,944
339
gh_patches_debug_23504
rasdani/github-patches
git_diff
iterative__dvc-8197
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> installing from Ubuntu repo does not install s3 adapter # Bug Report DVC version 2.6.3 ![image](https://user-images.githubusercontent.com/167835/186968532-080d228a-054f-4688-8967-8ec62cd17458.png) DVC version 2.21.1 ![image](https://user-images.githubusercontent.com/167835/186968664-c2b3edcb-2dca-4eba-9cee-fbdd472979eb.png) </issue> <code> [start of scripts/pyinstaller/build.py] 1 import os 2 import pathlib 3 from subprocess import STDOUT, check_call 4 5 path = pathlib.Path(__file__).parent.absolute() 6 hooks = path / "hooks" 7 dvc = path.parent.parent / "dvc" 8 entry = dvc / "__main__.py" 9 10 check_call( 11 [ 12 "pyinstaller", 13 "--additional-hooks-dir", 14 os.fspath(hooks), 15 "--name", 16 "dvc", 17 "-y", 18 os.fspath(entry), 19 ], 20 cwd=path, 21 stderr=STDOUT, 22 ) 23 24 check_call( 25 [ 26 path / "dist" / "dvc" / "dvc", 27 "doctor", 28 ], 29 stderr=STDOUT, 30 ) 31 [end of scripts/pyinstaller/build.py] [start of scripts/pyinstaller/hooks/hook-dvc.py] 1 from PyInstaller.utils.hooks import ( # pylint:disable=import-error 2 copy_metadata, 3 ) 4 5 # needed for `dvc doctor` to show dep versions 6 datas = copy_metadata("adlfs", recursive=True) 7 datas += copy_metadata("knack") 8 datas += copy_metadata("gcsfs") 9 datas += copy_metadata("pyarrow") 10 datas += copy_metadata("pydrive2") 11 datas += copy_metadata("s3fs", recursive=True) 12 datas += copy_metadata("boto3") 13 datas += copy_metadata("ossfs") 14 datas += copy_metadata("sshfs") 15 datas += copy_metadata("webdav4") 16 datas += copy_metadata("aiohttp") 17 datas += copy_metadata("aiohttp_retry") 18 19 # https://github.com/pypa/setuptools/issues/1963 20 hiddenimports = ["pkg_resources.py2_warn"] 21 [end of scripts/pyinstaller/hooks/hook-dvc.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/scripts/pyinstaller/build.py b/scripts/pyinstaller/build.py --- a/scripts/pyinstaller/build.py +++ b/scripts/pyinstaller/build.py @@ -1,6 +1,6 @@ import os import pathlib -from subprocess import STDOUT, check_call +from subprocess import STDOUT, check_call, check_output path = pathlib.Path(__file__).parent.absolute() hooks = path / "hooks" @@ -21,10 +21,27 @@ stderr=STDOUT, ) -check_call( +out = check_output( [ path / "dist" / "dvc" / "dvc", "doctor", ], stderr=STDOUT, -) +).decode() + +remotes = [ + "s3", + "oss", + "gdrive", + "gs", + "hdfs", + "http", + "webhdfs", + "azure", + "ssh", + "webdav", +] + +print(out) +for remote in remotes: + assert f"\t{remote}" in out, f"Missing support for {remote}" diff --git a/scripts/pyinstaller/hooks/hook-dvc.py b/scripts/pyinstaller/hooks/hook-dvc.py --- a/scripts/pyinstaller/hooks/hook-dvc.py +++ b/scripts/pyinstaller/hooks/hook-dvc.py @@ -16,5 +16,15 @@ datas += copy_metadata("aiohttp") datas += copy_metadata("aiohttp_retry") -# https://github.com/pypa/setuptools/issues/1963 -hiddenimports = ["pkg_resources.py2_warn"] +hiddenimports = [ + "dvc_azure", + "dvc_gdrive", + "dvc_gs", + "dvc_hdfs", + "dvc_oss", + "dvc_s3", + "dvc_webdav", + "dvc_webhdfs", + # https://github.com/pypa/setuptools/issues/1963 + "pkg_resources.py2_warn", +]
{"golden_diff": "diff --git a/scripts/pyinstaller/build.py b/scripts/pyinstaller/build.py\n--- a/scripts/pyinstaller/build.py\n+++ b/scripts/pyinstaller/build.py\n@@ -1,6 +1,6 @@\n import os\n import pathlib\n-from subprocess import STDOUT, check_call\n+from subprocess import STDOUT, check_call, check_output\n \n path = pathlib.Path(__file__).parent.absolute()\n hooks = path / \"hooks\"\n@@ -21,10 +21,27 @@\n stderr=STDOUT,\n )\n \n-check_call(\n+out = check_output(\n [\n path / \"dist\" / \"dvc\" / \"dvc\",\n \"doctor\",\n ],\n stderr=STDOUT,\n-)\n+).decode()\n+\n+remotes = [\n+ \"s3\",\n+ \"oss\",\n+ \"gdrive\",\n+ \"gs\",\n+ \"hdfs\",\n+ \"http\",\n+ \"webhdfs\",\n+ \"azure\",\n+ \"ssh\",\n+ \"webdav\",\n+]\n+\n+print(out)\n+for remote in remotes:\n+ assert f\"\\t{remote}\" in out, f\"Missing support for {remote}\"\ndiff --git a/scripts/pyinstaller/hooks/hook-dvc.py b/scripts/pyinstaller/hooks/hook-dvc.py\n--- a/scripts/pyinstaller/hooks/hook-dvc.py\n+++ b/scripts/pyinstaller/hooks/hook-dvc.py\n@@ -16,5 +16,15 @@\n datas += copy_metadata(\"aiohttp\")\n datas += copy_metadata(\"aiohttp_retry\")\n \n-# https://github.com/pypa/setuptools/issues/1963\n-hiddenimports = [\"pkg_resources.py2_warn\"]\n+hiddenimports = [\n+ \"dvc_azure\",\n+ \"dvc_gdrive\",\n+ \"dvc_gs\",\n+ \"dvc_hdfs\",\n+ \"dvc_oss\",\n+ \"dvc_s3\",\n+ \"dvc_webdav\",\n+ \"dvc_webhdfs\",\n+ # https://github.com/pypa/setuptools/issues/1963\n+ \"pkg_resources.py2_warn\",\n+]\n", "issue": "installing from Ubuntu repo does not install s3 adapter\n# Bug Report\r\n\r\nDVC version 2.6.3\r\n\r\n![image](https://user-images.githubusercontent.com/167835/186968532-080d228a-054f-4688-8967-8ec62cd17458.png)\r\n\r\nDVC version 2.21.1\r\n\r\n![image](https://user-images.githubusercontent.com/167835/186968664-c2b3edcb-2dca-4eba-9cee-fbdd472979eb.png)\r\n\n", "before_files": [{"content": "import os\nimport pathlib\nfrom subprocess import STDOUT, check_call\n\npath = pathlib.Path(__file__).parent.absolute()\nhooks = path / \"hooks\"\ndvc = path.parent.parent / \"dvc\"\nentry = dvc / \"__main__.py\"\n\ncheck_call(\n [\n \"pyinstaller\",\n \"--additional-hooks-dir\",\n os.fspath(hooks),\n \"--name\",\n \"dvc\",\n \"-y\",\n os.fspath(entry),\n ],\n cwd=path,\n stderr=STDOUT,\n)\n\ncheck_call(\n [\n path / \"dist\" / \"dvc\" / \"dvc\",\n \"doctor\",\n ],\n stderr=STDOUT,\n)\n", "path": "scripts/pyinstaller/build.py"}, {"content": "from PyInstaller.utils.hooks import ( # pylint:disable=import-error\n copy_metadata,\n)\n\n# needed for `dvc doctor` to show dep versions\ndatas = copy_metadata(\"adlfs\", recursive=True)\ndatas += copy_metadata(\"knack\")\ndatas += copy_metadata(\"gcsfs\")\ndatas += copy_metadata(\"pyarrow\")\ndatas += copy_metadata(\"pydrive2\")\ndatas += copy_metadata(\"s3fs\", recursive=True)\ndatas += copy_metadata(\"boto3\")\ndatas += copy_metadata(\"ossfs\")\ndatas += copy_metadata(\"sshfs\")\ndatas += copy_metadata(\"webdav4\")\ndatas += copy_metadata(\"aiohttp\")\ndatas += copy_metadata(\"aiohttp_retry\")\n\n# https://github.com/pypa/setuptools/issues/1963\nhiddenimports = [\"pkg_resources.py2_warn\"]\n", "path": "scripts/pyinstaller/hooks/hook-dvc.py"}]}
1,119
457
gh_patches_debug_5606
rasdani/github-patches
git_diff
ansible__ansible-lint-477
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> False positive EANSIBLE0014 also flags vars in shell task # Issue Type - Bug report # Ansible and Ansible Lint details ``` ansible --version ansible 2.3.0.0 ansible-lint --version ansible-lint 3.4.13 ``` - ansible installation method: pip - ansible-lint installation method: pip # Desired Behaviour EANSIBLE0014 should validate only command task, not shell. # Actual Behaviour (Bug report only) When ansible-lint validating playbook with shell tasks with env vars ``` - hosts: "localhost" gather_facts: no become: no tasks: - shell: 'MYVAR="$(date)" env | grep MY' ``` it fails and complains about Env vars shouldn't be in command ``` test-play.yaml:5: [EANSIBLE0014] Environment variables don't work as part of command ``` </issue> <code> [start of lib/ansiblelint/rules/EnvVarsInCommandRule.py] 1 # Copyright (c) 2016 Will Thames <[email protected]> 2 # 3 # Permission is hereby granted, free of charge, to any person obtaining a copy 4 # of this software and associated documentation files (the "Software"), to deal 5 # in the Software without restriction, including without limitation the rights 6 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 # copies of the Software, and to permit persons to whom the Software is 8 # furnished to do so, subject to the following conditions: 9 # 10 # The above copyright notice and this permission notice shall be included in 11 # all copies or substantial portions of the Software. 12 # 13 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 # THE SOFTWARE. 20 21 from ansiblelint import AnsibleLintRule 22 from ansiblelint.utils import LINE_NUMBER_KEY, FILENAME_KEY, get_first_cmd_arg 23 24 25 class EnvVarsInCommandRule(AnsibleLintRule): 26 id = '304' 27 shortdesc = "Environment variables don't work as part of command" 28 description = ( 29 'Environment variables should be passed to ``shell`` or ``command`` ' 30 'through environment argument' 31 ) 32 severity = 'VERY_HIGH' 33 tags = ['command-shell', 'bug', 'ANSIBLE0014'] 34 version_added = 'historic' 35 36 expected_args = ['chdir', 'creates', 'executable', 'removes', 'stdin', 'warn', 37 'cmd', '__ansible_module__', '__ansible_arguments__', 38 LINE_NUMBER_KEY, FILENAME_KEY] 39 40 def matchtask(self, file, task): 41 if task["action"]["__ansible_module__"] in ['shell', 'command']: 42 first_cmd_arg = get_first_cmd_arg(task) 43 if not first_cmd_arg: 44 return 45 46 return any([arg not in self.expected_args for arg in task['action']] + 47 ["=" in first_cmd_arg]) 48 [end of lib/ansiblelint/rules/EnvVarsInCommandRule.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/lib/ansiblelint/rules/EnvVarsInCommandRule.py b/lib/ansiblelint/rules/EnvVarsInCommandRule.py --- a/lib/ansiblelint/rules/EnvVarsInCommandRule.py +++ b/lib/ansiblelint/rules/EnvVarsInCommandRule.py @@ -38,7 +38,7 @@ LINE_NUMBER_KEY, FILENAME_KEY] def matchtask(self, file, task): - if task["action"]["__ansible_module__"] in ['shell', 'command']: + if task["action"]["__ansible_module__"] in ['command']: first_cmd_arg = get_first_cmd_arg(task) if not first_cmd_arg: return
{"golden_diff": "diff --git a/lib/ansiblelint/rules/EnvVarsInCommandRule.py b/lib/ansiblelint/rules/EnvVarsInCommandRule.py\n--- a/lib/ansiblelint/rules/EnvVarsInCommandRule.py\n+++ b/lib/ansiblelint/rules/EnvVarsInCommandRule.py\n@@ -38,7 +38,7 @@\n LINE_NUMBER_KEY, FILENAME_KEY]\n \n def matchtask(self, file, task):\n- if task[\"action\"][\"__ansible_module__\"] in ['shell', 'command']:\n+ if task[\"action\"][\"__ansible_module__\"] in ['command']:\n first_cmd_arg = get_first_cmd_arg(task)\n if not first_cmd_arg:\n return\n", "issue": "False positive EANSIBLE0014 also flags vars in shell task\n# Issue Type\r\n- Bug report\r\n\r\n# Ansible and Ansible Lint details\r\n```\r\nansible --version\r\nansible 2.3.0.0\r\nansible-lint --version\r\nansible-lint 3.4.13\r\n```\r\n\r\n- ansible installation method: pip\r\n- ansible-lint installation method: pip\r\n\r\n# Desired Behaviour\r\n\r\nEANSIBLE0014 should validate only command task, not shell.\r\n\r\n# Actual Behaviour (Bug report only)\r\n\r\nWhen ansible-lint validating playbook with shell tasks with env vars\r\n```\r\n- hosts: \"localhost\"\r\n gather_facts: no\r\n become: no\r\n tasks:\r\n - shell: 'MYVAR=\"$(date)\" env | grep MY'\r\n```\r\nit fails and complains about Env vars shouldn't be in command\r\n```\r\ntest-play.yaml:5: [EANSIBLE0014] Environment variables don't work as part of command\r\n```\r\n\n", "before_files": [{"content": "# Copyright (c) 2016 Will Thames <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\nfrom ansiblelint import AnsibleLintRule\nfrom ansiblelint.utils import LINE_NUMBER_KEY, FILENAME_KEY, get_first_cmd_arg\n\n\nclass EnvVarsInCommandRule(AnsibleLintRule):\n id = '304'\n shortdesc = \"Environment variables don't work as part of command\"\n description = (\n 'Environment variables should be passed to ``shell`` or ``command`` '\n 'through environment argument'\n )\n severity = 'VERY_HIGH'\n tags = ['command-shell', 'bug', 'ANSIBLE0014']\n version_added = 'historic'\n\n expected_args = ['chdir', 'creates', 'executable', 'removes', 'stdin', 'warn',\n 'cmd', '__ansible_module__', '__ansible_arguments__',\n LINE_NUMBER_KEY, FILENAME_KEY]\n\n def matchtask(self, file, task):\n if task[\"action\"][\"__ansible_module__\"] in ['shell', 'command']:\n first_cmd_arg = get_first_cmd_arg(task)\n if not first_cmd_arg:\n return\n\n return any([arg not in self.expected_args for arg in task['action']] +\n [\"=\" in first_cmd_arg])\n", "path": "lib/ansiblelint/rules/EnvVarsInCommandRule.py"}]}
1,335
149
gh_patches_debug_20627
rasdani/github-patches
git_diff
ciudadanointeligente__votainteligente-portal-electoral-283
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Candidate has_answered siempre en false ¿Cómo se hace para que deje de mostrar el enlace a twitter para candidatos que tienen todas las respuestas? ¿Cómo se hace para cambiar "pídele" por "pedile"? </issue> <code> [start of elections/models.py] 1 # coding=utf-8 2 from django.db import models 3 from autoslug import AutoSlugField 4 from taggit.managers import TaggableManager 5 from django.core.urlresolvers import reverse 6 from popolo.models import Person, Area 7 from django.utils.translation import ugettext_lazy as _ 8 from markdown_deux.templatetags.markdown_deux_tags import markdown_allowed 9 from candidator.models import Category, Topic as CanTopic 10 from picklefield.fields import PickledObjectField 11 from django.conf import settings 12 from django.utils.encoding import python_2_unicode_compatible 13 from django.contrib.flatpages.models import FlatPage 14 import copy 15 16 17 class ExtraInfoMixin(models.Model): 18 extra_info = PickledObjectField(default={}) 19 20 class Meta: 21 abstract = True 22 23 def __init__(self, *args, **kwargs): 24 super(ExtraInfoMixin, self).__init__(*args, **kwargs) 25 default_extra_info = copy.copy(self.default_extra_info) 26 default_extra_info.update(self.extra_info) 27 self.extra_info = default_extra_info 28 29 30 class Candidate(Person, ExtraInfoMixin): 31 election = models.ForeignKey('Election', related_name='candidates', null=True) 32 33 default_extra_info = settings.DEFAULT_CANDIDATE_EXTRA_INFO 34 35 @property 36 def twitter(self): 37 links = self.contact_details.filter(contact_type="TWITTER") 38 if links: 39 return links.first() 40 41 class Meta: 42 verbose_name = _("Candidato") 43 verbose_name_plural = _("Candidatos") 44 45 46 class CandidateFlatPage(FlatPage): 47 candidate = models.ForeignKey(Candidate, related_name='flatpages') 48 49 class Meta: 50 verbose_name = _(u"Página estáticas por candidato") 51 verbose_name_plural = _(u"Páginas estáticas por candidato") 52 53 def get_absolute_url(self): 54 return reverse('candidate_flatpage', kwargs={'election_slug': self.candidate.election.slug, 55 'slug': self.candidate.id, 56 'url': self.url 57 } 58 ) 59 60 61 class PersonalData(models.Model): 62 candidate = models.ForeignKey('Candidate', related_name="personal_datas") 63 label = models.CharField(max_length=512) 64 value = models.CharField(max_length=1024) 65 66 67 class Topic(CanTopic): 68 class Meta: 69 proxy = True 70 verbose_name = _(u"Pregunta") 71 verbose_name_plural = _(u"Preguntas") 72 73 @property 74 def election(self): 75 category = QuestionCategory.objects.get(category_ptr=self.category) 76 return category.election 77 78 79 @python_2_unicode_compatible 80 class QuestionCategory(Category): 81 election = models.ForeignKey('Election', related_name='categories', null=True) 82 83 def __str__(self): 84 return u'<%s> in <%s>' % (self.name, self.election.name) 85 86 class Meta: 87 verbose_name = _(u"Categoría de pregunta") 88 verbose_name_plural = _(u"Categorías de pregunta") 89 90 91 class Election(ExtraInfoMixin, models.Model): 92 name = models.CharField(max_length=255) 93 slug = AutoSlugField(populate_from='name', unique=True) 94 description = models.TextField(blank=True) 95 tags = TaggableManager(blank=True) 96 searchable = models.BooleanField(default=True) 97 highlighted = models.BooleanField(default=False) 98 extra_info_title = models.CharField(max_length=50, blank=True, null=True) 99 extra_info_content = models.TextField(max_length=3000, blank=True, null=True, help_text=_("Puedes usar Markdown. <br/> ") 100 + markdown_allowed()) 101 uses_preguntales = models.BooleanField(default=True, help_text=_(u"Esta elección debe usar preguntales?")) 102 uses_ranking = models.BooleanField(default=True, help_text=_(u"Esta elección debe usar ranking")) 103 uses_face_to_face = models.BooleanField(default=True, help_text=_(u"Esta elección debe usar frente a frente")) 104 uses_soul_mate = models.BooleanField(default=True, help_text=_(u"Esta elección debe usar 1/2 naranja")) 105 uses_questionary = models.BooleanField(default=True, help_text=_(u"Esta elección debe usar cuestionario")) 106 107 default_extra_info = settings.DEFAULT_ELECTION_EXTRA_INFO 108 area = models.ForeignKey(Area, null=True, related_name="elections") 109 110 def __unicode__(self): 111 return self.name 112 113 def get_absolute_url(self): 114 return reverse('election_view', kwargs={'slug': self.slug}) 115 116 def get_extra_info_url(self): 117 return reverse('election_extra_info', kwargs={'slug': self.slug}) 118 119 class Meta: 120 verbose_name = _(u'Mi Elección') 121 verbose_name_plural = _(u'Mis Elecciones') 122 [end of elections/models.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/elections/models.py b/elections/models.py --- a/elections/models.py +++ b/elections/models.py @@ -6,7 +6,7 @@ from popolo.models import Person, Area from django.utils.translation import ugettext_lazy as _ from markdown_deux.templatetags.markdown_deux_tags import markdown_allowed -from candidator.models import Category, Topic as CanTopic +from candidator.models import Category, Topic as CanTopic, TakenPosition from picklefield.fields import PickledObjectField from django.conf import settings from django.utils.encoding import python_2_unicode_compatible @@ -38,6 +38,11 @@ if links: return links.first() + @property + def has_answered(self): + are_there_answers = TakenPosition.objects.filter(person=self, position__isnull=False).exists() + return are_there_answers + class Meta: verbose_name = _("Candidato") verbose_name_plural = _("Candidatos")
{"golden_diff": "diff --git a/elections/models.py b/elections/models.py\n--- a/elections/models.py\n+++ b/elections/models.py\n@@ -6,7 +6,7 @@\n from popolo.models import Person, Area\n from django.utils.translation import ugettext_lazy as _\n from markdown_deux.templatetags.markdown_deux_tags import markdown_allowed\n-from candidator.models import Category, Topic as CanTopic\n+from candidator.models import Category, Topic as CanTopic, TakenPosition\n from picklefield.fields import PickledObjectField\n from django.conf import settings\n from django.utils.encoding import python_2_unicode_compatible\n@@ -38,6 +38,11 @@\n if links:\n return links.first()\n \n+ @property\n+ def has_answered(self):\n+ are_there_answers = TakenPosition.objects.filter(person=self, position__isnull=False).exists()\n+ return are_there_answers\n+\n class Meta:\n verbose_name = _(\"Candidato\")\n verbose_name_plural = _(\"Candidatos\")\n", "issue": "Candidate has_answered siempre en false\n\u00bfC\u00f3mo se hace para que deje de mostrar el enlace a twitter para candidatos que tienen todas las respuestas?\n\u00bfC\u00f3mo se hace para cambiar \"p\u00eddele\" por \"pedile\"?\n\n", "before_files": [{"content": "# coding=utf-8\nfrom django.db import models\nfrom autoslug import AutoSlugField\nfrom taggit.managers import TaggableManager\nfrom django.core.urlresolvers import reverse\nfrom popolo.models import Person, Area\nfrom django.utils.translation import ugettext_lazy as _\nfrom markdown_deux.templatetags.markdown_deux_tags import markdown_allowed\nfrom candidator.models import Category, Topic as CanTopic\nfrom picklefield.fields import PickledObjectField\nfrom django.conf import settings\nfrom django.utils.encoding import python_2_unicode_compatible\nfrom django.contrib.flatpages.models import FlatPage\nimport copy\n\n\nclass ExtraInfoMixin(models.Model):\n extra_info = PickledObjectField(default={})\n\n class Meta:\n abstract = True\n\n def __init__(self, *args, **kwargs):\n super(ExtraInfoMixin, self).__init__(*args, **kwargs)\n default_extra_info = copy.copy(self.default_extra_info)\n default_extra_info.update(self.extra_info)\n self.extra_info = default_extra_info\n\n\nclass Candidate(Person, ExtraInfoMixin):\n election = models.ForeignKey('Election', related_name='candidates', null=True)\n\n default_extra_info = settings.DEFAULT_CANDIDATE_EXTRA_INFO\n\n @property\n def twitter(self):\n links = self.contact_details.filter(contact_type=\"TWITTER\")\n if links:\n return links.first()\n\n class Meta:\n verbose_name = _(\"Candidato\")\n verbose_name_plural = _(\"Candidatos\")\n\n\nclass CandidateFlatPage(FlatPage):\n candidate = models.ForeignKey(Candidate, related_name='flatpages')\n\n class Meta:\n verbose_name = _(u\"P\u00e1gina est\u00e1ticas por candidato\")\n verbose_name_plural = _(u\"P\u00e1ginas est\u00e1ticas por candidato\")\n\n def get_absolute_url(self):\n return reverse('candidate_flatpage', kwargs={'election_slug': self.candidate.election.slug,\n 'slug': self.candidate.id,\n 'url': self.url\n }\n )\n\n\nclass PersonalData(models.Model):\n candidate = models.ForeignKey('Candidate', related_name=\"personal_datas\")\n label = models.CharField(max_length=512)\n value = models.CharField(max_length=1024)\n\n\nclass Topic(CanTopic):\n class Meta:\n proxy = True\n verbose_name = _(u\"Pregunta\")\n verbose_name_plural = _(u\"Preguntas\")\n\n @property\n def election(self):\n category = QuestionCategory.objects.get(category_ptr=self.category)\n return category.election\n\n\n@python_2_unicode_compatible\nclass QuestionCategory(Category):\n election = models.ForeignKey('Election', related_name='categories', null=True)\n\n def __str__(self):\n return u'<%s> in <%s>' % (self.name, self.election.name)\n\n class Meta:\n verbose_name = _(u\"Categor\u00eda de pregunta\")\n verbose_name_plural = _(u\"Categor\u00edas de pregunta\")\n\n\nclass Election(ExtraInfoMixin, models.Model):\n name = models.CharField(max_length=255)\n slug = AutoSlugField(populate_from='name', unique=True)\n description = models.TextField(blank=True)\n tags = TaggableManager(blank=True)\n searchable = models.BooleanField(default=True)\n highlighted = models.BooleanField(default=False)\n extra_info_title = models.CharField(max_length=50, blank=True, null=True)\n extra_info_content = models.TextField(max_length=3000, blank=True, null=True, help_text=_(\"Puedes usar Markdown. <br/> \")\n + markdown_allowed())\n uses_preguntales = models.BooleanField(default=True, help_text=_(u\"Esta elecci\u00f3n debe usar preguntales?\"))\n uses_ranking = models.BooleanField(default=True, help_text=_(u\"Esta elecci\u00f3n debe usar ranking\"))\n uses_face_to_face = models.BooleanField(default=True, help_text=_(u\"Esta elecci\u00f3n debe usar frente a frente\"))\n uses_soul_mate = models.BooleanField(default=True, help_text=_(u\"Esta elecci\u00f3n debe usar 1/2 naranja\"))\n uses_questionary = models.BooleanField(default=True, help_text=_(u\"Esta elecci\u00f3n debe usar cuestionario\"))\n\n default_extra_info = settings.DEFAULT_ELECTION_EXTRA_INFO\n area = models.ForeignKey(Area, null=True, related_name=\"elections\")\n\n def __unicode__(self):\n return self.name\n\n def get_absolute_url(self):\n return reverse('election_view', kwargs={'slug': self.slug})\n\n def get_extra_info_url(self):\n return reverse('election_extra_info', kwargs={'slug': self.slug})\n\n class Meta:\n verbose_name = _(u'Mi Elecci\u00f3n')\n verbose_name_plural = _(u'Mis Elecciones')\n", "path": "elections/models.py"}]}
1,848
222
gh_patches_debug_6647
rasdani/github-patches
git_diff
elastic__apm-agent-python-1647
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [META 555] Add automated span type/subtype checking against shared spec Spec PR: https://github.com/elastic/apm/pull/443 To start, we would just ensure that all span types/subtypes appear in the spec. In the future we will work on cross-agent alignment. </issue> <code> [start of elasticapm/instrumentation/packages/asyncio/aiopg.py] 1 # BSD 3-Clause License 2 # 3 # Copyright (c) 2019, Elasticsearch BV 4 # All rights reserved. 5 # 6 # Redistribution and use in source and binary forms, with or without 7 # modification, are permitted provided that the following conditions are met: 8 # 9 # * Redistributions of source code must retain the above copyright notice, this 10 # list of conditions and the following disclaimer. 11 # 12 # * Redistributions in binary form must reproduce the above copyright notice, 13 # this list of conditions and the following disclaimer in the documentation 14 # and/or other materials provided with the distribution. 15 # 16 # * Neither the name of the copyright holder nor the names of its 17 # contributors may be used to endorse or promote products derived from 18 # this software without specific prior written permission. 19 # 20 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 31 from elasticapm.contrib.asyncio.traces import async_capture_span 32 from elasticapm.instrumentation.packages.asyncio.base import AsyncAbstractInstrumentedModule 33 from elasticapm.instrumentation.packages.dbapi2 import extract_signature 34 35 36 class AioPGInstrumentation(AsyncAbstractInstrumentedModule): 37 name = "aiopg" 38 39 instrument_list = [ 40 ("aiopg.cursor", "Cursor.execute"), 41 ("aiopg.cursor", "Cursor.callproc"), 42 ("aiopg.connection", "Cursor.execute"), 43 ("aiopg.connection", "Cursor.callproc"), 44 ] 45 46 async def call(self, module, method, wrapped, instance, args, kwargs): 47 if method == "Cursor.execute": 48 query = args[0] if len(args) else kwargs["operation"] 49 query = _bake_sql(instance.raw, query) 50 name = extract_signature(query) 51 context = {"db": {"type": "sql", "statement": query}} 52 action = "query" 53 elif method == "Cursor.callproc": 54 func = args[0] if len(args) else kwargs["procname"] 55 name = func + "()" 56 context = None 57 action = "exec" 58 else: 59 raise AssertionError("call from uninstrumented method") 60 async with async_capture_span( 61 name, leaf=True, span_type="db", span_subtype="postgres", span_action=action, extra=context 62 ): 63 return await wrapped(*args, **kwargs) 64 65 66 def _bake_sql(cursor, sql): 67 # if this is a Composable object, use its `as_string` method 68 # see http://initd.org/psycopg/docs/sql.html 69 if hasattr(sql, "as_string"): 70 return sql.as_string(cursor) 71 return sql 72 [end of elasticapm/instrumentation/packages/asyncio/aiopg.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/elasticapm/instrumentation/packages/asyncio/aiopg.py b/elasticapm/instrumentation/packages/asyncio/aiopg.py --- a/elasticapm/instrumentation/packages/asyncio/aiopg.py +++ b/elasticapm/instrumentation/packages/asyncio/aiopg.py @@ -58,7 +58,7 @@ else: raise AssertionError("call from uninstrumented method") async with async_capture_span( - name, leaf=True, span_type="db", span_subtype="postgres", span_action=action, extra=context + name, leaf=True, span_type="db", span_subtype="postgresql", span_action=action, extra=context ): return await wrapped(*args, **kwargs)
{"golden_diff": "diff --git a/elasticapm/instrumentation/packages/asyncio/aiopg.py b/elasticapm/instrumentation/packages/asyncio/aiopg.py\n--- a/elasticapm/instrumentation/packages/asyncio/aiopg.py\n+++ b/elasticapm/instrumentation/packages/asyncio/aiopg.py\n@@ -58,7 +58,7 @@\n else:\n raise AssertionError(\"call from uninstrumented method\")\n async with async_capture_span(\n- name, leaf=True, span_type=\"db\", span_subtype=\"postgres\", span_action=action, extra=context\n+ name, leaf=True, span_type=\"db\", span_subtype=\"postgresql\", span_action=action, extra=context\n ):\n return await wrapped(*args, **kwargs)\n", "issue": "[META 555] Add automated span type/subtype checking against shared spec\nSpec PR: https://github.com/elastic/apm/pull/443\r\n\r\nTo start, we would just ensure that all span types/subtypes appear in the spec. In the future we will work on cross-agent alignment.\r\n\n", "before_files": [{"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\nfrom elasticapm.contrib.asyncio.traces import async_capture_span\nfrom elasticapm.instrumentation.packages.asyncio.base import AsyncAbstractInstrumentedModule\nfrom elasticapm.instrumentation.packages.dbapi2 import extract_signature\n\n\nclass AioPGInstrumentation(AsyncAbstractInstrumentedModule):\n name = \"aiopg\"\n\n instrument_list = [\n (\"aiopg.cursor\", \"Cursor.execute\"),\n (\"aiopg.cursor\", \"Cursor.callproc\"),\n (\"aiopg.connection\", \"Cursor.execute\"),\n (\"aiopg.connection\", \"Cursor.callproc\"),\n ]\n\n async def call(self, module, method, wrapped, instance, args, kwargs):\n if method == \"Cursor.execute\":\n query = args[0] if len(args) else kwargs[\"operation\"]\n query = _bake_sql(instance.raw, query)\n name = extract_signature(query)\n context = {\"db\": {\"type\": \"sql\", \"statement\": query}}\n action = \"query\"\n elif method == \"Cursor.callproc\":\n func = args[0] if len(args) else kwargs[\"procname\"]\n name = func + \"()\"\n context = None\n action = \"exec\"\n else:\n raise AssertionError(\"call from uninstrumented method\")\n async with async_capture_span(\n name, leaf=True, span_type=\"db\", span_subtype=\"postgres\", span_action=action, extra=context\n ):\n return await wrapped(*args, **kwargs)\n\n\ndef _bake_sql(cursor, sql):\n # if this is a Composable object, use its `as_string` method\n # see http://initd.org/psycopg/docs/sql.html\n if hasattr(sql, \"as_string\"):\n return sql.as_string(cursor)\n return sql\n", "path": "elasticapm/instrumentation/packages/asyncio/aiopg.py"}]}
1,471
172
gh_patches_debug_34381
rasdani/github-patches
git_diff
facebookresearch__hydra-1560
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [callbacks] call on_*_end events in reverse order </issue> <code> [start of hydra/core/callbacks.py] 1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved 2 import warnings 3 from typing import Any 4 5 from omegaconf import DictConfig 6 7 from hydra.core.utils import JobReturn 8 from hydra.utils import instantiate 9 10 11 class Callbacks: 12 def __init__(self, config: DictConfig) -> None: 13 self.callbacks = [] 14 for params in config.hydra.callbacks.values(): 15 self.callbacks.append(instantiate(params)) 16 17 def _notify(self, function_name: str, **kwargs: Any) -> None: 18 for c in self.callbacks: 19 try: 20 getattr(c, function_name)(**kwargs) 21 except Exception as e: 22 warnings.warn( 23 f"Callback {type(c).__name__}.{function_name} raised {type(e).__name__}: {e}" 24 ) 25 26 def on_run_start(self, config: DictConfig, **kwargs: Any) -> None: 27 self._notify(function_name="on_run_start", config=config, **kwargs) 28 29 def on_run_end(self, config: DictConfig, **kwargs: Any) -> None: 30 self._notify(function_name="on_run_end", config=config, **kwargs) 31 32 def on_multirun_start(self, config: DictConfig, **kwargs: Any) -> None: 33 self._notify(function_name="on_multirun_start", config=config, **kwargs) 34 35 def on_multirun_end(self, config: DictConfig, **kwargs: Any) -> None: 36 self._notify(function_name="on_multirun_end", config=config, **kwargs) 37 38 def on_job_start(self, config: DictConfig, **kwargs: Any) -> None: 39 self._notify(function_name="on_job_start", config=config, **kwargs) 40 41 def on_job_end( 42 self, config: DictConfig, job_return: JobReturn, **kwargs: Any 43 ) -> None: 44 self._notify( 45 function_name="on_job_end", config=config, job_return=job_return, **kwargs 46 ) 47 [end of hydra/core/callbacks.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/hydra/core/callbacks.py b/hydra/core/callbacks.py --- a/hydra/core/callbacks.py +++ b/hydra/core/callbacks.py @@ -14,8 +14,9 @@ for params in config.hydra.callbacks.values(): self.callbacks.append(instantiate(params)) - def _notify(self, function_name: str, **kwargs: Any) -> None: - for c in self.callbacks: + def _notify(self, function_name: str, reverse: bool = False, **kwargs: Any) -> None: + callbacks = reversed(self.callbacks) if reverse else self.callbacks + for c in callbacks: try: getattr(c, function_name)(**kwargs) except Exception as e: @@ -27,13 +28,15 @@ self._notify(function_name="on_run_start", config=config, **kwargs) def on_run_end(self, config: DictConfig, **kwargs: Any) -> None: - self._notify(function_name="on_run_end", config=config, **kwargs) + self._notify(function_name="on_run_end", config=config, reverse=True, **kwargs) def on_multirun_start(self, config: DictConfig, **kwargs: Any) -> None: self._notify(function_name="on_multirun_start", config=config, **kwargs) def on_multirun_end(self, config: DictConfig, **kwargs: Any) -> None: - self._notify(function_name="on_multirun_end", config=config, **kwargs) + self._notify( + function_name="on_multirun_end", reverse=True, config=config, **kwargs + ) def on_job_start(self, config: DictConfig, **kwargs: Any) -> None: self._notify(function_name="on_job_start", config=config, **kwargs) @@ -42,5 +45,9 @@ self, config: DictConfig, job_return: JobReturn, **kwargs: Any ) -> None: self._notify( - function_name="on_job_end", config=config, job_return=job_return, **kwargs + function_name="on_job_end", + config=config, + job_return=job_return, + reverse=True, + **kwargs, )
{"golden_diff": "diff --git a/hydra/core/callbacks.py b/hydra/core/callbacks.py\n--- a/hydra/core/callbacks.py\n+++ b/hydra/core/callbacks.py\n@@ -14,8 +14,9 @@\n for params in config.hydra.callbacks.values():\n self.callbacks.append(instantiate(params))\n \n- def _notify(self, function_name: str, **kwargs: Any) -> None:\n- for c in self.callbacks:\n+ def _notify(self, function_name: str, reverse: bool = False, **kwargs: Any) -> None:\n+ callbacks = reversed(self.callbacks) if reverse else self.callbacks\n+ for c in callbacks:\n try:\n getattr(c, function_name)(**kwargs)\n except Exception as e:\n@@ -27,13 +28,15 @@\n self._notify(function_name=\"on_run_start\", config=config, **kwargs)\n \n def on_run_end(self, config: DictConfig, **kwargs: Any) -> None:\n- self._notify(function_name=\"on_run_end\", config=config, **kwargs)\n+ self._notify(function_name=\"on_run_end\", config=config, reverse=True, **kwargs)\n \n def on_multirun_start(self, config: DictConfig, **kwargs: Any) -> None:\n self._notify(function_name=\"on_multirun_start\", config=config, **kwargs)\n \n def on_multirun_end(self, config: DictConfig, **kwargs: Any) -> None:\n- self._notify(function_name=\"on_multirun_end\", config=config, **kwargs)\n+ self._notify(\n+ function_name=\"on_multirun_end\", reverse=True, config=config, **kwargs\n+ )\n \n def on_job_start(self, config: DictConfig, **kwargs: Any) -> None:\n self._notify(function_name=\"on_job_start\", config=config, **kwargs)\n@@ -42,5 +45,9 @@\n self, config: DictConfig, job_return: JobReturn, **kwargs: Any\n ) -> None:\n self._notify(\n- function_name=\"on_job_end\", config=config, job_return=job_return, **kwargs\n+ function_name=\"on_job_end\",\n+ config=config,\n+ job_return=job_return,\n+ reverse=True,\n+ **kwargs,\n )\n", "issue": "[callbacks] call on_*_end events in reverse order\n\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport warnings\nfrom typing import Any\n\nfrom omegaconf import DictConfig\n\nfrom hydra.core.utils import JobReturn\nfrom hydra.utils import instantiate\n\n\nclass Callbacks:\n def __init__(self, config: DictConfig) -> None:\n self.callbacks = []\n for params in config.hydra.callbacks.values():\n self.callbacks.append(instantiate(params))\n\n def _notify(self, function_name: str, **kwargs: Any) -> None:\n for c in self.callbacks:\n try:\n getattr(c, function_name)(**kwargs)\n except Exception as e:\n warnings.warn(\n f\"Callback {type(c).__name__}.{function_name} raised {type(e).__name__}: {e}\"\n )\n\n def on_run_start(self, config: DictConfig, **kwargs: Any) -> None:\n self._notify(function_name=\"on_run_start\", config=config, **kwargs)\n\n def on_run_end(self, config: DictConfig, **kwargs: Any) -> None:\n self._notify(function_name=\"on_run_end\", config=config, **kwargs)\n\n def on_multirun_start(self, config: DictConfig, **kwargs: Any) -> None:\n self._notify(function_name=\"on_multirun_start\", config=config, **kwargs)\n\n def on_multirun_end(self, config: DictConfig, **kwargs: Any) -> None:\n self._notify(function_name=\"on_multirun_end\", config=config, **kwargs)\n\n def on_job_start(self, config: DictConfig, **kwargs: Any) -> None:\n self._notify(function_name=\"on_job_start\", config=config, **kwargs)\n\n def on_job_end(\n self, config: DictConfig, job_return: JobReturn, **kwargs: Any\n ) -> None:\n self._notify(\n function_name=\"on_job_end\", config=config, job_return=job_return, **kwargs\n )\n", "path": "hydra/core/callbacks.py"}]}
1,064
505
gh_patches_debug_23761
rasdani/github-patches
git_diff
fossasia__open-event-server-5139
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add custom form for attendees **Is your feature request related to a problem? Please describe.** <!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] --> Add custom form for attendees **Describe the solution you'd like** <!-- A clear and concise description of what you want to happen. --> **Describe alternatives you've considered** <!-- A clear and concise description of any alternative solutions or features you've considered. --> **Additional context** <!-- Add any other context or screenshots about the feature request here. --> **Working on it** </issue> <code> [start of app/models/custom_form.py] 1 import json 2 from sqlalchemy.schema import UniqueConstraint 3 4 from app.models import db 5 from app.models.base import SoftDeletionModel 6 7 SESSION_FORM = { 8 "title": {"include": 1, "require": 1}, 9 "subtitle": {"include": 0, "require": 0}, 10 "short_abstract": {"include": 1, "require": 0}, 11 "long_abstract": {"include": 0, "require": 0}, 12 "comments": {"include": 1, "require": 0}, 13 "track": {"include": 0, "require": 0}, 14 "session_type": {"include": 0, "require": 0}, 15 "language": {"include": 0, "require": 0}, 16 "slides": {"include": 1, "require": 0}, 17 "video": {"include": 0, "require": 0}, 18 "audio": {"include": 0, "require": 0} 19 } 20 21 SPEAKER_FORM = { 22 "name": {"include": 1, "require": 1}, 23 "email": {"include": 1, "require": 1}, 24 "photo": {"include": 1, "require": 0}, 25 "organisation": {"include": 1, "require": 0}, 26 "position": {"include": 1, "require": 0}, 27 "country": {"include": 1, "require": 0}, 28 "short_biography": {"include": 1, "require": 0}, 29 "long_biography": {"include": 0, "require": 0}, 30 "mobile": {"include": 0, "require": 0}, 31 "website": {"include": 1, "require": 0}, 32 "facebook": {"include": 0, "require": 0}, 33 "twitter": {"include": 1, "require": 0}, 34 "github": {"include": 0, "require": 0}, 35 "linkedin": {"include": 0, "require": 0} 36 } 37 38 session_form_str = json.dumps(SESSION_FORM, separators=(',', ':')) 39 speaker_form_str = json.dumps(SPEAKER_FORM, separators=(',', ':')) 40 41 42 class CustomForms(SoftDeletionModel): 43 """custom form model class""" 44 __tablename__ = 'custom_forms' 45 __table_args__ = (UniqueConstraint('event_id', 'field_identifier', 'form', name='custom_form_identifier'), ) 46 id = db.Column(db.Integer, primary_key=True) 47 field_identifier = db.Column(db.String, nullable=False) 48 form = db.Column(db.String, nullable=False) 49 type = db.Column(db.String, nullable=False) 50 is_required = db.Column(db.Boolean) 51 is_included = db.Column(db.Boolean) 52 is_fixed = db.Column(db.Boolean) 53 event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE')) 54 55 def __init__(self, 56 event_id=None, 57 field_identifier=None, 58 form=None, 59 type=None, 60 is_required=None, 61 is_included=None, 62 is_fixed=None, 63 deleted_at=None): 64 self.event_id = event_id 65 self.field_identifier = field_identifier, 66 self.form = form, 67 self.type = type, 68 self.is_required = is_required, 69 self.is_included = is_included, 70 self.is_fixed = is_fixed 71 self.deleted_at = deleted_at 72 73 def __repr__(self): 74 return '<CustomForm %r>' % self.id 75 76 def __str__(self): 77 return self.__repr__() 78 79 @property 80 def serialize(self): 81 """Return object data in easily serializable format""" 82 83 return { 84 'id': self.id, 85 'field_identifier': self.field_identifier, 86 'form': self.form, 87 'type': self.type, 88 'is_required': self.is_required, 89 'is_included': self.is_included, 90 'is_fixed': self.is_fixed 91 } 92 [end of app/models/custom_form.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/app/models/custom_form.py b/app/models/custom_form.py --- a/app/models/custom_form.py +++ b/app/models/custom_form.py @@ -35,8 +35,34 @@ "linkedin": {"include": 0, "require": 0} } +ATTENDEE_FORM = { + "firstname": {"include": 1, "require": 1}, + "lastname": {"include": 1, "require": 1}, + "email": {"include": 1, "require": 0}, + "address": {"include": 1, "require": 0}, + "city": {"include": 1, "require": 0}, + "state": {"include": 1, "require": 0}, + "country": {"include": 1, "require": 0}, + "job_title": {"include": 1, "require": 0}, + "phone": {"include": 1, "require": 0}, + "tax_business_info": {"include": 0, "require": 0}, + "billing_address": {"include": 0, "require": 0}, + "home_address": {"include": 0, "require": 0}, + "shipping_address": {"include": 0, "require": 0}, + "company": {"include": 0, "require": 0}, + "work_address": {"include": 0, "require": 0}, + "work_phone": {"include": 0, "require": 0}, + "website": {"include": 1, "require": 0}, + "blog": {"include": 0, "require": 0}, + "twitter": {"include": 1, "require": 0}, + "facebook": {"include": 0, "require": 0}, + "github": {"include": 1, "require": 0}, + "gender": {"include": 0, "require": 0}, +} + session_form_str = json.dumps(SESSION_FORM, separators=(',', ':')) speaker_form_str = json.dumps(SPEAKER_FORM, separators=(',', ':')) +attendee_form_str = json.dumps(ATTENDEE_FORM, separators=(',', ':')) class CustomForms(SoftDeletionModel):
{"golden_diff": "diff --git a/app/models/custom_form.py b/app/models/custom_form.py\n--- a/app/models/custom_form.py\n+++ b/app/models/custom_form.py\n@@ -35,8 +35,34 @@\n \"linkedin\": {\"include\": 0, \"require\": 0}\n }\n \n+ATTENDEE_FORM = {\n+ \"firstname\": {\"include\": 1, \"require\": 1},\n+ \"lastname\": {\"include\": 1, \"require\": 1},\n+ \"email\": {\"include\": 1, \"require\": 0},\n+ \"address\": {\"include\": 1, \"require\": 0},\n+ \"city\": {\"include\": 1, \"require\": 0},\n+ \"state\": {\"include\": 1, \"require\": 0},\n+ \"country\": {\"include\": 1, \"require\": 0},\n+ \"job_title\": {\"include\": 1, \"require\": 0},\n+ \"phone\": {\"include\": 1, \"require\": 0},\n+ \"tax_business_info\": {\"include\": 0, \"require\": 0},\n+ \"billing_address\": {\"include\": 0, \"require\": 0},\n+ \"home_address\": {\"include\": 0, \"require\": 0},\n+ \"shipping_address\": {\"include\": 0, \"require\": 0},\n+ \"company\": {\"include\": 0, \"require\": 0},\n+ \"work_address\": {\"include\": 0, \"require\": 0},\n+ \"work_phone\": {\"include\": 0, \"require\": 0},\n+ \"website\": {\"include\": 1, \"require\": 0},\n+ \"blog\": {\"include\": 0, \"require\": 0},\n+ \"twitter\": {\"include\": 1, \"require\": 0},\n+ \"facebook\": {\"include\": 0, \"require\": 0},\n+ \"github\": {\"include\": 1, \"require\": 0},\n+ \"gender\": {\"include\": 0, \"require\": 0},\n+}\n+\n session_form_str = json.dumps(SESSION_FORM, separators=(',', ':'))\n speaker_form_str = json.dumps(SPEAKER_FORM, separators=(',', ':'))\n+attendee_form_str = json.dumps(ATTENDEE_FORM, separators=(',', ':'))\n \n \n class CustomForms(SoftDeletionModel):\n", "issue": "Add custom form for attendees\n**Is your feature request related to a problem? Please describe.**\r\n<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->\r\n\r\nAdd custom form for attendees\r\n\r\n**Describe the solution you'd like**\r\n<!-- A clear and concise description of what you want to happen. -->\r\n\r\n**Describe alternatives you've considered**\r\n<!-- A clear and concise description of any alternative solutions or features you've considered. -->\r\n\r\n**Additional context**\r\n<!-- Add any other context or screenshots about the feature request here. -->\r\n\r\n**Working on it**\n", "before_files": [{"content": "import json\nfrom sqlalchemy.schema import UniqueConstraint\n\nfrom app.models import db\nfrom app.models.base import SoftDeletionModel\n\nSESSION_FORM = {\n \"title\": {\"include\": 1, \"require\": 1},\n \"subtitle\": {\"include\": 0, \"require\": 0},\n \"short_abstract\": {\"include\": 1, \"require\": 0},\n \"long_abstract\": {\"include\": 0, \"require\": 0},\n \"comments\": {\"include\": 1, \"require\": 0},\n \"track\": {\"include\": 0, \"require\": 0},\n \"session_type\": {\"include\": 0, \"require\": 0},\n \"language\": {\"include\": 0, \"require\": 0},\n \"slides\": {\"include\": 1, \"require\": 0},\n \"video\": {\"include\": 0, \"require\": 0},\n \"audio\": {\"include\": 0, \"require\": 0}\n}\n\nSPEAKER_FORM = {\n \"name\": {\"include\": 1, \"require\": 1},\n \"email\": {\"include\": 1, \"require\": 1},\n \"photo\": {\"include\": 1, \"require\": 0},\n \"organisation\": {\"include\": 1, \"require\": 0},\n \"position\": {\"include\": 1, \"require\": 0},\n \"country\": {\"include\": 1, \"require\": 0},\n \"short_biography\": {\"include\": 1, \"require\": 0},\n \"long_biography\": {\"include\": 0, \"require\": 0},\n \"mobile\": {\"include\": 0, \"require\": 0},\n \"website\": {\"include\": 1, \"require\": 0},\n \"facebook\": {\"include\": 0, \"require\": 0},\n \"twitter\": {\"include\": 1, \"require\": 0},\n \"github\": {\"include\": 0, \"require\": 0},\n \"linkedin\": {\"include\": 0, \"require\": 0}\n}\n\nsession_form_str = json.dumps(SESSION_FORM, separators=(',', ':'))\nspeaker_form_str = json.dumps(SPEAKER_FORM, separators=(',', ':'))\n\n\nclass CustomForms(SoftDeletionModel):\n \"\"\"custom form model class\"\"\"\n __tablename__ = 'custom_forms'\n __table_args__ = (UniqueConstraint('event_id', 'field_identifier', 'form', name='custom_form_identifier'), )\n id = db.Column(db.Integer, primary_key=True)\n field_identifier = db.Column(db.String, nullable=False)\n form = db.Column(db.String, nullable=False)\n type = db.Column(db.String, nullable=False)\n is_required = db.Column(db.Boolean)\n is_included = db.Column(db.Boolean)\n is_fixed = db.Column(db.Boolean)\n event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE'))\n\n def __init__(self,\n event_id=None,\n field_identifier=None,\n form=None,\n type=None,\n is_required=None,\n is_included=None,\n is_fixed=None,\n deleted_at=None):\n self.event_id = event_id\n self.field_identifier = field_identifier,\n self.form = form,\n self.type = type,\n self.is_required = is_required,\n self.is_included = is_included,\n self.is_fixed = is_fixed\n self.deleted_at = deleted_at\n\n def __repr__(self):\n return '<CustomForm %r>' % self.id\n\n def __str__(self):\n return self.__repr__()\n\n @property\n def serialize(self):\n \"\"\"Return object data in easily serializable format\"\"\"\n\n return {\n 'id': self.id,\n 'field_identifier': self.field_identifier,\n 'form': self.form,\n 'type': self.type,\n 'is_required': self.is_required,\n 'is_included': self.is_included,\n 'is_fixed': self.is_fixed\n }\n", "path": "app/models/custom_form.py"}]}
1,683
518
gh_patches_debug_9114
rasdani/github-patches
git_diff
UTNkar__moore-183
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Centre drive embeds <!-- Do you want to ask a question? Are you looking for support? The system administrator can help you: [email protected] --> ### Description Center drive embeds. Currently it looks a bit weird. ![image](https://user-images.githubusercontent.com/29704138/28264214-3dd2eb48-6aea-11e7-9944-fb199c8d4ecc.png) ### Steps to Reproduce 1. [First Step] 2. [Second Step] 3. [and so on...] <!-- Please select the appropriate "topic category"/blue and "issue type"/yellow label --> </issue> <code> [start of src/google/models.py] 1 from datetime import date 2 3 from django.db import models 4 from django.utils.translation import ugettext_lazy as _ 5 from wagtail.wagtailadmin.edit_handlers import FieldPanel, StreamFieldPanel, \ 6 TabbedInterface, ObjectList 7 from wagtail.wagtailcore import blocks 8 from wagtail.wagtailcore.fields import StreamField, RichTextField 9 from wagtail.wagtailcore.models import Page 10 from wagtail.wagtailsearch import index 11 12 from blocks.models import WAGTAIL_STATIC_BLOCKTYPES 13 from utils.translation import TranslatedField 14 15 16 class GoogleFormBlock(blocks.StructBlock): 17 form_id = blocks.CharBlock() 18 height = blocks.IntegerBlock() 19 20 class Meta: 21 label = _('Google Form') 22 icon = 'fa-check-square-o' 23 template = 'google/blocks/form.html' 24 group = _('Meta') 25 26 27 class GoogleFormIndex(Page): 28 title_sv = models.CharField(max_length=255) 29 translated_title = TranslatedField('title', 'title_sv') 30 31 description_en = RichTextField( 32 verbose_name=_('English description'), 33 blank=True, 34 ) 35 description_sv = RichTextField( 36 verbose_name=_('Swedish description'), 37 blank=True, 38 ) 39 description = TranslatedField('description_en', 'description_sv') 40 41 # Editor panels configuration 42 content_panels = Page.content_panels + [ 43 FieldPanel('title_sv', classname="full title"), 44 FieldPanel('description_en'), 45 FieldPanel('description_sv'), 46 ] 47 48 # Sub-page type rules 49 subpage_types = ['google.GoogleFormPage'] 50 51 def get_context(self, request, **kwargs): 52 context = super(GoogleFormIndex, self).get_context(request, **kwargs) 53 54 # Add extra variables and return the updated context 55 context['google_forms'] = GoogleFormPage.objects.child_of(self).live()\ 56 .order_by('-deadline') 57 return context 58 59 60 class GoogleFormPage(Page): 61 title_sv = models.CharField(max_length=255) 62 translated_title = TranslatedField('title', 'title_sv') 63 64 # TODO: Limit to one form! 65 form_en = StreamField([('google_form', GoogleFormBlock())]) 66 form_sv = StreamField([('google_form', GoogleFormBlock())]) 67 form = TranslatedField('form_en', 'form_sv') 68 69 deadline = models.DateField(verbose_name=_('Form deadline')) 70 71 results_en = StreamField( 72 WAGTAIL_STATIC_BLOCKTYPES, 73 blank=True, 74 ) 75 results_sv = StreamField( 76 WAGTAIL_STATIC_BLOCKTYPES, 77 blank=True, 78 ) 79 results = TranslatedField('results_en', 'results_sv') 80 81 @property 82 def is_past_due(self) -> bool: 83 return date.today() > self.deadline 84 85 # Editor panels configuration 86 content_panels = Page.content_panels + [ 87 FieldPanel('title_sv', classname="full title"), 88 FieldPanel('deadline'), 89 StreamFieldPanel('form_en'), 90 StreamFieldPanel('form_sv'), 91 ] 92 93 edit_handler = TabbedInterface([ 94 ObjectList(content_panels, heading=_('Common')), 95 ObjectList([StreamFieldPanel('results_en')], heading=_('English')), 96 ObjectList([StreamFieldPanel('results_sv')], heading=_('Swedish')), 97 ObjectList( 98 Page.promote_panels + Page.settings_panels, heading=_('Settings') 99 ), 100 ]) 101 102 # Search index configuration 103 search_fields = Page.search_fields + [ 104 index.SearchField('title_sv'), 105 index.FilterField('results_en'), 106 index.FilterField('results_sv'), 107 index.FilterField('deadline'), 108 ] 109 110 # Parent page / subpage type rules 111 parent_page_types = ['google.GoogleFormIndex'] 112 subpage_types = [] 113 114 115 class GoogleDriveBlock(blocks.StructBlock): 116 folder_id = blocks.CharBlock() 117 view = blocks.ChoiceBlock(choices=[ 118 ('list', _('List')), 119 ('grid', _('Grid')), 120 ]) 121 height = blocks.IntegerBlock() 122 123 class Meta: 124 label = _('Google Drive') 125 icon = 'fa-folder-open' 126 template = 'google/blocks/drive.html' 127 [end of src/google/models.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/google/models.py b/src/google/models.py --- a/src/google/models.py +++ b/src/google/models.py @@ -21,7 +21,7 @@ label = _('Google Form') icon = 'fa-check-square-o' template = 'google/blocks/form.html' - group = _('Meta') + group = _('Embed') class GoogleFormIndex(Page): @@ -124,3 +124,4 @@ label = _('Google Drive') icon = 'fa-folder-open' template = 'google/blocks/drive.html' + group = _('Embed')
{"golden_diff": "diff --git a/src/google/models.py b/src/google/models.py\n--- a/src/google/models.py\n+++ b/src/google/models.py\n@@ -21,7 +21,7 @@\n label = _('Google Form')\n icon = 'fa-check-square-o'\n template = 'google/blocks/form.html'\n- group = _('Meta')\n+ group = _('Embed')\n \n \n class GoogleFormIndex(Page):\n@@ -124,3 +124,4 @@\n label = _('Google Drive')\n icon = 'fa-folder-open'\n template = 'google/blocks/drive.html'\n+ group = _('Embed')\n", "issue": "Centre drive embeds\n<!-- Do you want to ask a question? Are you looking for support? The system administrator can help you: [email protected] -->\r\n\r\n### Description\r\n\r\nCenter drive embeds. Currently it looks a bit weird.\r\n\r\n![image](https://user-images.githubusercontent.com/29704138/28264214-3dd2eb48-6aea-11e7-9944-fb199c8d4ecc.png)\r\n\r\n\r\n### Steps to Reproduce\r\n\r\n1. [First Step]\r\n2. [Second Step]\r\n3. [and so on...]\r\n\r\n<!-- Please select the appropriate \"topic category\"/blue and \"issue type\"/yellow label -->\r\n\n", "before_files": [{"content": "from datetime import date\n\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\nfrom wagtail.wagtailadmin.edit_handlers import FieldPanel, StreamFieldPanel, \\\n TabbedInterface, ObjectList\nfrom wagtail.wagtailcore import blocks\nfrom wagtail.wagtailcore.fields import StreamField, RichTextField\nfrom wagtail.wagtailcore.models import Page\nfrom wagtail.wagtailsearch import index\n\nfrom blocks.models import WAGTAIL_STATIC_BLOCKTYPES\nfrom utils.translation import TranslatedField\n\n\nclass GoogleFormBlock(blocks.StructBlock):\n form_id = blocks.CharBlock()\n height = blocks.IntegerBlock()\n\n class Meta:\n label = _('Google Form')\n icon = 'fa-check-square-o'\n template = 'google/blocks/form.html'\n group = _('Meta')\n\n\nclass GoogleFormIndex(Page):\n title_sv = models.CharField(max_length=255)\n translated_title = TranslatedField('title', 'title_sv')\n\n description_en = RichTextField(\n verbose_name=_('English description'),\n blank=True,\n )\n description_sv = RichTextField(\n verbose_name=_('Swedish description'),\n blank=True,\n )\n description = TranslatedField('description_en', 'description_sv')\n\n # Editor panels configuration\n content_panels = Page.content_panels + [\n FieldPanel('title_sv', classname=\"full title\"),\n FieldPanel('description_en'),\n FieldPanel('description_sv'),\n ]\n\n # Sub-page type rules\n subpage_types = ['google.GoogleFormPage']\n\n def get_context(self, request, **kwargs):\n context = super(GoogleFormIndex, self).get_context(request, **kwargs)\n\n # Add extra variables and return the updated context\n context['google_forms'] = GoogleFormPage.objects.child_of(self).live()\\\n .order_by('-deadline')\n return context\n\n\nclass GoogleFormPage(Page):\n title_sv = models.CharField(max_length=255)\n translated_title = TranslatedField('title', 'title_sv')\n\n # TODO: Limit to one form!\n form_en = StreamField([('google_form', GoogleFormBlock())])\n form_sv = StreamField([('google_form', GoogleFormBlock())])\n form = TranslatedField('form_en', 'form_sv')\n\n deadline = models.DateField(verbose_name=_('Form deadline'))\n\n results_en = StreamField(\n WAGTAIL_STATIC_BLOCKTYPES,\n blank=True,\n )\n results_sv = StreamField(\n WAGTAIL_STATIC_BLOCKTYPES,\n blank=True,\n )\n results = TranslatedField('results_en', 'results_sv')\n\n @property\n def is_past_due(self) -> bool:\n return date.today() > self.deadline\n\n # Editor panels configuration\n content_panels = Page.content_panels + [\n FieldPanel('title_sv', classname=\"full title\"),\n FieldPanel('deadline'),\n StreamFieldPanel('form_en'),\n StreamFieldPanel('form_sv'),\n ]\n\n edit_handler = TabbedInterface([\n ObjectList(content_panels, heading=_('Common')),\n ObjectList([StreamFieldPanel('results_en')], heading=_('English')),\n ObjectList([StreamFieldPanel('results_sv')], heading=_('Swedish')),\n ObjectList(\n Page.promote_panels + Page.settings_panels, heading=_('Settings')\n ),\n ])\n\n # Search index configuration\n search_fields = Page.search_fields + [\n index.SearchField('title_sv'),\n index.FilterField('results_en'),\n index.FilterField('results_sv'),\n index.FilterField('deadline'),\n ]\n\n # Parent page / subpage type rules\n parent_page_types = ['google.GoogleFormIndex']\n subpage_types = []\n\n\nclass GoogleDriveBlock(blocks.StructBlock):\n folder_id = blocks.CharBlock()\n view = blocks.ChoiceBlock(choices=[\n ('list', _('List')),\n ('grid', _('Grid')),\n ])\n height = blocks.IntegerBlock()\n\n class Meta:\n label = _('Google Drive')\n icon = 'fa-folder-open'\n template = 'google/blocks/drive.html'\n", "path": "src/google/models.py"}]}
1,835
135
gh_patches_debug_41589
rasdani/github-patches
git_diff
getsentry__sentry-python-851
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Django 3.1 async views do not work When using sentry versions greater than 0.16.3, (tested on 0.18.0), Django 3.1 aysnc views do not work. ``` log.py 224 ERROR Internal Server Error: /async_ok Traceback (most recent call last): File "/Users/williamchu/dev/sentry-python/.tox/py3.8-django-3.1/lib/python3.8/site-packages/django/core/handlers/exception.py", line 47, in inner response = get_response(request) File "/Users/williamchu/dev/sentry-python/.tox/py3.8-django-3.1/lib/python3.8/site-packages/django/core/handlers/base.py", line 186, in _get_response self.check_response(response, callback) File "/Users/williamchu/dev/sentry-python/.tox/py3.8-django-3.1/lib/python3.8/site-packages/django/core/handlers/base.py", line 312, in check_response raise ValueError( ValueError: The view tests.integrations.django.myapp.views.async_ok didn't return an HttpResponse object. It returned an unawaited coroutine instead. You may need to add an 'await' into your view. ``` I have made a branch with a test case to demonstrate this: https://github.com/uptickmetachu/sentry-python/tree/django3.1-test-async-view </issue> <code> [start of sentry_sdk/integrations/django/views.py] 1 from sentry_sdk.hub import Hub 2 from sentry_sdk._types import MYPY 3 from sentry_sdk import _functools 4 5 if MYPY: 6 from typing import Any 7 8 9 def patch_views(): 10 # type: () -> None 11 12 from django.core.handlers.base import BaseHandler 13 from sentry_sdk.integrations.django import DjangoIntegration 14 15 old_make_view_atomic = BaseHandler.make_view_atomic 16 17 @_functools.wraps(old_make_view_atomic) 18 def sentry_patched_make_view_atomic(self, *args, **kwargs): 19 # type: (Any, *Any, **Any) -> Any 20 callback = old_make_view_atomic(self, *args, **kwargs) 21 22 # XXX: The wrapper function is created for every request. Find more 23 # efficient way to wrap views (or build a cache?) 24 25 hub = Hub.current 26 integration = hub.get_integration(DjangoIntegration) 27 28 if integration is not None and integration.middleware_spans: 29 30 @_functools.wraps(callback) 31 def sentry_wrapped_callback(request, *args, **kwargs): 32 # type: (Any, *Any, **Any) -> Any 33 with hub.start_span( 34 op="django.view", description=request.resolver_match.view_name 35 ): 36 return callback(request, *args, **kwargs) 37 38 else: 39 sentry_wrapped_callback = callback 40 41 return sentry_wrapped_callback 42 43 BaseHandler.make_view_atomic = sentry_patched_make_view_atomic 44 [end of sentry_sdk/integrations/django/views.py] [start of sentry_sdk/integrations/django/asgi.py] 1 """ 2 Instrumentation for Django 3.0 3 4 Since this file contains `async def` it is conditionally imported in 5 `sentry_sdk.integrations.django` (depending on the existence of 6 `django.core.handlers.asgi`. 7 """ 8 9 from sentry_sdk import Hub 10 from sentry_sdk._types import MYPY 11 12 from sentry_sdk.integrations.django import DjangoIntegration 13 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware 14 15 if MYPY: 16 from typing import Any 17 from typing import Union 18 19 from django.http.response import HttpResponse 20 21 22 def patch_django_asgi_handler_impl(cls): 23 # type: (Any) -> None 24 old_app = cls.__call__ 25 26 async def sentry_patched_asgi_handler(self, scope, receive, send): 27 # type: (Any, Any, Any, Any) -> Any 28 if Hub.current.get_integration(DjangoIntegration) is None: 29 return await old_app(self, scope, receive, send) 30 31 middleware = SentryAsgiMiddleware( 32 old_app.__get__(self, cls), unsafe_context_data=True 33 )._run_asgi3 34 return await middleware(scope, receive, send) 35 36 cls.__call__ = sentry_patched_asgi_handler 37 38 39 def patch_get_response_async(cls, _before_get_response): 40 # type: (Any, Any) -> None 41 old_get_response_async = cls.get_response_async 42 43 async def sentry_patched_get_response_async(self, request): 44 # type: (Any, Any) -> Union[HttpResponse, BaseException] 45 _before_get_response(request) 46 return await old_get_response_async(self, request) 47 48 cls.get_response_async = sentry_patched_get_response_async 49 50 51 def patch_channels_asgi_handler_impl(cls): 52 # type: (Any) -> None 53 old_app = cls.__call__ 54 55 async def sentry_patched_asgi_handler(self, receive, send): 56 # type: (Any, Any, Any) -> Any 57 if Hub.current.get_integration(DjangoIntegration) is None: 58 return await old_app(self, receive, send) 59 60 middleware = SentryAsgiMiddleware( 61 lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True 62 ) 63 64 return await middleware(self.scope)(receive, send) 65 66 cls.__call__ = sentry_patched_asgi_handler 67 [end of sentry_sdk/integrations/django/asgi.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -6,10 +6,9 @@ `django.core.handlers.asgi`. """ -from sentry_sdk import Hub +from sentry_sdk import Hub, _functools from sentry_sdk._types import MYPY -from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware if MYPY: @@ -21,6 +20,9 @@ def patch_django_asgi_handler_impl(cls): # type: (Any) -> None + + from sentry_sdk.integrations.django import DjangoIntegration + old_app = cls.__call__ async def sentry_patched_asgi_handler(self, scope, receive, send): @@ -50,6 +52,9 @@ def patch_channels_asgi_handler_impl(cls): # type: (Any) -> None + + from sentry_sdk.integrations.django import DjangoIntegration + old_app = cls.__call__ async def sentry_patched_asgi_handler(self, receive, send): @@ -64,3 +69,17 @@ return await middleware(self.scope)(receive, send) cls.__call__ = sentry_patched_asgi_handler + + +def wrap_async_view(hub, callback): + # type: (Hub, Any) -> Any + @_functools.wraps(callback) + async def sentry_wrapped_callback(request, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + + with hub.start_span( + op="django.view", description=request.resolver_match.view_name + ): + return await callback(request, *args, **kwargs) + + return sentry_wrapped_callback diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -6,6 +6,18 @@ from typing import Any +try: + from asyncio import iscoroutinefunction +except ImportError: + iscoroutinefunction = None # type: ignore + + +try: + from sentry_sdk.integrations.django.asgi import wrap_async_view +except (ImportError, SyntaxError): + wrap_async_view = None # type: ignore + + def patch_views(): # type: () -> None @@ -27,13 +39,14 @@ if integration is not None and integration.middleware_spans: - @_functools.wraps(callback) - def sentry_wrapped_callback(request, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - with hub.start_span( - op="django.view", description=request.resolver_match.view_name - ): - return callback(request, *args, **kwargs) + if ( + iscoroutinefunction is not None + and wrap_async_view is not None + and iscoroutinefunction(callback) + ): + sentry_wrapped_callback = wrap_async_view(hub, callback) + else: + sentry_wrapped_callback = _wrap_sync_view(hub, callback) else: sentry_wrapped_callback = callback @@ -41,3 +54,16 @@ return sentry_wrapped_callback BaseHandler.make_view_atomic = sentry_patched_make_view_atomic + + +def _wrap_sync_view(hub, callback): + # type: (Hub, Any) -> Any + @_functools.wraps(callback) + def sentry_wrapped_callback(request, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + with hub.start_span( + op="django.view", description=request.resolver_match.view_name + ): + return callback(request, *args, **kwargs) + + return sentry_wrapped_callback
{"golden_diff": "diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py\n--- a/sentry_sdk/integrations/django/asgi.py\n+++ b/sentry_sdk/integrations/django/asgi.py\n@@ -6,10 +6,9 @@\n `django.core.handlers.asgi`.\n \"\"\"\n \n-from sentry_sdk import Hub\n+from sentry_sdk import Hub, _functools\n from sentry_sdk._types import MYPY\n \n-from sentry_sdk.integrations.django import DjangoIntegration\n from sentry_sdk.integrations.asgi import SentryAsgiMiddleware\n \n if MYPY:\n@@ -21,6 +20,9 @@\n \n def patch_django_asgi_handler_impl(cls):\n # type: (Any) -> None\n+\n+ from sentry_sdk.integrations.django import DjangoIntegration\n+\n old_app = cls.__call__\n \n async def sentry_patched_asgi_handler(self, scope, receive, send):\n@@ -50,6 +52,9 @@\n \n def patch_channels_asgi_handler_impl(cls):\n # type: (Any) -> None\n+\n+ from sentry_sdk.integrations.django import DjangoIntegration\n+\n old_app = cls.__call__\n \n async def sentry_patched_asgi_handler(self, receive, send):\n@@ -64,3 +69,17 @@\n return await middleware(self.scope)(receive, send)\n \n cls.__call__ = sentry_patched_asgi_handler\n+\n+\n+def wrap_async_view(hub, callback):\n+ # type: (Hub, Any) -> Any\n+ @_functools.wraps(callback)\n+ async def sentry_wrapped_callback(request, *args, **kwargs):\n+ # type: (Any, *Any, **Any) -> Any\n+\n+ with hub.start_span(\n+ op=\"django.view\", description=request.resolver_match.view_name\n+ ):\n+ return await callback(request, *args, **kwargs)\n+\n+ return sentry_wrapped_callback\ndiff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py\n--- a/sentry_sdk/integrations/django/views.py\n+++ b/sentry_sdk/integrations/django/views.py\n@@ -6,6 +6,18 @@\n from typing import Any\n \n \n+try:\n+ from asyncio import iscoroutinefunction\n+except ImportError:\n+ iscoroutinefunction = None # type: ignore\n+\n+\n+try:\n+ from sentry_sdk.integrations.django.asgi import wrap_async_view\n+except (ImportError, SyntaxError):\n+ wrap_async_view = None # type: ignore\n+\n+\n def patch_views():\n # type: () -> None\n \n@@ -27,13 +39,14 @@\n \n if integration is not None and integration.middleware_spans:\n \n- @_functools.wraps(callback)\n- def sentry_wrapped_callback(request, *args, **kwargs):\n- # type: (Any, *Any, **Any) -> Any\n- with hub.start_span(\n- op=\"django.view\", description=request.resolver_match.view_name\n- ):\n- return callback(request, *args, **kwargs)\n+ if (\n+ iscoroutinefunction is not None\n+ and wrap_async_view is not None\n+ and iscoroutinefunction(callback)\n+ ):\n+ sentry_wrapped_callback = wrap_async_view(hub, callback)\n+ else:\n+ sentry_wrapped_callback = _wrap_sync_view(hub, callback)\n \n else:\n sentry_wrapped_callback = callback\n@@ -41,3 +54,16 @@\n return sentry_wrapped_callback\n \n BaseHandler.make_view_atomic = sentry_patched_make_view_atomic\n+\n+\n+def _wrap_sync_view(hub, callback):\n+ # type: (Hub, Any) -> Any\n+ @_functools.wraps(callback)\n+ def sentry_wrapped_callback(request, *args, **kwargs):\n+ # type: (Any, *Any, **Any) -> Any\n+ with hub.start_span(\n+ op=\"django.view\", description=request.resolver_match.view_name\n+ ):\n+ return callback(request, *args, **kwargs)\n+\n+ return sentry_wrapped_callback\n", "issue": "Django 3.1 async views do not work\nWhen using sentry versions greater than 0.16.3, (tested on 0.18.0), Django 3.1 aysnc views do not work.\r\n\r\n```\r\nlog.py 224 ERROR Internal Server Error: /async_ok\r\nTraceback (most recent call last):\r\n File \"/Users/williamchu/dev/sentry-python/.tox/py3.8-django-3.1/lib/python3.8/site-packages/django/core/handlers/exception.py\", line 47, in inner\r\n response = get_response(request)\r\n File \"/Users/williamchu/dev/sentry-python/.tox/py3.8-django-3.1/lib/python3.8/site-packages/django/core/handlers/base.py\", line 186, in _get_response\r\n self.check_response(response, callback)\r\n File \"/Users/williamchu/dev/sentry-python/.tox/py3.8-django-3.1/lib/python3.8/site-packages/django/core/handlers/base.py\", line 312, in check_response\r\n raise ValueError(\r\nValueError: The view tests.integrations.django.myapp.views.async_ok didn't return an HttpResponse object. It returned an unawaited coroutine instead. You may need to add an 'await' into your view.\r\n```\r\n\r\nI have made a branch with a test case to demonstrate this: https://github.com/uptickmetachu/sentry-python/tree/django3.1-test-async-view\r\n\r\n\n", "before_files": [{"content": "from sentry_sdk.hub import Hub\nfrom sentry_sdk._types import MYPY\nfrom sentry_sdk import _functools\n\nif MYPY:\n from typing import Any\n\n\ndef patch_views():\n # type: () -> None\n\n from django.core.handlers.base import BaseHandler\n from sentry_sdk.integrations.django import DjangoIntegration\n\n old_make_view_atomic = BaseHandler.make_view_atomic\n\n @_functools.wraps(old_make_view_atomic)\n def sentry_patched_make_view_atomic(self, *args, **kwargs):\n # type: (Any, *Any, **Any) -> Any\n callback = old_make_view_atomic(self, *args, **kwargs)\n\n # XXX: The wrapper function is created for every request. Find more\n # efficient way to wrap views (or build a cache?)\n\n hub = Hub.current\n integration = hub.get_integration(DjangoIntegration)\n\n if integration is not None and integration.middleware_spans:\n\n @_functools.wraps(callback)\n def sentry_wrapped_callback(request, *args, **kwargs):\n # type: (Any, *Any, **Any) -> Any\n with hub.start_span(\n op=\"django.view\", description=request.resolver_match.view_name\n ):\n return callback(request, *args, **kwargs)\n\n else:\n sentry_wrapped_callback = callback\n\n return sentry_wrapped_callback\n\n BaseHandler.make_view_atomic = sentry_patched_make_view_atomic\n", "path": "sentry_sdk/integrations/django/views.py"}, {"content": "\"\"\"\nInstrumentation for Django 3.0\n\nSince this file contains `async def` it is conditionally imported in\n`sentry_sdk.integrations.django` (depending on the existence of\n`django.core.handlers.asgi`.\n\"\"\"\n\nfrom sentry_sdk import Hub\nfrom sentry_sdk._types import MYPY\n\nfrom sentry_sdk.integrations.django import DjangoIntegration\nfrom sentry_sdk.integrations.asgi import SentryAsgiMiddleware\n\nif MYPY:\n from typing import Any\n from typing import Union\n\n from django.http.response import HttpResponse\n\n\ndef patch_django_asgi_handler_impl(cls):\n # type: (Any) -> None\n old_app = cls.__call__\n\n async def sentry_patched_asgi_handler(self, scope, receive, send):\n # type: (Any, Any, Any, Any) -> Any\n if Hub.current.get_integration(DjangoIntegration) is None:\n return await old_app(self, scope, receive, send)\n\n middleware = SentryAsgiMiddleware(\n old_app.__get__(self, cls), unsafe_context_data=True\n )._run_asgi3\n return await middleware(scope, receive, send)\n\n cls.__call__ = sentry_patched_asgi_handler\n\n\ndef patch_get_response_async(cls, _before_get_response):\n # type: (Any, Any) -> None\n old_get_response_async = cls.get_response_async\n\n async def sentry_patched_get_response_async(self, request):\n # type: (Any, Any) -> Union[HttpResponse, BaseException]\n _before_get_response(request)\n return await old_get_response_async(self, request)\n\n cls.get_response_async = sentry_patched_get_response_async\n\n\ndef patch_channels_asgi_handler_impl(cls):\n # type: (Any) -> None\n old_app = cls.__call__\n\n async def sentry_patched_asgi_handler(self, receive, send):\n # type: (Any, Any, Any) -> Any\n if Hub.current.get_integration(DjangoIntegration) is None:\n return await old_app(self, receive, send)\n\n middleware = SentryAsgiMiddleware(\n lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True\n )\n\n return await middleware(self.scope)(receive, send)\n\n cls.__call__ = sentry_patched_asgi_handler\n", "path": "sentry_sdk/integrations/django/asgi.py"}]}
1,958
955
gh_patches_debug_797
rasdani/github-patches
git_diff
pre-commit__pre-commit-167
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> npmrc causes npm to install to home directory instead of nodeenv Here is what happened when I tried to get eslint installed: ``` $ pre-commit run --all-files eslint..............................................................................................................................................................................................................................................................................................................Failed hookid: eslint xargs: eslint: No such file or directory ``` Moving .npmrc to nope.npmrc fixed the issue. </issue> <code> [start of setup.py] 1 from setuptools import find_packages 2 from setuptools import setup 3 4 5 setup( 6 name='pre_commit', 7 description=( 8 'A framework for managing and maintaining multi-language pre-commit ' 9 'hooks.' 10 ), 11 url='https://github.com/pre-commit/pre-commit', 12 version='0.2.9', 13 14 author='Anthony Sottile', 15 author_email='[email protected]', 16 17 platforms='linux', 18 classifiers=[ 19 'License :: OSI Approved :: MIT License', 20 'Programming Language :: Python :: 2', 21 'Programming Language :: Python :: 2.6', 22 'Programming Language :: Python :: 2.7', 23 'Programming Language :: Python :: 3', 24 'Programming Language :: Python :: 3.3', 25 'Programming Language :: Python :: 3.4', 26 'Programming Language :: Python :: Implementation :: CPython', 27 'Programming Language :: Python :: Implementation :: PyPy', 28 ], 29 30 packages=find_packages('.', exclude=('tests*', 'testing*')), 31 package_data={ 32 'pre_commit': [ 33 'resources/pre-commit-hook', 34 'resources/rbenv.tar.gz', 35 'resources/ruby-build.tar.gz', 36 'resources/ruby-download.tar.gz', 37 ] 38 }, 39 install_requires=[ 40 'argparse', 41 'aspy.yaml', 42 'cached-property', 43 'jsonschema', 44 'nodeenv>=0.9.4', 45 'ordereddict', 46 'plumbum', 47 'pyyaml', 48 'simplejson', 49 'virtualenv', 50 ], 51 entry_points={ 52 'console_scripts': [ 53 'pre-commit = pre_commit.main:main', 54 'validate-config = pre_commit.clientlib.validate_config:run', 55 'validate-manifest = pre_commit.clientlib.validate_manifest:run', 56 ], 57 }, 58 ) 59 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -41,7 +41,7 @@ 'aspy.yaml', 'cached-property', 'jsonschema', - 'nodeenv>=0.9.4', + 'nodeenv>=0.11.1', 'ordereddict', 'plumbum', 'pyyaml',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -41,7 +41,7 @@\n 'aspy.yaml',\n 'cached-property',\n 'jsonschema',\n- 'nodeenv>=0.9.4',\n+ 'nodeenv>=0.11.1',\n 'ordereddict',\n 'plumbum',\n 'pyyaml',\n", "issue": "npmrc causes npm to install to home directory instead of nodeenv\nHere is what happened when I tried to get eslint installed: \n\n```\n$ pre-commit run --all-files\neslint..............................................................................................................................................................................................................................................................................................................Failed\nhookid: eslint\n\nxargs: eslint: No such file or directory\n```\n\nMoving .npmrc to nope.npmrc fixed the issue.\n\n", "before_files": [{"content": "from setuptools import find_packages\nfrom setuptools import setup\n\n\nsetup(\n name='pre_commit',\n description=(\n 'A framework for managing and maintaining multi-language pre-commit '\n 'hooks.'\n ),\n url='https://github.com/pre-commit/pre-commit',\n version='0.2.9',\n\n author='Anthony Sottile',\n author_email='[email protected]',\n\n platforms='linux',\n classifiers=[\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n ],\n\n packages=find_packages('.', exclude=('tests*', 'testing*')),\n package_data={\n 'pre_commit': [\n 'resources/pre-commit-hook',\n 'resources/rbenv.tar.gz',\n 'resources/ruby-build.tar.gz',\n 'resources/ruby-download.tar.gz',\n ]\n },\n install_requires=[\n 'argparse',\n 'aspy.yaml',\n 'cached-property',\n 'jsonschema',\n 'nodeenv>=0.9.4',\n 'ordereddict',\n 'plumbum',\n 'pyyaml',\n 'simplejson',\n 'virtualenv',\n ],\n entry_points={\n 'console_scripts': [\n 'pre-commit = pre_commit.main:main',\n 'validate-config = pre_commit.clientlib.validate_config:run',\n 'validate-manifest = pre_commit.clientlib.validate_manifest:run',\n ],\n },\n)\n", "path": "setup.py"}]}
1,097
90
gh_patches_debug_20356
rasdani/github-patches
git_diff
cloud-custodian__cloud-custodian-5615
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> tools/c7n-org - azure subscription generation includes disabled subscriptions per report on gitter. ngibbondaimler - We used azuresubs.py from c7n-org to generate a list of our subscriptions, however it's picking up disabled subscriptions and c7n-org throws an exception when it tries to read from a disabled sub to apply policy. Is there a suggested workaround for this? Stefan Gordon - I believe the return from the subscription API list call includes a state attribute, something like "state": "Enabled" - So for your scenario perhaps you can just add a check on that value at https://github.com/cloud-custodian/cloud-custodian/blob/master/tools/c7n_org/scripts/azuresubs.py#L34 Additionally if you can file an issue with the error you are getting in c7n-org I would say that we should update it to handle this error properly. Generating a list without those is an easy workaround but it shouldn't fail on them. </issue> <code> [start of tools/c7n_org/scripts/azuresubs.py] 1 # Copyright 2018 Capital One Services, LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import click 16 from c7n_azure.session import Session 17 from c7n.utils import yaml_dump 18 from azure.mgmt.resource.subscriptions import SubscriptionClient 19 20 21 @click.command() 22 @click.option( 23 '-f', '--output', type=click.File('w'), 24 help="File to store the generated config (default stdout)") 25 def main(output): 26 """ 27 Generate a c7n-org subscriptions config file 28 """ 29 30 client = SubscriptionClient(Session().get_credentials()) 31 subs = [sub.serialize(True) for sub in client.subscriptions.list()] 32 results = [] 33 for sub in subs: 34 sub_info = { 35 'subscription_id': sub['subscriptionId'], 36 'name': sub['displayName'] 37 } 38 results.append(sub_info) 39 40 print(yaml_dump({'subscriptions': results}), file=output) 41 42 43 if __name__ == '__main__': 44 main() 45 [end of tools/c7n_org/scripts/azuresubs.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/tools/c7n_org/scripts/azuresubs.py b/tools/c7n_org/scripts/azuresubs.py --- a/tools/c7n_org/scripts/azuresubs.py +++ b/tools/c7n_org/scripts/azuresubs.py @@ -22,7 +22,12 @@ @click.option( '-f', '--output', type=click.File('w'), help="File to store the generated config (default stdout)") -def main(output): [email protected]( + '-s', '--state', multiple=True, type=click.Choice( + ['Enabled', 'Warned', 'PastDue', 'Disabled', 'Deleted']), + default=('Enabled',), + help="File to store the generated config (default stdout)") +def main(output, state): """ Generate a c7n-org subscriptions config file """ @@ -31,6 +36,8 @@ subs = [sub.serialize(True) for sub in client.subscriptions.list()] results = [] for sub in subs: + if state and sub['state'] not in state: + continue sub_info = { 'subscription_id': sub['subscriptionId'], 'name': sub['displayName']
{"golden_diff": "diff --git a/tools/c7n_org/scripts/azuresubs.py b/tools/c7n_org/scripts/azuresubs.py\n--- a/tools/c7n_org/scripts/azuresubs.py\n+++ b/tools/c7n_org/scripts/azuresubs.py\n@@ -22,7 +22,12 @@\n @click.option(\n '-f', '--output', type=click.File('w'),\n help=\"File to store the generated config (default stdout)\")\n-def main(output):\[email protected](\n+ '-s', '--state', multiple=True, type=click.Choice(\n+ ['Enabled', 'Warned', 'PastDue', 'Disabled', 'Deleted']),\n+ default=('Enabled',),\n+ help=\"File to store the generated config (default stdout)\")\n+def main(output, state):\n \"\"\"\n Generate a c7n-org subscriptions config file\n \"\"\"\n@@ -31,6 +36,8 @@\n subs = [sub.serialize(True) for sub in client.subscriptions.list()]\n results = []\n for sub in subs:\n+ if state and sub['state'] not in state:\n+ continue\n sub_info = {\n 'subscription_id': sub['subscriptionId'],\n 'name': sub['displayName']\n", "issue": "tools/c7n-org - azure subscription generation includes disabled subscriptions\n\r\nper report on gitter.\r\n\r\nngibbondaimler - We used azuresubs.py from c7n-org to generate a list of our subscriptions, however it's picking up disabled subscriptions and c7n-org throws an exception when it tries to read from a disabled sub to apply policy. Is there a suggested workaround for this?\r\n\r\n\r\nStefan Gordon -\r\nI believe the return from the subscription API list call includes a state attribute, something like \"state\": \"Enabled\" - So for your scenario perhaps you can just add a check on that value at https://github.com/cloud-custodian/cloud-custodian/blob/master/tools/c7n_org/scripts/azuresubs.py#L34\r\nAdditionally if you can file an issue with the error you are getting in c7n-org I would say that we should update it to handle this error properly. Generating a list without those is an easy workaround but it shouldn't fail on them.\r\n\n", "before_files": [{"content": "# Copyright 2018 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport click\nfrom c7n_azure.session import Session\nfrom c7n.utils import yaml_dump\nfrom azure.mgmt.resource.subscriptions import SubscriptionClient\n\n\[email protected]()\[email protected](\n '-f', '--output', type=click.File('w'),\n help=\"File to store the generated config (default stdout)\")\ndef main(output):\n \"\"\"\n Generate a c7n-org subscriptions config file\n \"\"\"\n\n client = SubscriptionClient(Session().get_credentials())\n subs = [sub.serialize(True) for sub in client.subscriptions.list()]\n results = []\n for sub in subs:\n sub_info = {\n 'subscription_id': sub['subscriptionId'],\n 'name': sub['displayName']\n }\n results.append(sub_info)\n\n print(yaml_dump({'subscriptions': results}), file=output)\n\n\nif __name__ == '__main__':\n main()\n", "path": "tools/c7n_org/scripts/azuresubs.py"}]}
1,152
266
gh_patches_debug_33390
rasdani/github-patches
git_diff
kivy__kivy-1947
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> TextInput crashes while using clipboard bubble After opening clipboard bubble, keyboard doesn't close anymore. Moreover, when closing application, it gives: ``` I/python ( 4932): [INFO ] [Clipboard ] Provider: dummy(['clipboard_android'] ignored) I/python ( 4932): [INFO ] [Base ] Leaving application in progress... I/python ( 4932): Python for android ended. W/dalvikvm( 4932): threadid=10: thread exiting with uncaught exception (group=0x4001d560) E/AndroidRuntime( 4932): FATAL EXCEPTION: Thread-11 E/AndroidRuntime( 4932): java.lang.NoClassDefFoundError: android.content.ClipData E/AndroidRuntime( 4932): at org.renpy.android.SDLSurfaceView.nativeInit(Native Method) E/AndroidRuntime( 4932): at org.renpy.android.SDLSurfaceView.run(SDLSurfaceView.java:725) E/AndroidRuntime( 4932): at java.lang.Thread.run(Thread.java:1019) E/AndroidRuntime( 4932): Caused by: java.lang.ClassNotFoundException: android.content.ClipData in loader dalvik.system.PathClassLoader[/data/app/org.emanuele.LyricsDL-2.apk] E/AndroidRuntime( 4932): at dalvik.system.PathClassLoader.findClass(PathClassLoader.java:240) E/AndroidRuntime( 4932): at java.lang.ClassLoader.loadClass(ClassLoader.java:551) E/AndroidRuntime( 4932): at java.lang.ClassLoader.loadClass(ClassLoader.java:511) E/AndroidRuntime( 4932): ... 3 more ``` If specifing "use_bubble: False" it works correctly, but clipboard is obviously disabled. android sdk 14 kivy 1.8.0 ## <bountysource-plugin> Want to back this issue? **[Post a bounty on it!](https://www.bountysource.com/issues/1436926-textinput-crashes-while-using-clipboard-bubble?utm_campaign=plugin&utm_content=tracker%2F42681&utm_medium=issues&utm_source=github)** We accept bounties via [Bountysource](https://www.bountysource.com/?utm_campaign=plugin&utm_content=tracker%2F42681&utm_medium=issues&utm_source=github). </bountysource-plugin> </issue> <code> [start of kivy/core/clipboard/clipboard_android.py] 1 ''' 2 Clipboard Android 3 ================= 4 5 Android implementation of Clipboard provider, using Pyjnius. 6 ''' 7 8 __all__ = ('ClipboardAndroid', ) 9 10 from kivy.core.clipboard import ClipboardBase 11 from jnius import autoclass 12 from android.runnable import run_on_ui_thread 13 14 AndroidString = autoclass('java.lang.String') 15 PythonActivity = autoclass('org.renpy.android.PythonActivity') 16 Context = autoclass('android.content.Context') 17 ClipData = autoclass('android.content.ClipData') 18 ClipDescription = autoclass('android.content.ClipDescription') 19 20 21 class ClipboardAndroid(ClipboardBase): 22 23 def __init__(self): 24 super(ClipboardAndroid, self).__init__() 25 self._clipboard = None 26 self._data = dict() 27 self._data['text/plain'] = None 28 self._data['application/data'] = None 29 PythonActivity._clipboard = None 30 31 def get(self, mimetype='text/plain'): 32 return self._get(mimetype) 33 34 def put(self, data, mimetype='text/plain'): 35 self._set(data, mimetype) 36 37 def get_types(self): 38 return list(self._data.keys()) 39 40 @run_on_ui_thread 41 def _initialize_clipboard(self): 42 PythonActivity._clipboard = PythonActivity.getSystemService( 43 Context.CLIPBOARD_SERVICE) 44 45 def _get_clipboard(f): 46 def called(*args, **kargs): 47 self = args[0] 48 if not PythonActivity._clipboard: 49 self._initialize_clipboard() 50 import time 51 while not PythonActivity._clipboard: 52 time.sleep(.01) 53 return f(*args, **kargs) 54 return called 55 56 @_get_clipboard 57 def _get(self, mimetype='text/plain'): 58 clippy = PythonActivity._clipboard 59 primary_clip = clippy.getPrimaryClip() 60 if primary_clip and clippy.getPrimaryClipDescription().hasMimeType( 61 ClipDescription.MIMETYPE_TEXT_PLAIN): 62 data = primary_clip.getItemAt(0).getText().toString() 63 else: 64 # TODO: non text data types Not yet implemented 65 data = '' 66 return data 67 68 @_get_clipboard 69 def _set(self, data, mimetype): 70 clippy = PythonActivity._clipboard 71 new_clip = ClipData.newPlainText(AndroidString(""), 72 AndroidString(data)) 73 # put text data onto clipboard 74 clippy.setPrimaryClip(new_clip) 75 [end of kivy/core/clipboard/clipboard_android.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/kivy/core/clipboard/clipboard_android.py b/kivy/core/clipboard/clipboard_android.py --- a/kivy/core/clipboard/clipboard_android.py +++ b/kivy/core/clipboard/clipboard_android.py @@ -14,8 +14,8 @@ AndroidString = autoclass('java.lang.String') PythonActivity = autoclass('org.renpy.android.PythonActivity') Context = autoclass('android.content.Context') -ClipData = autoclass('android.content.ClipData') -ClipDescription = autoclass('android.content.ClipDescription') +VER = autoclass('android.os.Build$VERSION') +sdk = VER.SDK_INT class ClipboardAndroid(ClipboardBase): @@ -56,19 +56,29 @@ @_get_clipboard def _get(self, mimetype='text/plain'): clippy = PythonActivity._clipboard - primary_clip = clippy.getPrimaryClip() - if primary_clip and clippy.getPrimaryClipDescription().hasMimeType( - ClipDescription.MIMETYPE_TEXT_PLAIN): - data = primary_clip.getItemAt(0).getText().toString() + if sdk < 11: + data = clippy.getText().toString() else: - # TODO: non text data types Not yet implemented - data = '' + ClipDescription = autoclass('android.content.ClipDescription') + primary_clip = clippy.getPrimaryClip() + if primary_clip and clippy.getPrimaryClipDescription().hasMimeType( + ClipDescription.MIMETYPE_TEXT_PLAIN): + data = primary_clip.getItemAt(0).getText().toString() + else: + # TODO: non text data types Not yet implemented + data = '' return data @_get_clipboard def _set(self, data, mimetype): clippy = PythonActivity._clipboard - new_clip = ClipData.newPlainText(AndroidString(""), + + if sdk < 11: + #versions previous to honeycomb + clippy.setText(AndroidString(data)) + else: + ClipData = autoclass('android.content.ClipData') + new_clip = ClipData.newPlainText(AndroidString(""), AndroidString(data)) - # put text data onto clipboard - clippy.setPrimaryClip(new_clip) + # put text data onto clipboard + clippy.setPrimaryClip(new_clip)
{"golden_diff": "diff --git a/kivy/core/clipboard/clipboard_android.py b/kivy/core/clipboard/clipboard_android.py\n--- a/kivy/core/clipboard/clipboard_android.py\n+++ b/kivy/core/clipboard/clipboard_android.py\n@@ -14,8 +14,8 @@\n AndroidString = autoclass('java.lang.String')\n PythonActivity = autoclass('org.renpy.android.PythonActivity')\n Context = autoclass('android.content.Context')\n-ClipData = autoclass('android.content.ClipData')\n-ClipDescription = autoclass('android.content.ClipDescription')\n+VER = autoclass('android.os.Build$VERSION')\n+sdk = VER.SDK_INT\n \n \n class ClipboardAndroid(ClipboardBase):\n@@ -56,19 +56,29 @@\n @_get_clipboard\n def _get(self, mimetype='text/plain'):\n clippy = PythonActivity._clipboard\n- primary_clip = clippy.getPrimaryClip()\n- if primary_clip and clippy.getPrimaryClipDescription().hasMimeType(\n- ClipDescription.MIMETYPE_TEXT_PLAIN):\n- data = primary_clip.getItemAt(0).getText().toString()\n+ if sdk < 11:\n+ data = clippy.getText().toString()\n else:\n- # TODO: non text data types Not yet implemented\n- data = ''\n+ ClipDescription = autoclass('android.content.ClipDescription')\n+ primary_clip = clippy.getPrimaryClip()\n+ if primary_clip and clippy.getPrimaryClipDescription().hasMimeType(\n+ ClipDescription.MIMETYPE_TEXT_PLAIN):\n+ data = primary_clip.getItemAt(0).getText().toString()\n+ else:\n+ # TODO: non text data types Not yet implemented\n+ data = ''\n return data\n \n @_get_clipboard\n def _set(self, data, mimetype):\n clippy = PythonActivity._clipboard\n- new_clip = ClipData.newPlainText(AndroidString(\"\"),\n+\n+ if sdk < 11:\n+ #versions previous to honeycomb\n+ clippy.setText(AndroidString(data))\n+ else:\n+ ClipData = autoclass('android.content.ClipData')\n+ new_clip = ClipData.newPlainText(AndroidString(\"\"),\n AndroidString(data))\n- # put text data onto clipboard\n- clippy.setPrimaryClip(new_clip)\n+ # put text data onto clipboard\n+ clippy.setPrimaryClip(new_clip)\n", "issue": "TextInput crashes while using clipboard bubble\nAfter opening clipboard bubble, keyboard doesn't close anymore.\nMoreover, when closing application, it gives:\n\n```\nI/python ( 4932): [INFO ] [Clipboard ] Provider: dummy(['clipboard_android'] ignored)\nI/python ( 4932): [INFO ] [Base ] Leaving application in progress...\nI/python ( 4932): Python for android ended.\nW/dalvikvm( 4932): threadid=10: thread exiting with uncaught exception (group=0x4001d560)\nE/AndroidRuntime( 4932): FATAL EXCEPTION: Thread-11\nE/AndroidRuntime( 4932): java.lang.NoClassDefFoundError: android.content.ClipData\nE/AndroidRuntime( 4932): at org.renpy.android.SDLSurfaceView.nativeInit(Native Method)\nE/AndroidRuntime( 4932): at org.renpy.android.SDLSurfaceView.run(SDLSurfaceView.java:725)\nE/AndroidRuntime( 4932): at java.lang.Thread.run(Thread.java:1019)\nE/AndroidRuntime( 4932): Caused by: java.lang.ClassNotFoundException: android.content.ClipData in loader dalvik.system.PathClassLoader[/data/app/org.emanuele.LyricsDL-2.apk]\nE/AndroidRuntime( 4932): at dalvik.system.PathClassLoader.findClass(PathClassLoader.java:240)\nE/AndroidRuntime( 4932): at java.lang.ClassLoader.loadClass(ClassLoader.java:551)\nE/AndroidRuntime( 4932): at java.lang.ClassLoader.loadClass(ClassLoader.java:511)\nE/AndroidRuntime( 4932): ... 3 more\n```\n\nIf specifing \"use_bubble: False\" it works correctly, but clipboard is obviously disabled.\n\nandroid sdk 14\nkivy 1.8.0\n## <bountysource-plugin>\n\nWant to back this issue? **[Post a bounty on it!](https://www.bountysource.com/issues/1436926-textinput-crashes-while-using-clipboard-bubble?utm_campaign=plugin&utm_content=tracker%2F42681&utm_medium=issues&utm_source=github)** We accept bounties via [Bountysource](https://www.bountysource.com/?utm_campaign=plugin&utm_content=tracker%2F42681&utm_medium=issues&utm_source=github).\n</bountysource-plugin>\n\n", "before_files": [{"content": "'''\nClipboard Android\n=================\n\nAndroid implementation of Clipboard provider, using Pyjnius.\n'''\n\n__all__ = ('ClipboardAndroid', )\n\nfrom kivy.core.clipboard import ClipboardBase\nfrom jnius import autoclass\nfrom android.runnable import run_on_ui_thread\n\nAndroidString = autoclass('java.lang.String')\nPythonActivity = autoclass('org.renpy.android.PythonActivity')\nContext = autoclass('android.content.Context')\nClipData = autoclass('android.content.ClipData')\nClipDescription = autoclass('android.content.ClipDescription')\n\n\nclass ClipboardAndroid(ClipboardBase):\n\n def __init__(self):\n super(ClipboardAndroid, self).__init__()\n self._clipboard = None\n self._data = dict()\n self._data['text/plain'] = None\n self._data['application/data'] = None\n PythonActivity._clipboard = None\n\n def get(self, mimetype='text/plain'):\n return self._get(mimetype)\n\n def put(self, data, mimetype='text/plain'):\n self._set(data, mimetype)\n\n def get_types(self):\n return list(self._data.keys())\n\n @run_on_ui_thread\n def _initialize_clipboard(self):\n PythonActivity._clipboard = PythonActivity.getSystemService(\n Context.CLIPBOARD_SERVICE)\n\n def _get_clipboard(f):\n def called(*args, **kargs):\n self = args[0]\n if not PythonActivity._clipboard:\n self._initialize_clipboard()\n import time\n while not PythonActivity._clipboard:\n time.sleep(.01)\n return f(*args, **kargs)\n return called\n\n @_get_clipboard\n def _get(self, mimetype='text/plain'):\n clippy = PythonActivity._clipboard\n primary_clip = clippy.getPrimaryClip()\n if primary_clip and clippy.getPrimaryClipDescription().hasMimeType(\n ClipDescription.MIMETYPE_TEXT_PLAIN):\n data = primary_clip.getItemAt(0).getText().toString()\n else:\n # TODO: non text data types Not yet implemented\n data = ''\n return data\n\n @_get_clipboard\n def _set(self, data, mimetype):\n clippy = PythonActivity._clipboard\n new_clip = ClipData.newPlainText(AndroidString(\"\"),\n AndroidString(data))\n # put text data onto clipboard\n clippy.setPrimaryClip(new_clip)\n", "path": "kivy/core/clipboard/clipboard_android.py"}]}
1,777
515
gh_patches_debug_9553
rasdani/github-patches
git_diff
mathesar-foundation__mathesar-3064
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Not really possible to override settings ## Description I want to override the mathsar settings in order to allow LDAP login using django-auth-ldap. I changed the `config/settings/production.py` file that mentions: `# Override default settings ` and added the needed configuration. This worked fine however that file is under version control so if it is changed on the origin I'll get a conflict and would need to also merge my changes. The usual way to implement this functionality is to add a *non tracked* `local.py` file that would contain any extra configuration for each environment (either dev or production) and import *that* file from the corresponding file. I.e the production.py would be changed to: ```python # Override default settings try: from .local import * except ImportError: pass ``` This way, if the local.py file is there it will be used to override the config but if it isnt' there it will be ignored. ## Expected behavior Being able to override django settings for my environment *without* keeping a fork. ## To Reproduce Change the production.py file and you'll see that it's version controlled so it can't be easily changed! ## Environment Not needed ## Additional context I'd be happy to provide a PR implementing the functionality described here, i.e allow an untracked local.py file to override django settings for each *user/environment*. </issue> <code> [start of config/settings/development.py] 1 from config.settings.common_settings import * # noqa 2 3 # Override default settings 4 [end of config/settings/development.py] [start of config/settings/production.py] 1 from config.settings.common_settings import * # noqa 2 3 # Override default settings 4 [end of config/settings/production.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/config/settings/development.py b/config/settings/development.py --- a/config/settings/development.py +++ b/config/settings/development.py @@ -1,3 +1,10 @@ from config.settings.common_settings import * # noqa # Override default settings + + +# Use a local.py module for settings that shouldn't be version tracked +try: + from .local import * # noqa +except ImportError: + pass diff --git a/config/settings/production.py b/config/settings/production.py --- a/config/settings/production.py +++ b/config/settings/production.py @@ -1,3 +1,10 @@ from config.settings.common_settings import * # noqa # Override default settings + + +# Use a local.py module for settings that shouldn't be version tracked +try: + from .local import * # noqa +except ImportError: + pass
{"golden_diff": "diff --git a/config/settings/development.py b/config/settings/development.py\n--- a/config/settings/development.py\n+++ b/config/settings/development.py\n@@ -1,3 +1,10 @@\n from config.settings.common_settings import * # noqa\n \n # Override default settings\n+\n+\n+# Use a local.py module for settings that shouldn't be version tracked\n+try:\n+ from .local import * # noqa\n+except ImportError:\n+ pass\ndiff --git a/config/settings/production.py b/config/settings/production.py\n--- a/config/settings/production.py\n+++ b/config/settings/production.py\n@@ -1,3 +1,10 @@\n from config.settings.common_settings import * # noqa\n \n # Override default settings\n+\n+\n+# Use a local.py module for settings that shouldn't be version tracked\n+try:\n+ from .local import * # noqa \n+except ImportError:\n+ pass\n", "issue": "Not really possible to override settings\n## Description\r\nI want to override the mathsar settings in order to allow LDAP login using django-auth-ldap. I changed the `config/settings/production.py` file that mentions: `# Override default settings ` and added the needed configuration. \r\n\r\nThis worked fine however that file is under version control so if it is changed on the origin I'll get a conflict and would need to also merge my changes. The usual way to implement this functionality is to add a *non tracked* `local.py` file that would contain any extra configuration for each environment (either dev or production) and import *that* file from the corresponding file. I.e the production.py would be changed to:\r\n\r\n```python\r\n# Override default settings \r\n\r\ntry:\r\n from .local import *\r\nexcept ImportError:\r\n pass\r\n```\r\n\r\nThis way, if the local.py file is there it will be used to override the config but if it isnt' there it will be ignored. \r\n\r\n## Expected behavior\r\nBeing able to override django settings for my environment *without* keeping a fork.\r\n\r\n## To Reproduce\r\nChange the production.py file and you'll see that it's version controlled so it can't be easily changed!\r\n\r\n## Environment\r\nNot needed\r\n\r\n## Additional context\r\nI'd be happy to provide a PR implementing the functionality described here, i.e allow an untracked local.py file to override django settings for each *user/environment*.\n", "before_files": [{"content": "from config.settings.common_settings import * # noqa\n\n# Override default settings\n", "path": "config/settings/development.py"}, {"content": "from config.settings.common_settings import * # noqa\n\n# Override default settings\n", "path": "config/settings/production.py"}]}
886
193
gh_patches_debug_4675
rasdani/github-patches
git_diff
pypa__pip-5931
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> pip uses deprecated SafeConfigParser * Pip version: 9.0.1 * Python version: 3.6.1 * Operating system: Mac OS X 10.12.4 ### Description: With `error::DeprecationWarning` in `PYTHONWARNINGS`: ``` pip uninstall -y faker /Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/pep425tags.py:260: DeprecationWarning: the imp module is deprecated in favour of importlib; see the module's documentation for alternative uses import imp Exception: Traceback (most recent call last): File "/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/basecommand.py", line 215, in main status = self.run(options, args) File "/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/commands/uninstall.py", line 76, in run requirement_set.uninstall(auto_confirm=options.yes) File "/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/req/req_set.py", line 346, in uninstall req.uninstall(auto_confirm=auto_confirm) File "/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/req/req_install.py", line 732, in uninstall config = configparser.SafeConfigParser(**options) File "/Users/davidchudzicki/.cache/hypothesis-build-runtimes/versions/python3.6/lib/python3.6/configparser.py", line 1212, in __init__ DeprecationWarning, stacklevel=2 DeprecationWarning: The SafeConfigParser class has been renamed to ConfigParser in Python 3.2. This alias will be removed in future versions. Use ConfigParser directly instead. ``` </issue> <code> [start of src/pip/_internal/vcs/mercurial.py] 1 from __future__ import absolute_import 2 3 import logging 4 import os 5 6 from pip._vendor.six.moves import configparser 7 8 from pip._internal.download import path_to_url 9 from pip._internal.utils.misc import display_path, make_vcs_requirement_url 10 from pip._internal.utils.temp_dir import TempDirectory 11 from pip._internal.vcs import VersionControl, vcs 12 13 logger = logging.getLogger(__name__) 14 15 16 class Mercurial(VersionControl): 17 name = 'hg' 18 dirname = '.hg' 19 repo_name = 'clone' 20 schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') 21 22 def get_base_rev_args(self, rev): 23 return [rev] 24 25 def export(self, location): 26 """Export the Hg repository at the url to the destination location""" 27 with TempDirectory(kind="export") as temp_dir: 28 self.unpack(temp_dir.path) 29 30 self.run_command( 31 ['archive', location], show_stdout=False, cwd=temp_dir.path 32 ) 33 34 def fetch_new(self, dest, url, rev_options): 35 rev_display = rev_options.to_display() 36 logger.info( 37 'Cloning hg %s%s to %s', 38 url, 39 rev_display, 40 display_path(dest), 41 ) 42 self.run_command(['clone', '--noupdate', '-q', url, dest]) 43 cmd_args = ['update', '-q'] + rev_options.to_args() 44 self.run_command(cmd_args, cwd=dest) 45 46 def switch(self, dest, url, rev_options): 47 repo_config = os.path.join(dest, self.dirname, 'hgrc') 48 config = configparser.SafeConfigParser() 49 try: 50 config.read(repo_config) 51 config.set('paths', 'default', url) 52 with open(repo_config, 'w') as config_file: 53 config.write(config_file) 54 except (OSError, configparser.NoSectionError) as exc: 55 logger.warning( 56 'Could not switch Mercurial repository to %s: %s', url, exc, 57 ) 58 else: 59 cmd_args = ['update', '-q'] + rev_options.to_args() 60 self.run_command(cmd_args, cwd=dest) 61 62 def update(self, dest, url, rev_options): 63 self.run_command(['pull', '-q'], cwd=dest) 64 cmd_args = ['update', '-q'] + rev_options.to_args() 65 self.run_command(cmd_args, cwd=dest) 66 67 def get_url(self, location): 68 url = self.run_command( 69 ['showconfig', 'paths.default'], 70 show_stdout=False, cwd=location).strip() 71 if self._is_local_repository(url): 72 url = path_to_url(url) 73 return url.strip() 74 75 def get_revision(self, location): 76 current_revision = self.run_command( 77 ['parents', '--template={rev}'], 78 show_stdout=False, cwd=location).strip() 79 return current_revision 80 81 def get_revision_hash(self, location): 82 current_rev_hash = self.run_command( 83 ['parents', '--template={node}'], 84 show_stdout=False, cwd=location).strip() 85 return current_rev_hash 86 87 def get_src_requirement(self, dist, location): 88 repo = self.get_url(location) 89 if not repo.lower().startswith('hg:'): 90 repo = 'hg+' + repo 91 current_rev_hash = self.get_revision_hash(location) 92 egg_project_name = dist.egg_name().split('-', 1)[0] 93 return make_vcs_requirement_url(repo, current_rev_hash, 94 egg_project_name) 95 96 def is_commit_id_equal(self, dest, name): 97 """Always assume the versions don't match""" 98 return False 99 100 101 vcs.register(Mercurial) 102 [end of src/pip/_internal/vcs/mercurial.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -45,7 +45,7 @@ def switch(self, dest, url, rev_options): repo_config = os.path.join(dest, self.dirname, 'hgrc') - config = configparser.SafeConfigParser() + config = configparser.RawConfigParser() try: config.read(repo_config) config.set('paths', 'default', url)
{"golden_diff": "diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py\n--- a/src/pip/_internal/vcs/mercurial.py\n+++ b/src/pip/_internal/vcs/mercurial.py\n@@ -45,7 +45,7 @@\n \n def switch(self, dest, url, rev_options):\n repo_config = os.path.join(dest, self.dirname, 'hgrc')\n- config = configparser.SafeConfigParser()\n+ config = configparser.RawConfigParser()\n try:\n config.read(repo_config)\n config.set('paths', 'default', url)\n", "issue": "pip uses deprecated SafeConfigParser\n* Pip version: 9.0.1\r\n* Python version: 3.6.1\r\n* Operating system: Mac OS X 10.12.4\r\n\r\n### Description:\r\n\r\nWith `error::DeprecationWarning` in `PYTHONWARNINGS`:\r\n\r\n```\r\npip uninstall -y faker\r\n/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/pep425tags.py:260: DeprecationWarning: the imp module is deprecated in favour of importlib; see the module's documentation for alternative uses\r\n import imp\r\nException:\r\nTraceback (most recent call last):\r\n File \"/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/basecommand.py\", line 215, in main\r\n status = self.run(options, args)\r\n File \"/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/commands/uninstall.py\", line 76, in run\r\n requirement_set.uninstall(auto_confirm=options.yes)\r\n File \"/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/req/req_set.py\", line 346, in uninstall\r\n req.uninstall(auto_confirm=auto_confirm)\r\n File \"/Users/davidchudzicki/.cache/hypothesis-build-runtimes/.tox/py36-full/lib/python3.6/site-packages/pip/req/req_install.py\", line 732, in uninstall\r\n config = configparser.SafeConfigParser(**options)\r\n File \"/Users/davidchudzicki/.cache/hypothesis-build-runtimes/versions/python3.6/lib/python3.6/configparser.py\", line 1212, in __init__\r\n DeprecationWarning, stacklevel=2\r\nDeprecationWarning: The SafeConfigParser class has been renamed to ConfigParser in Python 3.2. This alias will be removed in future versions. Use ConfigParser directly instead.\r\n```\n", "before_files": [{"content": "from __future__ import absolute_import\n\nimport logging\nimport os\n\nfrom pip._vendor.six.moves import configparser\n\nfrom pip._internal.download import path_to_url\nfrom pip._internal.utils.misc import display_path, make_vcs_requirement_url\nfrom pip._internal.utils.temp_dir import TempDirectory\nfrom pip._internal.vcs import VersionControl, vcs\n\nlogger = logging.getLogger(__name__)\n\n\nclass Mercurial(VersionControl):\n name = 'hg'\n dirname = '.hg'\n repo_name = 'clone'\n schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')\n\n def get_base_rev_args(self, rev):\n return [rev]\n\n def export(self, location):\n \"\"\"Export the Hg repository at the url to the destination location\"\"\"\n with TempDirectory(kind=\"export\") as temp_dir:\n self.unpack(temp_dir.path)\n\n self.run_command(\n ['archive', location], show_stdout=False, cwd=temp_dir.path\n )\n\n def fetch_new(self, dest, url, rev_options):\n rev_display = rev_options.to_display()\n logger.info(\n 'Cloning hg %s%s to %s',\n url,\n rev_display,\n display_path(dest),\n )\n self.run_command(['clone', '--noupdate', '-q', url, dest])\n cmd_args = ['update', '-q'] + rev_options.to_args()\n self.run_command(cmd_args, cwd=dest)\n\n def switch(self, dest, url, rev_options):\n repo_config = os.path.join(dest, self.dirname, 'hgrc')\n config = configparser.SafeConfigParser()\n try:\n config.read(repo_config)\n config.set('paths', 'default', url)\n with open(repo_config, 'w') as config_file:\n config.write(config_file)\n except (OSError, configparser.NoSectionError) as exc:\n logger.warning(\n 'Could not switch Mercurial repository to %s: %s', url, exc,\n )\n else:\n cmd_args = ['update', '-q'] + rev_options.to_args()\n self.run_command(cmd_args, cwd=dest)\n\n def update(self, dest, url, rev_options):\n self.run_command(['pull', '-q'], cwd=dest)\n cmd_args = ['update', '-q'] + rev_options.to_args()\n self.run_command(cmd_args, cwd=dest)\n\n def get_url(self, location):\n url = self.run_command(\n ['showconfig', 'paths.default'],\n show_stdout=False, cwd=location).strip()\n if self._is_local_repository(url):\n url = path_to_url(url)\n return url.strip()\n\n def get_revision(self, location):\n current_revision = self.run_command(\n ['parents', '--template={rev}'],\n show_stdout=False, cwd=location).strip()\n return current_revision\n\n def get_revision_hash(self, location):\n current_rev_hash = self.run_command(\n ['parents', '--template={node}'],\n show_stdout=False, cwd=location).strip()\n return current_rev_hash\n\n def get_src_requirement(self, dist, location):\n repo = self.get_url(location)\n if not repo.lower().startswith('hg:'):\n repo = 'hg+' + repo\n current_rev_hash = self.get_revision_hash(location)\n egg_project_name = dist.egg_name().split('-', 1)[0]\n return make_vcs_requirement_url(repo, current_rev_hash,\n egg_project_name)\n\n def is_commit_id_equal(self, dest, name):\n \"\"\"Always assume the versions don't match\"\"\"\n return False\n\n\nvcs.register(Mercurial)\n", "path": "src/pip/_internal/vcs/mercurial.py"}]}
2,040
141
gh_patches_debug_3922
rasdani/github-patches
git_diff
sql-machine-learning__elasticdl-773
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Hide TensorBoard service REST call logs Currently these logs appear on master pod's log which is not necessary at user level: ``` I0624 15:50:54.834580 140272641951488 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:54] "GET /data/plugin/scalars/tags HTTP/1.1" 200 - W0624 15:50:55.150964 140556554397440 servicer.py:195] Task result for outdated version 30 dropped I0624 15:50:57.245738 140272633558784 _internal.py:122] ::ffff:10.36.1.1 - - [24/Jun/2019 15:50:57] "GET /data/environment HTTP/1.1" 200 - I0624 15:50:57.250612 140271752836864 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:57] "GET /data/experiments HTTP/1.1" 200 - I0624 15:50:57.252254 140272105903872 _internal.py:122] ::ffff:10.36.1.1 - - [24/Jun/2019 15:50:57] "GET /data/runs HTTP/1.1" 200 - I0624 15:50:57.255696 140272641951488 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:57] "GET /data/plugins_listing HTTP/1.1" 200 - I0624 15:50:57.430979 140272641951488 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:57] "GET /data/plugin/scalars/tags HTTP/1.1" 200 - ``` </issue> <code> [start of elasticdl/python/elasticdl/master/tensorboard_service.py] 1 import datetime 2 import subprocess 3 import time 4 5 import tensorflow as tf 6 7 import numpy as np 8 9 10 class TensorboardService(object): 11 """Tensorboard Service implementation""" 12 13 def __init__(self, tensorboard_log_dir): 14 """ 15 Arguments: 16 tensorboard_log_dir: The log directory for Tensorboard. 17 """ 18 _current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") 19 self._tensorboard_log_dir = tensorboard_log_dir + _current_time 20 self._initialize_summary_writer() 21 self.tb_process = None 22 23 def _initialize_summary_writer(self): 24 self.summary_writer = tf.summary.create_file_writer( 25 self._tensorboard_log_dir 26 ) 27 28 def write_dict_to_summary(self, dictionary, version): 29 with self.summary_writer.as_default(): 30 for k, v in dictionary.items(): 31 if isinstance(v, np.ndarray) and len(v) == 1: 32 v = v[0] 33 tf.summary.scalar(k, v, step=version) 34 35 def start(self): 36 self.tb_process = subprocess.Popen( 37 ["tensorboard --logdir " + self._tensorboard_log_dir], shell=True 38 ) 39 40 def keep_running(self): 41 while self.tb_process.poll() is None: 42 time.sleep(10) 43 [end of elasticdl/python/elasticdl/master/tensorboard_service.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/elasticdl/python/elasticdl/master/tensorboard_service.py b/elasticdl/python/elasticdl/master/tensorboard_service.py --- a/elasticdl/python/elasticdl/master/tensorboard_service.py +++ b/elasticdl/python/elasticdl/master/tensorboard_service.py @@ -34,7 +34,10 @@ def start(self): self.tb_process = subprocess.Popen( - ["tensorboard --logdir " + self._tensorboard_log_dir], shell=True + ["tensorboard --logdir " + self._tensorboard_log_dir], + shell=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.STDOUT, ) def keep_running(self):
{"golden_diff": "diff --git a/elasticdl/python/elasticdl/master/tensorboard_service.py b/elasticdl/python/elasticdl/master/tensorboard_service.py\n--- a/elasticdl/python/elasticdl/master/tensorboard_service.py\n+++ b/elasticdl/python/elasticdl/master/tensorboard_service.py\n@@ -34,7 +34,10 @@\n \n def start(self):\n self.tb_process = subprocess.Popen(\n- [\"tensorboard --logdir \" + self._tensorboard_log_dir], shell=True\n+ [\"tensorboard --logdir \" + self._tensorboard_log_dir],\n+ shell=True,\n+ stdout=subprocess.DEVNULL,\n+ stderr=subprocess.STDOUT,\n )\n \n def keep_running(self):\n", "issue": "Hide TensorBoard service REST call logs\nCurrently these logs appear on master pod's log which is not necessary at user level:\r\n```\r\nI0624 15:50:54.834580 140272641951488 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:54] \"GET /data/plugin/scalars/tags HTTP/1.1\" 200 -\r\nW0624 15:50:55.150964 140556554397440 servicer.py:195] Task result for outdated version 30 dropped\r\nI0624 15:50:57.245738 140272633558784 _internal.py:122] ::ffff:10.36.1.1 - - [24/Jun/2019 15:50:57] \"GET /data/environment HTTP/1.1\" 200 -\r\nI0624 15:50:57.250612 140271752836864 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:57] \"GET /data/experiments HTTP/1.1\" 200 -\r\nI0624 15:50:57.252254 140272105903872 _internal.py:122] ::ffff:10.36.1.1 - - [24/Jun/2019 15:50:57] \"GET /data/runs HTTP/1.1\" 200 -\r\nI0624 15:50:57.255696 140272641951488 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:57] \"GET /data/plugins_listing HTTP/1.1\" 200 -\r\nI0624 15:50:57.430979 140272641951488 _internal.py:122] ::ffff:10.138.0.35 - - [24/Jun/2019 15:50:57] \"GET /data/plugin/scalars/tags HTTP/1.1\" 200 -\r\n```\n", "before_files": [{"content": "import datetime\nimport subprocess\nimport time\n\nimport tensorflow as tf\n\nimport numpy as np\n\n\nclass TensorboardService(object):\n \"\"\"Tensorboard Service implementation\"\"\"\n\n def __init__(self, tensorboard_log_dir):\n \"\"\"\n Arguments:\n tensorboard_log_dir: The log directory for Tensorboard.\n \"\"\"\n _current_time = datetime.datetime.now().strftime(\"%Y%m%d-%H%M%S\")\n self._tensorboard_log_dir = tensorboard_log_dir + _current_time\n self._initialize_summary_writer()\n self.tb_process = None\n\n def _initialize_summary_writer(self):\n self.summary_writer = tf.summary.create_file_writer(\n self._tensorboard_log_dir\n )\n\n def write_dict_to_summary(self, dictionary, version):\n with self.summary_writer.as_default():\n for k, v in dictionary.items():\n if isinstance(v, np.ndarray) and len(v) == 1:\n v = v[0]\n tf.summary.scalar(k, v, step=version)\n\n def start(self):\n self.tb_process = subprocess.Popen(\n [\"tensorboard --logdir \" + self._tensorboard_log_dir], shell=True\n )\n\n def keep_running(self):\n while self.tb_process.poll() is None:\n time.sleep(10)\n", "path": "elasticdl/python/elasticdl/master/tensorboard_service.py"}]}
1,577
159
gh_patches_debug_32860
rasdani/github-patches
git_diff
qutebrowser__qutebrowser-3702
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Crash with invalid dictionary filenames Looks like this doesn't get caught properly on init: ``` 16:39:44 DEBUG init app:_init_modules:468 Initializing websettings... 16:39:45 ERROR misc crashsignal:exception_hook:216 Uncaught exception Traceback (most recent call last): File "/bin/qutebrowser", line 11, in <module> load_entry_point('qutebrowser==1.1.1', 'gui_scripts', 'qutebrowser')() File "/usr/lib/python3.6/site-packages/qutebrowser/qutebrowser.py", line 188, in main return app.run(args) File "/usr/lib/python3.6/site-packages/qutebrowser/app.py", line 137, in run init(args, crash_handler) File "/usr/lib/python3.6/site-packages/qutebrowser/app.py", line 163, in init _init_modules(args, crash_handler) File "/usr/lib/python3.6/site-packages/qutebrowser/app.py", line 469, in _init_modules websettings.init(args) File "/usr/lib/python3.6/site-packages/qutebrowser/config/websettings.py", line 215, in init webenginesettings.init(args) File "/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py", line 297, in init websettings.init_mappings(MAPPINGS) File "/usr/lib/python3.6/site-packages/qutebrowser/config/websettings.py", line 198, in init_mappings mapping.set(value) File "/usr/lib/python3.6/site-packages/qutebrowser/config/websettings.py", line 72, in set self._set(value, settings=settings) File "/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py", line 154, in _set filenames = [self._find_installed(code) for code in value] File "/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py", line 154, in <listcomp> filenames = [self._find_installed(code) for code in value] File "/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py", line 143, in _find_installed local_filename = spell.local_filename(code) File "/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/spell.py", line 64, in local_filename all_installed = local_files(code) File "/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/spell.py", line 51, in local_files for matching_dict in sorted(matching_dicts, key=version, reverse=True): File "/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/spell.py", line 36, in version .format(filename)) ValueError: the given dictionary file name is malformed: /usr/share/qt/qtwebengine_dictionaries/en-US.bdic ``` https://crashes.qutebrowser.org/view/63cd0d83 cc @elshize </issue> <code> [start of qutebrowser/browser/webengine/spell.py] 1 # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: 2 3 # Copyright 2017-2018 Michal Siedlaczek <[email protected]> 4 5 # This file is part of qutebrowser. 6 # 7 # qutebrowser is free software: you can redistribute it and/or modify 8 # it under the terms of the GNU General Public License as published by 9 # the Free Software Foundation, either version 3 of the License, or 10 # (at your option) any later version. 11 # 12 # qutebrowser is distributed in the hope that it will be useful, 13 # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 # GNU General Public License for more details. 16 # 17 # You should have received a copy of the GNU General Public License 18 # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. 19 20 """Installing and configuring spell-checking for QtWebEngine.""" 21 22 import glob 23 import os 24 import re 25 26 from PyQt5.QtCore import QLibraryInfo 27 from qutebrowser.utils import log 28 29 30 def version(filename): 31 """Extract the version number from the dictionary file name.""" 32 version_re = re.compile(r".+-(?P<version>[0-9]+-[0-9]+?)\.bdic") 33 match = version_re.fullmatch(filename) 34 if match is None: 35 raise ValueError('the given dictionary file name is malformed: {}' 36 .format(filename)) 37 return tuple(int(n) for n in match.group('version').split('-')) 38 39 40 def dictionary_dir(): 41 """Return the path (str) to the QtWebEngine's dictionaries directory.""" 42 datapath = QLibraryInfo.location(QLibraryInfo.DataPath) 43 return os.path.join(datapath, 'qtwebengine_dictionaries') 44 45 46 def local_files(code): 47 """Return all installed dictionaries for the given code.""" 48 pathname = os.path.join(dictionary_dir(), '{}*.bdic'.format(code)) 49 matching_dicts = glob.glob(pathname) 50 files = [] 51 for matching_dict in sorted(matching_dicts, key=version, reverse=True): 52 filename = os.path.basename(matching_dict) 53 log.config.debug('Found file for dict {}: {}'.format(code, filename)) 54 files.append(filename) 55 return files 56 57 58 def local_filename(code): 59 """Return the newest installed dictionary for the given code. 60 61 Return the filename of the installed dictionary with the highest version 62 number or None if the dictionary is not installed. 63 """ 64 all_installed = local_files(code) 65 return os.path.splitext(all_installed[0])[0] if all_installed else None 66 [end of qutebrowser/browser/webengine/spell.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/qutebrowser/browser/webengine/spell.py b/qutebrowser/browser/webengine/spell.py --- a/qutebrowser/browser/webengine/spell.py +++ b/qutebrowser/browser/webengine/spell.py @@ -24,16 +24,18 @@ import re from PyQt5.QtCore import QLibraryInfo -from qutebrowser.utils import log +from qutebrowser.utils import log, message + +dict_version_re = re.compile(r".+-(?P<version>[0-9]+-[0-9]+?)\.bdic") def version(filename): """Extract the version number from the dictionary file name.""" - version_re = re.compile(r".+-(?P<version>[0-9]+-[0-9]+?)\.bdic") - match = version_re.fullmatch(filename) + match = dict_version_re.match(filename) if match is None: - raise ValueError('the given dictionary file name is malformed: {}' - .format(filename)) + message.warning( + "Found a dictionary with a malformed name: {}".format(filename)) + return None return tuple(int(n) for n in match.group('version').split('-')) @@ -44,15 +46,23 @@ def local_files(code): - """Return all installed dictionaries for the given code.""" + """Return all installed dictionaries for the given code. + + The returned dictionaries are sorted by version, therefore the latest will + be the first element. The list will be empty if no dictionaries are found. + """ pathname = os.path.join(dictionary_dir(), '{}*.bdic'.format(code)) matching_dicts = glob.glob(pathname) - files = [] - for matching_dict in sorted(matching_dicts, key=version, reverse=True): - filename = os.path.basename(matching_dict) - log.config.debug('Found file for dict {}: {}'.format(code, filename)) - files.append(filename) - return files + versioned_dicts = [] + for matching_dict in matching_dicts: + parsed_version = version(matching_dict) + if parsed_version is not None: + filename = os.path.basename(matching_dict) + log.config.debug('Found file for dict {}: {}' + .format(code, filename)) + versioned_dicts.append((parsed_version, filename)) + return [filename for version, filename + in sorted(versioned_dicts, reverse=True)] def local_filename(code):
{"golden_diff": "diff --git a/qutebrowser/browser/webengine/spell.py b/qutebrowser/browser/webengine/spell.py\n--- a/qutebrowser/browser/webengine/spell.py\n+++ b/qutebrowser/browser/webengine/spell.py\n@@ -24,16 +24,18 @@\n import re\n \n from PyQt5.QtCore import QLibraryInfo\n-from qutebrowser.utils import log\n+from qutebrowser.utils import log, message\n+\n+dict_version_re = re.compile(r\".+-(?P<version>[0-9]+-[0-9]+?)\\.bdic\")\n \n \n def version(filename):\n \"\"\"Extract the version number from the dictionary file name.\"\"\"\n- version_re = re.compile(r\".+-(?P<version>[0-9]+-[0-9]+?)\\.bdic\")\n- match = version_re.fullmatch(filename)\n+ match = dict_version_re.match(filename)\n if match is None:\n- raise ValueError('the given dictionary file name is malformed: {}'\n- .format(filename))\n+ message.warning(\n+ \"Found a dictionary with a malformed name: {}\".format(filename))\n+ return None\n return tuple(int(n) for n in match.group('version').split('-'))\n \n \n@@ -44,15 +46,23 @@\n \n \n def local_files(code):\n- \"\"\"Return all installed dictionaries for the given code.\"\"\"\n+ \"\"\"Return all installed dictionaries for the given code.\n+\n+ The returned dictionaries are sorted by version, therefore the latest will\n+ be the first element. The list will be empty if no dictionaries are found.\n+ \"\"\"\n pathname = os.path.join(dictionary_dir(), '{}*.bdic'.format(code))\n matching_dicts = glob.glob(pathname)\n- files = []\n- for matching_dict in sorted(matching_dicts, key=version, reverse=True):\n- filename = os.path.basename(matching_dict)\n- log.config.debug('Found file for dict {}: {}'.format(code, filename))\n- files.append(filename)\n- return files\n+ versioned_dicts = []\n+ for matching_dict in matching_dicts:\n+ parsed_version = version(matching_dict)\n+ if parsed_version is not None:\n+ filename = os.path.basename(matching_dict)\n+ log.config.debug('Found file for dict {}: {}'\n+ .format(code, filename))\n+ versioned_dicts.append((parsed_version, filename))\n+ return [filename for version, filename\n+ in sorted(versioned_dicts, reverse=True)]\n \n \n def local_filename(code):\n", "issue": "Crash with invalid dictionary filenames\nLooks like this doesn't get caught properly on init:\r\n\r\n```\r\n16:39:44 DEBUG init app:_init_modules:468 Initializing websettings...\r\n16:39:45 ERROR misc crashsignal:exception_hook:216 Uncaught exception\r\nTraceback (most recent call last):\r\n File \"/bin/qutebrowser\", line 11, in <module>\r\n load_entry_point('qutebrowser==1.1.1', 'gui_scripts', 'qutebrowser')()\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/qutebrowser.py\", line 188, in main\r\n return app.run(args)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/app.py\", line 137, in run\r\n init(args, crash_handler)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/app.py\", line 163, in init\r\n _init_modules(args, crash_handler)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/app.py\", line 469, in _init_modules\r\n websettings.init(args)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/config/websettings.py\", line 215, in init\r\n webenginesettings.init(args)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py\", line 297, in init\r\n websettings.init_mappings(MAPPINGS)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/config/websettings.py\", line 198, in init_mappings\r\n mapping.set(value)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/config/websettings.py\", line 72, in set\r\n self._set(value, settings=settings)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py\", line 154, in _set\r\n filenames = [self._find_installed(code) for code in value]\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py\", line 154, in <listcomp>\r\n filenames = [self._find_installed(code) for code in value]\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/webenginesettings.py\", line 143, in _find_installed\r\n local_filename = spell.local_filename(code)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/spell.py\", line 64, in local_filename\r\n all_installed = local_files(code)\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/spell.py\", line 51, in local_files\r\n for matching_dict in sorted(matching_dicts, key=version, reverse=True):\r\n File \"/usr/lib/python3.6/site-packages/qutebrowser/browser/webengine/spell.py\", line 36, in version\r\n .format(filename))\r\nValueError: the given dictionary file name is malformed: /usr/share/qt/qtwebengine_dictionaries/en-US.bdic\r\n```\r\n\r\nhttps://crashes.qutebrowser.org/view/63cd0d83\r\n\r\ncc @elshize \n", "before_files": [{"content": "# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:\n\n# Copyright 2017-2018 Michal Siedlaczek <[email protected]>\n\n# This file is part of qutebrowser.\n#\n# qutebrowser is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# qutebrowser is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Installing and configuring spell-checking for QtWebEngine.\"\"\"\n\nimport glob\nimport os\nimport re\n\nfrom PyQt5.QtCore import QLibraryInfo\nfrom qutebrowser.utils import log\n\n\ndef version(filename):\n \"\"\"Extract the version number from the dictionary file name.\"\"\"\n version_re = re.compile(r\".+-(?P<version>[0-9]+-[0-9]+?)\\.bdic\")\n match = version_re.fullmatch(filename)\n if match is None:\n raise ValueError('the given dictionary file name is malformed: {}'\n .format(filename))\n return tuple(int(n) for n in match.group('version').split('-'))\n\n\ndef dictionary_dir():\n \"\"\"Return the path (str) to the QtWebEngine's dictionaries directory.\"\"\"\n datapath = QLibraryInfo.location(QLibraryInfo.DataPath)\n return os.path.join(datapath, 'qtwebengine_dictionaries')\n\n\ndef local_files(code):\n \"\"\"Return all installed dictionaries for the given code.\"\"\"\n pathname = os.path.join(dictionary_dir(), '{}*.bdic'.format(code))\n matching_dicts = glob.glob(pathname)\n files = []\n for matching_dict in sorted(matching_dicts, key=version, reverse=True):\n filename = os.path.basename(matching_dict)\n log.config.debug('Found file for dict {}: {}'.format(code, filename))\n files.append(filename)\n return files\n\n\ndef local_filename(code):\n \"\"\"Return the newest installed dictionary for the given code.\n\n Return the filename of the installed dictionary with the highest version\n number or None if the dictionary is not installed.\n \"\"\"\n all_installed = local_files(code)\n return os.path.splitext(all_installed[0])[0] if all_installed else None\n", "path": "qutebrowser/browser/webengine/spell.py"}]}
1,950
538
gh_patches_debug_4226
rasdani/github-patches
git_diff
mlflow__mlflow-3598
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Unpin sqlalchemy Hi, Currently mlflow pins sqlalchemy to `<= 1.3.13`. I wanted to use this package on a projects that requires a more updated version of sqlalchemy. Would it be possible to unpit sqlalchemy or to change the constraint to disallow specific version of it? (like `!=1.3.14` etc?) Thanks </issue> <code> [start of setup.py] 1 import os 2 from importlib.machinery import SourceFileLoader 3 from setuptools import setup, find_packages 4 5 version = ( 6 SourceFileLoader("mlflow.version", os.path.join("mlflow", "version.py")).load_module().VERSION 7 ) 8 9 10 # Get a list of all files in the JS directory to include in our module 11 def package_files(directory): 12 paths = [] 13 for (path, _, filenames) in os.walk(directory): 14 for filename in filenames: 15 paths.append(os.path.join("..", path, filename)) 16 return paths 17 18 19 # Prints out a set of paths (relative to the mlflow/ directory) of files in mlflow/server/js/build 20 # to include in the wheel, e.g. "../mlflow/server/js/build/index.html" 21 js_files = package_files("mlflow/server/js/build") 22 models_container_server_files = package_files("mlflow/models/container") 23 alembic_files = [ 24 "../mlflow/store/db_migrations/alembic.ini", 25 "../mlflow/temporary_db_migrations_for_pre_1_users/alembic.ini", 26 ] 27 28 setup( 29 name="mlflow", 30 version=version, 31 packages=find_packages(exclude=["tests", "tests.*"]), 32 package_data={"mlflow": js_files + models_container_server_files + alembic_files}, 33 install_requires=[ 34 "alembic<=1.4.1", 35 # Required 36 "azure-storage-blob", 37 "click>=7.0", 38 "cloudpickle", 39 "databricks-cli>=0.8.7", 40 "requests>=2.17.3", 41 "six>=1.10.0", 42 'waitress; platform_system == "Windows"', 43 'gunicorn; platform_system != "Windows"', 44 "Flask", 45 "numpy", 46 "pandas", 47 "python-dateutil", 48 "protobuf>=3.6.0", 49 "gitpython>=2.1.0", 50 "pyyaml", 51 "querystring_parser", 52 "docker>=4.0.0", 53 "entrypoints", 54 # Pin sqlparse for: https://github.com/mlflow/mlflow/issues/3433 55 "sqlparse>=0.3.1", 56 # Required to run the MLflow server against SQL-backed storage 57 "sqlalchemy<=1.3.13", 58 "gorilla", 59 "prometheus-flask-exporter", 60 ], 61 extras_require={ 62 "extras": [ 63 "scikit-learn", 64 # Required to log artifacts and models to HDFS artifact locations 65 "pyarrow", 66 # Required to log artifacts and models to AWS S3 artifact locations 67 "boto3", 68 "mleap", 69 # Required to log artifacts and models to GCS artifact locations 70 "google-cloud-storage", 71 "azureml-core>=1.2.0", 72 # Required to log artifacts to SFTP artifact locations 73 "pysftp", 74 # Required by the mlflow.projects module, when running projects against 75 # a remote Kubernetes cluster 76 "kubernetes", 77 ], 78 "sqlserver": ["mlflow-dbstore",], 79 "aliyun-oss": ["aliyunstoreplugin",], 80 }, 81 entry_points=""" 82 [console_scripts] 83 mlflow=mlflow.cli:cli 84 """, 85 zip_safe=False, 86 author="Databricks", 87 description="MLflow: A Platform for ML Development and Productionization", 88 long_description=open("README.rst").read(), 89 license="Apache License 2.0", 90 classifiers=["Intended Audience :: Developers", "Programming Language :: Python :: 3.6",], 91 keywords="ml ai databricks", 92 url="https://mlflow.org/", 93 python_requires=">=3.5", 94 project_urls={ 95 "Bug Tracker": "https://github.com/mlflow/mlflow/issues", 96 "Documentation": "https://mlflow.org/docs/latest/index.html", 97 "Source Code": "https://github.com/mlflow/mlflow", 98 }, 99 ) 100 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -54,7 +54,7 @@ # Pin sqlparse for: https://github.com/mlflow/mlflow/issues/3433 "sqlparse>=0.3.1", # Required to run the MLflow server against SQL-backed storage - "sqlalchemy<=1.3.13", + "sqlalchemy", "gorilla", "prometheus-flask-exporter", ],
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -54,7 +54,7 @@\n # Pin sqlparse for: https://github.com/mlflow/mlflow/issues/3433\n \"sqlparse>=0.3.1\",\n # Required to run the MLflow server against SQL-backed storage\n- \"sqlalchemy<=1.3.13\",\n+ \"sqlalchemy\",\n \"gorilla\",\n \"prometheus-flask-exporter\",\n ],\n", "issue": "Unpin sqlalchemy\nHi,\r\n\r\nCurrently mlflow pins sqlalchemy to `<= 1.3.13`. I wanted to use this package on a projects that requires a more updated version of sqlalchemy.\r\nWould it be possible to unpit sqlalchemy or to change the constraint to disallow specific version of it? (like `!=1.3.14` etc?)\r\n\r\nThanks\n", "before_files": [{"content": "import os\nfrom importlib.machinery import SourceFileLoader\nfrom setuptools import setup, find_packages\n\nversion = (\n SourceFileLoader(\"mlflow.version\", os.path.join(\"mlflow\", \"version.py\")).load_module().VERSION\n)\n\n\n# Get a list of all files in the JS directory to include in our module\ndef package_files(directory):\n paths = []\n for (path, _, filenames) in os.walk(directory):\n for filename in filenames:\n paths.append(os.path.join(\"..\", path, filename))\n return paths\n\n\n# Prints out a set of paths (relative to the mlflow/ directory) of files in mlflow/server/js/build\n# to include in the wheel, e.g. \"../mlflow/server/js/build/index.html\"\njs_files = package_files(\"mlflow/server/js/build\")\nmodels_container_server_files = package_files(\"mlflow/models/container\")\nalembic_files = [\n \"../mlflow/store/db_migrations/alembic.ini\",\n \"../mlflow/temporary_db_migrations_for_pre_1_users/alembic.ini\",\n]\n\nsetup(\n name=\"mlflow\",\n version=version,\n packages=find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={\"mlflow\": js_files + models_container_server_files + alembic_files},\n install_requires=[\n \"alembic<=1.4.1\",\n # Required\n \"azure-storage-blob\",\n \"click>=7.0\",\n \"cloudpickle\",\n \"databricks-cli>=0.8.7\",\n \"requests>=2.17.3\",\n \"six>=1.10.0\",\n 'waitress; platform_system == \"Windows\"',\n 'gunicorn; platform_system != \"Windows\"',\n \"Flask\",\n \"numpy\",\n \"pandas\",\n \"python-dateutil\",\n \"protobuf>=3.6.0\",\n \"gitpython>=2.1.0\",\n \"pyyaml\",\n \"querystring_parser\",\n \"docker>=4.0.0\",\n \"entrypoints\",\n # Pin sqlparse for: https://github.com/mlflow/mlflow/issues/3433\n \"sqlparse>=0.3.1\",\n # Required to run the MLflow server against SQL-backed storage\n \"sqlalchemy<=1.3.13\",\n \"gorilla\",\n \"prometheus-flask-exporter\",\n ],\n extras_require={\n \"extras\": [\n \"scikit-learn\",\n # Required to log artifacts and models to HDFS artifact locations\n \"pyarrow\",\n # Required to log artifacts and models to AWS S3 artifact locations\n \"boto3\",\n \"mleap\",\n # Required to log artifacts and models to GCS artifact locations\n \"google-cloud-storage\",\n \"azureml-core>=1.2.0\",\n # Required to log artifacts to SFTP artifact locations\n \"pysftp\",\n # Required by the mlflow.projects module, when running projects against\n # a remote Kubernetes cluster\n \"kubernetes\",\n ],\n \"sqlserver\": [\"mlflow-dbstore\",],\n \"aliyun-oss\": [\"aliyunstoreplugin\",],\n },\n entry_points=\"\"\"\n [console_scripts]\n mlflow=mlflow.cli:cli\n \"\"\",\n zip_safe=False,\n author=\"Databricks\",\n description=\"MLflow: A Platform for ML Development and Productionization\",\n long_description=open(\"README.rst\").read(),\n license=\"Apache License 2.0\",\n classifiers=[\"Intended Audience :: Developers\", \"Programming Language :: Python :: 3.6\",],\n keywords=\"ml ai databricks\",\n url=\"https://mlflow.org/\",\n python_requires=\">=3.5\",\n project_urls={\n \"Bug Tracker\": \"https://github.com/mlflow/mlflow/issues\",\n \"Documentation\": \"https://mlflow.org/docs/latest/index.html\",\n \"Source Code\": \"https://github.com/mlflow/mlflow\",\n },\n)\n", "path": "setup.py"}]}
1,651
115
gh_patches_debug_5864
rasdani/github-patches
git_diff
pyca__cryptography-1575
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Build automation fixes for 8th release When triggering the wheel build the release automation does not appropriately wait for the build to complete but instead grabs the previous build. The previous attempted fix of adding a `sleep(3)` did not work around this issue. </issue> <code> [start of tasks.py] 1 # This file is dual licensed under the terms of the Apache License, Version 2 # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 # for complete details. 4 5 from __future__ import absolute_import, division, print_function 6 7 import getpass 8 import os 9 import time 10 11 import invoke 12 13 import requests 14 15 16 JENKINS_URL = "https://jenkins.cryptography.io/job/cryptography-wheel-builder" 17 18 19 def wait_for_build_completed(session): 20 # Wait 3 seconds before actually checking if the build is complete, to 21 # ensure that it had time to really start. 22 time.sleep(3) 23 while True: 24 response = session.get( 25 "{0}/lastBuild/api/json/".format(JENKINS_URL), 26 headers={ 27 "Accept": "application/json", 28 } 29 ) 30 response.raise_for_status() 31 if not response.json()["building"]: 32 assert response.json()["result"] == "SUCCESS" 33 break 34 time.sleep(0.1) 35 36 37 def download_artifacts(session): 38 response = session.get( 39 "{0}/lastBuild/api/json/".format(JENKINS_URL), 40 headers={ 41 "Accept": "application/json" 42 } 43 ) 44 response.raise_for_status() 45 assert not response.json()["building"] 46 assert response.json()["result"] == "SUCCESS" 47 48 paths = [] 49 50 for run in response.json()["runs"]: 51 response = session.get( 52 run["url"] + "api/json/", 53 headers={ 54 "Accept": "application/json", 55 } 56 ) 57 response.raise_for_status() 58 for artifact in response.json()["artifacts"]: 59 response = session.get( 60 "{0}artifact/{1}".format(run["url"], artifact["relativePath"]) 61 ) 62 out_path = os.path.join( 63 os.path.dirname(__file__), 64 "dist", 65 artifact["fileName"], 66 ) 67 with open(out_path, "wb") as f: 68 f.write(response.content) 69 paths.append(out_path) 70 return paths 71 72 73 @invoke.task 74 def release(version): 75 """ 76 ``version`` should be a string like '0.4' or '1.0'. 77 """ 78 invoke.run("git tag -s {0} -m '{0} release'".format(version)) 79 invoke.run("git push --tags") 80 81 invoke.run("python setup.py sdist") 82 invoke.run("cd vectors/ && python setup.py sdist bdist_wheel") 83 84 invoke.run( 85 "twine upload -s dist/cryptography-{0}* " 86 "vectors/dist/cryptography_vectors-{0}*".format(version) 87 ) 88 89 session = requests.Session() 90 91 # This tells the CDN to delete the cached response for the URL. We do this 92 # so that the Jenkins builders will see the new sdist immediately when they 93 # go to build the wheels. 94 response = session.request( 95 "PURGE", "https://pypi.python.org/simple/cryptography/" 96 ) 97 response.raise_for_status() 98 99 username = getpass.getpass("Input the GitHub/Jenkins username: ") 100 token = getpass.getpass("Input the Jenkins token: ") 101 response = session.post( 102 "{0}/build".format(JENKINS_URL), 103 auth=requests.auth.HTTPBasicAuth( 104 username, token 105 ), 106 params={ 107 "cause": "Building wheels for {0}".format(version) 108 } 109 ) 110 response.raise_for_status() 111 wait_for_build_completed(session) 112 paths = download_artifacts(session) 113 invoke.run("twine upload {0}".format(" ".join(paths))) 114 [end of tasks.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/tasks.py b/tasks.py --- a/tasks.py +++ b/tasks.py @@ -17,9 +17,9 @@ def wait_for_build_completed(session): - # Wait 3 seconds before actually checking if the build is complete, to + # Wait 20 seconds before actually checking if the build is complete, to # ensure that it had time to really start. - time.sleep(3) + time.sleep(20) while True: response = session.get( "{0}/lastBuild/api/json/".format(JENKINS_URL),
{"golden_diff": "diff --git a/tasks.py b/tasks.py\n--- a/tasks.py\n+++ b/tasks.py\n@@ -17,9 +17,9 @@\n \n \n def wait_for_build_completed(session):\n- # Wait 3 seconds before actually checking if the build is complete, to\n+ # Wait 20 seconds before actually checking if the build is complete, to\n # ensure that it had time to really start.\n- time.sleep(3)\n+ time.sleep(20)\n while True:\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n", "issue": "Build automation fixes for 8th release\nWhen triggering the wheel build the release automation does not appropriately wait for the build to complete but instead grabs the previous build. The previous attempted fix of adding a `sleep(3)` did not work around this issue.\n\n", "before_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport getpass\nimport os\nimport time\n\nimport invoke\n\nimport requests\n\n\nJENKINS_URL = \"https://jenkins.cryptography.io/job/cryptography-wheel-builder\"\n\n\ndef wait_for_build_completed(session):\n # Wait 3 seconds before actually checking if the build is complete, to\n # ensure that it had time to really start.\n time.sleep(3)\n while True:\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n if not response.json()[\"building\"]:\n assert response.json()[\"result\"] == \"SUCCESS\"\n break\n time.sleep(0.1)\n\n\ndef download_artifacts(session):\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\"\n }\n )\n response.raise_for_status()\n assert not response.json()[\"building\"]\n assert response.json()[\"result\"] == \"SUCCESS\"\n\n paths = []\n\n for run in response.json()[\"runs\"]:\n response = session.get(\n run[\"url\"] + \"api/json/\",\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n for artifact in response.json()[\"artifacts\"]:\n response = session.get(\n \"{0}artifact/{1}\".format(run[\"url\"], artifact[\"relativePath\"])\n )\n out_path = os.path.join(\n os.path.dirname(__file__),\n \"dist\",\n artifact[\"fileName\"],\n )\n with open(out_path, \"wb\") as f:\n f.write(response.content)\n paths.append(out_path)\n return paths\n\n\[email protected]\ndef release(version):\n \"\"\"\n ``version`` should be a string like '0.4' or '1.0'.\n \"\"\"\n invoke.run(\"git tag -s {0} -m '{0} release'\".format(version))\n invoke.run(\"git push --tags\")\n\n invoke.run(\"python setup.py sdist\")\n invoke.run(\"cd vectors/ && python setup.py sdist bdist_wheel\")\n\n invoke.run(\n \"twine upload -s dist/cryptography-{0}* \"\n \"vectors/dist/cryptography_vectors-{0}*\".format(version)\n )\n\n session = requests.Session()\n\n # This tells the CDN to delete the cached response for the URL. We do this\n # so that the Jenkins builders will see the new sdist immediately when they\n # go to build the wheels.\n response = session.request(\n \"PURGE\", \"https://pypi.python.org/simple/cryptography/\"\n )\n response.raise_for_status()\n\n username = getpass.getpass(\"Input the GitHub/Jenkins username: \")\n token = getpass.getpass(\"Input the Jenkins token: \")\n response = session.post(\n \"{0}/build\".format(JENKINS_URL),\n auth=requests.auth.HTTPBasicAuth(\n username, token\n ),\n params={\n \"cause\": \"Building wheels for {0}\".format(version)\n }\n )\n response.raise_for_status()\n wait_for_build_completed(session)\n paths = download_artifacts(session)\n invoke.run(\"twine upload {0}\".format(\" \".join(paths)))\n", "path": "tasks.py"}]}
1,584
131
gh_patches_debug_4530
rasdani/github-patches
git_diff
ivy-llc__ivy-16060
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> cross </issue> <code> [start of ivy/functional/frontends/paddle/tensor/linalg.py] 1 # global 2 import ivy 3 from ivy.func_wrapper import with_unsupported_dtypes, with_supported_dtypes 4 from ivy.functional.frontends.paddle import promote_types_of_paddle_inputs 5 from ivy.functional.frontends.paddle.func_wrapper import ( 6 to_ivy_arrays_and_back, 7 ) 8 9 10 # matmul 11 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 12 @to_ivy_arrays_and_back 13 def matmul(x, y, transpose_x=False, transpose_y=False, name=None): 14 x, y = promote_types_of_paddle_inputs(x, y) 15 return ivy.matmul(x, y, transpose_a=transpose_x, transpose_b=transpose_y) 16 17 18 # norm 19 @with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle") 20 @to_ivy_arrays_and_back 21 def norm(x, p="fro", axis=None, keepdim=False, name=None): 22 if axis is None and p is not None: 23 if p == "fro": 24 p = 2 25 ret = ivy.vector_norm(x.flatten(), ord=p, axis=-1) 26 if keepdim: 27 ret = ret.reshape([1] * len(x.shape)) 28 if len(ret.shape) == 0: 29 return ivy.array([ret]) 30 return ret 31 32 if isinstance(axis, tuple): 33 axis = list(axis) 34 if isinstance(axis, list) and len(axis) == 1: 35 axis = axis[0] 36 37 if isinstance(axis, int): 38 if p == "fro": 39 p = 2 40 if p in [0, 1, 2, ivy.inf, -ivy.inf]: 41 ret = ivy.vector_norm(x, ord=p, axis=axis, keepdims=keepdim) 42 elif isinstance(p, (int, float)): 43 ret = ivy.pow( 44 ivy.sum(ivy.pow(ivy.abs(x), p), axis=axis, keepdims=keepdim), 45 float(1.0 / p), 46 ) 47 48 elif isinstance(axis, list) and len(axis) == 2: 49 if p == 0: 50 raise ValueError 51 elif p == 1: 52 ret = ivy.sum(ivy.abs(x), axis=axis, keepdims=keepdim) 53 elif p == 2 or p == "fro": 54 ret = ivy.matrix_norm(x, ord="fro", axis=axis, keepdims=keepdim) 55 elif p == ivy.inf: 56 ret = ivy.max(ivy.abs(x), axis=axis, keepdims=keepdim) 57 elif p == -ivy.inf: 58 ret = ivy.min(ivy.abs(x), axis=axis, keepdims=keepdim) 59 elif isinstance(p, (int, float)) and p > 0: 60 ret = ivy.pow( 61 ivy.sum(ivy.pow(ivy.abs(x), p), axis=axis, keepdims=keepdim), 62 float(1.0 / p), 63 ) 64 else: 65 raise ValueError 66 67 else: 68 raise ValueError 69 70 if len(ret.shape) == 0: 71 ret = ivy.array( 72 [ret] 73 ) # this is done so as to match shape of output from paddle 74 return ret 75 76 77 # eig 78 @to_ivy_arrays_and_back 79 def eig(x, name=None): 80 return ivy.eig(x) 81 82 83 # eigvals 84 @to_ivy_arrays_and_back 85 def eigvals(x, name=None): 86 return ivy.eigvals(x) 87 88 89 # eigvalsh 90 @to_ivy_arrays_and_back 91 def eigvalsh(x, UPLO="L", name=None): 92 return ivy.eigvalsh(x, UPLO=UPLO) 93 94 95 # eigh 96 @to_ivy_arrays_and_back 97 def eigh(x, UPLO="L", name=None): 98 return ivy.eigh(x, UPLO=UPLO) 99 100 101 # pinv 102 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 103 @to_ivy_arrays_and_back 104 def pinv(x, rcond=1e-15, hermitian=False, name=None): 105 # TODO: Add hermitian functionality 106 return ivy.pinv(x, rtol=rcond) 107 [end of ivy/functional/frontends/paddle/tensor/linalg.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ivy/functional/frontends/paddle/tensor/linalg.py b/ivy/functional/frontends/paddle/tensor/linalg.py --- a/ivy/functional/frontends/paddle/tensor/linalg.py +++ b/ivy/functional/frontends/paddle/tensor/linalg.py @@ -7,6 +7,15 @@ ) +@with_supported_dtypes( + {"2.4.2 and below": ("float32", "float64", "int32", "int64")}, "paddle" +) +@to_ivy_arrays_and_back +def cross(x, y, /, *, axis=9, name=None): + x, y = promote_types_of_paddle_inputs(x, y) + return ivy.cross(x, y, axis=axis) + + # matmul @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") @to_ivy_arrays_and_back
{"golden_diff": "diff --git a/ivy/functional/frontends/paddle/tensor/linalg.py b/ivy/functional/frontends/paddle/tensor/linalg.py\n--- a/ivy/functional/frontends/paddle/tensor/linalg.py\n+++ b/ivy/functional/frontends/paddle/tensor/linalg.py\n@@ -7,6 +7,15 @@\n )\n \n \n+@with_supported_dtypes(\n+ {\"2.4.2 and below\": (\"float32\", \"float64\", \"int32\", \"int64\")}, \"paddle\"\n+)\n+@to_ivy_arrays_and_back\n+def cross(x, y, /, *, axis=9, name=None):\n+ x, y = promote_types_of_paddle_inputs(x, y)\n+ return ivy.cross(x, y, axis=axis)\n+\n+\n # matmul\n @with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n @to_ivy_arrays_and_back\n", "issue": "cross\n\n", "before_files": [{"content": "# global\nimport ivy\nfrom ivy.func_wrapper import with_unsupported_dtypes, with_supported_dtypes\nfrom ivy.functional.frontends.paddle import promote_types_of_paddle_inputs\nfrom ivy.functional.frontends.paddle.func_wrapper import (\n to_ivy_arrays_and_back,\n)\n\n\n# matmul\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef matmul(x, y, transpose_x=False, transpose_y=False, name=None):\n x, y = promote_types_of_paddle_inputs(x, y)\n return ivy.matmul(x, y, transpose_a=transpose_x, transpose_b=transpose_y)\n\n\n# norm\n@with_supported_dtypes({\"2.4.2 and below\": (\"float32\", \"float64\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef norm(x, p=\"fro\", axis=None, keepdim=False, name=None):\n if axis is None and p is not None:\n if p == \"fro\":\n p = 2\n ret = ivy.vector_norm(x.flatten(), ord=p, axis=-1)\n if keepdim:\n ret = ret.reshape([1] * len(x.shape))\n if len(ret.shape) == 0:\n return ivy.array([ret])\n return ret\n\n if isinstance(axis, tuple):\n axis = list(axis)\n if isinstance(axis, list) and len(axis) == 1:\n axis = axis[0]\n\n if isinstance(axis, int):\n if p == \"fro\":\n p = 2\n if p in [0, 1, 2, ivy.inf, -ivy.inf]:\n ret = ivy.vector_norm(x, ord=p, axis=axis, keepdims=keepdim)\n elif isinstance(p, (int, float)):\n ret = ivy.pow(\n ivy.sum(ivy.pow(ivy.abs(x), p), axis=axis, keepdims=keepdim),\n float(1.0 / p),\n )\n\n elif isinstance(axis, list) and len(axis) == 2:\n if p == 0:\n raise ValueError\n elif p == 1:\n ret = ivy.sum(ivy.abs(x), axis=axis, keepdims=keepdim)\n elif p == 2 or p == \"fro\":\n ret = ivy.matrix_norm(x, ord=\"fro\", axis=axis, keepdims=keepdim)\n elif p == ivy.inf:\n ret = ivy.max(ivy.abs(x), axis=axis, keepdims=keepdim)\n elif p == -ivy.inf:\n ret = ivy.min(ivy.abs(x), axis=axis, keepdims=keepdim)\n elif isinstance(p, (int, float)) and p > 0:\n ret = ivy.pow(\n ivy.sum(ivy.pow(ivy.abs(x), p), axis=axis, keepdims=keepdim),\n float(1.0 / p),\n )\n else:\n raise ValueError\n\n else:\n raise ValueError\n\n if len(ret.shape) == 0:\n ret = ivy.array(\n [ret]\n ) # this is done so as to match shape of output from paddle\n return ret\n\n\n# eig\n@to_ivy_arrays_and_back\ndef eig(x, name=None):\n return ivy.eig(x)\n\n\n# eigvals\n@to_ivy_arrays_and_back\ndef eigvals(x, name=None):\n return ivy.eigvals(x)\n\n\n# eigvalsh\n@to_ivy_arrays_and_back\ndef eigvalsh(x, UPLO=\"L\", name=None):\n return ivy.eigvalsh(x, UPLO=UPLO)\n\n\n# eigh\n@to_ivy_arrays_and_back\ndef eigh(x, UPLO=\"L\", name=None):\n return ivy.eigh(x, UPLO=UPLO)\n\n\n# pinv\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef pinv(x, rcond=1e-15, hermitian=False, name=None):\n # TODO: Add hermitian functionality\n return ivy.pinv(x, rtol=rcond)\n", "path": "ivy/functional/frontends/paddle/tensor/linalg.py"}]}
1,724
224
gh_patches_debug_22157
rasdani/github-patches
git_diff
lutris__lutris-1197
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Change "Import Games" to something more clear (like "Configure library importing") I personally feel like the current name for that menu is confusing, misleading and does't represent its actual purpose. I personally think something like "Configure library importing" will describe the menu much better, but if you disagree, any suggestions are appreciated. </issue> <code> [start of lutris/gui/sync.py] 1 import gi 2 gi.require_version('Gtk', '3.0') 3 from gi.repository import Gtk, Gio 4 5 from lutris.gui.widgets.utils import get_runner_icon 6 from lutris.gui.dialogs import NoticeDialog 7 from lutris.services import get_services 8 from lutris.settings import read_setting, write_setting 9 from lutris.util.jobs import AsyncCall 10 11 12 class ServiceSyncRow(Gtk.HBox): 13 14 def __init__(self, service): 15 super(ServiceSyncRow, self).__init__() 16 self.set_spacing(20) 17 18 self.identifier = service.__name__.split('.')[-1] 19 name = service.NAME 20 21 icon = get_runner_icon(self.identifier) 22 self.pack_start(icon, False, False, 0) 23 24 label = Gtk.Label(xalign=0) 25 label.set_markup("<b>{}</b>".format(name)) 26 self.pack_start(label, True, True, 0) 27 28 actions = Gtk.VBox() 29 self.pack_start(actions, False, False, 0) 30 31 sync_switch = Gtk.Switch() 32 sync_switch.set_tooltip_text("Sync when Lutris starts") 33 sync_switch.props.valign = Gtk.Align.CENTER 34 sync_switch.connect('notify::active', self.on_switch_changed) 35 if read_setting('sync_at_startup', self.identifier) == 'True': 36 sync_switch.set_state(True) 37 actions.pack_start(sync_switch, False, False, 0) 38 39 sync_button = Gtk.Button("Sync") 40 sync_button.set_tooltip_text("Sync now") 41 sync_button.connect('clicked', self.on_sync_button_clicked, service.sync_with_lutris) 42 actions.pack_start(sync_button, False, False, 0) 43 44 def on_sync_button_clicked(self, button, sync_method): 45 AsyncCall(sync_method, callback=self.on_service_synced) 46 47 def on_service_synced(self, caller, data): 48 parent = self.get_toplevel() 49 if not isinstance(parent, Gtk.Window): 50 # The sync dialog may have closed 51 parent = Gio.Application.get_default().props.active_window 52 NoticeDialog("Games synced", parent=parent) 53 54 def on_switch_changed(self, switch, data): 55 state = switch.get_active() 56 write_setting('sync_at_startup', state, self.identifier) 57 58 59 class SyncServiceDialog(Gtk.Dialog): 60 61 def __init__(self, parent=None): 62 Gtk.Dialog.__init__(self, title="Import local games", parent=parent) 63 self.connect("delete-event", lambda *x: self.destroy()) 64 self.set_border_width(10) 65 self.set_size_request(512, 0) 66 67 box_outer = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6) 68 self.get_content_area().add(box_outer) 69 70 description_label = Gtk.Label() 71 description_label.set_markup("You can import games from local game sources, \n" 72 "you can also choose to sync everytime Lutris starts") 73 box_outer.pack_start(description_label, False, False, 5) 74 75 separator = Gtk.Separator() 76 box_outer.pack_start(separator, False, False, 0) 77 78 for service in get_services(): 79 sync_row = ServiceSyncRow(service) 80 box_outer.pack_start(sync_row, False, True, 0) 81 box_outer.show_all() 82 [end of lutris/gui/sync.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/lutris/gui/sync.py b/lutris/gui/sync.py --- a/lutris/gui/sync.py +++ b/lutris/gui/sync.py @@ -59,7 +59,7 @@ class SyncServiceDialog(Gtk.Dialog): def __init__(self, parent=None): - Gtk.Dialog.__init__(self, title="Import local games", parent=parent) + Gtk.Dialog.__init__(self, title="Configure local game import", parent=parent) self.connect("delete-event", lambda *x: self.destroy()) self.set_border_width(10) self.set_size_request(512, 0) @@ -68,8 +68,8 @@ self.get_content_area().add(box_outer) description_label = Gtk.Label() - description_label.set_markup("You can import games from local game sources, \n" - "you can also choose to sync everytime Lutris starts") + description_label.set_markup("You can choose which local game sources will get synced each\n" + "time Lutris starts, or launch an immediate import of games.") box_outer.pack_start(description_label, False, False, 5) separator = Gtk.Separator()
{"golden_diff": "diff --git a/lutris/gui/sync.py b/lutris/gui/sync.py\n--- a/lutris/gui/sync.py\n+++ b/lutris/gui/sync.py\n@@ -59,7 +59,7 @@\n class SyncServiceDialog(Gtk.Dialog):\n \n def __init__(self, parent=None):\n- Gtk.Dialog.__init__(self, title=\"Import local games\", parent=parent)\n+ Gtk.Dialog.__init__(self, title=\"Configure local game import\", parent=parent)\n self.connect(\"delete-event\", lambda *x: self.destroy())\n self.set_border_width(10)\n self.set_size_request(512, 0)\n@@ -68,8 +68,8 @@\n self.get_content_area().add(box_outer)\n \n description_label = Gtk.Label()\n- description_label.set_markup(\"You can import games from local game sources, \\n\"\n- \"you can also choose to sync everytime Lutris starts\")\n+ description_label.set_markup(\"You can choose which local game sources will get synced each\\n\"\n+ \"time Lutris starts, or launch an immediate import of games.\")\n box_outer.pack_start(description_label, False, False, 5)\n \n separator = Gtk.Separator()\n", "issue": "Change \"Import Games\" to something more clear (like \"Configure library importing\")\nI personally feel like the current name for that menu is confusing, misleading and does't represent its actual purpose. I personally think something like \"Configure library importing\" will describe the menu much better, but if you disagree, any suggestions are appreciated.\n", "before_files": [{"content": "import gi\ngi.require_version('Gtk', '3.0')\nfrom gi.repository import Gtk, Gio\n\nfrom lutris.gui.widgets.utils import get_runner_icon\nfrom lutris.gui.dialogs import NoticeDialog\nfrom lutris.services import get_services\nfrom lutris.settings import read_setting, write_setting\nfrom lutris.util.jobs import AsyncCall\n\n\nclass ServiceSyncRow(Gtk.HBox):\n\n def __init__(self, service):\n super(ServiceSyncRow, self).__init__()\n self.set_spacing(20)\n\n self.identifier = service.__name__.split('.')[-1]\n name = service.NAME\n\n icon = get_runner_icon(self.identifier)\n self.pack_start(icon, False, False, 0)\n\n label = Gtk.Label(xalign=0)\n label.set_markup(\"<b>{}</b>\".format(name))\n self.pack_start(label, True, True, 0)\n\n actions = Gtk.VBox()\n self.pack_start(actions, False, False, 0)\n\n sync_switch = Gtk.Switch()\n sync_switch.set_tooltip_text(\"Sync when Lutris starts\")\n sync_switch.props.valign = Gtk.Align.CENTER\n sync_switch.connect('notify::active', self.on_switch_changed)\n if read_setting('sync_at_startup', self.identifier) == 'True':\n sync_switch.set_state(True)\n actions.pack_start(sync_switch, False, False, 0)\n\n sync_button = Gtk.Button(\"Sync\")\n sync_button.set_tooltip_text(\"Sync now\")\n sync_button.connect('clicked', self.on_sync_button_clicked, service.sync_with_lutris)\n actions.pack_start(sync_button, False, False, 0)\n\n def on_sync_button_clicked(self, button, sync_method):\n AsyncCall(sync_method, callback=self.on_service_synced)\n\n def on_service_synced(self, caller, data):\n parent = self.get_toplevel()\n if not isinstance(parent, Gtk.Window):\n # The sync dialog may have closed\n parent = Gio.Application.get_default().props.active_window\n NoticeDialog(\"Games synced\", parent=parent)\n\n def on_switch_changed(self, switch, data):\n state = switch.get_active()\n write_setting('sync_at_startup', state, self.identifier)\n\n\nclass SyncServiceDialog(Gtk.Dialog):\n\n def __init__(self, parent=None):\n Gtk.Dialog.__init__(self, title=\"Import local games\", parent=parent)\n self.connect(\"delete-event\", lambda *x: self.destroy())\n self.set_border_width(10)\n self.set_size_request(512, 0)\n\n box_outer = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)\n self.get_content_area().add(box_outer)\n\n description_label = Gtk.Label()\n description_label.set_markup(\"You can import games from local game sources, \\n\"\n \"you can also choose to sync everytime Lutris starts\")\n box_outer.pack_start(description_label, False, False, 5)\n\n separator = Gtk.Separator()\n box_outer.pack_start(separator, False, False, 0)\n\n for service in get_services():\n sync_row = ServiceSyncRow(service)\n box_outer.pack_start(sync_row, False, True, 0)\n box_outer.show_all()\n", "path": "lutris/gui/sync.py"}]}
1,446
270
gh_patches_debug_38060
rasdani/github-patches
git_diff
alltheplaces__alltheplaces-5102
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Fix Costa Coffee (US) which has switched from Amasty to a JSON API In the latest weekly run, Costa Coffee (costacoffee_us) is now broken with the Amasty "amlocator" AJAX endpoint no longer existing. In it's place appears to be a JSON API that returns store details (locations and addresses): https://us.costacoffee.com/api/cf/?locale=en-US&include=2&content_type=storeLocatorStore&limit=500&fields.location[near]=33.77804102,-84.38068933 This appears to be a fairly easy fix to rewrite the spider to use the new JSON API (with a higher limit than 500). </issue> <code> [start of locations/spiders/costacoffee_us.py] 1 import json 2 import re 3 4 import scrapy 5 from scrapy import Selector 6 7 from locations.categories import Categories, apply_category 8 from locations.items import Feature 9 10 11 class CostaCoffeeUSSpider(scrapy.Spider): 12 name = "costacoffee_us" 13 item_attributes = {"brand": "Costa Coffee", "brand_wikidata": "Q608845"} 14 allowed_domains = ["us.costacoffee.com"] 15 start_urls = ["https://us.costacoffee.com/amlocator/index/ajax"] 16 17 def parse(self, response): 18 script = response.xpath('//script[contains(text(), "amLocator")]/text()').extract_first() 19 20 start = script.index("jsonLocations: ") + len("jsonLocations: ") 21 stop = script.index("imageLocations") 22 23 locations = script[start:stop].strip().strip(",") 24 items = json.loads(locations)["items"] 25 26 for store in items: 27 item = Feature() 28 item["ref"] = store["id"] 29 item["lat"] = store["lat"] 30 item["lon"] = store["lng"] 31 32 html = Selector(text=store["popup_html"]) 33 34 item["name"] = html.xpath('//*[@class="amlocator-title"]/text()').get() 35 36 for line in html.xpath('//div[@class="amlocator-info-popup"]/text()').getall(): 37 line = line.strip() 38 if m := re.match(r"City: (.*)", line): 39 item["city"] = m.group(1) 40 elif m := re.match(r"Zip: (.*)", line): 41 item["postcode"] = m.group(1) 42 elif m := re.match(r"Address: (.*)", line): 43 item["street_address"] = m.group(1) 44 elif m := re.match(r"State: (.*)", line): 45 item["state"] = m.group(1) 46 47 apply_category(Categories.COFFEE_SHOP, item) 48 49 yield item 50 [end of locations/spiders/costacoffee_us.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/locations/spiders/costacoffee_us.py b/locations/spiders/costacoffee_us.py --- a/locations/spiders/costacoffee_us.py +++ b/locations/spiders/costacoffee_us.py @@ -1,49 +1,38 @@ -import json -import re - -import scrapy -from scrapy import Selector +from scrapy import Spider +from scrapy.http import JsonRequest from locations.categories import Categories, apply_category -from locations.items import Feature +from locations.dict_parser import DictParser +from locations.hours import DAYS_FULL, OpeningHours -class CostaCoffeeUSSpider(scrapy.Spider): +class CostaCoffeeUSSpider(Spider): name = "costacoffee_us" item_attributes = {"brand": "Costa Coffee", "brand_wikidata": "Q608845"} allowed_domains = ["us.costacoffee.com"] - start_urls = ["https://us.costacoffee.com/amlocator/index/ajax"] - - def parse(self, response): - script = response.xpath('//script[contains(text(), "amLocator")]/text()').extract_first() - - start = script.index("jsonLocations: ") + len("jsonLocations: ") - stop = script.index("imageLocations") - - locations = script[start:stop].strip().strip(",") - items = json.loads(locations)["items"] - - for store in items: - item = Feature() - item["ref"] = store["id"] - item["lat"] = store["lat"] - item["lon"] = store["lng"] + start_urls = ["https://us.costacoffee.com/api/cf/?content_type=storeLocatorStore"] + page_size = 1000 - html = Selector(text=store["popup_html"]) - - item["name"] = html.xpath('//*[@class="amlocator-title"]/text()').get() - - for line in html.xpath('//div[@class="amlocator-info-popup"]/text()').getall(): - line = line.strip() - if m := re.match(r"City: (.*)", line): - item["city"] = m.group(1) - elif m := re.match(r"Zip: (.*)", line): - item["postcode"] = m.group(1) - elif m := re.match(r"Address: (.*)", line): - item["street_address"] = m.group(1) - elif m := re.match(r"State: (.*)", line): - item["state"] = m.group(1) + def start_requests(self): + for url in self.start_urls: + yield JsonRequest(url=f"{url}&limit={self.page_size}") + def parse(self, response): + for location in response.json()["items"]: + item = DictParser.parse(location["fields"]) + item["ref"] = location["sys"]["id"] + item["addr_full"] = location["fields"]["storeAddress"] + item["opening_hours"] = OpeningHours() + for day_name in [s.lower() for s in DAYS_FULL]: + open_time = location["fields"].get(f"{day_name}Opening") + close_time = location["fields"].get(f"{day_name}Closing") + if open_time and "24 HOURS" in open_time.upper(): + item["opening_hours"].add_range(day_name, "00:00", "24:00") + elif open_time and close_time: + item["opening_hours"].add_range(day_name, open_time, close_time) apply_category(Categories.COFFEE_SHOP, item) - yield item + + offset = response.json()["skip"] + if offset + response.json()["limit"] < response.json()["total"]: + yield JsonRequest(url=f"{response.request.url}&limit={self.page_size}&offset={offset}")
{"golden_diff": "diff --git a/locations/spiders/costacoffee_us.py b/locations/spiders/costacoffee_us.py\n--- a/locations/spiders/costacoffee_us.py\n+++ b/locations/spiders/costacoffee_us.py\n@@ -1,49 +1,38 @@\n-import json\n-import re\n-\n-import scrapy\n-from scrapy import Selector\n+from scrapy import Spider\n+from scrapy.http import JsonRequest\n \n from locations.categories import Categories, apply_category\n-from locations.items import Feature\n+from locations.dict_parser import DictParser\n+from locations.hours import DAYS_FULL, OpeningHours\n \n \n-class CostaCoffeeUSSpider(scrapy.Spider):\n+class CostaCoffeeUSSpider(Spider):\n name = \"costacoffee_us\"\n item_attributes = {\"brand\": \"Costa Coffee\", \"brand_wikidata\": \"Q608845\"}\n allowed_domains = [\"us.costacoffee.com\"]\n- start_urls = [\"https://us.costacoffee.com/amlocator/index/ajax\"]\n-\n- def parse(self, response):\n- script = response.xpath('//script[contains(text(), \"amLocator\")]/text()').extract_first()\n-\n- start = script.index(\"jsonLocations: \") + len(\"jsonLocations: \")\n- stop = script.index(\"imageLocations\")\n-\n- locations = script[start:stop].strip().strip(\",\")\n- items = json.loads(locations)[\"items\"]\n-\n- for store in items:\n- item = Feature()\n- item[\"ref\"] = store[\"id\"]\n- item[\"lat\"] = store[\"lat\"]\n- item[\"lon\"] = store[\"lng\"]\n+ start_urls = [\"https://us.costacoffee.com/api/cf/?content_type=storeLocatorStore\"]\n+ page_size = 1000\n \n- html = Selector(text=store[\"popup_html\"])\n-\n- item[\"name\"] = html.xpath('//*[@class=\"amlocator-title\"]/text()').get()\n-\n- for line in html.xpath('//div[@class=\"amlocator-info-popup\"]/text()').getall():\n- line = line.strip()\n- if m := re.match(r\"City: (.*)\", line):\n- item[\"city\"] = m.group(1)\n- elif m := re.match(r\"Zip: (.*)\", line):\n- item[\"postcode\"] = m.group(1)\n- elif m := re.match(r\"Address: (.*)\", line):\n- item[\"street_address\"] = m.group(1)\n- elif m := re.match(r\"State: (.*)\", line):\n- item[\"state\"] = m.group(1)\n+ def start_requests(self):\n+ for url in self.start_urls:\n+ yield JsonRequest(url=f\"{url}&limit={self.page_size}\")\n \n+ def parse(self, response):\n+ for location in response.json()[\"items\"]:\n+ item = DictParser.parse(location[\"fields\"])\n+ item[\"ref\"] = location[\"sys\"][\"id\"]\n+ item[\"addr_full\"] = location[\"fields\"][\"storeAddress\"]\n+ item[\"opening_hours\"] = OpeningHours()\n+ for day_name in [s.lower() for s in DAYS_FULL]:\n+ open_time = location[\"fields\"].get(f\"{day_name}Opening\")\n+ close_time = location[\"fields\"].get(f\"{day_name}Closing\")\n+ if open_time and \"24 HOURS\" in open_time.upper():\n+ item[\"opening_hours\"].add_range(day_name, \"00:00\", \"24:00\")\n+ elif open_time and close_time:\n+ item[\"opening_hours\"].add_range(day_name, open_time, close_time)\n apply_category(Categories.COFFEE_SHOP, item)\n-\n yield item\n+\n+ offset = response.json()[\"skip\"]\n+ if offset + response.json()[\"limit\"] < response.json()[\"total\"]:\n+ yield JsonRequest(url=f\"{response.request.url}&limit={self.page_size}&offset={offset}\")\n", "issue": "Fix Costa Coffee (US) which has switched from Amasty to a JSON API\nIn the latest weekly run, Costa Coffee (costacoffee_us) is now broken with the Amasty \"amlocator\" AJAX endpoint no longer existing. In it's place appears to be a JSON API that returns store details (locations and addresses):\r\n\r\nhttps://us.costacoffee.com/api/cf/?locale=en-US&include=2&content_type=storeLocatorStore&limit=500&fields.location[near]=33.77804102,-84.38068933\r\n\r\nThis appears to be a fairly easy fix to rewrite the spider to use the new JSON API (with a higher limit than 500).\n", "before_files": [{"content": "import json\nimport re\n\nimport scrapy\nfrom scrapy import Selector\n\nfrom locations.categories import Categories, apply_category\nfrom locations.items import Feature\n\n\nclass CostaCoffeeUSSpider(scrapy.Spider):\n name = \"costacoffee_us\"\n item_attributes = {\"brand\": \"Costa Coffee\", \"brand_wikidata\": \"Q608845\"}\n allowed_domains = [\"us.costacoffee.com\"]\n start_urls = [\"https://us.costacoffee.com/amlocator/index/ajax\"]\n\n def parse(self, response):\n script = response.xpath('//script[contains(text(), \"amLocator\")]/text()').extract_first()\n\n start = script.index(\"jsonLocations: \") + len(\"jsonLocations: \")\n stop = script.index(\"imageLocations\")\n\n locations = script[start:stop].strip().strip(\",\")\n items = json.loads(locations)[\"items\"]\n\n for store in items:\n item = Feature()\n item[\"ref\"] = store[\"id\"]\n item[\"lat\"] = store[\"lat\"]\n item[\"lon\"] = store[\"lng\"]\n\n html = Selector(text=store[\"popup_html\"])\n\n item[\"name\"] = html.xpath('//*[@class=\"amlocator-title\"]/text()').get()\n\n for line in html.xpath('//div[@class=\"amlocator-info-popup\"]/text()').getall():\n line = line.strip()\n if m := re.match(r\"City: (.*)\", line):\n item[\"city\"] = m.group(1)\n elif m := re.match(r\"Zip: (.*)\", line):\n item[\"postcode\"] = m.group(1)\n elif m := re.match(r\"Address: (.*)\", line):\n item[\"street_address\"] = m.group(1)\n elif m := re.match(r\"State: (.*)\", line):\n item[\"state\"] = m.group(1)\n\n apply_category(Categories.COFFEE_SHOP, item)\n\n yield item\n", "path": "locations/spiders/costacoffee_us.py"}]}
1,207
847
gh_patches_debug_21453
rasdani/github-patches
git_diff
mozmeao__basket-1036
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add content-type header for Acoustic Transact requests Transact customers using Oauth for submissions should add header: `Content-Type : text/plain` or `Content-Type : text/xml` </issue> <code> [start of basket/news/backends/acoustic.py] 1 import logging 2 3 from django.conf import settings 4 from django.utils.encoding import force_bytes 5 6 from lxml import etree 7 from requests import ConnectionError 8 from silverpop.api import Silverpop, SilverpopResponseException 9 10 logger = logging.getLogger(__name__) 11 XML_HEADER = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>' 12 13 14 def process_response(resp): 15 logger.debug("Response: %s" % resp.text) 16 response = etree.fromstring(resp.text.encode("utf-8")) 17 failure = response.find(".//FAILURES/FAILURE") 18 if failure: 19 raise SilverpopResponseException(failure.attrib["description"]) 20 21 fault = response.find(".//Fault/FaultString") 22 if fault: 23 raise SilverpopResponseException(fault.text) 24 25 return response 26 27 28 def process_tx_response(resp): 29 logger.debug("Response: %s" % resp.text) 30 response = etree.fromstring(resp.text.encode("utf-8")) 31 errors = response.findall(".//ERROR_STRING") 32 if errors: 33 for e in errors: 34 if e.text: 35 raise SilverpopResponseException(e.text) 36 37 return response 38 39 40 def xml_tag(tag, value=None, cdata=False, **attrs): 41 xmlt = etree.Element(tag, attrs) 42 if value: 43 if cdata: 44 xmlt.text = etree.CDATA(value) 45 else: 46 xmlt.text = value 47 48 return xmlt 49 50 51 def transact_xml(to, campaign_id, fields=None, bcc=None, save_to_db=False): 52 fields = fields or {} 53 bcc = bcc or [] 54 if isinstance(bcc, str): 55 bcc = [bcc] 56 57 root = xml_tag("XTMAILING") 58 root.append(xml_tag("CAMPAIGN_ID", campaign_id)) 59 if "transaction_id" in fields: 60 root.append(xml_tag("TRANSACTION_ID", fields["transaction_id"])) 61 62 root.append(xml_tag("SEND_AS_BATCH", "false")) 63 root.append(xml_tag("NO_RETRY_ON_FAILURE", "false")) 64 if fields and save_to_db: 65 save_cols_tag = xml_tag("SAVE_COLUMNS") 66 root.append(save_cols_tag) 67 for name in fields: 68 save_cols_tag.append(xml_tag("COLUMN_NAME", name)) 69 70 recipient_tag = xml_tag("RECIPIENT") 71 root.append(recipient_tag) 72 recipient_tag.append(xml_tag("EMAIL", to)) 73 for addr in bcc: 74 recipient_tag.append(xml_tag("BCC", addr)) 75 recipient_tag.append(xml_tag("BODY_TYPE", "HTML")) 76 for name, value in fields.items(): 77 p_tag = xml_tag("PERSONALIZATION") 78 p_tag.append(xml_tag("TAG_NAME", name)) 79 p_tag.append(xml_tag("VALUE", value)) 80 recipient_tag.append(p_tag) 81 82 return XML_HEADER + etree.tostring(root, encoding="unicode") 83 84 85 class Acoustic(Silverpop): 86 def _call(self, xml): 87 logger.debug("Request: %s" % xml) 88 try: 89 response = self.session.post( 90 self.api_endpoint, 91 data=force_bytes(xml), 92 timeout=10, 93 ) 94 except ConnectionError: 95 # try one more time 96 response = self.session.post( 97 self.api_endpoint, 98 data=force_bytes(xml), 99 timeout=10, 100 ) 101 102 return process_response(response) 103 104 105 class AcousticTransact(Silverpop): 106 api_xt_endpoint = "https://transact-campaign-us-%s.goacoustic.com/XTMail" 107 108 def __init__(self, client_id, client_secret, refresh_token, server_number): 109 self.api_xt_endpoint = self.api_xt_endpoint % server_number 110 super().__init__(client_id, client_secret, refresh_token, server_number) 111 112 def _call_xt(self, xml): 113 logger.debug("Request: %s" % xml) 114 response = self.session.post( 115 self.api_xt_endpoint, 116 data=force_bytes(xml), 117 timeout=10, 118 ) 119 return process_tx_response(response) 120 121 def send_mail(self, to, campaign_id, fields=None, bcc=None, save_to_db=False): 122 self._call_xt(transact_xml(to, campaign_id, fields, bcc, save_to_db)) 123 124 125 acoustic = Acoustic( 126 client_id=settings.ACOUSTIC_CLIENT_ID, 127 client_secret=settings.ACOUSTIC_CLIENT_SECRET, 128 refresh_token=settings.ACOUSTIC_REFRESH_TOKEN, 129 server_number=settings.ACOUSTIC_SERVER_NUMBER, 130 ) 131 acoustic_tx = AcousticTransact( 132 client_id=settings.ACOUSTIC_TX_CLIENT_ID, 133 client_secret=settings.ACOUSTIC_TX_CLIENT_SECRET, 134 refresh_token=settings.ACOUSTIC_TX_REFRESH_TOKEN, 135 server_number=settings.ACOUSTIC_TX_SERVER_NUMBER, 136 ) 137 [end of basket/news/backends/acoustic.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/basket/news/backends/acoustic.py b/basket/news/backends/acoustic.py --- a/basket/news/backends/acoustic.py +++ b/basket/news/backends/acoustic.py @@ -90,6 +90,7 @@ self.api_endpoint, data=force_bytes(xml), timeout=10, + headers={"Content-Type": "text/xml"}, ) except ConnectionError: # try one more time @@ -97,6 +98,7 @@ self.api_endpoint, data=force_bytes(xml), timeout=10, + headers={"Content-Type": "text/xml"}, ) return process_response(response) @@ -115,6 +117,7 @@ self.api_xt_endpoint, data=force_bytes(xml), timeout=10, + headers={"Content-Type": "text/xml"}, ) return process_tx_response(response)
{"golden_diff": "diff --git a/basket/news/backends/acoustic.py b/basket/news/backends/acoustic.py\n--- a/basket/news/backends/acoustic.py\n+++ b/basket/news/backends/acoustic.py\n@@ -90,6 +90,7 @@\n self.api_endpoint,\n data=force_bytes(xml),\n timeout=10,\n+ headers={\"Content-Type\": \"text/xml\"},\n )\n except ConnectionError:\n # try one more time\n@@ -97,6 +98,7 @@\n self.api_endpoint,\n data=force_bytes(xml),\n timeout=10,\n+ headers={\"Content-Type\": \"text/xml\"},\n )\n \n return process_response(response)\n@@ -115,6 +117,7 @@\n self.api_xt_endpoint,\n data=force_bytes(xml),\n timeout=10,\n+ headers={\"Content-Type\": \"text/xml\"},\n )\n return process_tx_response(response)\n", "issue": "Add content-type header for Acoustic Transact requests\nTransact customers using Oauth for submissions should add header: \r\n\r\n`Content-Type : text/plain` or \r\n`Content-Type : text/xml`\r\n\n", "before_files": [{"content": "import logging\n\nfrom django.conf import settings\nfrom django.utils.encoding import force_bytes\n\nfrom lxml import etree\nfrom requests import ConnectionError\nfrom silverpop.api import Silverpop, SilverpopResponseException\n\nlogger = logging.getLogger(__name__)\nXML_HEADER = '<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>'\n\n\ndef process_response(resp):\n logger.debug(\"Response: %s\" % resp.text)\n response = etree.fromstring(resp.text.encode(\"utf-8\"))\n failure = response.find(\".//FAILURES/FAILURE\")\n if failure:\n raise SilverpopResponseException(failure.attrib[\"description\"])\n\n fault = response.find(\".//Fault/FaultString\")\n if fault:\n raise SilverpopResponseException(fault.text)\n\n return response\n\n\ndef process_tx_response(resp):\n logger.debug(\"Response: %s\" % resp.text)\n response = etree.fromstring(resp.text.encode(\"utf-8\"))\n errors = response.findall(\".//ERROR_STRING\")\n if errors:\n for e in errors:\n if e.text:\n raise SilverpopResponseException(e.text)\n\n return response\n\n\ndef xml_tag(tag, value=None, cdata=False, **attrs):\n xmlt = etree.Element(tag, attrs)\n if value:\n if cdata:\n xmlt.text = etree.CDATA(value)\n else:\n xmlt.text = value\n\n return xmlt\n\n\ndef transact_xml(to, campaign_id, fields=None, bcc=None, save_to_db=False):\n fields = fields or {}\n bcc = bcc or []\n if isinstance(bcc, str):\n bcc = [bcc]\n\n root = xml_tag(\"XTMAILING\")\n root.append(xml_tag(\"CAMPAIGN_ID\", campaign_id))\n if \"transaction_id\" in fields:\n root.append(xml_tag(\"TRANSACTION_ID\", fields[\"transaction_id\"]))\n\n root.append(xml_tag(\"SEND_AS_BATCH\", \"false\"))\n root.append(xml_tag(\"NO_RETRY_ON_FAILURE\", \"false\"))\n if fields and save_to_db:\n save_cols_tag = xml_tag(\"SAVE_COLUMNS\")\n root.append(save_cols_tag)\n for name in fields:\n save_cols_tag.append(xml_tag(\"COLUMN_NAME\", name))\n\n recipient_tag = xml_tag(\"RECIPIENT\")\n root.append(recipient_tag)\n recipient_tag.append(xml_tag(\"EMAIL\", to))\n for addr in bcc:\n recipient_tag.append(xml_tag(\"BCC\", addr))\n recipient_tag.append(xml_tag(\"BODY_TYPE\", \"HTML\"))\n for name, value in fields.items():\n p_tag = xml_tag(\"PERSONALIZATION\")\n p_tag.append(xml_tag(\"TAG_NAME\", name))\n p_tag.append(xml_tag(\"VALUE\", value))\n recipient_tag.append(p_tag)\n\n return XML_HEADER + etree.tostring(root, encoding=\"unicode\")\n\n\nclass Acoustic(Silverpop):\n def _call(self, xml):\n logger.debug(\"Request: %s\" % xml)\n try:\n response = self.session.post(\n self.api_endpoint,\n data=force_bytes(xml),\n timeout=10,\n )\n except ConnectionError:\n # try one more time\n response = self.session.post(\n self.api_endpoint,\n data=force_bytes(xml),\n timeout=10,\n )\n\n return process_response(response)\n\n\nclass AcousticTransact(Silverpop):\n api_xt_endpoint = \"https://transact-campaign-us-%s.goacoustic.com/XTMail\"\n\n def __init__(self, client_id, client_secret, refresh_token, server_number):\n self.api_xt_endpoint = self.api_xt_endpoint % server_number\n super().__init__(client_id, client_secret, refresh_token, server_number)\n\n def _call_xt(self, xml):\n logger.debug(\"Request: %s\" % xml)\n response = self.session.post(\n self.api_xt_endpoint,\n data=force_bytes(xml),\n timeout=10,\n )\n return process_tx_response(response)\n\n def send_mail(self, to, campaign_id, fields=None, bcc=None, save_to_db=False):\n self._call_xt(transact_xml(to, campaign_id, fields, bcc, save_to_db))\n\n\nacoustic = Acoustic(\n client_id=settings.ACOUSTIC_CLIENT_ID,\n client_secret=settings.ACOUSTIC_CLIENT_SECRET,\n refresh_token=settings.ACOUSTIC_REFRESH_TOKEN,\n server_number=settings.ACOUSTIC_SERVER_NUMBER,\n)\nacoustic_tx = AcousticTransact(\n client_id=settings.ACOUSTIC_TX_CLIENT_ID,\n client_secret=settings.ACOUSTIC_TX_CLIENT_SECRET,\n refresh_token=settings.ACOUSTIC_TX_REFRESH_TOKEN,\n server_number=settings.ACOUSTIC_TX_SERVER_NUMBER,\n)\n", "path": "basket/news/backends/acoustic.py"}]}
1,893
203
gh_patches_debug_17418
rasdani/github-patches
git_diff
zulip__zulip-3596
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Wikipedia bot crashes when the query contains multiple word delimit by underscore. The bot is under `contrib_bots/bots`. Some error handling is needed. </issue> <code> [start of contrib_bots/bots/wikipedia/wikipedia.py] 1 from __future__ import absolute_import 2 from __future__ import print_function 3 import requests 4 import logging 5 6 # See readme.md for instructions on running this code. 7 8 class WikipediaHandler(object): 9 ''' 10 This plugin facilitates searching Wikipedia for a 11 specific key term and returns the top article from the 12 search. It looks for messages starting with '@wikipedia' 13 or '@wiki'. 14 15 In this example, we write all Wikipedia searches into 16 the same stream that it was called from, but this code 17 could be adapted to write Wikipedia searches to some 18 kind of external issue tracker as well. 19 ''' 20 21 def usage(self): 22 return ''' 23 This plugin will allow users to directly search 24 Wikipedia for a specific key term and get the top 25 article that is returned from the search. Users 26 should preface searches with "@wikipedia" or 27 "@wiki". 28 ''' 29 30 def triage_message(self, message, client): 31 original_content = message['content'] 32 33 # This next line of code is defensive, as we 34 # never want to get into an infinite loop of posting Wikipedia 35 # searches for own Wikipedia searches! 36 if message['sender_full_name'] == 'wikipedia-bot': 37 return False 38 is_wikipedia = (original_content.startswith('@wiki') or 39 original_content.startswith('@wikipedia')) 40 41 return is_wikipedia 42 43 def handle_message(self, message, client, state_handler): 44 query = message['content'] 45 46 for prefix in ['@wikipedia', '@wiki']: 47 if query.startswith(prefix): 48 query = query[len(prefix)+1:] 49 break 50 51 query_wiki_link = ('https://en.wikipedia.org/w/api.php?action=query&' 52 'list=search&srsearch=%s&format=json' % (query,)) 53 try: 54 data = requests.get(query_wiki_link) 55 except requests.exceptions.RequestException: 56 logging.error('broken link') 57 return 58 59 if data.status_code != 200: 60 logging.error('unsuccessful data') 61 return 62 63 search_string = data.json()['query']['search'][0]['title'].replace(' ', '_') 64 url = 'https://wikipedia.org/wiki/' + search_string 65 new_content = 'For search term "' + query 66 if len(data.json()['query']['search']) == 0: 67 new_content = 'I am sorry. The search term you provided is not found :slightly_frowning_face:' 68 else: 69 new_content = new_content + '", ' + url 70 71 client.send_message(dict( 72 type=message['type'], 73 to=message['display_recipient'], 74 subject=message['subject'], 75 content=new_content, 76 )) 77 78 handler_class = WikipediaHandler 79 [end of contrib_bots/bots/wikipedia/wikipedia.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/contrib_bots/bots/wikipedia/wikipedia.py b/contrib_bots/bots/wikipedia/wikipedia.py --- a/contrib_bots/bots/wikipedia/wikipedia.py +++ b/contrib_bots/bots/wikipedia/wikipedia.py @@ -60,12 +60,12 @@ logging.error('unsuccessful data') return - search_string = data.json()['query']['search'][0]['title'].replace(' ', '_') - url = 'https://wikipedia.org/wiki/' + search_string new_content = 'For search term "' + query if len(data.json()['query']['search']) == 0: new_content = 'I am sorry. The search term you provided is not found :slightly_frowning_face:' else: + search_string = data.json()['query']['search'][0]['title'].replace(' ', '_') + url = 'https://en.wikipedia.org/wiki/' + search_string new_content = new_content + '", ' + url client.send_message(dict(
{"golden_diff": "diff --git a/contrib_bots/bots/wikipedia/wikipedia.py b/contrib_bots/bots/wikipedia/wikipedia.py\n--- a/contrib_bots/bots/wikipedia/wikipedia.py\n+++ b/contrib_bots/bots/wikipedia/wikipedia.py\n@@ -60,12 +60,12 @@\n logging.error('unsuccessful data')\n return\n \n- search_string = data.json()['query']['search'][0]['title'].replace(' ', '_')\n- url = 'https://wikipedia.org/wiki/' + search_string\n new_content = 'For search term \"' + query\n if len(data.json()['query']['search']) == 0:\n new_content = 'I am sorry. The search term you provided is not found :slightly_frowning_face:'\n else:\n+ search_string = data.json()['query']['search'][0]['title'].replace(' ', '_')\n+ url = 'https://en.wikipedia.org/wiki/' + search_string\n new_content = new_content + '\", ' + url\n \n client.send_message(dict(\n", "issue": "Wikipedia bot crashes when the query contains multiple word delimit by underscore.\nThe bot is under `contrib_bots/bots`. Some error handling is needed.\n", "before_files": [{"content": "from __future__ import absolute_import\nfrom __future__ import print_function\nimport requests\nimport logging\n\n# See readme.md for instructions on running this code.\n\nclass WikipediaHandler(object):\n '''\n This plugin facilitates searching Wikipedia for a\n specific key term and returns the top article from the\n search. It looks for messages starting with '@wikipedia'\n or '@wiki'.\n\n In this example, we write all Wikipedia searches into\n the same stream that it was called from, but this code\n could be adapted to write Wikipedia searches to some\n kind of external issue tracker as well.\n '''\n\n def usage(self):\n return '''\n This plugin will allow users to directly search\n Wikipedia for a specific key term and get the top\n article that is returned from the search. Users\n should preface searches with \"@wikipedia\" or\n \"@wiki\".\n '''\n\n def triage_message(self, message, client):\n original_content = message['content']\n\n # This next line of code is defensive, as we\n # never want to get into an infinite loop of posting Wikipedia\n # searches for own Wikipedia searches!\n if message['sender_full_name'] == 'wikipedia-bot':\n return False\n is_wikipedia = (original_content.startswith('@wiki') or\n original_content.startswith('@wikipedia'))\n\n return is_wikipedia\n\n def handle_message(self, message, client, state_handler):\n query = message['content']\n\n for prefix in ['@wikipedia', '@wiki']:\n if query.startswith(prefix):\n query = query[len(prefix)+1:]\n break\n\n query_wiki_link = ('https://en.wikipedia.org/w/api.php?action=query&'\n 'list=search&srsearch=%s&format=json' % (query,))\n try:\n data = requests.get(query_wiki_link)\n except requests.exceptions.RequestException:\n logging.error('broken link')\n return\n\n if data.status_code != 200:\n logging.error('unsuccessful data')\n return\n\n search_string = data.json()['query']['search'][0]['title'].replace(' ', '_')\n url = 'https://wikipedia.org/wiki/' + search_string\n new_content = 'For search term \"' + query\n if len(data.json()['query']['search']) == 0:\n new_content = 'I am sorry. The search term you provided is not found :slightly_frowning_face:'\n else:\n new_content = new_content + '\", ' + url\n\n client.send_message(dict(\n type=message['type'],\n to=message['display_recipient'],\n subject=message['subject'],\n content=new_content,\n ))\n\nhandler_class = WikipediaHandler\n", "path": "contrib_bots/bots/wikipedia/wikipedia.py"}]}
1,305
227
gh_patches_debug_24325
rasdani/github-patches
git_diff
xonsh__xonsh-3002
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> cmd && othercmd throws an exception ``` ➤ ls &&Exception in thread Thread-35: Traceback (most recent call last): File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner self.run() File "/usr/lib/python3.6/threading.py", line 864, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.6/site-packages/prompt_toolkit/interface.py", line 860, in run completions = list(buffer.completer.get_completions(document, complete_event)) File "/usr/lib/python3.6/site-packages/xonsh/ptk/completer.py", line 49, in get_completions self.ctx) File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 124, in complete out = func(prefix, line, begidx, endidx, ctx) File "/usr/lib/python3.6/site-packages/xonsh/completers/__amalgam__.py", line 831, in complete_skipper ctx) File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 124, in complete out = func(prefix, line, begidx, endidx, ctx) File "/usr/lib/python3.6/site-packages/xonsh/completers/__amalgam__.py", line 1427, in complete_base complete_command(prefix, line, start, end, ctx)) TypeError: unsupported operand type(s) for |: 'tuple' and 'set' ``` ``` ➤ y -Q | egrep 'toolkit|xon' community/python-prompt_toolkit 1.0.14-1 local/xonsh 0.5.9-1 ``` Let me know if you need anything else. </issue> <code> [start of xonsh/completers/base.py] 1 """Base completer for xonsh.""" 2 import collections.abc as cabc 3 4 from xonsh.completers.path import complete_path 5 from xonsh.completers.python import complete_python 6 from xonsh.completers.commands import complete_command 7 8 9 def complete_base(prefix, line, start, end, ctx): 10 """If the line is empty, complete based on valid commands, python names, 11 and paths. If we are completing the first argument, complete based on 12 valid commands and python names. 13 """ 14 if line.strip() == "": 15 out = complete_python(prefix, line, start, end, ctx) | complete_command( 16 prefix, line, start, end, ctx 17 ) 18 paths = complete_path(prefix, line, start, end, ctx, False) 19 return (out | paths[0]), paths[1] 20 elif prefix == line: 21 python_comps = complete_python(prefix, line, start, end, ctx) 22 if isinstance(python_comps, cabc.Sequence): 23 return ( 24 python_comps[0] | complete_command(prefix, line, start, end, ctx), 25 python_comps[1], 26 ) 27 else: 28 return python_comps | complete_command(prefix, line, start, end, ctx) 29 return set() 30 [end of xonsh/completers/base.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/xonsh/completers/base.py b/xonsh/completers/base.py --- a/xonsh/completers/base.py +++ b/xonsh/completers/base.py @@ -11,19 +11,21 @@ and paths. If we are completing the first argument, complete based on valid commands and python names. """ + # get and unpack python completions + python_comps = complete_python(prefix, line, start, end, ctx) + if isinstance(python_comps, cabc.Sequence): + python_comps, python_comps_len = python_comps + else: + python_comps_len = None + # add command completions + out = python_comps | complete_command(prefix, line, start, end, ctx) + # add paths, if needed if line.strip() == "": - out = complete_python(prefix, line, start, end, ctx) | complete_command( - prefix, line, start, end, ctx - ) paths = complete_path(prefix, line, start, end, ctx, False) return (out | paths[0]), paths[1] elif prefix == line: - python_comps = complete_python(prefix, line, start, end, ctx) - if isinstance(python_comps, cabc.Sequence): - return ( - python_comps[0] | complete_command(prefix, line, start, end, ctx), - python_comps[1], - ) + if python_comps_len is None: + return out else: - return python_comps | complete_command(prefix, line, start, end, ctx) + return out, python_comps_len return set()
{"golden_diff": "diff --git a/xonsh/completers/base.py b/xonsh/completers/base.py\n--- a/xonsh/completers/base.py\n+++ b/xonsh/completers/base.py\n@@ -11,19 +11,21 @@\n and paths. If we are completing the first argument, complete based on\n valid commands and python names.\n \"\"\"\n+ # get and unpack python completions\n+ python_comps = complete_python(prefix, line, start, end, ctx)\n+ if isinstance(python_comps, cabc.Sequence):\n+ python_comps, python_comps_len = python_comps\n+ else:\n+ python_comps_len = None\n+ # add command completions\n+ out = python_comps | complete_command(prefix, line, start, end, ctx)\n+ # add paths, if needed\n if line.strip() == \"\":\n- out = complete_python(prefix, line, start, end, ctx) | complete_command(\n- prefix, line, start, end, ctx\n- )\n paths = complete_path(prefix, line, start, end, ctx, False)\n return (out | paths[0]), paths[1]\n elif prefix == line:\n- python_comps = complete_python(prefix, line, start, end, ctx)\n- if isinstance(python_comps, cabc.Sequence):\n- return (\n- python_comps[0] | complete_command(prefix, line, start, end, ctx),\n- python_comps[1],\n- )\n+ if python_comps_len is None:\n+ return out\n else:\n- return python_comps | complete_command(prefix, line, start, end, ctx)\n+ return out, python_comps_len\n return set()\n", "issue": "cmd && othercmd throws an exception\n```\r\n\u27a4 ls &&Exception in thread Thread-35:\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.6/threading.py\", line 916, in _bootstrap_inner\r\n self.run()\r\n File \"/usr/lib/python3.6/threading.py\", line 864, in run\r\n self._target(*self._args, **self._kwargs)\r\n File \"/usr/lib/python3.6/site-packages/prompt_toolkit/interface.py\", line 860, in run\r\n completions = list(buffer.completer.get_completions(document, complete_event))\r\n File \"/usr/lib/python3.6/site-packages/xonsh/ptk/completer.py\", line 49, in get_completions\r\n self.ctx)\r\n File \"/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 124, in complete\r\n out = func(prefix, line, begidx, endidx, ctx)\r\n File \"/usr/lib/python3.6/site-packages/xonsh/completers/__amalgam__.py\", line 831, in complete_skipper\r\n ctx)\r\n File \"/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 124, in complete\r\n out = func(prefix, line, begidx, endidx, ctx)\r\n File \"/usr/lib/python3.6/site-packages/xonsh/completers/__amalgam__.py\", line 1427, in complete_base\r\n complete_command(prefix, line, start, end, ctx))\r\nTypeError: unsupported operand type(s) for |: 'tuple' and 'set'\r\n```\r\n\r\n```\r\n\u27a4 y -Q | egrep 'toolkit|xon'\r\ncommunity/python-prompt_toolkit 1.0.14-1\r\nlocal/xonsh 0.5.9-1\r\n```\r\n\r\nLet me know if you need anything else.\n", "before_files": [{"content": "\"\"\"Base completer for xonsh.\"\"\"\nimport collections.abc as cabc\n\nfrom xonsh.completers.path import complete_path\nfrom xonsh.completers.python import complete_python\nfrom xonsh.completers.commands import complete_command\n\n\ndef complete_base(prefix, line, start, end, ctx):\n \"\"\"If the line is empty, complete based on valid commands, python names,\n and paths. If we are completing the first argument, complete based on\n valid commands and python names.\n \"\"\"\n if line.strip() == \"\":\n out = complete_python(prefix, line, start, end, ctx) | complete_command(\n prefix, line, start, end, ctx\n )\n paths = complete_path(prefix, line, start, end, ctx, False)\n return (out | paths[0]), paths[1]\n elif prefix == line:\n python_comps = complete_python(prefix, line, start, end, ctx)\n if isinstance(python_comps, cabc.Sequence):\n return (\n python_comps[0] | complete_command(prefix, line, start, end, ctx),\n python_comps[1],\n )\n else:\n return python_comps | complete_command(prefix, line, start, end, ctx)\n return set()\n", "path": "xonsh/completers/base.py"}]}
1,293
387
gh_patches_debug_22442
rasdani/github-patches
git_diff
getsentry__sentry-24461
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> I have an issue when I import the export json file ## Important Details On-Premise w/ Docker, version 9.1.2 ## Description I have two servers with a sentry. There are several projects on the first server, and I would like to copy these projects to the clear second server. I use the export/import commands. The export command works fine. However, when I run the import command, I get an error. ## Steps to Reproduce 1. Run command 'sentry export sentry_export.json' on the first server 2. Run command 'sentry import sentry_export.json' on the second server 3. Get an error Good items to include here include: `Traceback (most recent call last): File "/usr/local/bin/sentry", line 8, in <module> sys.exit(main()) File "/usr/local/lib/python2.7/site-packages/sentry/runner/__init__.py", line 162, in main cli(prog_name=get_prog(), obj={}, max_content_width=100) File "/usr/local/lib/python2.7/site-packages/click/core.py", line 722, in __call__ return self.main(*args, **kwargs) File "/usr/local/lib/python2.7/site-packages/click/core.py", line 697, in main rv = self.invoke(ctx) File "/usr/local/lib/python2.7/site-packages/click/core.py", line 1066, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) File "/usr/local/lib/python2.7/site-packages/click/core.py", line 895, in invoke return ctx.invoke(self.callback, **ctx.params) File "/usr/local/lib/python2.7/site-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) File "/usr/local/lib/python2.7/site-packages/click/decorators.py", line 17, in new_func return f(get_current_context(), *args, **kwargs) File "/usr/local/lib/python2.7/site-packages/sentry/runner/decorators.py", line 36, in inner return ctx.invoke(f, *args, **kwargs) File "/usr/local/lib/python2.7/site-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) File "/usr/local/lib/python2.7/site-packages/sentry/runner/commands/backup.py", line 21, in import_ for obj in serializers.deserialize("json", src, stream=True, use_natural_keys=True): File "/usr/local/lib/python2.7/site-packages/django/core/serializers/json.py", line 76, in Deserializer six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2]) File "/usr/local/lib/python2.7/site-packages/django/core/serializers/json.py", line 70, in Deserializer for obj in PythonDeserializer(objects, **options): File "/usr/local/lib/python2.7/site-packages/django/core/serializers/python.py", line 140, in Deserializer data[field.name] = field.to_python(field_value) File "/usr/local/lib/python2.7/site-packages/sentry/db/models/fields/array.py", line 56, in to_python value = json.loads(value) File "/usr/local/lib/python2.7/site-packages/sentry/utils/json.py", line 111, in loads return _default_decoder.decode(value) File "/usr/local/lib/python2.7/site-packages/simplejson/decoder.py", line 370, in decode obj, end = self.raw_decode(s) File "/usr/local/lib/python2.7/site-packages/simplejson/decoder.py", line 400, in raw_decode return self.scan_once(s, idx=_w(s, idx).end()) django.core.serializers.base.DeserializationError: Expecting value: line 1 column 2 (char 1)` ### What you expected to happen Import works fine too ### Possible Solution [If you have an idea on how this could be solved include that detail here.] </issue> <code> [start of src/sentry/db/models/fields/array.py] 1 from django.db import models 2 3 from sentry.db.models.utils import Creator 4 from sentry.utils import json 5 from sentry.utils.compat import map 6 7 8 # Adapted from django-pgfields 9 # https://github.com/lukesneeringer/django-pgfields/blob/master/django_pg/models/fields/array.py 10 class ArrayField(models.Field): 11 def __init__(self, of=models.TextField, **kwargs): 12 # Arrays in PostgreSQL are arrays of a particular type. 13 # Save the subtype in our field class. 14 if isinstance(of, type): 15 of = of() 16 self.of = of 17 18 # Set "null" to True. Arrays don't have nulls, but null=True 19 # in the ORM amounts to nothing in SQL (whereas null=False 20 # corresponds to `NOT NULL`) 21 kwargs["null"] = True 22 23 super().__init__(**kwargs) 24 25 def contribute_to_class(self, cls, name): 26 """ 27 Add a descriptor for backwards compatibility 28 with previous Django behavior. 29 """ 30 super().contribute_to_class(cls, name) 31 setattr(cls, name, Creator(self)) 32 33 def db_type(self, connection): 34 return f"{self.of.db_type(connection)}[]" 35 36 def get_internal_type(self): 37 return "TextField" 38 39 def to_python(self, value): 40 if not value: 41 value = [] 42 if isinstance(value, str): 43 value = json.loads(value) 44 return map(self.of.to_python, value) 45 [end of src/sentry/db/models/fields/array.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/sentry/db/models/fields/array.py b/src/sentry/db/models/fields/array.py --- a/src/sentry/db/models/fields/array.py +++ b/src/sentry/db/models/fields/array.py @@ -1,3 +1,5 @@ +import ast + from django.db import models from sentry.db.models.utils import Creator @@ -36,9 +38,26 @@ def get_internal_type(self): return "TextField" + def get_prep_value(self, value): + """Iterate over each item in the array, and run it + through the `get_prep_value` of this array's type. + """ + # If no valid value was given, return an empty list. + if not value: + return [] + + # Appropriately coerce each individual value within + # our array. + return [self.of.get_prep_value(item) for item in value] + def to_python(self, value): if not value: value = [] if isinstance(value, str): - value = json.loads(value) + try: + value = json.loads(value) + except json.JSONDecodeError: + # This is to accomodate the erronous exports pre 21.4.0 + # See getsentry/sentry#23843 for more details + value = ast.literal_eval(value) return map(self.of.to_python, value)
{"golden_diff": "diff --git a/src/sentry/db/models/fields/array.py b/src/sentry/db/models/fields/array.py\n--- a/src/sentry/db/models/fields/array.py\n+++ b/src/sentry/db/models/fields/array.py\n@@ -1,3 +1,5 @@\n+import ast\n+\n from django.db import models\n \n from sentry.db.models.utils import Creator\n@@ -36,9 +38,26 @@\n def get_internal_type(self):\n return \"TextField\"\n \n+ def get_prep_value(self, value):\n+ \"\"\"Iterate over each item in the array, and run it\n+ through the `get_prep_value` of this array's type.\n+ \"\"\"\n+ # If no valid value was given, return an empty list.\n+ if not value:\n+ return []\n+\n+ # Appropriately coerce each individual value within\n+ # our array.\n+ return [self.of.get_prep_value(item) for item in value]\n+\n def to_python(self, value):\n if not value:\n value = []\n if isinstance(value, str):\n- value = json.loads(value)\n+ try:\n+ value = json.loads(value)\n+ except json.JSONDecodeError:\n+ # This is to accomodate the erronous exports pre 21.4.0\n+ # See getsentry/sentry#23843 for more details\n+ value = ast.literal_eval(value)\n return map(self.of.to_python, value)\n", "issue": "I have an issue when I import the export json file\n## Important Details\r\n\r\nOn-Premise w/ Docker, version 9.1.2\r\n\r\n## Description\r\n\r\nI have two servers with a sentry. There are several projects on the first server, and I would like to copy these projects to the clear second server.\r\nI use the export/import commands. The export command works fine. However, when I run the import command, I get an error.\r\n\r\n## Steps to Reproduce\r\n\r\n1. Run command 'sentry export sentry_export.json' on the first server\r\n2. Run command 'sentry import sentry_export.json' on the second server\r\n3. Get an error\r\n\r\nGood items to include here include:\r\n\r\n`Traceback (most recent call last):\r\n File \"/usr/local/bin/sentry\", line 8, in <module>\r\n sys.exit(main())\r\n File \"/usr/local/lib/python2.7/site-packages/sentry/runner/__init__.py\", line 162, in main\r\n cli(prog_name=get_prog(), obj={}, max_content_width=100)\r\n File \"/usr/local/lib/python2.7/site-packages/click/core.py\", line 722, in __call__\r\n return self.main(*args, **kwargs)\r\n File \"/usr/local/lib/python2.7/site-packages/click/core.py\", line 697, in main\r\n rv = self.invoke(ctx)\r\n File \"/usr/local/lib/python2.7/site-packages/click/core.py\", line 1066, in invoke\r\n return _process_result(sub_ctx.command.invoke(sub_ctx))\r\n File \"/usr/local/lib/python2.7/site-packages/click/core.py\", line 895, in invoke\r\n return ctx.invoke(self.callback, **ctx.params)\r\n File \"/usr/local/lib/python2.7/site-packages/click/core.py\", line 535, in invoke\r\n return callback(*args, **kwargs)\r\n File \"/usr/local/lib/python2.7/site-packages/click/decorators.py\", line 17, in new_func\r\n return f(get_current_context(), *args, **kwargs)\r\n File \"/usr/local/lib/python2.7/site-packages/sentry/runner/decorators.py\", line 36, in inner\r\n return ctx.invoke(f, *args, **kwargs)\r\n File \"/usr/local/lib/python2.7/site-packages/click/core.py\", line 535, in invoke\r\n return callback(*args, **kwargs)\r\n File \"/usr/local/lib/python2.7/site-packages/sentry/runner/commands/backup.py\", line 21, in import_\r\n for obj in serializers.deserialize(\"json\", src, stream=True, use_natural_keys=True):\r\n File \"/usr/local/lib/python2.7/site-packages/django/core/serializers/json.py\", line 76, in Deserializer\r\n six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])\r\n File \"/usr/local/lib/python2.7/site-packages/django/core/serializers/json.py\", line 70, in Deserializer\r\n for obj in PythonDeserializer(objects, **options):\r\n File \"/usr/local/lib/python2.7/site-packages/django/core/serializers/python.py\", line 140, in Deserializer\r\n data[field.name] = field.to_python(field_value)\r\n File \"/usr/local/lib/python2.7/site-packages/sentry/db/models/fields/array.py\", line 56, in to_python\r\n value = json.loads(value)\r\n File \"/usr/local/lib/python2.7/site-packages/sentry/utils/json.py\", line 111, in loads\r\n return _default_decoder.decode(value)\r\n File \"/usr/local/lib/python2.7/site-packages/simplejson/decoder.py\", line 370, in decode\r\n obj, end = self.raw_decode(s)\r\n File \"/usr/local/lib/python2.7/site-packages/simplejson/decoder.py\", line 400, in raw_decode\r\n return self.scan_once(s, idx=_w(s, idx).end())\r\ndjango.core.serializers.base.DeserializationError: Expecting value: line 1 column 2 (char 1)`\r\n\r\n### What you expected to happen\r\n\r\nImport works fine too\r\n\r\n### Possible Solution\r\n\r\n[If you have an idea on how this could be solved include that detail here.]\r\n\n", "before_files": [{"content": "from django.db import models\n\nfrom sentry.db.models.utils import Creator\nfrom sentry.utils import json\nfrom sentry.utils.compat import map\n\n\n# Adapted from django-pgfields\n# https://github.com/lukesneeringer/django-pgfields/blob/master/django_pg/models/fields/array.py\nclass ArrayField(models.Field):\n def __init__(self, of=models.TextField, **kwargs):\n # Arrays in PostgreSQL are arrays of a particular type.\n # Save the subtype in our field class.\n if isinstance(of, type):\n of = of()\n self.of = of\n\n # Set \"null\" to True. Arrays don't have nulls, but null=True\n # in the ORM amounts to nothing in SQL (whereas null=False\n # corresponds to `NOT NULL`)\n kwargs[\"null\"] = True\n\n super().__init__(**kwargs)\n\n def contribute_to_class(self, cls, name):\n \"\"\"\n Add a descriptor for backwards compatibility\n with previous Django behavior.\n \"\"\"\n super().contribute_to_class(cls, name)\n setattr(cls, name, Creator(self))\n\n def db_type(self, connection):\n return f\"{self.of.db_type(connection)}[]\"\n\n def get_internal_type(self):\n return \"TextField\"\n\n def to_python(self, value):\n if not value:\n value = []\n if isinstance(value, str):\n value = json.loads(value)\n return map(self.of.to_python, value)\n", "path": "src/sentry/db/models/fields/array.py"}]}
1,864
320
gh_patches_debug_40098
rasdani/github-patches
git_diff
pytorch__vision-6458
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Port `transforms.LinearTransformation` to `prototype.transforms` cc @vfdev-5 @datumbox @bjuncek </issue> <code> [start of torchvision/prototype/transforms/_misc.py] 1 import functools 2 from typing import Any, Callable, Dict, List, Sequence, Type, Union 3 4 import torch 5 from torchvision.prototype.transforms import functional as F, Transform 6 from torchvision.transforms.transforms import _setup_size 7 8 9 class Identity(Transform): 10 def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any: 11 return inpt 12 13 14 class Lambda(Transform): 15 def __init__(self, fn: Callable[[Any], Any], *types: Type): 16 super().__init__() 17 self.fn = fn 18 self.types = types 19 20 def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any: 21 if type(inpt) in self.types: 22 return self.fn(inpt) 23 else: 24 return inpt 25 26 def extra_repr(self) -> str: 27 extras = [] 28 name = getattr(self.fn, "__name__", None) 29 if name: 30 extras.append(name) 31 extras.append(f"types={[type.__name__ for type in self.types]}") 32 return ", ".join(extras) 33 34 35 class Normalize(Transform): 36 def __init__(self, mean: List[float], std: List[float]): 37 super().__init__() 38 self.mean = mean 39 self.std = std 40 41 def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any: 42 return F.normalize(inpt, mean=self.mean, std=self.std) 43 44 45 class GaussianBlur(Transform): 46 def __init__( 47 self, kernel_size: Union[int, Sequence[int]], sigma: Union[float, Sequence[float]] = (0.1, 2.0) 48 ) -> None: 49 super().__init__() 50 self.kernel_size = _setup_size(kernel_size, "Kernel size should be a tuple/list of two integers") 51 for ks in self.kernel_size: 52 if ks <= 0 or ks % 2 == 0: 53 raise ValueError("Kernel size value should be an odd and positive number.") 54 55 if isinstance(sigma, float): 56 if sigma <= 0: 57 raise ValueError("If sigma is a single number, it must be positive.") 58 sigma = (sigma, sigma) 59 elif isinstance(sigma, Sequence) and len(sigma) == 2: 60 if not 0.0 < sigma[0] <= sigma[1]: 61 raise ValueError("sigma values should be positive and of the form (min, max).") 62 else: 63 raise TypeError("sigma should be a single float or a list/tuple with length 2 floats.") 64 65 self.sigma = sigma 66 67 def _get_params(self, sample: Any) -> Dict[str, Any]: 68 sigma = torch.empty(1).uniform_(self.sigma[0], self.sigma[1]).item() 69 return dict(sigma=[sigma, sigma]) 70 71 def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any: 72 return F.gaussian_blur(inpt, **params) 73 74 75 class ToDtype(Lambda): 76 def __init__(self, dtype: torch.dtype, *types: Type) -> None: 77 self.dtype = dtype 78 super().__init__(functools.partial(torch.Tensor.to, dtype=dtype), *types) 79 80 def extra_repr(self) -> str: 81 return ", ".join([f"dtype={self.dtype}", f"types={[type.__name__ for type in self.types]}"]) 82 [end of torchvision/prototype/transforms/_misc.py] [start of torchvision/prototype/transforms/__init__.py] 1 from . import functional # usort: skip 2 3 from ._transform import Transform # usort: skip 4 5 from ._augment import RandomCutmix, RandomErasing, RandomMixup 6 from ._auto_augment import AugMix, AutoAugment, AutoAugmentPolicy, RandAugment, TrivialAugmentWide 7 from ._color import ( 8 ColorJitter, 9 RandomAdjustSharpness, 10 RandomAutocontrast, 11 RandomEqualize, 12 RandomInvert, 13 RandomPhotometricDistort, 14 RandomPosterize, 15 RandomSolarize, 16 ) 17 from ._container import Compose, RandomApply, RandomChoice, RandomOrder 18 from ._geometry import ( 19 BatchMultiCrop, 20 CenterCrop, 21 ElasticTransform, 22 FiveCrop, 23 FixedSizeCrop, 24 Pad, 25 RandomAffine, 26 RandomCrop, 27 RandomHorizontalFlip, 28 RandomIoUCrop, 29 RandomPerspective, 30 RandomResizedCrop, 31 RandomRotation, 32 RandomShortestSize, 33 RandomVerticalFlip, 34 RandomZoomOut, 35 Resize, 36 ScaleJitter, 37 TenCrop, 38 ) 39 from ._meta import ConvertBoundingBoxFormat, ConvertColorSpace, ConvertImageDtype 40 from ._misc import GaussianBlur, Identity, Lambda, Normalize, ToDtype 41 from ._type_conversion import DecodeImage, LabelToOneHot, ToImagePIL, ToImageTensor 42 43 from ._deprecated import Grayscale, RandomGrayscale, ToTensor, ToPILImage, PILToTensor # usort: skip 44 [end of torchvision/prototype/transforms/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/torchvision/prototype/transforms/__init__.py b/torchvision/prototype/transforms/__init__.py --- a/torchvision/prototype/transforms/__init__.py +++ b/torchvision/prototype/transforms/__init__.py @@ -37,7 +37,7 @@ TenCrop, ) from ._meta import ConvertBoundingBoxFormat, ConvertColorSpace, ConvertImageDtype -from ._misc import GaussianBlur, Identity, Lambda, Normalize, ToDtype +from ._misc import GaussianBlur, Identity, Lambda, LinearTransformation, Normalize, ToDtype from ._type_conversion import DecodeImage, LabelToOneHot, ToImagePIL, ToImageTensor from ._deprecated import Grayscale, RandomGrayscale, ToTensor, ToPILImage, PILToTensor # usort: skip diff --git a/torchvision/prototype/transforms/_misc.py b/torchvision/prototype/transforms/_misc.py --- a/torchvision/prototype/transforms/_misc.py +++ b/torchvision/prototype/transforms/_misc.py @@ -1,7 +1,10 @@ import functools from typing import Any, Callable, Dict, List, Sequence, Type, Union +import PIL.Image + import torch +from torchvision.prototype import features from torchvision.prototype.transforms import functional as F, Transform from torchvision.transforms.transforms import _setup_size @@ -32,6 +35,59 @@ return ", ".join(extras) +class LinearTransformation(Transform): + def __init__(self, transformation_matrix: torch.Tensor, mean_vector: torch.Tensor): + super().__init__() + if transformation_matrix.size(0) != transformation_matrix.size(1): + raise ValueError( + "transformation_matrix should be square. Got " + f"{tuple(transformation_matrix.size())} rectangular matrix." + ) + + if mean_vector.size(0) != transformation_matrix.size(0): + raise ValueError( + f"mean_vector should have the same length {mean_vector.size(0)}" + f" as any one of the dimensions of the transformation_matrix [{tuple(transformation_matrix.size())}]" + ) + + if transformation_matrix.device != mean_vector.device: + raise ValueError( + f"Input tensors should be on the same device. Got {transformation_matrix.device} and {mean_vector.device}" + ) + + self.transformation_matrix = transformation_matrix + self.mean_vector = mean_vector + + def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any: + + if isinstance(inpt, features._Feature) and not isinstance(inpt, features.Image): + return inpt + elif isinstance(inpt, PIL.Image.Image): + raise TypeError("Unsupported input type") + + # Image instance after linear transformation is not Image anymore due to unknown data range + # Thus we will return Tensor for input Image + + shape = inpt.shape + n = shape[-3] * shape[-2] * shape[-1] + if n != self.transformation_matrix.shape[0]: + raise ValueError( + "Input tensor and transformation matrix have incompatible shape." + + f"[{shape[-3]} x {shape[-2]} x {shape[-1]}] != " + + f"{self.transformation_matrix.shape[0]}" + ) + + if inpt.device.type != self.mean_vector.device.type: + raise ValueError( + "Input tensor should be on the same device as transformation matrix and mean vector. " + f"Got {inpt.device} vs {self.mean_vector.device}" + ) + + flat_tensor = inpt.view(-1, n) - self.mean_vector + transformed_tensor = torch.mm(flat_tensor, self.transformation_matrix) + return transformed_tensor.view(shape) + + class Normalize(Transform): def __init__(self, mean: List[float], std: List[float]): super().__init__()
{"golden_diff": "diff --git a/torchvision/prototype/transforms/__init__.py b/torchvision/prototype/transforms/__init__.py\n--- a/torchvision/prototype/transforms/__init__.py\n+++ b/torchvision/prototype/transforms/__init__.py\n@@ -37,7 +37,7 @@\n TenCrop,\n )\n from ._meta import ConvertBoundingBoxFormat, ConvertColorSpace, ConvertImageDtype\n-from ._misc import GaussianBlur, Identity, Lambda, Normalize, ToDtype\n+from ._misc import GaussianBlur, Identity, Lambda, LinearTransformation, Normalize, ToDtype\n from ._type_conversion import DecodeImage, LabelToOneHot, ToImagePIL, ToImageTensor\n \n from ._deprecated import Grayscale, RandomGrayscale, ToTensor, ToPILImage, PILToTensor # usort: skip\ndiff --git a/torchvision/prototype/transforms/_misc.py b/torchvision/prototype/transforms/_misc.py\n--- a/torchvision/prototype/transforms/_misc.py\n+++ b/torchvision/prototype/transforms/_misc.py\n@@ -1,7 +1,10 @@\n import functools\n from typing import Any, Callable, Dict, List, Sequence, Type, Union\n \n+import PIL.Image\n+\n import torch\n+from torchvision.prototype import features\n from torchvision.prototype.transforms import functional as F, Transform\n from torchvision.transforms.transforms import _setup_size\n \n@@ -32,6 +35,59 @@\n return \", \".join(extras)\n \n \n+class LinearTransformation(Transform):\n+ def __init__(self, transformation_matrix: torch.Tensor, mean_vector: torch.Tensor):\n+ super().__init__()\n+ if transformation_matrix.size(0) != transformation_matrix.size(1):\n+ raise ValueError(\n+ \"transformation_matrix should be square. Got \"\n+ f\"{tuple(transformation_matrix.size())} rectangular matrix.\"\n+ )\n+\n+ if mean_vector.size(0) != transformation_matrix.size(0):\n+ raise ValueError(\n+ f\"mean_vector should have the same length {mean_vector.size(0)}\"\n+ f\" as any one of the dimensions of the transformation_matrix [{tuple(transformation_matrix.size())}]\"\n+ )\n+\n+ if transformation_matrix.device != mean_vector.device:\n+ raise ValueError(\n+ f\"Input tensors should be on the same device. Got {transformation_matrix.device} and {mean_vector.device}\"\n+ )\n+\n+ self.transformation_matrix = transformation_matrix\n+ self.mean_vector = mean_vector\n+\n+ def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:\n+\n+ if isinstance(inpt, features._Feature) and not isinstance(inpt, features.Image):\n+ return inpt\n+ elif isinstance(inpt, PIL.Image.Image):\n+ raise TypeError(\"Unsupported input type\")\n+\n+ # Image instance after linear transformation is not Image anymore due to unknown data range\n+ # Thus we will return Tensor for input Image\n+\n+ shape = inpt.shape\n+ n = shape[-3] * shape[-2] * shape[-1]\n+ if n != self.transformation_matrix.shape[0]:\n+ raise ValueError(\n+ \"Input tensor and transformation matrix have incompatible shape.\"\n+ + f\"[{shape[-3]} x {shape[-2]} x {shape[-1]}] != \"\n+ + f\"{self.transformation_matrix.shape[0]}\"\n+ )\n+\n+ if inpt.device.type != self.mean_vector.device.type:\n+ raise ValueError(\n+ \"Input tensor should be on the same device as transformation matrix and mean vector. \"\n+ f\"Got {inpt.device} vs {self.mean_vector.device}\"\n+ )\n+\n+ flat_tensor = inpt.view(-1, n) - self.mean_vector\n+ transformed_tensor = torch.mm(flat_tensor, self.transformation_matrix)\n+ return transformed_tensor.view(shape)\n+\n+\n class Normalize(Transform):\n def __init__(self, mean: List[float], std: List[float]):\n super().__init__()\n", "issue": "Port `transforms.LinearTransformation` to `prototype.transforms`\ncc @vfdev-5 @datumbox @bjuncek\n", "before_files": [{"content": "import functools\nfrom typing import Any, Callable, Dict, List, Sequence, Type, Union\n\nimport torch\nfrom torchvision.prototype.transforms import functional as F, Transform\nfrom torchvision.transforms.transforms import _setup_size\n\n\nclass Identity(Transform):\n def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:\n return inpt\n\n\nclass Lambda(Transform):\n def __init__(self, fn: Callable[[Any], Any], *types: Type):\n super().__init__()\n self.fn = fn\n self.types = types\n\n def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:\n if type(inpt) in self.types:\n return self.fn(inpt)\n else:\n return inpt\n\n def extra_repr(self) -> str:\n extras = []\n name = getattr(self.fn, \"__name__\", None)\n if name:\n extras.append(name)\n extras.append(f\"types={[type.__name__ for type in self.types]}\")\n return \", \".join(extras)\n\n\nclass Normalize(Transform):\n def __init__(self, mean: List[float], std: List[float]):\n super().__init__()\n self.mean = mean\n self.std = std\n\n def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:\n return F.normalize(inpt, mean=self.mean, std=self.std)\n\n\nclass GaussianBlur(Transform):\n def __init__(\n self, kernel_size: Union[int, Sequence[int]], sigma: Union[float, Sequence[float]] = (0.1, 2.0)\n ) -> None:\n super().__init__()\n self.kernel_size = _setup_size(kernel_size, \"Kernel size should be a tuple/list of two integers\")\n for ks in self.kernel_size:\n if ks <= 0 or ks % 2 == 0:\n raise ValueError(\"Kernel size value should be an odd and positive number.\")\n\n if isinstance(sigma, float):\n if sigma <= 0:\n raise ValueError(\"If sigma is a single number, it must be positive.\")\n sigma = (sigma, sigma)\n elif isinstance(sigma, Sequence) and len(sigma) == 2:\n if not 0.0 < sigma[0] <= sigma[1]:\n raise ValueError(\"sigma values should be positive and of the form (min, max).\")\n else:\n raise TypeError(\"sigma should be a single float or a list/tuple with length 2 floats.\")\n\n self.sigma = sigma\n\n def _get_params(self, sample: Any) -> Dict[str, Any]:\n sigma = torch.empty(1).uniform_(self.sigma[0], self.sigma[1]).item()\n return dict(sigma=[sigma, sigma])\n\n def _transform(self, inpt: Any, params: Dict[str, Any]) -> Any:\n return F.gaussian_blur(inpt, **params)\n\n\nclass ToDtype(Lambda):\n def __init__(self, dtype: torch.dtype, *types: Type) -> None:\n self.dtype = dtype\n super().__init__(functools.partial(torch.Tensor.to, dtype=dtype), *types)\n\n def extra_repr(self) -> str:\n return \", \".join([f\"dtype={self.dtype}\", f\"types={[type.__name__ for type in self.types]}\"])\n", "path": "torchvision/prototype/transforms/_misc.py"}, {"content": "from . import functional # usort: skip\n\nfrom ._transform import Transform # usort: skip\n\nfrom ._augment import RandomCutmix, RandomErasing, RandomMixup\nfrom ._auto_augment import AugMix, AutoAugment, AutoAugmentPolicy, RandAugment, TrivialAugmentWide\nfrom ._color import (\n ColorJitter,\n RandomAdjustSharpness,\n RandomAutocontrast,\n RandomEqualize,\n RandomInvert,\n RandomPhotometricDistort,\n RandomPosterize,\n RandomSolarize,\n)\nfrom ._container import Compose, RandomApply, RandomChoice, RandomOrder\nfrom ._geometry import (\n BatchMultiCrop,\n CenterCrop,\n ElasticTransform,\n FiveCrop,\n FixedSizeCrop,\n Pad,\n RandomAffine,\n RandomCrop,\n RandomHorizontalFlip,\n RandomIoUCrop,\n RandomPerspective,\n RandomResizedCrop,\n RandomRotation,\n RandomShortestSize,\n RandomVerticalFlip,\n RandomZoomOut,\n Resize,\n ScaleJitter,\n TenCrop,\n)\nfrom ._meta import ConvertBoundingBoxFormat, ConvertColorSpace, ConvertImageDtype\nfrom ._misc import GaussianBlur, Identity, Lambda, Normalize, ToDtype\nfrom ._type_conversion import DecodeImage, LabelToOneHot, ToImagePIL, ToImageTensor\n\nfrom ._deprecated import Grayscale, RandomGrayscale, ToTensor, ToPILImage, PILToTensor # usort: skip\n", "path": "torchvision/prototype/transforms/__init__.py"}]}
1,857
863
gh_patches_debug_13037
rasdani/github-patches
git_diff
psf__black-2739
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Custom cache directory to be thread/process safe **Is your feature request related to a problem? Please describe.** The cache files are not thread/process safe. I've checked multiple projects simultaneously in different processes but get permission errors when one process is writing to the cache while another is reading from it. On linux the fix is pretty easy: set a different temp directory for each process using the ``XDG_CACHE_HOME`` environment variable but there is no equivalent for Windows (there's somewhat an equivalent but you need a rather [specific setup](https://github.com/platformdirs/platformdirs/blob/main/src/platformdirs/windows.py#L157-L165) to use the environment variable). **Describe the solution you'd like** The solution that will work for my use case (and I think is the least amount of changes) is to create a custom environment variable that overrides using platformdirs to get the cache directory. Think this could look like: ```python CACHE_DIR = Path(os.environ.get("BLACK_CACHE_DIR", user_cache_dir("black", version=__version__))) if not CACHE_DIR.exists(): raise RuntimeError(f"{CACHE_DIR} does not exist") ``` **Describe alternatives you've considered** 1. Add the command line option to turn off cacheing (as this ticket as #248 asked for) (guess is this more work) 2. Add command line option to set the cache directory (guess is this is more work but not a lot) 3. Make the cache dir thread/process safe (guess is this is more work) </issue> <code> [start of src/black/cache.py] 1 """Caching of formatted files with feature-based invalidation.""" 2 3 import os 4 import pickle 5 from pathlib import Path 6 import tempfile 7 from typing import Dict, Iterable, Set, Tuple 8 9 from platformdirs import user_cache_dir 10 11 from black.mode import Mode 12 13 from _black_version import version as __version__ 14 15 16 # types 17 Timestamp = float 18 FileSize = int 19 CacheInfo = Tuple[Timestamp, FileSize] 20 Cache = Dict[str, CacheInfo] 21 22 23 CACHE_DIR = Path(user_cache_dir("black", version=__version__)) 24 25 26 def read_cache(mode: Mode) -> Cache: 27 """Read the cache if it exists and is well formed. 28 29 If it is not well formed, the call to write_cache later should resolve the issue. 30 """ 31 cache_file = get_cache_file(mode) 32 if not cache_file.exists(): 33 return {} 34 35 with cache_file.open("rb") as fobj: 36 try: 37 cache: Cache = pickle.load(fobj) 38 except (pickle.UnpicklingError, ValueError, IndexError): 39 return {} 40 41 return cache 42 43 44 def get_cache_file(mode: Mode) -> Path: 45 return CACHE_DIR / f"cache.{mode.get_cache_key()}.pickle" 46 47 48 def get_cache_info(path: Path) -> CacheInfo: 49 """Return the information used to check if a file is already formatted or not.""" 50 stat = path.stat() 51 return stat.st_mtime, stat.st_size 52 53 54 def filter_cached(cache: Cache, sources: Iterable[Path]) -> Tuple[Set[Path], Set[Path]]: 55 """Split an iterable of paths in `sources` into two sets. 56 57 The first contains paths of files that modified on disk or are not in the 58 cache. The other contains paths to non-modified files. 59 """ 60 todo, done = set(), set() 61 for src in sources: 62 res_src = src.resolve() 63 if cache.get(str(res_src)) != get_cache_info(res_src): 64 todo.add(src) 65 else: 66 done.add(src) 67 return todo, done 68 69 70 def write_cache(cache: Cache, sources: Iterable[Path], mode: Mode) -> None: 71 """Update the cache file.""" 72 cache_file = get_cache_file(mode) 73 try: 74 CACHE_DIR.mkdir(parents=True, exist_ok=True) 75 new_cache = { 76 **cache, 77 **{str(src.resolve()): get_cache_info(src) for src in sources}, 78 } 79 with tempfile.NamedTemporaryFile(dir=str(cache_file.parent), delete=False) as f: 80 pickle.dump(new_cache, f, protocol=4) 81 os.replace(f.name, cache_file) 82 except OSError: 83 pass 84 [end of src/black/cache.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/black/cache.py b/src/black/cache.py --- a/src/black/cache.py +++ b/src/black/cache.py @@ -20,7 +20,23 @@ Cache = Dict[str, CacheInfo] -CACHE_DIR = Path(user_cache_dir("black", version=__version__)) +def get_cache_dir() -> Path: + """Get the cache directory used by black. + + Users can customize this directory on all systems using `BLACK_CACHE_DIR` + environment variable. By default, the cache directory is the user cache directory + under the black application. + + This result is immediately set to a constant `black.cache.CACHE_DIR` as to avoid + repeated calls. + """ + # NOTE: Function mostly exists as a clean way to test getting the cache directory. + default_cache_dir = user_cache_dir("black", version=__version__) + cache_dir = Path(os.environ.get("BLACK_CACHE_DIR", default_cache_dir)) + return cache_dir + + +CACHE_DIR = get_cache_dir() def read_cache(mode: Mode) -> Cache:
{"golden_diff": "diff --git a/src/black/cache.py b/src/black/cache.py\n--- a/src/black/cache.py\n+++ b/src/black/cache.py\n@@ -20,7 +20,23 @@\n Cache = Dict[str, CacheInfo]\n \n \n-CACHE_DIR = Path(user_cache_dir(\"black\", version=__version__))\n+def get_cache_dir() -> Path:\n+ \"\"\"Get the cache directory used by black.\n+\n+ Users can customize this directory on all systems using `BLACK_CACHE_DIR`\n+ environment variable. By default, the cache directory is the user cache directory\n+ under the black application.\n+\n+ This result is immediately set to a constant `black.cache.CACHE_DIR` as to avoid\n+ repeated calls.\n+ \"\"\"\n+ # NOTE: Function mostly exists as a clean way to test getting the cache directory.\n+ default_cache_dir = user_cache_dir(\"black\", version=__version__)\n+ cache_dir = Path(os.environ.get(\"BLACK_CACHE_DIR\", default_cache_dir))\n+ return cache_dir\n+\n+\n+CACHE_DIR = get_cache_dir()\n \n \n def read_cache(mode: Mode) -> Cache:\n", "issue": "Custom cache directory to be thread/process safe\n**Is your feature request related to a problem? Please describe.**\r\n\r\nThe cache files are not thread/process safe. I've checked multiple projects simultaneously in different processes but get permission errors when one process is writing to the cache while another is reading from it. On linux the fix is pretty easy: set a different temp directory for each process using the ``XDG_CACHE_HOME`` environment variable but there is no equivalent for Windows (there's somewhat an equivalent but you need a rather [specific setup](https://github.com/platformdirs/platformdirs/blob/main/src/platformdirs/windows.py#L157-L165) to use the environment variable).\r\n\r\n**Describe the solution you'd like**\r\n\r\nThe solution that will work for my use case (and I think is the least amount of changes) is to create a custom environment variable that overrides using platformdirs to get the cache directory. Think this could look like:\r\n```python\r\nCACHE_DIR = Path(os.environ.get(\"BLACK_CACHE_DIR\", user_cache_dir(\"black\", version=__version__)))\r\nif not CACHE_DIR.exists():\r\n raise RuntimeError(f\"{CACHE_DIR} does not exist\")\r\n```\r\n\r\n**Describe alternatives you've considered**\r\n\r\n1. Add the command line option to turn off cacheing (as this ticket as #248 asked for) (guess is this more work)\r\n2. Add command line option to set the cache directory (guess is this is more work but not a lot)\r\n3. Make the cache dir thread/process safe (guess is this is more work)\n", "before_files": [{"content": "\"\"\"Caching of formatted files with feature-based invalidation.\"\"\"\n\nimport os\nimport pickle\nfrom pathlib import Path\nimport tempfile\nfrom typing import Dict, Iterable, Set, Tuple\n\nfrom platformdirs import user_cache_dir\n\nfrom black.mode import Mode\n\nfrom _black_version import version as __version__\n\n\n# types\nTimestamp = float\nFileSize = int\nCacheInfo = Tuple[Timestamp, FileSize]\nCache = Dict[str, CacheInfo]\n\n\nCACHE_DIR = Path(user_cache_dir(\"black\", version=__version__))\n\n\ndef read_cache(mode: Mode) -> Cache:\n \"\"\"Read the cache if it exists and is well formed.\n\n If it is not well formed, the call to write_cache later should resolve the issue.\n \"\"\"\n cache_file = get_cache_file(mode)\n if not cache_file.exists():\n return {}\n\n with cache_file.open(\"rb\") as fobj:\n try:\n cache: Cache = pickle.load(fobj)\n except (pickle.UnpicklingError, ValueError, IndexError):\n return {}\n\n return cache\n\n\ndef get_cache_file(mode: Mode) -> Path:\n return CACHE_DIR / f\"cache.{mode.get_cache_key()}.pickle\"\n\n\ndef get_cache_info(path: Path) -> CacheInfo:\n \"\"\"Return the information used to check if a file is already formatted or not.\"\"\"\n stat = path.stat()\n return stat.st_mtime, stat.st_size\n\n\ndef filter_cached(cache: Cache, sources: Iterable[Path]) -> Tuple[Set[Path], Set[Path]]:\n \"\"\"Split an iterable of paths in `sources` into two sets.\n\n The first contains paths of files that modified on disk or are not in the\n cache. The other contains paths to non-modified files.\n \"\"\"\n todo, done = set(), set()\n for src in sources:\n res_src = src.resolve()\n if cache.get(str(res_src)) != get_cache_info(res_src):\n todo.add(src)\n else:\n done.add(src)\n return todo, done\n\n\ndef write_cache(cache: Cache, sources: Iterable[Path], mode: Mode) -> None:\n \"\"\"Update the cache file.\"\"\"\n cache_file = get_cache_file(mode)\n try:\n CACHE_DIR.mkdir(parents=True, exist_ok=True)\n new_cache = {\n **cache,\n **{str(src.resolve()): get_cache_info(src) for src in sources},\n }\n with tempfile.NamedTemporaryFile(dir=str(cache_file.parent), delete=False) as f:\n pickle.dump(new_cache, f, protocol=4)\n os.replace(f.name, cache_file)\n except OSError:\n pass\n", "path": "src/black/cache.py"}]}
1,573
237
gh_patches_debug_1081
rasdani/github-patches
git_diff
mlcommons__GaNDLF-315
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add an easy way to verify installation **Is your feature request related to a problem? Please describe.** Currently, we are asking users to run specific commands to verify installation, which can be cumbursome. **Describe the solution you'd like** It would be great if this could put in a script (and extended/updated as needed). **Describe alternatives you've considered** N.A. **Additional context** N.A. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 3 """The setup script.""" 4 5 6 import os 7 from setuptools import setup, find_packages 8 from setuptools.command.install import install 9 from setuptools.command.develop import develop 10 from setuptools.command.egg_info import egg_info 11 12 with open("README.md") as readme_file: 13 readme = readme_file.read() 14 15 16 def git_submodule_update(): 17 ## submodule update 18 os.system("git submodule update --init --recursive") 19 20 21 class CustomInstallCommand(install): 22 def run(self): 23 install.run(self) 24 git_submodule_update() 25 26 27 class CustomDevelopCommand(develop): 28 def run(self): 29 develop.run(self) 30 git_submodule_update() 31 32 33 class CustomEggInfoCommand(egg_info): 34 def run(self): 35 egg_info.run(self) 36 git_submodule_update() 37 38 39 # read version.py 40 import sys, re 41 42 try: 43 filepath = "GANDLF/version.py" 44 version_file = open(filepath) 45 (__version__,) = re.findall('__version__ = "(.*)"', version_file.read()) 46 47 except Exception as error: 48 __version__ = "0.0.1" 49 sys.stderr.write("Warning: Could not open '%s' due %s\n" % (filepath, error)) 50 51 requirements = [ 52 "black", 53 "numpy==1.21.0", 54 "scipy", 55 "SimpleITK==2.1.0", 56 "torch>=1.7", 57 "torchvision", 58 "tqdm", 59 "torchio==0.18.57", 60 "pandas", 61 "pylint", 62 "scikit-learn==0.23.1", 63 "pickle5==0.0.11", 64 "setuptools", 65 "seaborn", 66 "pyyaml", 67 "openslide-python", 68 "scikit-image", 69 "matplotlib", 70 "requests>=2.25.0", 71 "pyvips", 72 "pytest", 73 "coverage", 74 "pytest-cov", 75 "psutil", 76 "medcam", 77 "opencv-python", 78 "torchmetrics", 79 "OpenPatchMiner==0.1.6", 80 "pydicom", 81 ] 82 83 setup( 84 name="GANDLF", 85 version=__version__, 86 author="Jose Agraz, Vinayak Ahluwalia, Bhakti Baheti, Spyridon Bakas, Ujjwal Baid, Megh Bhalerao, Brandon Edwards, Karol Gotkowski, Caleb Grenko, Orhun Güley, Ibrahim Ethem Hamamci, Sarthak Pati, Micah Sheller, Juliia Skobleva, Siddhesh Thakur, Spiros Thermos", # alphabetical order 87 author_email="[email protected]", 88 python_requires=">=3.6", 89 packages=find_packages(), 90 cmdclass={ # this ensures git_submodule_update is called during install 91 "install": CustomInstallCommand, 92 "develop": CustomDevelopCommand, 93 "egg_info": CustomEggInfoCommand, 94 }, 95 scripts=[ 96 "gandlf_run", 97 "gandlf_constructCSV", 98 "gandlf_collectStats", 99 "gandlf_patchMiner", 100 "gandlf_preprocess", 101 "gandlf_anonymizer", 102 ], 103 classifiers=[ 104 "Development Status :: 3 - Alpha", 105 "Intended Audience :: Science/Research", 106 "License :: OSI Approved :: BSD License", 107 "Natural Language :: English", 108 "Operating System :: OS Independent", 109 "Programming Language :: Python :: 3.7", 110 "Programming Language :: Python :: 3.8", 111 "Programming Language :: Python :: 3.9", 112 "Topic :: Scientific/Engineering :: Medical Science Apps", 113 ], 114 description=( 115 "PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging." 116 ), 117 install_requires=requirements, 118 license="BSD-3-Clause License", 119 long_description=readme, 120 long_description_content_type="text/markdown", 121 include_package_data=True, 122 keywords="semantic, segmentation, regression, classification, data-augmentation, medical-imaging", 123 zip_safe=False, 124 ) 125 126 ## windows vips installation 127 if os.name == "nt": # proceed for windows 128 from pathlib import Path 129 130 # download and extract if main dll is absent 131 if not Path("./vips/vips-dev-8.10/bin/libvips-42.dll").exists(): 132 print("Downloading and extracting VIPS for Windows") 133 url = "https://github.com/libvips/libvips/releases/download/v8.10.2/vips-dev-w64-all-8.10.2.zip" 134 zip_to_extract = "./vips.zip" 135 import urllib.request, zipfile 136 137 urllib.request.urlretrieve(url, zip_to_extract) 138 z = zipfile.ZipFile(zip_to_extract) 139 z.extractall("./vips") 140 z.close() 141 os.remove(zip_to_extract) 142 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -99,6 +99,7 @@ "gandlf_patchMiner", "gandlf_preprocess", "gandlf_anonymizer", + "gandlf_verifyInstall", ], classifiers=[ "Development Status :: 3 - Alpha",
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -99,6 +99,7 @@\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n+ \"gandlf_verifyInstall\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n", "issue": "Add an easy way to verify installation\n**Is your feature request related to a problem? Please describe.**\r\nCurrently, we are asking users to run specific commands to verify installation, which can be cumbursome.\r\n\r\n**Describe the solution you'd like**\r\nIt would be great if this could put in a script (and extended/updated as needed).\r\n\r\n**Describe alternatives you've considered**\r\nN.A.\r\n\r\n**Additional context**\r\nN.A.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\nwith open(\"README.md\") as readme_file:\n readme = readme_file.read()\n\n\ndef git_submodule_update():\n ## submodule update\n os.system(\"git submodule update --init --recursive\")\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n git_submodule_update()\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n git_submodule_update()\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n git_submodule_update()\n\n\n# read version.py\nimport sys, re\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\nrequirements = [\n \"black\",\n \"numpy==1.21.0\",\n \"scipy\",\n \"SimpleITK==2.1.0\",\n \"torch>=1.7\",\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.57\",\n \"pandas\",\n \"pylint\",\n \"scikit-learn==0.23.1\",\n \"pickle5==0.0.11\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"openslide-python\",\n \"scikit-image\",\n \"matplotlib\",\n \"requests>=2.25.0\",\n \"pyvips\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics\",\n \"OpenPatchMiner==0.1.6\",\n \"pydicom\",\n]\n\nsetup(\n name=\"GANDLF\",\n version=__version__,\n author=\"Jose Agraz, Vinayak Ahluwalia, Bhakti Baheti, Spyridon Bakas, Ujjwal Baid, Megh Bhalerao, Brandon Edwards, Karol Gotkowski, Caleb Grenko, Orhun G\u00fcley, Ibrahim Ethem Hamamci, Sarthak Pati, Micah Sheller, Juliia Skobleva, Siddhesh Thakur, Spiros Thermos\", # alphabetical order\n author_email=\"[email protected]\",\n python_requires=\">=3.6\",\n packages=find_packages(),\n cmdclass={ # this ensures git_submodule_update is called during install\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"BSD-3-Clause License\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging\",\n zip_safe=False,\n)\n\n## windows vips installation\nif os.name == \"nt\": # proceed for windows\n from pathlib import Path\n\n # download and extract if main dll is absent\n if not Path(\"./vips/vips-dev-8.10/bin/libvips-42.dll\").exists():\n print(\"Downloading and extracting VIPS for Windows\")\n url = \"https://github.com/libvips/libvips/releases/download/v8.10.2/vips-dev-w64-all-8.10.2.zip\"\n zip_to_extract = \"./vips.zip\"\n import urllib.request, zipfile\n\n urllib.request.urlretrieve(url, zip_to_extract)\n z = zipfile.ZipFile(zip_to_extract)\n z.extractall(\"./vips\")\n z.close()\n os.remove(zip_to_extract)\n", "path": "setup.py"}]}
2,013
83
gh_patches_debug_27670
rasdani/github-patches
git_diff
crytic__slither-211
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Bug in function-id printer Hi there! There is an issue connected to `function-id` printer that I faced: when you create a dynamic array with `public` visibility modifier compiler automatically generates a getter-function with `uint256` input parameter. However, Slither thinks that the getter has no input parameters. Thus, the wrong function signature is printed in the output. Here is a small example. ``` pragma solidity 0.5.7; contract Example { uint256[] public example; } ``` Slither outputs the following table: ``` +-----------+------------+ | Name | ID | +-----------+------------+ | example() | 0x54353f2f | +-----------+------------+ ``` However, the real `example()` function's signature is `0x477e4a02` in the example. </issue> <code> [start of slither/printers/summary/function_ids.py] 1 """ 2 Module printing summary of the contract 3 """ 4 import collections 5 from prettytable import PrettyTable 6 from slither.printers.abstract_printer import AbstractPrinter 7 from slither.utils.colors import blue, green, magenta 8 from slither.utils.function import get_function_id 9 10 class FunctionIds(AbstractPrinter): 11 12 ARGUMENT = 'function-id' 13 HELP = 'Print the keccack256 signature of the functions' 14 15 WIKI = 'https://github.com/trailofbits/slither/wiki/Printer-documentation#function-id' 16 17 def output(self, _filename): 18 """ 19 _filename is not used 20 Args: 21 _filename(string) 22 """ 23 24 txt = '' 25 for contract in self.slither.contracts_derived: 26 txt += '\n{}:\n'.format(contract.name) 27 table = PrettyTable(['Name', 'ID']) 28 for function in contract.functions: 29 if function.visibility in ['public', 'external']: 30 table.add_row([function.full_name, hex(get_function_id(function.full_name))]) 31 for variable in contract.state_variables: 32 if variable.visibility in ['public']: 33 table.add_row([variable.name+'()', hex(get_function_id(variable.name+'()'))]) 34 txt += str(table) + '\n' 35 36 self.info(txt) 37 [end of slither/printers/summary/function_ids.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/slither/printers/summary/function_ids.py b/slither/printers/summary/function_ids.py --- a/slither/printers/summary/function_ids.py +++ b/slither/printers/summary/function_ids.py @@ -3,6 +3,8 @@ """ import collections from prettytable import PrettyTable + +from slither.core.solidity_types import ArrayType, MappingType from slither.printers.abstract_printer import AbstractPrinter from slither.utils.colors import blue, green, magenta from slither.utils.function import get_function_id @@ -30,7 +32,18 @@ table.add_row([function.full_name, hex(get_function_id(function.full_name))]) for variable in contract.state_variables: if variable.visibility in ['public']: - table.add_row([variable.name+'()', hex(get_function_id(variable.name+'()'))]) + variable_getter_args = "" + if type(variable.type) is ArrayType: + length = 0 + v = variable + while type(v.type) is ArrayType: + length += 1 + v = v.type + variable_getter_args = ','.join(["uint256"]*length) + elif type(variable.type) is MappingType: + variable_getter_args = variable.type.type_from + + table.add_row([f"{variable.name}({variable_getter_args})", hex(get_function_id(f"{variable.name}({variable_getter_args})"))]) txt += str(table) + '\n' self.info(txt)
{"golden_diff": "diff --git a/slither/printers/summary/function_ids.py b/slither/printers/summary/function_ids.py\n--- a/slither/printers/summary/function_ids.py\n+++ b/slither/printers/summary/function_ids.py\n@@ -3,6 +3,8 @@\n \"\"\"\n import collections\n from prettytable import PrettyTable\n+\n+from slither.core.solidity_types import ArrayType, MappingType\n from slither.printers.abstract_printer import AbstractPrinter\n from slither.utils.colors import blue, green, magenta\n from slither.utils.function import get_function_id\n@@ -30,7 +32,18 @@\n table.add_row([function.full_name, hex(get_function_id(function.full_name))])\n for variable in contract.state_variables:\n if variable.visibility in ['public']:\n- table.add_row([variable.name+'()', hex(get_function_id(variable.name+'()'))])\n+ variable_getter_args = \"\"\n+ if type(variable.type) is ArrayType:\n+ length = 0\n+ v = variable\n+ while type(v.type) is ArrayType:\n+ length += 1\n+ v = v.type\n+ variable_getter_args = ','.join([\"uint256\"]*length)\n+ elif type(variable.type) is MappingType:\n+ variable_getter_args = variable.type.type_from\n+\n+ table.add_row([f\"{variable.name}({variable_getter_args})\", hex(get_function_id(f\"{variable.name}({variable_getter_args})\"))])\n txt += str(table) + '\\n'\n \n self.info(txt)\n", "issue": "Bug in function-id printer\nHi there! There is an issue connected to `function-id` printer that I faced:\r\n\r\nwhen you create a dynamic array with `public` visibility modifier compiler automatically generates a getter-function with `uint256` input parameter. However, Slither thinks that the getter has no input parameters. Thus, the wrong function signature is printed in the output.\r\n\r\nHere is a small example.\r\n\r\n```\r\npragma solidity 0.5.7;\r\n\r\ncontract Example {\r\n uint256[] public example;\r\n}\r\n```\r\n\r\nSlither outputs the following table: \r\n\r\n```\r\n+-----------+------------+\r\n| Name | ID |\r\n+-----------+------------+\r\n| example() | 0x54353f2f |\r\n+-----------+------------+\r\n```\r\n\r\nHowever, the real `example()` function's signature is `0x477e4a02` in the example.\n", "before_files": [{"content": "\"\"\"\n Module printing summary of the contract\n\"\"\"\nimport collections\nfrom prettytable import PrettyTable\nfrom slither.printers.abstract_printer import AbstractPrinter\nfrom slither.utils.colors import blue, green, magenta\nfrom slither.utils.function import get_function_id\n\nclass FunctionIds(AbstractPrinter):\n\n ARGUMENT = 'function-id'\n HELP = 'Print the keccack256 signature of the functions'\n\n WIKI = 'https://github.com/trailofbits/slither/wiki/Printer-documentation#function-id'\n\n def output(self, _filename):\n \"\"\"\n _filename is not used\n Args:\n _filename(string)\n \"\"\"\n\n txt = ''\n for contract in self.slither.contracts_derived:\n txt += '\\n{}:\\n'.format(contract.name)\n table = PrettyTable(['Name', 'ID'])\n for function in contract.functions:\n if function.visibility in ['public', 'external']:\n table.add_row([function.full_name, hex(get_function_id(function.full_name))])\n for variable in contract.state_variables:\n if variable.visibility in ['public']:\n table.add_row([variable.name+'()', hex(get_function_id(variable.name+'()'))])\n txt += str(table) + '\\n'\n\n self.info(txt)\n", "path": "slither/printers/summary/function_ids.py"}]}
1,069
337
gh_patches_debug_5271
rasdani/github-patches
git_diff
safe-global__safe-config-service-145
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Increase max limit for the chains endpoint </issue> <code> [start of src/chains/views.py] 1 from rest_framework import filters 2 from rest_framework.generics import ListAPIView, RetrieveAPIView 3 from rest_framework.pagination import LimitOffsetPagination 4 5 from .models import Chain 6 from .serializers import ChainSerializer 7 8 9 class ChainsListView(ListAPIView): 10 serializer_class = ChainSerializer 11 pagination_class = LimitOffsetPagination 12 pagination_class.max_limit = 10 13 pagination_class.default_limit = 10 14 queryset = Chain.objects.all() 15 filter_backends = [filters.OrderingFilter] 16 ordering_fields = ["relevance", "name"] 17 ordering = [ 18 "relevance", 19 "name", 20 ] 21 22 23 class ChainsDetailView(RetrieveAPIView): 24 serializer_class = ChainSerializer 25 queryset = Chain.objects.all() 26 [end of src/chains/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/chains/views.py b/src/chains/views.py --- a/src/chains/views.py +++ b/src/chains/views.py @@ -9,7 +9,7 @@ class ChainsListView(ListAPIView): serializer_class = ChainSerializer pagination_class = LimitOffsetPagination - pagination_class.max_limit = 10 + pagination_class.max_limit = 100 pagination_class.default_limit = 10 queryset = Chain.objects.all() filter_backends = [filters.OrderingFilter]
{"golden_diff": "diff --git a/src/chains/views.py b/src/chains/views.py\n--- a/src/chains/views.py\n+++ b/src/chains/views.py\n@@ -9,7 +9,7 @@\n class ChainsListView(ListAPIView):\n serializer_class = ChainSerializer\n pagination_class = LimitOffsetPagination\n- pagination_class.max_limit = 10\n+ pagination_class.max_limit = 100\n pagination_class.default_limit = 10\n queryset = Chain.objects.all()\n filter_backends = [filters.OrderingFilter]\n", "issue": "Increase max limit for the chains endpoint\n\n", "before_files": [{"content": "from rest_framework import filters\nfrom rest_framework.generics import ListAPIView, RetrieveAPIView\nfrom rest_framework.pagination import LimitOffsetPagination\n\nfrom .models import Chain\nfrom .serializers import ChainSerializer\n\n\nclass ChainsListView(ListAPIView):\n serializer_class = ChainSerializer\n pagination_class = LimitOffsetPagination\n pagination_class.max_limit = 10\n pagination_class.default_limit = 10\n queryset = Chain.objects.all()\n filter_backends = [filters.OrderingFilter]\n ordering_fields = [\"relevance\", \"name\"]\n ordering = [\n \"relevance\",\n \"name\",\n ]\n\n\nclass ChainsDetailView(RetrieveAPIView):\n serializer_class = ChainSerializer\n queryset = Chain.objects.all()\n", "path": "src/chains/views.py"}]}
740
115
gh_patches_debug_24394
rasdani/github-patches
git_diff
pulp__pulpcore-4182
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Reclaim space for repository fails with Cannot delete some instances of model 'Artifact' because they are referenced through protected foreign keys: 'ContentArtifact.artifact'." **Version** 3.16, but probably all versions **Describe the bug** - Reclaim space for repository fails with the following error. ~~~ Task paused with error: "("Cannot delete some instances of model 'Artifact' because they are referenced through protected foreign keys: 'ContentArtifact.artifact'.", {<ContentArtifact: pk=452959ad-c045-4e85-bf9f-6651ba37f57d>})" ~~~ **To Reproduce** See BZ **Additional context** https://bugzilla.redhat.com/show_bug.cgi?id=2169322 </issue> <code> [start of pulpcore/app/tasks/reclaim_space.py] 1 from pulpcore.app.models import ( 2 Artifact, 3 Content, 4 ContentArtifact, 5 ProgressReport, 6 PublishedMetadata, 7 Repository, 8 RepositoryVersion, 9 ) 10 from pulpcore.app.util import get_domain 11 12 13 def reclaim_space(repo_pks, keeplist_rv_pks=None, force=False): 14 """ 15 This task frees-up disk space by removing Artifact files from the filesystem for Content 16 exclusive to the list of provided repos. 17 18 Note: content marked as `proctected` will be excluded from the reclaim disk space. 19 20 Kwargs: 21 repo_pks (list): A list of repo pks the disk reclaim space is performed on. 22 keeplist_rv_pks (list): A list of repo version pks that will be excluded from the reclaim 23 disk space. 24 force (bool): If True, uploaded content will be taken into account. 25 26 """ 27 reclaimed_repos = Repository.objects.filter(pk__in=repo_pks) 28 for repo in reclaimed_repos: 29 repo.invalidate_cache(everything=True) 30 31 domain = get_domain() 32 rest_of_repos = Repository.objects.filter(pulp_domain=domain).exclude(pk__in=repo_pks) 33 c_keep_qs = Content.objects.filter(repositories__in=rest_of_repos) 34 c_reclaim_qs = Content.objects.filter(repositories__in=repo_pks) 35 c_reclaim_qs = c_reclaim_qs.exclude( 36 pk__in=c_keep_qs, pulp_type=PublishedMetadata.get_pulp_type() 37 ) 38 39 if keeplist_rv_pks: 40 rv_qs = RepositoryVersion.objects.filter(pk__in=keeplist_rv_pks) 41 rv_content = Content.objects.none() 42 for rv in rv_qs.iterator(): 43 rv_content |= rv.content 44 c_reclaim_qs = c_reclaim_qs.exclude(pk__in=rv_content) 45 46 content_distinct = c_reclaim_qs.distinct("pulp_type") 47 unprotected = [] 48 for content in content_distinct: 49 if not content.cast().PROTECTED_FROM_RECLAIM: 50 unprotected.append(content.pulp_type) 51 52 ca_qs = ContentArtifact.objects.select_related("content", "artifact").filter( 53 content__in=c_reclaim_qs.values("pk"), artifact__isnull=False 54 ) 55 if not force: 56 ca_qs = ca_qs.filter(remoteartifact__isnull=False) 57 artifact_pks = set() 58 ca_to_update = [] 59 for ca in ca_qs.iterator(): 60 if ca.content.pulp_type in unprotected: 61 artifact_pks.add(ca.artifact.pk) 62 ca.artifact = None 63 ca_to_update.append(ca) 64 65 ContentArtifact.objects.bulk_update(objs=ca_to_update, fields=["artifact"], batch_size=1000) 66 artifacts_to_delete = Artifact.objects.filter(pk__in=artifact_pks) 67 progress_bar = ProgressReport( 68 message="Reclaim disk space", 69 total=artifacts_to_delete.count(), 70 code="reclaim-space.artifact", 71 done=0, 72 state="running", 73 ) 74 progress_bar.save() 75 76 counter = 0 77 interval = 100 78 for artifact in artifacts_to_delete.iterator(): 79 # we need to manually call delete() because it cleans up the file on the filesystem 80 artifact.delete() 81 progress_bar.done += 1 82 counter += 1 83 84 if counter >= interval: 85 progress_bar.save() 86 counter = 0 87 88 progress_bar.state = "completed" 89 progress_bar.save() 90 [end of pulpcore/app/tasks/reclaim_space.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pulpcore/app/tasks/reclaim_space.py b/pulpcore/app/tasks/reclaim_space.py --- a/pulpcore/app/tasks/reclaim_space.py +++ b/pulpcore/app/tasks/reclaim_space.py @@ -1,3 +1,7 @@ +from logging import getLogger + +from django.db.models.deletion import ProtectedError + from pulpcore.app.models import ( Artifact, Content, @@ -9,6 +13,8 @@ ) from pulpcore.app.util import get_domain +log = getLogger(__name__) + def reclaim_space(repo_pks, keeplist_rv_pks=None, force=False): """ @@ -76,10 +82,16 @@ counter = 0 interval = 100 for artifact in artifacts_to_delete.iterator(): - # we need to manually call delete() because it cleans up the file on the filesystem - artifact.delete() - progress_bar.done += 1 - counter += 1 + try: + # we need to manually call delete() because it cleans up the file on the filesystem + artifact.delete() + except ProtectedError as e: + # Rarely artifact could be shared between to different content units. + # Just log and skip the artifact deletion in this case + log.info(e) + else: + progress_bar.done += 1 + counter += 1 if counter >= interval: progress_bar.save()
{"golden_diff": "diff --git a/pulpcore/app/tasks/reclaim_space.py b/pulpcore/app/tasks/reclaim_space.py\n--- a/pulpcore/app/tasks/reclaim_space.py\n+++ b/pulpcore/app/tasks/reclaim_space.py\n@@ -1,3 +1,7 @@\n+from logging import getLogger\n+\n+from django.db.models.deletion import ProtectedError\n+\n from pulpcore.app.models import (\n Artifact,\n Content,\n@@ -9,6 +13,8 @@\n )\n from pulpcore.app.util import get_domain\n \n+log = getLogger(__name__)\n+\n \n def reclaim_space(repo_pks, keeplist_rv_pks=None, force=False):\n \"\"\"\n@@ -76,10 +82,16 @@\n counter = 0\n interval = 100\n for artifact in artifacts_to_delete.iterator():\n- # we need to manually call delete() because it cleans up the file on the filesystem\n- artifact.delete()\n- progress_bar.done += 1\n- counter += 1\n+ try:\n+ # we need to manually call delete() because it cleans up the file on the filesystem\n+ artifact.delete()\n+ except ProtectedError as e:\n+ # Rarely artifact could be shared between to different content units.\n+ # Just log and skip the artifact deletion in this case\n+ log.info(e)\n+ else:\n+ progress_bar.done += 1\n+ counter += 1\n \n if counter >= interval:\n progress_bar.save()\n", "issue": "Reclaim space for repository fails with Cannot delete some instances of model 'Artifact' because they are referenced through protected foreign keys: 'ContentArtifact.artifact'.\"\n**Version**\r\n3.16, but probably all versions\r\n\r\n**Describe the bug**\r\n\r\n- Reclaim space for repository fails with the following error.\r\n\r\n ~~~\r\n Task paused with error: \"(\"Cannot delete some instances of model 'Artifact' because they are referenced through protected foreign keys: 'ContentArtifact.artifact'.\", {<ContentArtifact: pk=452959ad-c045-4e85-bf9f-6651ba37f57d>})\"\r\n ~~~\r\n\r\n**To Reproduce**\r\nSee BZ\r\n\r\n**Additional context**\r\nhttps://bugzilla.redhat.com/show_bug.cgi?id=2169322\r\n\n", "before_files": [{"content": "from pulpcore.app.models import (\n Artifact,\n Content,\n ContentArtifact,\n ProgressReport,\n PublishedMetadata,\n Repository,\n RepositoryVersion,\n)\nfrom pulpcore.app.util import get_domain\n\n\ndef reclaim_space(repo_pks, keeplist_rv_pks=None, force=False):\n \"\"\"\n This task frees-up disk space by removing Artifact files from the filesystem for Content\n exclusive to the list of provided repos.\n\n Note: content marked as `proctected` will be excluded from the reclaim disk space.\n\n Kwargs:\n repo_pks (list): A list of repo pks the disk reclaim space is performed on.\n keeplist_rv_pks (list): A list of repo version pks that will be excluded from the reclaim\n disk space.\n force (bool): If True, uploaded content will be taken into account.\n\n \"\"\"\n reclaimed_repos = Repository.objects.filter(pk__in=repo_pks)\n for repo in reclaimed_repos:\n repo.invalidate_cache(everything=True)\n\n domain = get_domain()\n rest_of_repos = Repository.objects.filter(pulp_domain=domain).exclude(pk__in=repo_pks)\n c_keep_qs = Content.objects.filter(repositories__in=rest_of_repos)\n c_reclaim_qs = Content.objects.filter(repositories__in=repo_pks)\n c_reclaim_qs = c_reclaim_qs.exclude(\n pk__in=c_keep_qs, pulp_type=PublishedMetadata.get_pulp_type()\n )\n\n if keeplist_rv_pks:\n rv_qs = RepositoryVersion.objects.filter(pk__in=keeplist_rv_pks)\n rv_content = Content.objects.none()\n for rv in rv_qs.iterator():\n rv_content |= rv.content\n c_reclaim_qs = c_reclaim_qs.exclude(pk__in=rv_content)\n\n content_distinct = c_reclaim_qs.distinct(\"pulp_type\")\n unprotected = []\n for content in content_distinct:\n if not content.cast().PROTECTED_FROM_RECLAIM:\n unprotected.append(content.pulp_type)\n\n ca_qs = ContentArtifact.objects.select_related(\"content\", \"artifact\").filter(\n content__in=c_reclaim_qs.values(\"pk\"), artifact__isnull=False\n )\n if not force:\n ca_qs = ca_qs.filter(remoteartifact__isnull=False)\n artifact_pks = set()\n ca_to_update = []\n for ca in ca_qs.iterator():\n if ca.content.pulp_type in unprotected:\n artifact_pks.add(ca.artifact.pk)\n ca.artifact = None\n ca_to_update.append(ca)\n\n ContentArtifact.objects.bulk_update(objs=ca_to_update, fields=[\"artifact\"], batch_size=1000)\n artifacts_to_delete = Artifact.objects.filter(pk__in=artifact_pks)\n progress_bar = ProgressReport(\n message=\"Reclaim disk space\",\n total=artifacts_to_delete.count(),\n code=\"reclaim-space.artifact\",\n done=0,\n state=\"running\",\n )\n progress_bar.save()\n\n counter = 0\n interval = 100\n for artifact in artifacts_to_delete.iterator():\n # we need to manually call delete() because it cleans up the file on the filesystem\n artifact.delete()\n progress_bar.done += 1\n counter += 1\n\n if counter >= interval:\n progress_bar.save()\n counter = 0\n\n progress_bar.state = \"completed\"\n progress_bar.save()\n", "path": "pulpcore/app/tasks/reclaim_space.py"}]}
1,628
323
gh_patches_debug_5389
rasdani/github-patches
git_diff
vega__altair-989
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> scatter-matrix example don't fit into screen https://altair-viz.github.io/gallery/scatter_matrix.html#scatter-matrix ![image](https://user-images.githubusercontent.com/11561581/42128898-7df8a956-7cbe-11e8-8aa7-65e8621259c0.png) ```python #... ).properties( width=150, # suggested instead of 250 height=150 # suggested instead of 250 ).repeat( #... ``` </issue> <code> [start of altair/vegalite/v2/examples/scatter_matrix.py] 1 """ 2 Scatter Matrix 3 -------------- 4 An example of using a RepeatChart to construct a multi-panel scatter plot 5 with linked panning and zooming. 6 """ 7 # category: scatter plots 8 import altair as alt 9 from vega_datasets import data 10 11 alt.Chart(data.cars.url).mark_circle().encode( 12 alt.X(alt.repeat("column"), type='quantitative'), 13 alt.Y(alt.repeat("row"), type='quantitative'), 14 color='Origin:N' 15 ).properties( 16 width=250, 17 height=250 18 ).repeat( 19 row=['Horsepower', 'Acceleration', 'Miles_per_Gallon'], 20 column=['Miles_per_Gallon', 'Acceleration', 'Horsepower'] 21 ).interactive() 22 [end of altair/vegalite/v2/examples/scatter_matrix.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/altair/vegalite/v2/examples/scatter_matrix.py b/altair/vegalite/v2/examples/scatter_matrix.py --- a/altair/vegalite/v2/examples/scatter_matrix.py +++ b/altair/vegalite/v2/examples/scatter_matrix.py @@ -13,8 +13,8 @@ alt.Y(alt.repeat("row"), type='quantitative'), color='Origin:N' ).properties( - width=250, - height=250 + width=150, + height=150 ).repeat( row=['Horsepower', 'Acceleration', 'Miles_per_Gallon'], column=['Miles_per_Gallon', 'Acceleration', 'Horsepower']
{"golden_diff": "diff --git a/altair/vegalite/v2/examples/scatter_matrix.py b/altair/vegalite/v2/examples/scatter_matrix.py\n--- a/altair/vegalite/v2/examples/scatter_matrix.py\n+++ b/altair/vegalite/v2/examples/scatter_matrix.py\n@@ -13,8 +13,8 @@\n alt.Y(alt.repeat(\"row\"), type='quantitative'),\n color='Origin:N'\n ).properties(\n- width=250,\n- height=250\n+ width=150,\n+ height=150\n ).repeat(\n row=['Horsepower', 'Acceleration', 'Miles_per_Gallon'],\n column=['Miles_per_Gallon', 'Acceleration', 'Horsepower']\n", "issue": "scatter-matrix example don't fit into screen\nhttps://altair-viz.github.io/gallery/scatter_matrix.html#scatter-matrix\r\n![image](https://user-images.githubusercontent.com/11561581/42128898-7df8a956-7cbe-11e8-8aa7-65e8621259c0.png)\r\n```python\r\n#...\r\n).properties(\r\n width=150, # suggested instead of 250\r\n height=150 # suggested instead of 250\r\n).repeat(\r\n#...\r\n```\n", "before_files": [{"content": "\"\"\"\nScatter Matrix\n--------------\nAn example of using a RepeatChart to construct a multi-panel scatter plot\nwith linked panning and zooming.\n\"\"\"\n# category: scatter plots\nimport altair as alt\nfrom vega_datasets import data\n\nalt.Chart(data.cars.url).mark_circle().encode(\n alt.X(alt.repeat(\"column\"), type='quantitative'),\n alt.Y(alt.repeat(\"row\"), type='quantitative'),\n color='Origin:N'\n).properties(\n width=250,\n height=250\n).repeat(\n row=['Horsepower', 'Acceleration', 'Miles_per_Gallon'],\n column=['Miles_per_Gallon', 'Acceleration', 'Horsepower']\n).interactive()\n", "path": "altair/vegalite/v2/examples/scatter_matrix.py"}]}
877
173
gh_patches_debug_16489
rasdani/github-patches
git_diff
fossasia__open-event-server-6754
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Error in migrations **Describe the bug** Due to recent merging of PRs https://github.com/fossasia/open-event-server/pull/6744/ https://github.com/fossasia/open-event-server/pull/6748 The PR https://github.com/fossasia/open-event-server/pull/6744 has an invalid migration to remove the 'secret' column from the db which was unrelated to PR. Hence now we have two migration for the same column `secret` from `settings` which drops column on upgrade and creates on downgrade **Additional context** @iamareebjamal - Please chck </issue> <code> [start of migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py] 1 """empty message 2 3 Revision ID: 6ebafb385765 4 Revises: 30a490ad1609 5 Create Date: 2020-01-17 18:09:31.897988 6 7 """ 8 9 from alembic import op 10 import sqlalchemy as sa 11 import sqlalchemy_utils 12 13 14 # revision identifiers, used by Alembic. 15 revision = '6ebafb385765' 16 down_revision = '30a490ad1609' 17 18 19 def upgrade(): 20 # ### commands auto generated by Alembic - please adjust! ### 21 op.drop_column('settings', 'secret') 22 op.add_column('ticket_holders', sa.Column('age_group', sa.String(), nullable=True)) 23 # ### end Alembic commands ### 24 25 26 def downgrade(): 27 # ### commands auto generated by Alembic - please adjust! ### 28 op.drop_column('ticket_holders', 'age_group') 29 op.add_column('settings', sa.Column('secret', sa.VARCHAR(), autoincrement=False, nullable=True)) 30 # ### end Alembic commands ### 31 [end of migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py b/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py --- a/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py +++ b/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py @@ -18,7 +18,6 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('settings', 'secret') op.add_column('ticket_holders', sa.Column('age_group', sa.String(), nullable=True)) # ### end Alembic commands ### @@ -26,5 +25,4 @@ def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('ticket_holders', 'age_group') - op.add_column('settings', sa.Column('secret', sa.VARCHAR(), autoincrement=False, nullable=True)) # ### end Alembic commands ###
{"golden_diff": "diff --git a/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py b/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py\n--- a/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py\n+++ b/migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py\n@@ -18,7 +18,6 @@\n \n def upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n- op.drop_column('settings', 'secret')\n op.add_column('ticket_holders', sa.Column('age_group', sa.String(), nullable=True))\n # ### end Alembic commands ###\n \n@@ -26,5 +25,4 @@\n def downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('ticket_holders', 'age_group')\n- op.add_column('settings', sa.Column('secret', sa.VARCHAR(), autoincrement=False, nullable=True))\n # ### end Alembic commands ###\n", "issue": "Error in migrations\n**Describe the bug**\r\nDue to recent merging of PRs https://github.com/fossasia/open-event-server/pull/6744/ https://github.com/fossasia/open-event-server/pull/6748\r\n\r\nThe PR https://github.com/fossasia/open-event-server/pull/6744 has an invalid migration to remove the 'secret' column from the db which was unrelated to PR. Hence now we have two migration for the same column `secret` from `settings` which\r\n drops column on upgrade and creates on downgrade\r\n\r\n\r\n**Additional context**\r\n@iamareebjamal - Please chck\r\n\n", "before_files": [{"content": "\"\"\"empty message\n\nRevision ID: 6ebafb385765\nRevises: 30a490ad1609\nCreate Date: 2020-01-17 18:09:31.897988\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlalchemy_utils\n\n\n# revision identifiers, used by Alembic.\nrevision = '6ebafb385765'\ndown_revision = '30a490ad1609'\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('settings', 'secret')\n op.add_column('ticket_holders', sa.Column('age_group', sa.String(), nullable=True))\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('ticket_holders', 'age_group')\n op.add_column('settings', sa.Column('secret', sa.VARCHAR(), autoincrement=False, nullable=True))\n # ### end Alembic commands ###\n", "path": "migrations/versions/rev-2020-01-17-18:09:31-6ebafb385765_.py"}]}
1,042
314
gh_patches_debug_5180
rasdani/github-patches
git_diff
oppia__oppia-7075
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add the practice session experience to the topic viewer Add an option in the topic viewer page that can start the practice session, based on the skills that are part of the topic. Add the practice session experience to the topic viewer Add an option in the topic viewer page that can start the practice session, based on the skills that are part of the topic. </issue> <code> [start of core/controllers/topic_viewer.py] 1 # Copyright 2018 The Oppia Authors. All Rights Reserved. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS-IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 """Controllers for the topic viewer page.""" 16 17 from constants import constants 18 from core.controllers import acl_decorators 19 from core.controllers import base 20 from core.domain import story_services 21 from core.domain import topic_services 22 import feconf 23 24 25 class TopicViewerPage(base.BaseHandler): 26 """Renders the topic viewer page.""" 27 28 @acl_decorators.can_access_topic_viewer_page 29 def get(self, _): 30 """Handles GET requests.""" 31 32 if not constants.ENABLE_NEW_STRUCTURE_PLAYERS: 33 raise self.PageNotFoundException 34 35 self.render_template('dist/topic-viewer-page.mainpage.html') 36 37 38 class TopicPageDataHandler(base.BaseHandler): 39 """Manages the data that needs to be displayed to a learner on the topic 40 viewer page. 41 """ 42 GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON 43 44 @acl_decorators.can_access_topic_viewer_page 45 def get(self, topic_name): 46 """Handles GET requests.""" 47 48 if not constants.ENABLE_NEW_STRUCTURE_PLAYERS: 49 raise self.PageNotFoundException 50 51 topic = topic_services.get_topic_by_name(topic_name) 52 53 canonical_story_summaries = [ 54 story_services.get_story_summary_by_id( 55 canonical_story_id) for canonical_story_id 56 in topic.canonical_story_ids] 57 58 additional_story_summaries = [ 59 story_services.get_story_summary_by_id( 60 additional_story_id) for additional_story_id 61 in topic.additional_story_ids] 62 63 canonical_story_dicts = [ 64 summary.to_human_readable_dict() for summary 65 in canonical_story_summaries] 66 67 additional_story_dicts = [ 68 summary.to_human_readable_dict() for summary 69 in additional_story_summaries] 70 71 uncategorized_skill_ids = topic.get_all_uncategorized_skill_ids() 72 subtopics = topic.get_all_subtopics() 73 74 self.values.update({ 75 'topic_name': topic.name, 76 'canonical_story_dicts': canonical_story_dicts, 77 'additional_story_dicts': additional_story_dicts, 78 'uncategorized_skill_ids': uncategorized_skill_ids, 79 'subtopics': subtopics 80 }) 81 self.render_json(self.values) 82 [end of core/controllers/topic_viewer.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/core/controllers/topic_viewer.py b/core/controllers/topic_viewer.py --- a/core/controllers/topic_viewer.py +++ b/core/controllers/topic_viewer.py @@ -73,6 +73,7 @@ self.values.update({ 'topic_name': topic.name, + 'topic_id': topic.id, 'canonical_story_dicts': canonical_story_dicts, 'additional_story_dicts': additional_story_dicts, 'uncategorized_skill_ids': uncategorized_skill_ids,
{"golden_diff": "diff --git a/core/controllers/topic_viewer.py b/core/controllers/topic_viewer.py\n--- a/core/controllers/topic_viewer.py\n+++ b/core/controllers/topic_viewer.py\n@@ -73,6 +73,7 @@\n \n self.values.update({\n 'topic_name': topic.name,\n+ 'topic_id': topic.id,\n 'canonical_story_dicts': canonical_story_dicts,\n 'additional_story_dicts': additional_story_dicts,\n 'uncategorized_skill_ids': uncategorized_skill_ids,\n", "issue": "Add the practice session experience to the topic viewer\nAdd an option in the topic viewer page that can start the practice session, based on the skills that are part of the topic.\nAdd the practice session experience to the topic viewer\nAdd an option in the topic viewer page that can start the practice session, based on the skills that are part of the topic.\n", "before_files": [{"content": "# Copyright 2018 The Oppia Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS-IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Controllers for the topic viewer page.\"\"\"\n\nfrom constants import constants\nfrom core.controllers import acl_decorators\nfrom core.controllers import base\nfrom core.domain import story_services\nfrom core.domain import topic_services\nimport feconf\n\n\nclass TopicViewerPage(base.BaseHandler):\n \"\"\"Renders the topic viewer page.\"\"\"\n\n @acl_decorators.can_access_topic_viewer_page\n def get(self, _):\n \"\"\"Handles GET requests.\"\"\"\n\n if not constants.ENABLE_NEW_STRUCTURE_PLAYERS:\n raise self.PageNotFoundException\n\n self.render_template('dist/topic-viewer-page.mainpage.html')\n\n\nclass TopicPageDataHandler(base.BaseHandler):\n \"\"\"Manages the data that needs to be displayed to a learner on the topic\n viewer page.\n \"\"\"\n GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON\n\n @acl_decorators.can_access_topic_viewer_page\n def get(self, topic_name):\n \"\"\"Handles GET requests.\"\"\"\n\n if not constants.ENABLE_NEW_STRUCTURE_PLAYERS:\n raise self.PageNotFoundException\n\n topic = topic_services.get_topic_by_name(topic_name)\n\n canonical_story_summaries = [\n story_services.get_story_summary_by_id(\n canonical_story_id) for canonical_story_id\n in topic.canonical_story_ids]\n\n additional_story_summaries = [\n story_services.get_story_summary_by_id(\n additional_story_id) for additional_story_id\n in topic.additional_story_ids]\n\n canonical_story_dicts = [\n summary.to_human_readable_dict() for summary\n in canonical_story_summaries]\n\n additional_story_dicts = [\n summary.to_human_readable_dict() for summary\n in additional_story_summaries]\n\n uncategorized_skill_ids = topic.get_all_uncategorized_skill_ids()\n subtopics = topic.get_all_subtopics()\n\n self.values.update({\n 'topic_name': topic.name,\n 'canonical_story_dicts': canonical_story_dicts,\n 'additional_story_dicts': additional_story_dicts,\n 'uncategorized_skill_ids': uncategorized_skill_ids,\n 'subtopics': subtopics\n })\n self.render_json(self.values)\n", "path": "core/controllers/topic_viewer.py"}]}
1,335
100
gh_patches_debug_42262
rasdani/github-patches
git_diff
open-telemetry__opentelemetry-python-806
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> ext/pyramid: add docs page An entry in the docs is missing for the pyramid instrumentation. </issue> <code> [start of ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py] 1 # Copyright The OpenTelemetry Authors 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 """ 16 Pyramid instrumentation supporting `pyramid`_, it can be enabled by 17 using ``PyramidInstrumentor``. 18 19 .. _pyramid: https://docs.pylonsproject.org/projects/pyramid/en/latest/ 20 21 Usage 22 ----- 23 There are two methods to instrument Pyramid: 24 25 Method 1 (Instrument all Configurators): 26 ---------------------------------------- 27 .. code:: python 28 29 from pyramid.config import Configurator 30 from opentelemetry.ext.pyramid import PyramidInstrumentor 31 32 PyramidInstrumentor.instrument() 33 34 config = Configurator() 35 36 # use your config as normal 37 config.add_route('index', '/') 38 39 Method 2 (Instrument one Configurator): 40 --------------------------------------- 41 .. code:: python 42 43 from pyramid.config import Configurator 44 from opentelemetry.ext.pyramid import PyramidInstrumentor 45 46 config = Configurator() 47 PyramidInstrumentor().instrument_config(config) 48 49 # use your config as normal 50 config.add_route('index', '/') 51 52 Using ``pyramid.tweens`` settings: 53 ---------------------------------- 54 If you use Method 2 and then set tweens for your application with the ``pyramid.tweens`` setting, 55 you need to add ``opentelemetry.ext.pyramid.trace_tween_factory`` explicity to the list, 56 *as well as* instrumenting the config with `PyramidInstrumentor().instrument_config(config)`. 57 58 For example: 59 .. code:: python 60 settings = { 61 'pyramid.tweens', 'opentelemetry.ext.pyramid.trace_tween_factory\\nyour_tween_no_1\\nyour_tween_no_2', 62 } 63 config = Configurator(settings=settings) 64 PyramidInstrumentor.instrument_config(config) 65 66 # use your config as normal. 67 config.add_route('index', '/') 68 --- 69 """ 70 71 import typing 72 73 from pyramid.config import Configurator 74 from pyramid.path import caller_package 75 from pyramid.settings import aslist 76 from wrapt import ObjectProxy 77 from wrapt import wrap_function_wrapper as _wrap 78 79 from opentelemetry.ext.pyramid.callbacks import ( 80 SETTING_TRACE_ENABLED, 81 TWEEN_NAME, 82 trace_tween_factory, 83 ) 84 from opentelemetry.ext.pyramid.version import __version__ 85 from opentelemetry.instrumentation.instrumentor import BaseInstrumentor 86 from opentelemetry.instrumentation.utils import unwrap 87 from opentelemetry.trace import TracerProvider, get_tracer 88 89 90 def traced_init(wrapped, instance, args, kwargs): 91 settings = kwargs.get("settings", {}) 92 tweens = aslist(settings.get("pyramid.tweens", [])) 93 94 if tweens and TWEEN_NAME not in settings: 95 # pyramid.tweens.EXCVIEW is the name of built-in exception view provided by 96 # pyramid. We need our tween to be before it, otherwise unhandled 97 # exceptions will be caught before they reach our tween. 98 tweens = [TWEEN_NAME] + tweens 99 100 settings["pyramid.tweens"] = "\n".join(tweens) 101 102 kwargs["settings"] = settings 103 104 # `caller_package` works by walking a fixed amount of frames up the stack 105 # to find the calling package. So if we let the original `__init__` 106 # function call it, our wrapper will mess things up. 107 if not kwargs.get("package", None): 108 # Get the package for the third frame up from this one. 109 # Default is `level=2` which will give us the package from `wrapt` 110 # instead of the desired package (the caller) 111 kwargs["package"] = caller_package(level=3) 112 113 wrapped(*args, **kwargs) 114 instance.include("opentelemetry.ext.pyramid.callbacks") 115 116 117 class PyramidInstrumentor(BaseInstrumentor): 118 def _instrument(self, **kwargs): 119 """Integrate with Pyramid Python library. 120 https://docs.pylonsproject.org/projects/pyramid/en/latest/ 121 """ 122 _wrap("pyramid.config", "Configurator.__init__", traced_init) 123 124 def _uninstrument(self, **kwargs): 125 """"Disable Pyramid instrumentation""" 126 unwrap(Configurator, "__init__") 127 128 # pylint:disable=no-self-use 129 def instrument_config(self, config): 130 """Enable instrumentation in a Pyramid configurator. 131 132 Args: 133 config: The Configurator to instrument. 134 135 Returns: 136 An instrumented Configurator. 137 """ 138 config.include("opentelemetry.ext.pyramid.callbacks") 139 140 def uninstrument_config(self, config): 141 config.add_settings({SETTING_TRACE_ENABLED: False}) 142 [end of ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py b/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py --- a/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py +++ b/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py @@ -20,16 +20,17 @@ Usage ----- - There are two methods to instrument Pyramid: +There are two methods to instrument Pyramid: Method 1 (Instrument all Configurators): ---------------------------------------- + .. code:: python from pyramid.config import Configurator from opentelemetry.ext.pyramid import PyramidInstrumentor - PyramidInstrumentor.instrument() + PyramidInstrumentor().instrument() config = Configurator() @@ -38,6 +39,7 @@ Method 2 (Instrument one Configurator): --------------------------------------- + .. code:: python from pyramid.config import Configurator @@ -49,22 +51,30 @@ # use your config as normal config.add_route('index', '/') -Using ``pyramid.tweens`` settings: ----------------------------------- - If you use Method 2 and then set tweens for your application with the ``pyramid.tweens`` setting, - you need to add ``opentelemetry.ext.pyramid.trace_tween_factory`` explicity to the list, - *as well as* instrumenting the config with `PyramidInstrumentor().instrument_config(config)`. +Using ``pyramid.tweens`` setting: +--------------------------------- + +If you use Method 2 and then set tweens for your application with the ``pyramid.tweens`` setting, +you need to add ``opentelemetry.ext.pyramid.trace_tween_factory`` explicity to the list, +*as well as* instrumenting the config as shown above. + +For example: - For example: .. code:: python + + from pyramid.config import Configurator + from opentelemetry.ext.pyramid import PyramidInstrumentor + settings = { 'pyramid.tweens', 'opentelemetry.ext.pyramid.trace_tween_factory\\nyour_tween_no_1\\nyour_tween_no_2', } config = Configurator(settings=settings) - PyramidInstrumentor.instrument_config(config) + PyramidInstrumentor().instrument_config(config) # use your config as normal. config.add_route('index', '/') + +API --- """ @@ -87,7 +97,7 @@ from opentelemetry.trace import TracerProvider, get_tracer -def traced_init(wrapped, instance, args, kwargs): +def _traced_init(wrapped, instance, args, kwargs): settings = kwargs.get("settings", {}) tweens = aslist(settings.get("pyramid.tweens", [])) @@ -119,7 +129,7 @@ """Integrate with Pyramid Python library. https://docs.pylonsproject.org/projects/pyramid/en/latest/ """ - _wrap("pyramid.config", "Configurator.__init__", traced_init) + _wrap("pyramid.config", "Configurator.__init__", _traced_init) def _uninstrument(self, **kwargs): """"Disable Pyramid instrumentation""" @@ -131,9 +141,6 @@ Args: config: The Configurator to instrument. - - Returns: - An instrumented Configurator. """ config.include("opentelemetry.ext.pyramid.callbacks")
{"golden_diff": "diff --git a/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py b/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py\n--- a/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py\n+++ b/ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py\n@@ -20,16 +20,17 @@\n \n Usage\n -----\n- There are two methods to instrument Pyramid:\n+There are two methods to instrument Pyramid:\n \n Method 1 (Instrument all Configurators):\n ----------------------------------------\n+\n .. code:: python\n \n from pyramid.config import Configurator\n from opentelemetry.ext.pyramid import PyramidInstrumentor\n \n- PyramidInstrumentor.instrument()\n+ PyramidInstrumentor().instrument()\n \n config = Configurator()\n \n@@ -38,6 +39,7 @@\n \n Method 2 (Instrument one Configurator):\n ---------------------------------------\n+\n .. code:: python\n \n from pyramid.config import Configurator\n@@ -49,22 +51,30 @@\n # use your config as normal\n config.add_route('index', '/')\n \n-Using ``pyramid.tweens`` settings:\n-----------------------------------\n- If you use Method 2 and then set tweens for your application with the ``pyramid.tweens`` setting,\n- you need to add ``opentelemetry.ext.pyramid.trace_tween_factory`` explicity to the list,\n- *as well as* instrumenting the config with `PyramidInstrumentor().instrument_config(config)`.\n+Using ``pyramid.tweens`` setting:\n+---------------------------------\n+\n+If you use Method 2 and then set tweens for your application with the ``pyramid.tweens`` setting,\n+you need to add ``opentelemetry.ext.pyramid.trace_tween_factory`` explicity to the list,\n+*as well as* instrumenting the config as shown above.\n+\n+For example:\n \n- For example:\n .. code:: python\n+\n+ from pyramid.config import Configurator\n+ from opentelemetry.ext.pyramid import PyramidInstrumentor\n+\n settings = {\n 'pyramid.tweens', 'opentelemetry.ext.pyramid.trace_tween_factory\\\\nyour_tween_no_1\\\\nyour_tween_no_2',\n }\n config = Configurator(settings=settings)\n- PyramidInstrumentor.instrument_config(config)\n+ PyramidInstrumentor().instrument_config(config)\n \n # use your config as normal.\n config.add_route('index', '/')\n+\n+API\n ---\n \"\"\"\n \n@@ -87,7 +97,7 @@\n from opentelemetry.trace import TracerProvider, get_tracer\n \n \n-def traced_init(wrapped, instance, args, kwargs):\n+def _traced_init(wrapped, instance, args, kwargs):\n settings = kwargs.get(\"settings\", {})\n tweens = aslist(settings.get(\"pyramid.tweens\", []))\n \n@@ -119,7 +129,7 @@\n \"\"\"Integrate with Pyramid Python library.\n https://docs.pylonsproject.org/projects/pyramid/en/latest/\n \"\"\"\n- _wrap(\"pyramid.config\", \"Configurator.__init__\", traced_init)\n+ _wrap(\"pyramid.config\", \"Configurator.__init__\", _traced_init)\n \n def _uninstrument(self, **kwargs):\n \"\"\"\"Disable Pyramid instrumentation\"\"\"\n@@ -131,9 +141,6 @@\n \n Args:\n config: The Configurator to instrument.\n-\n- Returns:\n- An instrumented Configurator.\n \"\"\"\n config.include(\"opentelemetry.ext.pyramid.callbacks\")\n", "issue": "ext/pyramid: add docs page\nAn entry in the docs is missing for the pyramid instrumentation.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nPyramid instrumentation supporting `pyramid`_, it can be enabled by\nusing ``PyramidInstrumentor``.\n\n.. _pyramid: https://docs.pylonsproject.org/projects/pyramid/en/latest/\n\nUsage\n-----\n There are two methods to instrument Pyramid:\n\nMethod 1 (Instrument all Configurators):\n----------------------------------------\n.. code:: python\n\n from pyramid.config import Configurator\n from opentelemetry.ext.pyramid import PyramidInstrumentor\n\n PyramidInstrumentor.instrument()\n\n config = Configurator()\n\n # use your config as normal\n config.add_route('index', '/')\n\nMethod 2 (Instrument one Configurator):\n---------------------------------------\n.. code:: python\n\n from pyramid.config import Configurator\n from opentelemetry.ext.pyramid import PyramidInstrumentor\n\n config = Configurator()\n PyramidInstrumentor().instrument_config(config)\n\n # use your config as normal\n config.add_route('index', '/')\n\nUsing ``pyramid.tweens`` settings:\n----------------------------------\n If you use Method 2 and then set tweens for your application with the ``pyramid.tweens`` setting,\n you need to add ``opentelemetry.ext.pyramid.trace_tween_factory`` explicity to the list,\n *as well as* instrumenting the config with `PyramidInstrumentor().instrument_config(config)`.\n\n For example:\n.. code:: python\n settings = {\n 'pyramid.tweens', 'opentelemetry.ext.pyramid.trace_tween_factory\\\\nyour_tween_no_1\\\\nyour_tween_no_2',\n }\n config = Configurator(settings=settings)\n PyramidInstrumentor.instrument_config(config)\n\n # use your config as normal.\n config.add_route('index', '/')\n---\n\"\"\"\n\nimport typing\n\nfrom pyramid.config import Configurator\nfrom pyramid.path import caller_package\nfrom pyramid.settings import aslist\nfrom wrapt import ObjectProxy\nfrom wrapt import wrap_function_wrapper as _wrap\n\nfrom opentelemetry.ext.pyramid.callbacks import (\n SETTING_TRACE_ENABLED,\n TWEEN_NAME,\n trace_tween_factory,\n)\nfrom opentelemetry.ext.pyramid.version import __version__\nfrom opentelemetry.instrumentation.instrumentor import BaseInstrumentor\nfrom opentelemetry.instrumentation.utils import unwrap\nfrom opentelemetry.trace import TracerProvider, get_tracer\n\n\ndef traced_init(wrapped, instance, args, kwargs):\n settings = kwargs.get(\"settings\", {})\n tweens = aslist(settings.get(\"pyramid.tweens\", []))\n\n if tweens and TWEEN_NAME not in settings:\n # pyramid.tweens.EXCVIEW is the name of built-in exception view provided by\n # pyramid. We need our tween to be before it, otherwise unhandled\n # exceptions will be caught before they reach our tween.\n tweens = [TWEEN_NAME] + tweens\n\n settings[\"pyramid.tweens\"] = \"\\n\".join(tweens)\n\n kwargs[\"settings\"] = settings\n\n # `caller_package` works by walking a fixed amount of frames up the stack\n # to find the calling package. So if we let the original `__init__`\n # function call it, our wrapper will mess things up.\n if not kwargs.get(\"package\", None):\n # Get the package for the third frame up from this one.\n # Default is `level=2` which will give us the package from `wrapt`\n # instead of the desired package (the caller)\n kwargs[\"package\"] = caller_package(level=3)\n\n wrapped(*args, **kwargs)\n instance.include(\"opentelemetry.ext.pyramid.callbacks\")\n\n\nclass PyramidInstrumentor(BaseInstrumentor):\n def _instrument(self, **kwargs):\n \"\"\"Integrate with Pyramid Python library.\n https://docs.pylonsproject.org/projects/pyramid/en/latest/\n \"\"\"\n _wrap(\"pyramid.config\", \"Configurator.__init__\", traced_init)\n\n def _uninstrument(self, **kwargs):\n \"\"\"\"Disable Pyramid instrumentation\"\"\"\n unwrap(Configurator, \"__init__\")\n\n # pylint:disable=no-self-use\n def instrument_config(self, config):\n \"\"\"Enable instrumentation in a Pyramid configurator.\n\n Args:\n config: The Configurator to instrument.\n\n Returns:\n An instrumented Configurator.\n \"\"\"\n config.include(\"opentelemetry.ext.pyramid.callbacks\")\n\n def uninstrument_config(self, config):\n config.add_settings({SETTING_TRACE_ENABLED: False})\n", "path": "ext/opentelemetry-ext-pyramid/src/opentelemetry/ext/pyramid/__init__.py"}]}
1,982
790
gh_patches_debug_18633
rasdani/github-patches
git_diff
microsoft__botbuilder-python-1970
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Version dependency between Botbuilder and Question Answering ### [Github issues](https://github.com/Microsoft/botbuilder-python) should be used for bugs and feature requests. Use [Stack Overflow](https://stackoverflow.com/questions/tagged/botframework) for general "how-to" questions. ## Version Botbuilder 4.14.0 azure-ai-language-questionanswering 1.0.0 ## Describe the bug When building a Python bot using Question Answering and Botbuilder: The package azure-ai-language-questionanswering actual version 1.0.0 requires at least msrest version 0.6.21 but the actual version of Botbuilder (-schema/core/ai/etc) 4.14.2 requires msrest to be 0.6.19 (though I'm using 4.14.0 and the version doesn't match either). There's no way these two packages work together right now because of the msrest version dependency. ## To Reproduce 1. Create a requirements.txt file with these dependencies azure-ai-language-questionanswering==1.0.0 botbuilder-ai==4.14.0 botbuilder-core==4.14.0 botbuilder-dialogs==4.14.0 botbuilder-integration-aiohttp==4.14.0 botbuilder-schema==4.14.0 2. Install dependencies of a bot project with pip install -r requirements.txt ## Expected behavior Upgrade Botbuilder to work with msrest version 0.6.21 OR Decrease Question Answering version dependency of msrest to 0.6.19 or lower </issue> <code> [start of libraries/botframework-connector/setup.py] 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. 3 4 import os 5 from setuptools import setup 6 7 NAME = "botframework-connector" 8 VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.15.0" 9 REQUIRES = [ 10 "msrest==0.6.19", 11 "requests>=2.23.0,<2.26", 12 "PyJWT>=1.5.3,<2.0.0", 13 "botbuilder-schema==4.15.0", 14 "msal==1.17.0", 15 ] 16 17 root = os.path.abspath(os.path.dirname(__file__)) 18 19 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f: 20 long_description = f.read() 21 22 setup( 23 name=NAME, 24 version=VERSION, 25 description="Microsoft Bot Framework Bot Builder SDK for Python.", 26 author="Microsoft", 27 url="https://www.github.com/Microsoft/botbuilder-python", 28 keywords=["BotFrameworkConnector", "bots", "ai", "botframework", "botbuilder"], 29 install_requires=REQUIRES, 30 packages=[ 31 "botframework.connector", 32 "botframework.connector.auth", 33 "botframework.connector.async_mixin", 34 "botframework.connector.operations", 35 "botframework.connector.models", 36 "botframework.connector.aio", 37 "botframework.connector.aio.operations_async", 38 "botframework.connector.skills", 39 "botframework.connector.teams", 40 "botframework.connector.teams.operations", 41 "botframework.connector.token_api", 42 "botframework.connector.token_api.aio", 43 "botframework.connector.token_api.aio.operations_async", 44 "botframework.connector.token_api.models", 45 "botframework.connector.token_api.operations", 46 ], 47 include_package_data=True, 48 long_description=long_description, 49 long_description_content_type="text/x-rst", 50 license="MIT", 51 classifiers=[ 52 "Programming Language :: Python :: 3.7", 53 "Intended Audience :: Developers", 54 "License :: OSI Approved :: MIT License", 55 "Operating System :: OS Independent", 56 "Development Status :: 5 - Production/Stable", 57 "Topic :: Scientific/Engineering :: Artificial Intelligence", 58 ], 59 ) 60 [end of libraries/botframework-connector/setup.py] [start of libraries/botbuilder-schema/setup.py] 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. 3 4 import os 5 from setuptools import setup 6 7 NAME = "botbuilder-schema" 8 VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.15.0" 9 REQUIRES = ["msrest==0.6.19"] 10 11 root = os.path.abspath(os.path.dirname(__file__)) 12 13 with open(os.path.join(root, "README.rst"), encoding="utf-8") as f: 14 long_description = f.read() 15 16 setup( 17 name=NAME, 18 version=VERSION, 19 description="BotBuilder Schema", 20 author="Microsoft", 21 url="https://github.com/Microsoft/botbuilder-python", 22 keywords=["BotBuilderSchema", "bots", "ai", "botframework", "botbuilder"], 23 long_description=long_description, 24 long_description_content_type="text/x-rst", 25 license="MIT", 26 install_requires=REQUIRES, 27 packages=[ 28 "botbuilder.schema", 29 "botbuilder.schema.teams", 30 ], 31 include_package_data=True, 32 classifiers=[ 33 "Programming Language :: Python :: 3.7", 34 "Intended Audience :: Developers", 35 "License :: OSI Approved :: MIT License", 36 "Operating System :: OS Independent", 37 "Development Status :: 5 - Production/Stable", 38 "Topic :: Scientific/Engineering :: Artificial Intelligence", 39 ], 40 ) 41 [end of libraries/botbuilder-schema/setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/libraries/botbuilder-schema/setup.py b/libraries/botbuilder-schema/setup.py --- a/libraries/botbuilder-schema/setup.py +++ b/libraries/botbuilder-schema/setup.py @@ -6,7 +6,7 @@ NAME = "botbuilder-schema" VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.15.0" -REQUIRES = ["msrest==0.6.19"] +REQUIRES = ["msrest==0.6.*"] root = os.path.abspath(os.path.dirname(__file__)) diff --git a/libraries/botframework-connector/setup.py b/libraries/botframework-connector/setup.py --- a/libraries/botframework-connector/setup.py +++ b/libraries/botframework-connector/setup.py @@ -7,11 +7,11 @@ NAME = "botframework-connector" VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.15.0" REQUIRES = [ - "msrest==0.6.19", + "msrest==0.6.*", "requests>=2.23.0,<2.26", "PyJWT>=1.5.3,<2.0.0", "botbuilder-schema==4.15.0", - "msal==1.17.0", + "msal==1.*", ] root = os.path.abspath(os.path.dirname(__file__))
{"golden_diff": "diff --git a/libraries/botbuilder-schema/setup.py b/libraries/botbuilder-schema/setup.py\n--- a/libraries/botbuilder-schema/setup.py\n+++ b/libraries/botbuilder-schema/setup.py\n@@ -6,7 +6,7 @@\n \r\n NAME = \"botbuilder-schema\"\r\n VERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.15.0\"\r\n-REQUIRES = [\"msrest==0.6.19\"]\r\n+REQUIRES = [\"msrest==0.6.*\"]\r\n \r\n root = os.path.abspath(os.path.dirname(__file__))\r\n \r\ndiff --git a/libraries/botframework-connector/setup.py b/libraries/botframework-connector/setup.py\n--- a/libraries/botframework-connector/setup.py\n+++ b/libraries/botframework-connector/setup.py\n@@ -7,11 +7,11 @@\n NAME = \"botframework-connector\"\n VERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.15.0\"\n REQUIRES = [\n- \"msrest==0.6.19\",\n+ \"msrest==0.6.*\",\n \"requests>=2.23.0,<2.26\",\n \"PyJWT>=1.5.3,<2.0.0\",\n \"botbuilder-schema==4.15.0\",\n- \"msal==1.17.0\",\n+ \"msal==1.*\",\n ]\n \n root = os.path.abspath(os.path.dirname(__file__))\n", "issue": "Version dependency between Botbuilder and Question Answering\n### [Github issues](https://github.com/Microsoft/botbuilder-python) should be used for bugs and feature requests. Use [Stack Overflow](https://stackoverflow.com/questions/tagged/botframework) for general \"how-to\" questions. \r\n\r\n## Version\r\nBotbuilder 4.14.0\r\nazure-ai-language-questionanswering 1.0.0\r\n\r\n## Describe the bug\r\nWhen building a Python bot using Question Answering and Botbuilder:\r\nThe package azure-ai-language-questionanswering actual version 1.0.0 requires at least msrest version 0.6.21 but the actual version of Botbuilder (-schema/core/ai/etc) 4.14.2 requires msrest to be 0.6.19 (though I'm using 4.14.0 and the version doesn't match either).\r\nThere's no way these two packages work together right now because of the msrest version dependency.\r\n\r\n\r\n## To Reproduce\r\n1. Create a requirements.txt file with these dependencies \r\nazure-ai-language-questionanswering==1.0.0\r\nbotbuilder-ai==4.14.0\r\nbotbuilder-core==4.14.0\r\nbotbuilder-dialogs==4.14.0\r\nbotbuilder-integration-aiohttp==4.14.0\r\nbotbuilder-schema==4.14.0\r\n\r\n2. Install dependencies of a bot project with pip install -r requirements.txt\r\n\r\n\r\n## Expected behavior\r\nUpgrade Botbuilder to work with msrest version 0.6.21 OR\r\nDecrease Question Answering version dependency of msrest to 0.6.19 or lower\r\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nimport os\nfrom setuptools import setup\n\nNAME = \"botframework-connector\"\nVERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.15.0\"\nREQUIRES = [\n \"msrest==0.6.19\",\n \"requests>=2.23.0,<2.26\",\n \"PyJWT>=1.5.3,<2.0.0\",\n \"botbuilder-schema==4.15.0\",\n \"msal==1.17.0\",\n]\n\nroot = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nsetup(\n name=NAME,\n version=VERSION,\n description=\"Microsoft Bot Framework Bot Builder SDK for Python.\",\n author=\"Microsoft\",\n url=\"https://www.github.com/Microsoft/botbuilder-python\",\n keywords=[\"BotFrameworkConnector\", \"bots\", \"ai\", \"botframework\", \"botbuilder\"],\n install_requires=REQUIRES,\n packages=[\n \"botframework.connector\",\n \"botframework.connector.auth\",\n \"botframework.connector.async_mixin\",\n \"botframework.connector.operations\",\n \"botframework.connector.models\",\n \"botframework.connector.aio\",\n \"botframework.connector.aio.operations_async\",\n \"botframework.connector.skills\",\n \"botframework.connector.teams\",\n \"botframework.connector.teams.operations\",\n \"botframework.connector.token_api\",\n \"botframework.connector.token_api.aio\",\n \"botframework.connector.token_api.aio.operations_async\",\n \"botframework.connector.token_api.models\",\n \"botframework.connector.token_api.operations\",\n ],\n include_package_data=True,\n long_description=long_description,\n long_description_content_type=\"text/x-rst\",\n license=\"MIT\",\n classifiers=[\n \"Programming Language :: Python :: 3.7\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Development Status :: 5 - Production/Stable\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n)\n", "path": "libraries/botframework-connector/setup.py"}, {"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\r\n# Licensed under the MIT License.\r\n\r\nimport os\r\nfrom setuptools import setup\r\n\r\nNAME = \"botbuilder-schema\"\r\nVERSION = os.environ[\"packageVersion\"] if \"packageVersion\" in os.environ else \"4.15.0\"\r\nREQUIRES = [\"msrest==0.6.19\"]\r\n\r\nroot = os.path.abspath(os.path.dirname(__file__))\r\n\r\nwith open(os.path.join(root, \"README.rst\"), encoding=\"utf-8\") as f:\r\n long_description = f.read()\r\n\r\nsetup(\r\n name=NAME,\r\n version=VERSION,\r\n description=\"BotBuilder Schema\",\r\n author=\"Microsoft\",\r\n url=\"https://github.com/Microsoft/botbuilder-python\",\r\n keywords=[\"BotBuilderSchema\", \"bots\", \"ai\", \"botframework\", \"botbuilder\"],\r\n long_description=long_description,\r\n long_description_content_type=\"text/x-rst\",\r\n license=\"MIT\",\r\n install_requires=REQUIRES,\r\n packages=[\r\n \"botbuilder.schema\",\r\n \"botbuilder.schema.teams\",\r\n ],\r\n include_package_data=True,\r\n classifiers=[\r\n \"Programming Language :: Python :: 3.7\",\r\n \"Intended Audience :: Developers\",\r\n \"License :: OSI Approved :: MIT License\",\r\n \"Operating System :: OS Independent\",\r\n \"Development Status :: 5 - Production/Stable\",\r\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\r\n ],\r\n)\r\n", "path": "libraries/botbuilder-schema/setup.py"}]}
1,888
332
gh_patches_debug_21014
rasdani/github-patches
git_diff
python__peps-3559
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Infra: Allow dismissing the historical note banner These notes take up more than a 1/3 of my screen and make it significantly harder to read the PEP. I can developer console it out of existence on my laptop, but not on my phone. </issue> <code> [start of pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py] 1 """Roles to insert custom admonitions pointing readers to canonical content.""" 2 3 from __future__ import annotations 4 5 from docutils import nodes 6 from docutils.parsers import rst 7 8 PYPA_SPEC_BASE_URL = "https://packaging.python.org/en/latest/specifications/" 9 10 11 class PEPBanner(rst.Directive): 12 """Insert a special banner admonition in a PEP document.""" 13 14 has_content = True 15 required_arguments = 0 16 optional_arguments = 1 17 final_argument_whitespace = True 18 option_spec = {} 19 20 admonition_pre_template = "" 21 admonition_pre_text = "" 22 admonition_post_text = "" 23 24 admonition_class = nodes.important 25 css_classes = [] 26 27 28 def run(self) -> list[nodes.admonition]: 29 30 if self.arguments: 31 link_content = self.arguments[0] 32 pre_text = self.admonition_pre_template.format( 33 link_content=link_content) 34 else: 35 pre_text = self.admonition_pre_text 36 37 pre_text_node = nodes.paragraph(pre_text) 38 pre_text_node.line = self.lineno 39 pre_node, pre_msg = self.state.inline_text(pre_text, self.lineno) 40 pre_text_node.extend(pre_node + pre_msg) 41 42 post_text = self.admonition_post_text 43 post_text_node = nodes.paragraph(post_text) 44 post_text_node.line = self.lineno 45 post_node, post_msg = self.state.inline_text(post_text, self.lineno) 46 post_text_node.extend(post_node + post_msg) 47 48 source_lines = [pre_text] + list(self.content or []) + [post_text] 49 admonition_node = self.admonition_class( 50 "\n".join(source_lines), classes=["pep-banner"] + self.css_classes) 51 52 admonition_node.append(pre_text_node) 53 if self.content: 54 self.state.nested_parse( 55 self.content, self.content_offset, admonition_node) 56 admonition_node.append(post_text_node) 57 58 return [admonition_node] 59 60 61 class CanonicalDocBanner(PEPBanner): 62 """Insert an admonition pointing readers to a PEP's canonical docs.""" 63 64 admonition_pre_template = ( 65 "This PEP is a historical document. " 66 "The up-to-date, canonical documentation can now be found " 67 "at {link_content}." 68 ) 69 admonition_pre_text = ( 70 "This PEP is a historical document. " 71 "The up-to-date, canonical documentation can now be found elsewhere." 72 ) 73 admonition_post_text = ( 74 "See :pep:`1` for how to propose changes." 75 ) 76 77 css_classes = ["canonical-doc", "sticky-banner"] 78 79 80 81 class CanonicalPyPASpecBanner(PEPBanner): 82 """Insert a specialized admonition for PyPA packaging specifications.""" 83 84 admonition_pre_template = ( 85 "This PEP is a historical document. " 86 "The up-to-date, canonical spec, {link_content}, is maintained on " 87 f"the `PyPA specs page <{PYPA_SPEC_BASE_URL}>`__." 88 ) 89 admonition_pre_text = ( 90 "This PEP is a historical document. " 91 "The up-to-date, canonical specifications are maintained on " 92 f"the `PyPA specs page <{PYPA_SPEC_BASE_URL}>`__." 93 ) 94 admonition_post_text = ( 95 "See the `PyPA specification update process " 96 "<https://www.pypa.io/en/latest/specifications/#handling-fixes-and-other-minor-updates>`__ " 97 "for how to propose changes." 98 ) 99 admonition_class = nodes.attention 100 101 css_classes = ["canonical-pypa-spec", "sticky-banner"] 102 [end of pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py b/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py --- a/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py +++ b/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py @@ -34,6 +34,9 @@ else: pre_text = self.admonition_pre_text + close_button_node = nodes.paragraph('', '', nodes.Text('×'), classes=['close-button']) + close_button_node['classes'].append('close-button') + pre_text_node = nodes.paragraph(pre_text) pre_text_node.line = self.lineno pre_node, pre_msg = self.state.inline_text(pre_text, self.lineno) @@ -50,6 +53,7 @@ "\n".join(source_lines), classes=["pep-banner"] + self.css_classes) admonition_node.append(pre_text_node) + admonition_node.append(close_button_node) if self.content: self.state.nested_parse( self.content, self.content_offset, admonition_node)
{"golden_diff": "diff --git a/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py b/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py\n--- a/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py\n+++ b/pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py\n@@ -34,6 +34,9 @@\n else:\n pre_text = self.admonition_pre_text\n \n+ close_button_node = nodes.paragraph('', '', nodes.Text('\u00d7'), classes=['close-button'])\n+ close_button_node['classes'].append('close-button')\n+\n pre_text_node = nodes.paragraph(pre_text)\n pre_text_node.line = self.lineno\n pre_node, pre_msg = self.state.inline_text(pre_text, self.lineno)\n@@ -50,6 +53,7 @@\n \"\\n\".join(source_lines), classes=[\"pep-banner\"] + self.css_classes)\n \n admonition_node.append(pre_text_node)\n+ admonition_node.append(close_button_node)\n if self.content:\n self.state.nested_parse(\n self.content, self.content_offset, admonition_node)\n", "issue": "Infra: Allow dismissing the historical note banner\nThese notes take up more than a 1/3 of my screen and make it significantly harder to read the PEP. I can developer console it out of existence on my laptop, but not on my phone.\n", "before_files": [{"content": "\"\"\"Roles to insert custom admonitions pointing readers to canonical content.\"\"\"\n\nfrom __future__ import annotations\n\nfrom docutils import nodes\nfrom docutils.parsers import rst\n\nPYPA_SPEC_BASE_URL = \"https://packaging.python.org/en/latest/specifications/\"\n\n\nclass PEPBanner(rst.Directive):\n \"\"\"Insert a special banner admonition in a PEP document.\"\"\"\n\n has_content = True\n required_arguments = 0\n optional_arguments = 1\n final_argument_whitespace = True\n option_spec = {}\n\n admonition_pre_template = \"\"\n admonition_pre_text = \"\"\n admonition_post_text = \"\"\n\n admonition_class = nodes.important\n css_classes = []\n\n\n def run(self) -> list[nodes.admonition]:\n\n if self.arguments:\n link_content = self.arguments[0]\n pre_text = self.admonition_pre_template.format(\n link_content=link_content)\n else:\n pre_text = self.admonition_pre_text\n\n pre_text_node = nodes.paragraph(pre_text)\n pre_text_node.line = self.lineno\n pre_node, pre_msg = self.state.inline_text(pre_text, self.lineno)\n pre_text_node.extend(pre_node + pre_msg)\n\n post_text = self.admonition_post_text\n post_text_node = nodes.paragraph(post_text)\n post_text_node.line = self.lineno\n post_node, post_msg = self.state.inline_text(post_text, self.lineno)\n post_text_node.extend(post_node + post_msg)\n\n source_lines = [pre_text] + list(self.content or []) + [post_text]\n admonition_node = self.admonition_class(\n \"\\n\".join(source_lines), classes=[\"pep-banner\"] + self.css_classes)\n\n admonition_node.append(pre_text_node)\n if self.content:\n self.state.nested_parse(\n self.content, self.content_offset, admonition_node)\n admonition_node.append(post_text_node)\n\n return [admonition_node]\n\n\nclass CanonicalDocBanner(PEPBanner):\n \"\"\"Insert an admonition pointing readers to a PEP's canonical docs.\"\"\"\n\n admonition_pre_template = (\n \"This PEP is a historical document. \"\n \"The up-to-date, canonical documentation can now be found \"\n \"at {link_content}.\"\n )\n admonition_pre_text = (\n \"This PEP is a historical document. \"\n \"The up-to-date, canonical documentation can now be found elsewhere.\"\n )\n admonition_post_text = (\n \"See :pep:`1` for how to propose changes.\"\n )\n\n css_classes = [\"canonical-doc\", \"sticky-banner\"]\n\n\n\nclass CanonicalPyPASpecBanner(PEPBanner):\n \"\"\"Insert a specialized admonition for PyPA packaging specifications.\"\"\"\n\n admonition_pre_template = (\n \"This PEP is a historical document. \"\n \"The up-to-date, canonical spec, {link_content}, is maintained on \"\n f\"the `PyPA specs page <{PYPA_SPEC_BASE_URL}>`__.\"\n )\n admonition_pre_text = (\n \"This PEP is a historical document. \"\n \"The up-to-date, canonical specifications are maintained on \"\n f\"the `PyPA specs page <{PYPA_SPEC_BASE_URL}>`__.\"\n )\n admonition_post_text = (\n \"See the `PyPA specification update process \"\n \"<https://www.pypa.io/en/latest/specifications/#handling-fixes-and-other-minor-updates>`__ \"\n \"for how to propose changes.\"\n )\n admonition_class = nodes.attention\n\n css_classes = [\"canonical-pypa-spec\", \"sticky-banner\"]\n", "path": "pep_sphinx_extensions/pep_processor/parsing/pep_banner_directive.py"}]}
1,599
264
gh_patches_debug_13591
rasdani/github-patches
git_diff
freqtrade__freqtrade-4189
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> StoplossGuard should include stoploss_on_exchange as well The document is here: https://www.freqtrade.io/en/latest/configuration/#stoploss-guard But it should include stoploss_on_exchange as well. </issue> <code> [start of freqtrade/plugins/protections/stoploss_guard.py] 1 2 import logging 3 from datetime import datetime, timedelta 4 from typing import Any, Dict 5 6 from freqtrade.persistence import Trade 7 from freqtrade.plugins.protections import IProtection, ProtectionReturn 8 from freqtrade.strategy.interface import SellType 9 10 11 logger = logging.getLogger(__name__) 12 13 14 class StoplossGuard(IProtection): 15 16 has_global_stop: bool = True 17 has_local_stop: bool = True 18 19 def __init__(self, config: Dict[str, Any], protection_config: Dict[str, Any]) -> None: 20 super().__init__(config, protection_config) 21 22 self._trade_limit = protection_config.get('trade_limit', 10) 23 self._disable_global_stop = protection_config.get('only_per_pair', False) 24 25 def short_desc(self) -> str: 26 """ 27 Short method description - used for startup-messages 28 """ 29 return (f"{self.name} - Frequent Stoploss Guard, {self._trade_limit} stoplosses " 30 f"within {self.lookback_period_str}.") 31 32 def _reason(self) -> str: 33 """ 34 LockReason to use 35 """ 36 return (f'{self._trade_limit} stoplosses in {self._lookback_period} min, ' 37 f'locking for {self._stop_duration} min.') 38 39 def _stoploss_guard(self, date_now: datetime, pair: str = None) -> ProtectionReturn: 40 """ 41 Evaluate recent trades 42 """ 43 look_back_until = date_now - timedelta(minutes=self._lookback_period) 44 # filters = [ 45 # Trade.is_open.is_(False), 46 # Trade.close_date > look_back_until, 47 # or_(Trade.sell_reason == SellType.STOP_LOSS.value, 48 # and_(Trade.sell_reason == SellType.TRAILING_STOP_LOSS.value, 49 # Trade.close_profit < 0)) 50 # ] 51 # if pair: 52 # filters.append(Trade.pair == pair) 53 # trades = Trade.get_trades(filters).all() 54 55 trades1 = Trade.get_trades_proxy(pair=pair, is_open=False, close_date=look_back_until) 56 trades = [trade for trade in trades1 if str(trade.sell_reason) == SellType.STOP_LOSS.value 57 or (str(trade.sell_reason) == SellType.TRAILING_STOP_LOSS.value 58 and trade.close_profit < 0)] 59 60 if len(trades) > self._trade_limit: 61 self.log_once(f"Trading stopped due to {self._trade_limit} " 62 f"stoplosses within {self._lookback_period} minutes.", logger.info) 63 until = self.calculate_lock_end(trades, self._stop_duration) 64 return True, until, self._reason() 65 66 return False, None, None 67 68 def global_stop(self, date_now: datetime) -> ProtectionReturn: 69 """ 70 Stops trading (position entering) for all pairs 71 This must evaluate to true for the whole period of the "cooldown period". 72 :return: Tuple of [bool, until, reason]. 73 If true, all pairs will be locked with <reason> until <until> 74 """ 75 if self._disable_global_stop: 76 return False, None, None 77 return self._stoploss_guard(date_now, None) 78 79 def stop_per_pair(self, pair: str, date_now: datetime) -> ProtectionReturn: 80 """ 81 Stops trading (position entering) for this pair 82 This must evaluate to true for the whole period of the "cooldown period". 83 :return: Tuple of [bool, until, reason]. 84 If true, this pair will be locked with <reason> until <until> 85 """ 86 return self._stoploss_guard(date_now, pair) 87 [end of freqtrade/plugins/protections/stoploss_guard.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/freqtrade/plugins/protections/stoploss_guard.py b/freqtrade/plugins/protections/stoploss_guard.py --- a/freqtrade/plugins/protections/stoploss_guard.py +++ b/freqtrade/plugins/protections/stoploss_guard.py @@ -53,8 +53,9 @@ # trades = Trade.get_trades(filters).all() trades1 = Trade.get_trades_proxy(pair=pair, is_open=False, close_date=look_back_until) - trades = [trade for trade in trades1 if str(trade.sell_reason) == SellType.STOP_LOSS.value - or (str(trade.sell_reason) == SellType.TRAILING_STOP_LOSS.value + trades = [trade for trade in trades1 if (str(trade.sell_reason) in ( + SellType.TRAILING_STOP_LOSS.value, SellType.STOP_LOSS.value, + SellType.STOPLOSS_ON_EXCHANGE.value) and trade.close_profit < 0)] if len(trades) > self._trade_limit:
{"golden_diff": "diff --git a/freqtrade/plugins/protections/stoploss_guard.py b/freqtrade/plugins/protections/stoploss_guard.py\n--- a/freqtrade/plugins/protections/stoploss_guard.py\n+++ b/freqtrade/plugins/protections/stoploss_guard.py\n@@ -53,8 +53,9 @@\n # trades = Trade.get_trades(filters).all()\n \n trades1 = Trade.get_trades_proxy(pair=pair, is_open=False, close_date=look_back_until)\n- trades = [trade for trade in trades1 if str(trade.sell_reason) == SellType.STOP_LOSS.value\n- or (str(trade.sell_reason) == SellType.TRAILING_STOP_LOSS.value\n+ trades = [trade for trade in trades1 if (str(trade.sell_reason) in (\n+ SellType.TRAILING_STOP_LOSS.value, SellType.STOP_LOSS.value,\n+ SellType.STOPLOSS_ON_EXCHANGE.value)\n and trade.close_profit < 0)]\n \n if len(trades) > self._trade_limit:\n", "issue": "StoplossGuard should include stoploss_on_exchange as well\nThe document is here: https://www.freqtrade.io/en/latest/configuration/#stoploss-guard\r\n\r\nBut it should include stoploss_on_exchange as well.\n", "before_files": [{"content": "\nimport logging\nfrom datetime import datetime, timedelta\nfrom typing import Any, Dict\n\nfrom freqtrade.persistence import Trade\nfrom freqtrade.plugins.protections import IProtection, ProtectionReturn\nfrom freqtrade.strategy.interface import SellType\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass StoplossGuard(IProtection):\n\n has_global_stop: bool = True\n has_local_stop: bool = True\n\n def __init__(self, config: Dict[str, Any], protection_config: Dict[str, Any]) -> None:\n super().__init__(config, protection_config)\n\n self._trade_limit = protection_config.get('trade_limit', 10)\n self._disable_global_stop = protection_config.get('only_per_pair', False)\n\n def short_desc(self) -> str:\n \"\"\"\n Short method description - used for startup-messages\n \"\"\"\n return (f\"{self.name} - Frequent Stoploss Guard, {self._trade_limit} stoplosses \"\n f\"within {self.lookback_period_str}.\")\n\n def _reason(self) -> str:\n \"\"\"\n LockReason to use\n \"\"\"\n return (f'{self._trade_limit} stoplosses in {self._lookback_period} min, '\n f'locking for {self._stop_duration} min.')\n\n def _stoploss_guard(self, date_now: datetime, pair: str = None) -> ProtectionReturn:\n \"\"\"\n Evaluate recent trades\n \"\"\"\n look_back_until = date_now - timedelta(minutes=self._lookback_period)\n # filters = [\n # Trade.is_open.is_(False),\n # Trade.close_date > look_back_until,\n # or_(Trade.sell_reason == SellType.STOP_LOSS.value,\n # and_(Trade.sell_reason == SellType.TRAILING_STOP_LOSS.value,\n # Trade.close_profit < 0))\n # ]\n # if pair:\n # filters.append(Trade.pair == pair)\n # trades = Trade.get_trades(filters).all()\n\n trades1 = Trade.get_trades_proxy(pair=pair, is_open=False, close_date=look_back_until)\n trades = [trade for trade in trades1 if str(trade.sell_reason) == SellType.STOP_LOSS.value\n or (str(trade.sell_reason) == SellType.TRAILING_STOP_LOSS.value\n and trade.close_profit < 0)]\n\n if len(trades) > self._trade_limit:\n self.log_once(f\"Trading stopped due to {self._trade_limit} \"\n f\"stoplosses within {self._lookback_period} minutes.\", logger.info)\n until = self.calculate_lock_end(trades, self._stop_duration)\n return True, until, self._reason()\n\n return False, None, None\n\n def global_stop(self, date_now: datetime) -> ProtectionReturn:\n \"\"\"\n Stops trading (position entering) for all pairs\n This must evaluate to true for the whole period of the \"cooldown period\".\n :return: Tuple of [bool, until, reason].\n If true, all pairs will be locked with <reason> until <until>\n \"\"\"\n if self._disable_global_stop:\n return False, None, None\n return self._stoploss_guard(date_now, None)\n\n def stop_per_pair(self, pair: str, date_now: datetime) -> ProtectionReturn:\n \"\"\"\n Stops trading (position entering) for this pair\n This must evaluate to true for the whole period of the \"cooldown period\".\n :return: Tuple of [bool, until, reason].\n If true, this pair will be locked with <reason> until <until>\n \"\"\"\n return self._stoploss_guard(date_now, pair)\n", "path": "freqtrade/plugins/protections/stoploss_guard.py"}]}
1,540
229
gh_patches_debug_17067
rasdani/github-patches
git_diff
keras-team__autokeras-241
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> "numpy.linalg.linalg.LinAlgError: N-th leading minor of the array is not positive definite" exception while training ImageClassifier <!--- **If you are reporting a bug:** * Verify that your issue is not being currently addressed by other issues or pull requests. * Please note that Auto-Keras is only compatible with **Python 3.6**. * Tag the issue with the `bug report` tag. --> ### Bug Description When training ImageClassifier on float inputs of shape 80x92x24 on a binary classification task, this exception is thrown when AutoKeras lists all created models until this point and their respected losses and metrics. Here is a more complete log + stack trace: http://termbin.com/gb3n ### Reproducing Steps * Create a dataset of noise shaped 80x92x24 with ~360 or so samples * Respective binary labels. In my case, some values of the dataset are quite large (they vary between 0 and 1_000_000, but I'm not sure that this is related since AutoKeras performs normalization). * Have it train on a GTX 1070Ti or equivalent in terms of vram for about an hour (can't confirm that it's GPU-related either). * Wait for the exception ### Expected Behavior It should work. ### Setup Details - OS type and version: Ubuntu 18.04 - Python: 3.6 - autokeras: 0.2.14 (installed manually from git) - all dependencies are installed as stated in requirements.txt for this release, unless for: - tensorflow 1.10, compiled locally. </issue> <code> [start of setup.py] 1 from distutils.core import setup 2 3 setup( 4 name='autokeras', 5 packages=['autokeras'], # this must be the same as the name above 6 install_requires=['torch==0.4.1', 'torchvision==0.2.1', 'numpy>=1.14.5', 'keras==2.2.2', 'scikit-learn==0.19.1', 7 'tensorflow>=1.10.0', 'tqdm==4.25.0'], 8 version='0.2.17', 9 description='AutoML for deep learning', 10 author='Haifeng Jin', 11 author_email='[email protected]', 12 url='http://autokeras.com', 13 download_url='https://github.com/jhfjhfj1/autokeras/archive/0.2.17.tar.gz', 14 keywords=['automl'], # arbitrary keywords 15 classifiers=[] 16 ) 17 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -5,12 +5,12 @@ packages=['autokeras'], # this must be the same as the name above install_requires=['torch==0.4.1', 'torchvision==0.2.1', 'numpy>=1.14.5', 'keras==2.2.2', 'scikit-learn==0.19.1', 'tensorflow>=1.10.0', 'tqdm==4.25.0'], - version='0.2.17', + version='0.2.18', description='AutoML for deep learning', author='Haifeng Jin', author_email='[email protected]', url='http://autokeras.com', - download_url='https://github.com/jhfjhfj1/autokeras/archive/0.2.17.tar.gz', + download_url='https://github.com/jhfjhfj1/autokeras/archive/0.2.18.tar.gz', keywords=['automl'], # arbitrary keywords classifiers=[] )
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -5,12 +5,12 @@\n packages=['autokeras'], # this must be the same as the name above\n install_requires=['torch==0.4.1', 'torchvision==0.2.1', 'numpy>=1.14.5', 'keras==2.2.2', 'scikit-learn==0.19.1',\n 'tensorflow>=1.10.0', 'tqdm==4.25.0'],\n- version='0.2.17',\n+ version='0.2.18',\n description='AutoML for deep learning',\n author='Haifeng Jin',\n author_email='[email protected]',\n url='http://autokeras.com',\n- download_url='https://github.com/jhfjhfj1/autokeras/archive/0.2.17.tar.gz',\n+ download_url='https://github.com/jhfjhfj1/autokeras/archive/0.2.18.tar.gz',\n keywords=['automl'], # arbitrary keywords\n classifiers=[]\n )\n", "issue": "\"numpy.linalg.linalg.LinAlgError: N-th leading minor of the array is not positive definite\" exception while training ImageClassifier\n<!---\r\n**If you are reporting a bug:**\r\n* Verify that your issue is not being currently addressed by other issues or pull requests.\r\n* Please note that Auto-Keras is only compatible with **Python 3.6**.\r\n* Tag the issue with the `bug report` tag.\r\n-->\r\n\r\n### Bug Description\r\n\r\nWhen training ImageClassifier on float inputs of shape 80x92x24 on a binary classification task, this exception is thrown when AutoKeras lists all created models until this point and their respected losses and metrics. Here is a more complete log + stack trace: http://termbin.com/gb3n\r\n\r\n### Reproducing Steps\r\n\r\n* Create a dataset of noise shaped 80x92x24 with ~360 or so samples\r\n* Respective binary labels.\r\n\r\nIn my case, some values of the dataset are quite large (they vary between 0 and 1_000_000, but I'm not sure that this is related since AutoKeras performs normalization).\r\n\r\n* Have it train on a GTX 1070Ti or equivalent in terms of vram for about an hour (can't confirm that it's GPU-related either).\r\n* Wait for the exception\r\n\r\n\r\n### Expected Behavior\r\n\r\nIt should work.\r\n\r\n### Setup Details\r\n - OS type and version: Ubuntu 18.04\r\n - Python: 3.6\r\n - autokeras: 0.2.14 (installed manually from git)\r\n- all dependencies are installed as stated in requirements.txt for this release, unless for:\r\n- tensorflow 1.10, compiled locally. \r\n\n", "before_files": [{"content": "from distutils.core import setup\n\nsetup(\n name='autokeras',\n packages=['autokeras'], # this must be the same as the name above\n install_requires=['torch==0.4.1', 'torchvision==0.2.1', 'numpy>=1.14.5', 'keras==2.2.2', 'scikit-learn==0.19.1',\n 'tensorflow>=1.10.0', 'tqdm==4.25.0'],\n version='0.2.17',\n description='AutoML for deep learning',\n author='Haifeng Jin',\n author_email='[email protected]',\n url='http://autokeras.com',\n download_url='https://github.com/jhfjhfj1/autokeras/archive/0.2.17.tar.gz',\n keywords=['automl'], # arbitrary keywords\n classifiers=[]\n)\n", "path": "setup.py"}]}
1,134
267
gh_patches_debug_31843
rasdani/github-patches
git_diff
freedomofpress__securedrop-238
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Overly permissive Database privileges for “securedrop” user The privileges given the MySQL user accessing the SecureDrop database are overly permissive and allow an attacker after a successful SQL injection attack to escalate privileges: Example: https://github.com/freedomofpress/securedrop/search?q= GRANT+ALL+PRIVILEGES&ref=cmdform ``` echo "Setting up MySQL database..." mysql -u root -p"$mysql_root" -e "create database securedrop; GRANT ALL PRIVILEGES ON securedrop.* TO 'securedrop'@'localhost' IDENTIFIED BY '$mysql_securedrop';" ``` It should be made sure that the user is only applied with privileges that are actually needed. In case an attacker manages to spot a SQL injection vulnerability, the user with an unlimited set of permissions would be capable of compromising the machine via FILE and other SQL features (depending on the MySQL version). From what can be seen from the application’s logic, the database user essentially needs read-write access to one particular database - and not “GRANT ALL PRIVILEGES”. Reported as part of the cure53 audit of 0.2 as: SD-01-003 </issue> <code> [start of install_files/source.config.py] 1 import os 2 3 class BaseConfig(object): 4 DEBUG = False 5 TESTING = False 6 SECRET_KEY='SECRET_KEY_VALUE' 7 8 class ProductionConfig(BaseConfig): 9 pass 10 11 class DevelopmentConfig(BaseConfig): 12 DEBUG = True 13 14 class TestingConfig(BaseConfig): 15 TESTING = True 16 # Tests are simpler if CSRF protection is disabled 17 WTF_CSRF_ENABLED = False 18 19 # data directories - should be on secure media 20 STORE_DIR='/var/www/securedrop/store' 21 GPG_KEY_DIR='/var/www/securedrop/keys' 22 23 # fingerprint of the GPG key to encrypt submissions to 24 JOURNALIST_KEY='APP_GPG_KEY_FINGERPRINT' 25 26 SOURCE_TEMPLATES_DIR='/var/www/securedrop/source_templates' 27 JOURNALIST_TEMPLATES_DIR='/var/www/securedrop/journalist_templates' 28 WORD_LIST='/var/www/securedrop/wordlist' 29 30 NOUNS='/var/www/securedrop/dictionaries/nouns.txt' 31 ADJECTIVES='/var/www/securedrop/dictionaries/adjectives.txt' 32 BCRYPT_ID_SALT='BCRYPT_ID_SALT_VALUE' 33 BCRYPT_GPG_SALT='BCRYPT_GPG_SALT_VALUE' 34 # Default to the production configuration 35 FlaskConfig = ProductionConfig 36 37 if os.environ.get('SECUREDROP_ENV') == 'test': 38 FlaskConfig = TestingConfig 39 TEST_DIR='/tmp/securedrop_test' 40 STORE_DIR=os.path.join(TEST_DIR, 'store') 41 GPG_KEY_DIR=os.path.join(TEST_DIR, 'keys') 42 # test_journalist_key.pub 43 JOURNALIST_KEY='65A1B5FF195B56353CC63DFFCC40EF1228271441' 44 45 # Database Configuration 46 47 # Default to using a sqlite database file for development 48 #DATABASE_ENGINE = 'sqlite' 49 #DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite') 50 51 # Uncomment to use mysql (or any other databaes backend supported by 52 # SQLAlchemy). Make sure you have the necessary dependencies installed, and run 53 # `python -c "import db; db.create_tables()"` to initialize the database 54 55 DATABASE_ENGINE = 'mysql' 56 DATABASE_HOST = 'localhost' 57 DATABASE_NAME = 'securedrop' 58 DATABASE_USERNAME = 'securedrop' 59 DATABASE_PASSWORD = '' 60 [end of install_files/source.config.py] [start of install_files/document.config.py] 1 import os 2 3 class BaseConfig(object): 4 DEBUG = False 5 TESTING = False 6 SECRET_KEY='SECRET_KEY_VALUE' 7 8 class ProductionConfig(BaseConfig): 9 pass 10 11 class DevelopmentConfig(BaseConfig): 12 DEBUG = True 13 14 class TestingConfig(BaseConfig): 15 TESTING = True 16 # Tests are simpler if CSRF protection is disabled 17 WTF_CSRF_ENABLED = False 18 19 # data directories - should be on secure media 20 STORE_DIR='/var/www/securedrop/store' 21 GPG_KEY_DIR='/var/www/securedrop/keys' 22 23 # fingerprint of the GPG key to encrypt submissions to 24 JOURNALIST_KEY='APP_GPG_KEY_FINGERPRINT' 25 26 SOURCE_TEMPLATES_DIR='/var/www/securedrop/source_templates' 27 JOURNALIST_TEMPLATES_DIR='/var/www/securedrop/journalist_templates' 28 WORD_LIST='/var/www/securedrop/wordlist' 29 NOUNS='/var/www/securedrop/dictionaries/nouns.txt' 30 ADJECTIVES='/var/www/securedrop/dictionaries/adjectives.txt' 31 BCRYPT_ID_SALT='BCRYPT_ID_SALT_VALUE' 32 BCRYPT_GPG_SALT='BCRYPT_GPG_SALT_VALUE' 33 34 # Default to the production configuration 35 FlaskConfig = ProductionConfig 36 SECUREDROP_ROOT=os.path.abspath('/var/www/securedrop') 37 38 if os.environ.get('SECUREDROP_ENV') == 'test': 39 FlaskConfig = TestingConfig 40 TEST_DIR='/tmp/securedrop_test' 41 STORE_DIR=os.path.join(TEST_DIR, 'store') 42 GPG_KEY_DIR=os.path.join(TEST_DIR, 'keys') 43 # test_journalist_key.pub 44 JOURNALIST_KEY='65A1B5FF195B56353CC63DFFCC40EF1228271441' 45 46 # Database Configuration 47 48 # Default to using a sqlite database file for development 49 #DATABASE_ENGINE = 'sqlite' 50 #DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite') 51 52 # Uncomment to use mysql (or any other databaes backend supported by 53 # SQLAlchemy). Make sure you have the necessary dependencies installed, and run 54 # `python -c "import db; db.create_tables()"` to initialize the database 55 56 DATABASE_ENGINE = 'mysql' 57 DATABASE_HOST = 'localhost' 58 DATABASE_NAME = 'securedrop' 59 DATABASE_USERNAME = 'document_mysql' 60 DATABASE_PASSWORD = 'MYSQL_USER_PASS' 61 [end of install_files/document.config.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/install_files/document.config.py b/install_files/document.config.py --- a/install_files/document.config.py +++ b/install_files/document.config.py @@ -46,15 +46,15 @@ # Database Configuration # Default to using a sqlite database file for development -#DATABASE_ENGINE = 'sqlite' -#DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite') +DATABASE_ENGINE = 'sqlite' +DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite') # Uncomment to use mysql (or any other databaes backend supported by # SQLAlchemy). Make sure you have the necessary dependencies installed, and run # `python -c "import db; db.create_tables()"` to initialize the database -DATABASE_ENGINE = 'mysql' -DATABASE_HOST = 'localhost' -DATABASE_NAME = 'securedrop' -DATABASE_USERNAME = 'document_mysql' -DATABASE_PASSWORD = 'MYSQL_USER_PASS' +#DATABASE_ENGINE = 'mysql' +#DATABASE_HOST = 'localhost' +#DATABASE_NAME = 'securedrop' +#DATABASE_USERNAME = 'document_mysql' +#DATABASE_PASSWORD = 'MYSQL_USER_PASS' diff --git a/install_files/source.config.py b/install_files/source.config.py --- a/install_files/source.config.py +++ b/install_files/source.config.py @@ -45,15 +45,16 @@ # Database Configuration # Default to using a sqlite database file for development -#DATABASE_ENGINE = 'sqlite' -#DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite') +DATABASE_ENGINE = 'sqlite' +SECUREDROP_ROOT=os.path.abspath('/var/www/securedrop') +DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite') # Uncomment to use mysql (or any other databaes backend supported by # SQLAlchemy). Make sure you have the necessary dependencies installed, and run # `python -c "import db; db.create_tables()"` to initialize the database -DATABASE_ENGINE = 'mysql' -DATABASE_HOST = 'localhost' -DATABASE_NAME = 'securedrop' -DATABASE_USERNAME = 'securedrop' -DATABASE_PASSWORD = '' +#DATABASE_ENGINE = 'mysql' +#DATABASE_HOST = 'localhost' +#DATABASE_NAME = 'securedrop' +#DATABASE_USERNAME = 'securedrop' +#DATABASE_PASSWORD = ''
{"golden_diff": "diff --git a/install_files/document.config.py b/install_files/document.config.py\n--- a/install_files/document.config.py\n+++ b/install_files/document.config.py\n@@ -46,15 +46,15 @@\n # Database Configuration\n \n # Default to using a sqlite database file for development\n-#DATABASE_ENGINE = 'sqlite'\n-#DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite')\n+DATABASE_ENGINE = 'sqlite'\n+DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite')\n \n # Uncomment to use mysql (or any other databaes backend supported by\n # SQLAlchemy). Make sure you have the necessary dependencies installed, and run\n # `python -c \"import db; db.create_tables()\"` to initialize the database\n \n-DATABASE_ENGINE = 'mysql'\n-DATABASE_HOST = 'localhost'\n-DATABASE_NAME = 'securedrop'\n-DATABASE_USERNAME = 'document_mysql'\n-DATABASE_PASSWORD = 'MYSQL_USER_PASS'\n+#DATABASE_ENGINE = 'mysql'\n+#DATABASE_HOST = 'localhost'\n+#DATABASE_NAME = 'securedrop'\n+#DATABASE_USERNAME = 'document_mysql'\n+#DATABASE_PASSWORD = 'MYSQL_USER_PASS'\ndiff --git a/install_files/source.config.py b/install_files/source.config.py\n--- a/install_files/source.config.py\n+++ b/install_files/source.config.py\n@@ -45,15 +45,16 @@\n # Database Configuration\n \n # Default to using a sqlite database file for development\n-#DATABASE_ENGINE = 'sqlite'\n-#DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite')\n+DATABASE_ENGINE = 'sqlite'\n+SECUREDROP_ROOT=os.path.abspath('/var/www/securedrop')\n+DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite')\n \n # Uncomment to use mysql (or any other databaes backend supported by\n # SQLAlchemy). Make sure you have the necessary dependencies installed, and run\n # `python -c \"import db; db.create_tables()\"` to initialize the database\n \n-DATABASE_ENGINE = 'mysql'\n-DATABASE_HOST = 'localhost'\n-DATABASE_NAME = 'securedrop'\n-DATABASE_USERNAME = 'securedrop'\n-DATABASE_PASSWORD = ''\n+#DATABASE_ENGINE = 'mysql'\n+#DATABASE_HOST = 'localhost'\n+#DATABASE_NAME = 'securedrop'\n+#DATABASE_USERNAME = 'securedrop'\n+#DATABASE_PASSWORD = ''\n", "issue": "Overly permissive Database privileges for \u201csecuredrop\u201d user\nThe privileges given the MySQL user accessing the SecureDrop database\nare overly permissive and allow an attacker after a successful SQL\ninjection attack to escalate privileges:\n\nExample: https://github.com/freedomofpress/securedrop/search?q=\nGRANT+ALL+PRIVILEGES&ref=cmdform\n\n```\necho \"Setting up MySQL database...\"\nmysql -u root -p\"$mysql_root\" -e \"create database securedrop; GRANT ALL\nPRIVILEGES ON securedrop.* TO 'securedrop'@'localhost' IDENTIFIED BY\n'$mysql_securedrop';\"\n```\n\nIt should be made sure that the user is only applied with privileges\nthat are actually needed. In case an attacker manages to spot a SQL\ninjection vulnerability, the user with an unlimited set of permissions\nwould be capable of compromising the machine via FILE and other SQL\nfeatures (depending on the MySQL version). From what can be seen from\nthe application\u2019s logic, the database user essentially needs read-write\naccess to one particular database - and not \u201cGRANT ALL PRIVILEGES\u201d.\n\nReported as part of the cure53 audit of 0.2 as: SD-01-003\n\n", "before_files": [{"content": "import os\n\nclass BaseConfig(object):\n DEBUG = False\n TESTING = False\n SECRET_KEY='SECRET_KEY_VALUE'\n\nclass ProductionConfig(BaseConfig):\n pass\n\nclass DevelopmentConfig(BaseConfig):\n DEBUG = True\n\nclass TestingConfig(BaseConfig):\n TESTING = True\n # Tests are simpler if CSRF protection is disabled\n WTF_CSRF_ENABLED = False\n\n# data directories - should be on secure media\nSTORE_DIR='/var/www/securedrop/store'\nGPG_KEY_DIR='/var/www/securedrop/keys'\n\n# fingerprint of the GPG key to encrypt submissions to\nJOURNALIST_KEY='APP_GPG_KEY_FINGERPRINT'\n\nSOURCE_TEMPLATES_DIR='/var/www/securedrop/source_templates'\nJOURNALIST_TEMPLATES_DIR='/var/www/securedrop/journalist_templates'\nWORD_LIST='/var/www/securedrop/wordlist'\n\nNOUNS='/var/www/securedrop/dictionaries/nouns.txt'\nADJECTIVES='/var/www/securedrop/dictionaries/adjectives.txt'\nBCRYPT_ID_SALT='BCRYPT_ID_SALT_VALUE'\nBCRYPT_GPG_SALT='BCRYPT_GPG_SALT_VALUE'\n# Default to the production configuration\nFlaskConfig = ProductionConfig\n\nif os.environ.get('SECUREDROP_ENV') == 'test':\n FlaskConfig = TestingConfig\n TEST_DIR='/tmp/securedrop_test'\n STORE_DIR=os.path.join(TEST_DIR, 'store')\n GPG_KEY_DIR=os.path.join(TEST_DIR, 'keys')\n # test_journalist_key.pub\n JOURNALIST_KEY='65A1B5FF195B56353CC63DFFCC40EF1228271441'\n\n# Database Configuration\n\n# Default to using a sqlite database file for development\n#DATABASE_ENGINE = 'sqlite'\n#DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite')\n\n# Uncomment to use mysql (or any other databaes backend supported by\n# SQLAlchemy). Make sure you have the necessary dependencies installed, and run\n# `python -c \"import db; db.create_tables()\"` to initialize the database\n\nDATABASE_ENGINE = 'mysql'\nDATABASE_HOST = 'localhost'\nDATABASE_NAME = 'securedrop'\nDATABASE_USERNAME = 'securedrop'\nDATABASE_PASSWORD = ''\n", "path": "install_files/source.config.py"}, {"content": "import os\n\nclass BaseConfig(object):\n DEBUG = False\n TESTING = False\n SECRET_KEY='SECRET_KEY_VALUE'\n\nclass ProductionConfig(BaseConfig):\n pass\n\nclass DevelopmentConfig(BaseConfig):\n DEBUG = True\n\nclass TestingConfig(BaseConfig):\n TESTING = True\n # Tests are simpler if CSRF protection is disabled\n WTF_CSRF_ENABLED = False\n\n# data directories - should be on secure media\nSTORE_DIR='/var/www/securedrop/store'\nGPG_KEY_DIR='/var/www/securedrop/keys'\n\n# fingerprint of the GPG key to encrypt submissions to\nJOURNALIST_KEY='APP_GPG_KEY_FINGERPRINT'\n\nSOURCE_TEMPLATES_DIR='/var/www/securedrop/source_templates'\nJOURNALIST_TEMPLATES_DIR='/var/www/securedrop/journalist_templates'\nWORD_LIST='/var/www/securedrop/wordlist'\nNOUNS='/var/www/securedrop/dictionaries/nouns.txt'\nADJECTIVES='/var/www/securedrop/dictionaries/adjectives.txt'\nBCRYPT_ID_SALT='BCRYPT_ID_SALT_VALUE'\nBCRYPT_GPG_SALT='BCRYPT_GPG_SALT_VALUE'\n\n# Default to the production configuration\nFlaskConfig = ProductionConfig\nSECUREDROP_ROOT=os.path.abspath('/var/www/securedrop') \n\nif os.environ.get('SECUREDROP_ENV') == 'test':\n FlaskConfig = TestingConfig\n TEST_DIR='/tmp/securedrop_test'\n STORE_DIR=os.path.join(TEST_DIR, 'store')\n GPG_KEY_DIR=os.path.join(TEST_DIR, 'keys')\n # test_journalist_key.pub\n JOURNALIST_KEY='65A1B5FF195B56353CC63DFFCC40EF1228271441'\n\n# Database Configuration\n\n# Default to using a sqlite database file for development\n#DATABASE_ENGINE = 'sqlite'\n#DATABASE_FILE=os.path.join(SECUREDROP_ROOT, 'db.sqlite')\n\n# Uncomment to use mysql (or any other databaes backend supported by\n# SQLAlchemy). Make sure you have the necessary dependencies installed, and run\n# `python -c \"import db; db.create_tables()\"` to initialize the database\n\nDATABASE_ENGINE = 'mysql'\nDATABASE_HOST = 'localhost'\nDATABASE_NAME = 'securedrop'\nDATABASE_USERNAME = 'document_mysql'\nDATABASE_PASSWORD = 'MYSQL_USER_PASS'\n", "path": "install_files/document.config.py"}]}
2,031
482
gh_patches_debug_158
rasdani/github-patches
git_diff
mindsdb__mindsdb-2137
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [Bug]: PIP installation error ### Is there an existing issue for this? - [X] I have searched the existing issues ### Current Behavior Hello, using ```pip install mindsdb``` I've got this error: ``` Traceback (most recent call last): File "C:\Users\lukas\AppData\Local\Programs\Python\Python310\lib\site-packages\pip\_vendor\pep517\in_process\_in_process.py", line 363, in <module> main() File "C:\Users\lukas\AppData\Local\Programs\Python\Python310\lib\site-packages\pip\_vendor\pep517\in_process\_in_process.py", line 345, in main json_out['return_val'] = hook(**hook_input['kwargs']) File "C:\Users\lukas\AppData\Local\Programs\Python\Python310\lib\site-packages\pip\_vendor\pep517\in_process\_in_process.py", line 130, in get_requires_for_build_wheel return hook(config_settings) File "C:\Users\lukas\AppData\Local\Temp\pip-build-env-ke4g80_2\overlay\Lib\site-packages\setuptools\build_meta.py", line 177, in get_requires_for_build_wheel return self._get_build_requires( File "C:\Users\lukas\AppData\Local\Temp\pip-build-env-ke4g80_2\overlay\Lib\site-packages\setuptools\build_meta.py", line 159, in _get_build_requires self.run_setup() File "C:\Users\lukas\AppData\Local\Temp\pip-build-env-ke4g80_2\overlay\Lib\site-packages\setuptools\build_meta.py", line 281, in run_setup super(_BuildMetaLegacyBackend, File "C:\Users\lukas\AppData\Local\Temp\pip-build-env-ke4g80_2\overlay\Lib\site-packages\setuptools\build_meta.py", line 174, in run_setup exec(compile(code, __file__, 'exec'), locals()) File "setup.py", line 10, in <module> long_description = fh.read() File "C:\Users\lukas\AppData\Local\Programs\Python\Python310\lib\encodings\cp1250.py", line 23, in decode return codecs.charmap_decode(input,self.errors,decoding_table)[0] UnicodeDecodeError: 'charmap' codec can't decode byte 0x90 in position 3404: character maps to <undefined> ``` I am using the latest Python 3.10.4 ### Expected Behavior _No response_ ### Steps To Reproduce _No response_ ### Anything else? _No response_ </issue> <code> [start of setup.py] 1 from setuptools import setup, find_packages 2 3 4 about = {} 5 with open("mindsdb/__about__.py") as fp: 6 exec(fp.read(), about) 7 8 9 with open("README.md", "r") as fh: 10 long_description = fh.read() 11 12 13 def install_deps(): 14 """Reads requirements.txt and preprocess it 15 to be feed into setuptools. 16 17 This is the only possible way (we found) 18 how requirements.txt can be reused in setup.py 19 using dependencies from private github repositories. 20 21 Links must be appendend by `-{StringWithAtLeastOneNumber}` 22 or something like that, so e.g. `-9231` works as well as 23 `1.1.0`. This is ignored by the setuptools, but has to be there. 24 25 Warnings: 26 to make pip respect the links, you have to use 27 `--process-dependency-links` switch. So e.g.: 28 `pip install --process-dependency-links {git-url}` 29 30 Returns: 31 list of packages and dependency links. 32 """ 33 default = open('requirements.txt', 'r').readlines() 34 new_pkgs = [] 35 links = [] 36 for resource in default: 37 if 'git+https' in resource: 38 pkg = resource.split('#')[-1] 39 links.append(resource.strip() + '-9876543210') 40 new_pkgs.append(pkg.replace('egg=', '').rstrip()) 41 else: 42 new_pkgs.append(resource.strip()) 43 return new_pkgs, links 44 45 46 pkgs, new_links = install_deps() 47 48 setup( 49 name=about['__title__'], 50 version=about['__version__'], 51 url=about['__github__'], 52 download_url=about['__pypi__'], 53 license=about['__license__'], 54 author=about['__author__'], 55 author_email=about['__email__'], 56 description=about['__description__'], 57 long_description=long_description, 58 long_description_content_type="text/markdown", 59 packages=find_packages(), 60 install_requires=pkgs, 61 dependency_links=new_links, 62 include_package_data=True, 63 classifiers=[ 64 "Programming Language :: Python :: 3", 65 "Operating System :: OS Independent", 66 ], 67 python_requires=">=3.6" 68 ) 69 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ exec(fp.read(), about) -with open("README.md", "r") as fh: +with open("README.md", "r", encoding="utf8") as fh: long_description = fh.read()
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -6,7 +6,7 @@\n exec(fp.read(), about)\n \n \n-with open(\"README.md\", \"r\") as fh:\n+with open(\"README.md\", \"r\", encoding=\"utf8\") as fh:\n long_description = fh.read()\n", "issue": "[Bug]: PIP installation error\n### Is there an existing issue for this?\n\n- [X] I have searched the existing issues\n\n### Current Behavior\n\nHello,\r\nusing ```pip install mindsdb``` I've got this error:\r\n```\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\pip\\_vendor\\pep517\\in_process\\_in_process.py\", line 363, in <module>\r\n main()\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\pip\\_vendor\\pep517\\in_process\\_in_process.py\", line 345, in main\r\n json_out['return_val'] = hook(**hook_input['kwargs'])\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\pip\\_vendor\\pep517\\in_process\\_in_process.py\", line 130, in get_requires_for_build_wheel\r\n return hook(config_settings)\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Temp\\pip-build-env-ke4g80_2\\overlay\\Lib\\site-packages\\setuptools\\build_meta.py\", line 177, in get_requires_for_build_wheel\r\n return self._get_build_requires(\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Temp\\pip-build-env-ke4g80_2\\overlay\\Lib\\site-packages\\setuptools\\build_meta.py\", line 159, in _get_build_requires\r\n self.run_setup()\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Temp\\pip-build-env-ke4g80_2\\overlay\\Lib\\site-packages\\setuptools\\build_meta.py\", line 281, in run_setup\r\n super(_BuildMetaLegacyBackend,\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Temp\\pip-build-env-ke4g80_2\\overlay\\Lib\\site-packages\\setuptools\\build_meta.py\", line 174, in run_setup\r\n exec(compile(code, __file__, 'exec'), locals())\r\n File \"setup.py\", line 10, in <module>\r\n long_description = fh.read()\r\n File \"C:\\Users\\lukas\\AppData\\Local\\Programs\\Python\\Python310\\lib\\encodings\\cp1250.py\", line 23, in decode\r\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\r\n UnicodeDecodeError: 'charmap' codec can't decode byte 0x90 in position 3404: character maps to <undefined>\r\n```\r\nI am using the latest Python 3.10.4\n\n### Expected Behavior\n\n_No response_\n\n### Steps To Reproduce\n\n_No response_\n\n### Anything else?\n\n_No response_\n", "before_files": [{"content": "from setuptools import setup, find_packages\n\n\nabout = {}\nwith open(\"mindsdb/__about__.py\") as fp:\n exec(fp.read(), about)\n\n\nwith open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n\n\ndef install_deps():\n \"\"\"Reads requirements.txt and preprocess it\n to be feed into setuptools.\n\n This is the only possible way (we found)\n how requirements.txt can be reused in setup.py\n using dependencies from private github repositories.\n\n Links must be appendend by `-{StringWithAtLeastOneNumber}`\n or something like that, so e.g. `-9231` works as well as\n `1.1.0`. This is ignored by the setuptools, but has to be there.\n\n Warnings:\n to make pip respect the links, you have to use\n `--process-dependency-links` switch. So e.g.:\n `pip install --process-dependency-links {git-url}`\n\n Returns:\n list of packages and dependency links.\n \"\"\"\n default = open('requirements.txt', 'r').readlines()\n new_pkgs = []\n links = []\n for resource in default:\n if 'git+https' in resource:\n pkg = resource.split('#')[-1]\n links.append(resource.strip() + '-9876543210')\n new_pkgs.append(pkg.replace('egg=', '').rstrip())\n else:\n new_pkgs.append(resource.strip())\n return new_pkgs, links\n\n\npkgs, new_links = install_deps()\n\nsetup(\n name=about['__title__'],\n version=about['__version__'],\n url=about['__github__'],\n download_url=about['__pypi__'],\n license=about['__license__'],\n author=about['__author__'],\n author_email=about['__email__'],\n description=about['__description__'],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n packages=find_packages(),\n install_requires=pkgs,\n dependency_links=new_links,\n include_package_data=True,\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"Operating System :: OS Independent\",\n ],\n python_requires=\">=3.6\"\n)\n", "path": "setup.py"}]}
1,791
76
gh_patches_debug_20224
rasdani/github-patches
git_diff
getsentry__sentry-44899
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [Project Transfer] Request Approval Email Uses Incorrect Customer Subdomain r### Environment SaaS (https://sentry.io/) ### Version n/a ### Link [relevant transaction](https://sentry.sentry.io/discover/javascript:31e672a100fd44f5ab3ac36d4be4dbd3/?field=title&field=event.type&field=project&field=user.display&field=timestamp&field=replayId&homepage=true&name=All+Events&project=11276&query=url%3A%22https%3A%2F%2Fmagiclamp-mobile-website-appli.sentry.io%2F%2A%22&sort=-timestamp&statsPeriod=14d&yAxis=count%28%29) ### DSN n/a ### Steps to Reproduce 1. Org A requests a project transfer - supplies an owner email for Org B 2. Org B owner clicks link in resulting "Request for Project Transfer" email 3. Link directs Org B owner to **org-a**.sentry.io/accept-transfer/?data=<some_hash> ### Expected Result ... 3. Link directs Org B owner to **org-b**.sentry.io/accept-transfer/?data=<some_hash> ### Actual Result Because Org B owner is not authenticated with orb-a.sentry.io they're asked to login, which will result in a variety of issues. Here's [a transaction where this happened](https://sentry.sentry.io/discover/javascript:31e672a100fd44f5ab3ac36d4be4dbd3/) </issue> <code> [start of src/sentry/api/endpoints/project_transfer.py] 1 import logging 2 from urllib.parse import urlencode 3 from uuid import uuid4 4 5 from django.utils import timezone 6 from rest_framework import status 7 from rest_framework.request import Request 8 from rest_framework.response import Response 9 10 from sentry import audit_log, options, roles 11 from sentry.api.base import region_silo_endpoint 12 from sentry.api.bases.project import ProjectEndpoint, ProjectPermission 13 from sentry.api.decorators import sudo_required 14 from sentry.models import OrganizationMember 15 from sentry.utils.email import MessageBuilder 16 from sentry.utils.signing import sign 17 18 delete_logger = logging.getLogger("sentry.deletions.api") 19 20 21 class RelaxedProjectPermission(ProjectPermission): 22 scope_map = {"POST": ["project:admin"]} 23 24 25 @region_silo_endpoint 26 class ProjectTransferEndpoint(ProjectEndpoint): 27 permission_classes = [RelaxedProjectPermission] 28 29 @sudo_required 30 def post(self, request: Request, project) -> Response: 31 """ 32 Transfer a Project 33 ```````````````` 34 35 Schedules a project for transfer to a new organization. 36 37 :pparam string organization_slug: the slug of the organization the 38 project belongs to. 39 :pparam string project_slug: the slug of the project to delete. 40 :param string email: email of new owner. must be an organization owner 41 :auth: required 42 """ 43 if project.is_internal_project(): 44 return Response( 45 '{"error": "Cannot transfer projects internally used by Sentry."}', 46 status=status.HTTP_403_FORBIDDEN, 47 ) 48 49 email = request.data.get("email") 50 51 if email is None: 52 return Response(status=status.HTTP_400_BAD_REQUEST) 53 54 if not request.user.is_authenticated: 55 return Response(status=status.HTTP_403_FORBIDDEN) 56 57 try: 58 owner = OrganizationMember.objects.get_members_by_email_and_role( 59 email=email, role=roles.get_top_dog().id 60 )[0] 61 except IndexError: 62 return Response( 63 {"detail": "Could not find an organization owner with that email"}, 64 status=status.HTTP_404_NOT_FOUND, 65 ) 66 67 organization = project.organization 68 transaction_id = uuid4().hex 69 url_data = sign( 70 actor_id=request.user.id, 71 from_organization_id=organization.id, 72 project_id=project.id, 73 user_id=owner.user_id, 74 transaction_id=transaction_id, 75 ) 76 77 context = { 78 "email": email, 79 "from_org": project.organization.name, 80 "project_name": project.slug, 81 "request_time": timezone.now(), 82 "url": organization.absolute_url( 83 "/accept-transfer/", query=urlencode({"data": url_data}) 84 ), 85 "requester": request.user, 86 } 87 MessageBuilder( 88 subject="{}Request for Project Transfer".format(options.get("mail.subject-prefix")), 89 template="sentry/emails/transfer_project.txt", 90 html_template="sentry/emails/transfer_project.html", 91 type="org.confirm_project_transfer_request", 92 context=context, 93 ).send_async([email]) 94 95 self.create_audit_entry( 96 request=request, 97 organization=project.organization, 98 target_object=project.id, 99 event=audit_log.get_event_id("PROJECT_REQUEST_TRANSFER"), 100 data=project.get_audit_log_data(), 101 transaction_id=transaction_id, 102 ) 103 104 return Response(status=status.HTTP_204_NO_CONTENT) 105 [end of src/sentry/api/endpoints/project_transfer.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/sentry/api/endpoints/project_transfer.py b/src/sentry/api/endpoints/project_transfer.py --- a/src/sentry/api/endpoints/project_transfer.py +++ b/src/sentry/api/endpoints/project_transfer.py @@ -13,6 +13,7 @@ from sentry.api.decorators import sudo_required from sentry.models import OrganizationMember from sentry.utils.email import MessageBuilder +from sentry.utils.http import absolute_uri from sentry.utils.signing import sign delete_logger = logging.getLogger("sentry.deletions.api") @@ -79,9 +80,7 @@ "from_org": project.organization.name, "project_name": project.slug, "request_time": timezone.now(), - "url": organization.absolute_url( - "/accept-transfer/", query=urlencode({"data": url_data}) - ), + "url": absolute_uri(f"/accept-transfer/?{urlencode({'data': url_data})}"), "requester": request.user, } MessageBuilder(
{"golden_diff": "diff --git a/src/sentry/api/endpoints/project_transfer.py b/src/sentry/api/endpoints/project_transfer.py\n--- a/src/sentry/api/endpoints/project_transfer.py\n+++ b/src/sentry/api/endpoints/project_transfer.py\n@@ -13,6 +13,7 @@\n from sentry.api.decorators import sudo_required\n from sentry.models import OrganizationMember\n from sentry.utils.email import MessageBuilder\n+from sentry.utils.http import absolute_uri\n from sentry.utils.signing import sign\n \n delete_logger = logging.getLogger(\"sentry.deletions.api\")\n@@ -79,9 +80,7 @@\n \"from_org\": project.organization.name,\n \"project_name\": project.slug,\n \"request_time\": timezone.now(),\n- \"url\": organization.absolute_url(\n- \"/accept-transfer/\", query=urlencode({\"data\": url_data})\n- ),\n+ \"url\": absolute_uri(f\"/accept-transfer/?{urlencode({'data': url_data})}\"),\n \"requester\": request.user,\n }\n MessageBuilder(\n", "issue": "[Project Transfer] Request Approval Email Uses Incorrect Customer Subdomain\nr### Environment\r\n\r\nSaaS (https://sentry.io/)\r\n\r\n### Version\r\n\r\nn/a\r\n\r\n### Link\r\n\r\n[relevant transaction](https://sentry.sentry.io/discover/javascript:31e672a100fd44f5ab3ac36d4be4dbd3/?field=title&field=event.type&field=project&field=user.display&field=timestamp&field=replayId&homepage=true&name=All+Events&project=11276&query=url%3A%22https%3A%2F%2Fmagiclamp-mobile-website-appli.sentry.io%2F%2A%22&sort=-timestamp&statsPeriod=14d&yAxis=count%28%29)\r\n\r\n### DSN\r\n\r\nn/a\r\n\r\n### Steps to Reproduce\r\n\r\n1. Org A requests a project transfer - supplies an owner email for Org B\r\n2. Org B owner clicks link in resulting \"Request for Project Transfer\" email\r\n3. Link directs Org B owner to **org-a**.sentry.io/accept-transfer/?data=<some_hash>\r\n\r\n### Expected Result\r\n\r\n...\r\n3. Link directs Org B owner to **org-b**.sentry.io/accept-transfer/?data=<some_hash>\r\n\r\n### Actual Result\r\n\r\nBecause Org B owner is not authenticated with orb-a.sentry.io they're asked to login, which will result in a variety of issues.\r\n\r\nHere's [a transaction where this happened](https://sentry.sentry.io/discover/javascript:31e672a100fd44f5ab3ac36d4be4dbd3/)\n", "before_files": [{"content": "import logging\nfrom urllib.parse import urlencode\nfrom uuid import uuid4\n\nfrom django.utils import timezone\nfrom rest_framework import status\nfrom rest_framework.request import Request\nfrom rest_framework.response import Response\n\nfrom sentry import audit_log, options, roles\nfrom sentry.api.base import region_silo_endpoint\nfrom sentry.api.bases.project import ProjectEndpoint, ProjectPermission\nfrom sentry.api.decorators import sudo_required\nfrom sentry.models import OrganizationMember\nfrom sentry.utils.email import MessageBuilder\nfrom sentry.utils.signing import sign\n\ndelete_logger = logging.getLogger(\"sentry.deletions.api\")\n\n\nclass RelaxedProjectPermission(ProjectPermission):\n scope_map = {\"POST\": [\"project:admin\"]}\n\n\n@region_silo_endpoint\nclass ProjectTransferEndpoint(ProjectEndpoint):\n permission_classes = [RelaxedProjectPermission]\n\n @sudo_required\n def post(self, request: Request, project) -> Response:\n \"\"\"\n Transfer a Project\n ````````````````\n\n Schedules a project for transfer to a new organization.\n\n :pparam string organization_slug: the slug of the organization the\n project belongs to.\n :pparam string project_slug: the slug of the project to delete.\n :param string email: email of new owner. must be an organization owner\n :auth: required\n \"\"\"\n if project.is_internal_project():\n return Response(\n '{\"error\": \"Cannot transfer projects internally used by Sentry.\"}',\n status=status.HTTP_403_FORBIDDEN,\n )\n\n email = request.data.get(\"email\")\n\n if email is None:\n return Response(status=status.HTTP_400_BAD_REQUEST)\n\n if not request.user.is_authenticated:\n return Response(status=status.HTTP_403_FORBIDDEN)\n\n try:\n owner = OrganizationMember.objects.get_members_by_email_and_role(\n email=email, role=roles.get_top_dog().id\n )[0]\n except IndexError:\n return Response(\n {\"detail\": \"Could not find an organization owner with that email\"},\n status=status.HTTP_404_NOT_FOUND,\n )\n\n organization = project.organization\n transaction_id = uuid4().hex\n url_data = sign(\n actor_id=request.user.id,\n from_organization_id=organization.id,\n project_id=project.id,\n user_id=owner.user_id,\n transaction_id=transaction_id,\n )\n\n context = {\n \"email\": email,\n \"from_org\": project.organization.name,\n \"project_name\": project.slug,\n \"request_time\": timezone.now(),\n \"url\": organization.absolute_url(\n \"/accept-transfer/\", query=urlencode({\"data\": url_data})\n ),\n \"requester\": request.user,\n }\n MessageBuilder(\n subject=\"{}Request for Project Transfer\".format(options.get(\"mail.subject-prefix\")),\n template=\"sentry/emails/transfer_project.txt\",\n html_template=\"sentry/emails/transfer_project.html\",\n type=\"org.confirm_project_transfer_request\",\n context=context,\n ).send_async([email])\n\n self.create_audit_entry(\n request=request,\n organization=project.organization,\n target_object=project.id,\n event=audit_log.get_event_id(\"PROJECT_REQUEST_TRANSFER\"),\n data=project.get_audit_log_data(),\n transaction_id=transaction_id,\n )\n\n return Response(status=status.HTTP_204_NO_CONTENT)\n", "path": "src/sentry/api/endpoints/project_transfer.py"}]}
1,829
219
gh_patches_debug_60614
rasdani/github-patches
git_diff
cloudtools__troposphere-1670
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Security Hub tags not formatted correctly Using the following code for a Security Hub resource results in tags that don't follow the structure of Security Hub Cloudformation: ``` hub = tpl.add_resource(Hub( title="ExampleHub", Tags=Tags( Tag("Test", "Test value") ) )) ``` The resulting CF template is as follows: ``` "AWSTemplateFormatVersion": "2010-09-09", "Description": "Security Hub configuration", "Resources": { "ExampleHub": { "Properties": { "Tags": [ { "Key": "Test", "Value": "Test value" } ] }, "Type": "AWS::SecurityHub::Hub" } } } ``` Looking into the CF documentation for security hub, it looks as though the tag JSON is different: ``` { "Description": "Example Hub with Tags", "Resources": { "ExampleHubWithTags": { "Type": "AWS::SecurityHub::Hub", "Properties": { "Tags": { "key1": "value1", "key2": "value2" } } } } } ``` Trying to apply the changeset from the generated TF template results in error `Property validation failure: [Value of property {/Tags} does not match type {Map}]`. </issue> <code> [start of troposphere/securityhub.py] 1 # Copyright (c) 2012-2019, Mark Peek <[email protected]> 2 # All rights reserved. 3 # 4 # See LICENSE file for full license. 5 # 6 # *** Do not modify - this file is autogenerated *** 7 # Resource specification version: 4.1.0 8 9 10 from . import AWSObject 11 from troposphere import Tags 12 13 14 class Hub(AWSObject): 15 resource_type = "AWS::SecurityHub::Hub" 16 17 props = { 18 'Tags': (Tags, False), 19 } 20 [end of troposphere/securityhub.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/troposphere/securityhub.py b/troposphere/securityhub.py --- a/troposphere/securityhub.py +++ b/troposphere/securityhub.py @@ -8,12 +8,12 @@ from . import AWSObject -from troposphere import Tags +from .validators import json_checker class Hub(AWSObject): resource_type = "AWS::SecurityHub::Hub" props = { - 'Tags': (Tags, False), + 'Tags': (json_checker, False), }
{"golden_diff": "diff --git a/troposphere/securityhub.py b/troposphere/securityhub.py\n--- a/troposphere/securityhub.py\n+++ b/troposphere/securityhub.py\n@@ -8,12 +8,12 @@\n \n \n from . import AWSObject\n-from troposphere import Tags\n+from .validators import json_checker\n \n \n class Hub(AWSObject):\n resource_type = \"AWS::SecurityHub::Hub\"\n \n props = {\n- 'Tags': (Tags, False),\n+ 'Tags': (json_checker, False),\n }\n", "issue": "Security Hub tags not formatted correctly\nUsing the following code for a Security Hub resource results in tags that don't follow the structure of Security Hub Cloudformation:\r\n\r\n```\r\nhub = tpl.add_resource(Hub(\r\n title=\"ExampleHub\",\r\n Tags=Tags(\r\n Tag(\"Test\", \"Test value\")\r\n )\r\n ))\r\n```\r\n\r\nThe resulting CF template is as follows:\r\n```\r\n \"AWSTemplateFormatVersion\": \"2010-09-09\",\r\n \"Description\": \"Security Hub configuration\",\r\n \"Resources\": {\r\n \"ExampleHub\": {\r\n \"Properties\": {\r\n \"Tags\": [\r\n {\r\n \"Key\": \"Test\",\r\n \"Value\": \"Test value\"\r\n }\r\n ]\r\n },\r\n \"Type\": \"AWS::SecurityHub::Hub\"\r\n }\r\n }\r\n}\r\n```\r\n\r\nLooking into the CF documentation for security hub, it looks as though the tag JSON is different:\r\n```\r\n{\r\n \"Description\": \"Example Hub with Tags\",\r\n \"Resources\": {\r\n \"ExampleHubWithTags\": {\r\n \"Type\": \"AWS::SecurityHub::Hub\",\r\n \"Properties\": {\r\n \"Tags\": {\r\n \"key1\": \"value1\",\r\n \"key2\": \"value2\"\r\n }\r\n }\r\n }\r\n }\r\n}\r\n```\r\n\r\nTrying to apply the changeset from the generated TF template results in error `Property validation failure: [Value of property {/Tags} does not match type {Map}]`.\n", "before_files": [{"content": "# Copyright (c) 2012-2019, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n#\n# *** Do not modify - this file is autogenerated ***\n# Resource specification version: 4.1.0\n\n\nfrom . import AWSObject\nfrom troposphere import Tags\n\n\nclass Hub(AWSObject):\n resource_type = \"AWS::SecurityHub::Hub\"\n\n props = {\n 'Tags': (Tags, False),\n }\n", "path": "troposphere/securityhub.py"}]}
980
117
gh_patches_debug_3933
rasdani/github-patches
git_diff
microsoft__ptvsd-1559
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> attach_socket_import tests hang waiting for "process" event (https://github.com/microsoft/ptvsd/tree/dbg_adapter_refactor) When running multiple tests with `start_method` set to `attach_socket_cmdline` or `attach_socket_import`, subsequent tests fail because they cannot reuse the port number. It looks like ptvsd is not getting shut down properly, or the next test starts running before ptvsd exits. Estimate is uncertain, because this needs to be investigated to determine the root cause. Only happens on Python 2.7. </issue> <code> [start of src/ptvsd/__init__.py] 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. See LICENSE in the project root 3 # for license information. 4 5 from __future__ import absolute_import, print_function, unicode_literals 6 7 """An implementation of the Debug Adapter Protocol (DAP) for Python. 8 9 https://microsoft.github.io/debug-adapter-protocol/ 10 """ 11 12 __all__ = [ 13 "__version__", 14 "attach", 15 "break_into_debugger", 16 "debug_this_thread", 17 "enable_attach", 18 "is_attached", 19 "wait_for_attach", 20 ] 21 22 # Force absolute path on Python 2. 23 from os import path 24 __file__ = path.abspath(__file__) 25 del path 26 27 from ptvsd import _version 28 __version__ = _version.get_versions()["version"] 29 del _version 30 31 from ptvsd.server.attach_server import ( 32 attach, 33 break_into_debugger, 34 debug_this_thread, 35 enable_attach, 36 is_attached, 37 wait_for_attach, 38 ) 39 [end of src/ptvsd/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/ptvsd/__init__.py b/src/ptvsd/__init__.py --- a/src/ptvsd/__init__.py +++ b/src/ptvsd/__init__.py @@ -24,6 +24,14 @@ __file__ = path.abspath(__file__) del path +# Preload encodings that we're going to use to avoid import deadlocks on Python 2. +import codecs +codecs.lookup('ascii') +codecs.lookup('utf8') +codecs.lookup('utf-8') +codecs.lookup('latin1') +codecs.lookup('latin-1') + from ptvsd import _version __version__ = _version.get_versions()["version"] del _version
{"golden_diff": "diff --git a/src/ptvsd/__init__.py b/src/ptvsd/__init__.py\n--- a/src/ptvsd/__init__.py\n+++ b/src/ptvsd/__init__.py\n@@ -24,6 +24,14 @@\n __file__ = path.abspath(__file__)\n del path\n \n+# Preload encodings that we're going to use to avoid import deadlocks on Python 2.\n+import codecs\n+codecs.lookup('ascii')\n+codecs.lookup('utf8')\n+codecs.lookup('utf-8')\n+codecs.lookup('latin1')\n+codecs.lookup('latin-1')\n+\n from ptvsd import _version\n __version__ = _version.get_versions()[\"version\"]\n del _version\n", "issue": "attach_socket_import tests hang waiting for \"process\" event\n(https://github.com/microsoft/ptvsd/tree/dbg_adapter_refactor)\r\n\r\nWhen running multiple tests with `start_method` set to `attach_socket_cmdline` or `attach_socket_import`, subsequent tests fail because they cannot reuse the port number. It looks like ptvsd is not getting shut down properly, or the next test starts running before ptvsd exits. \r\n\r\nEstimate is uncertain, because this needs to be investigated to determine the root cause.\r\n\r\nOnly happens on Python 2.7.\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import absolute_import, print_function, unicode_literals\n\n\"\"\"An implementation of the Debug Adapter Protocol (DAP) for Python.\n\nhttps://microsoft.github.io/debug-adapter-protocol/\n\"\"\"\n\n__all__ = [\n \"__version__\",\n \"attach\",\n \"break_into_debugger\",\n \"debug_this_thread\",\n \"enable_attach\",\n \"is_attached\",\n \"wait_for_attach\",\n]\n\n# Force absolute path on Python 2.\nfrom os import path\n__file__ = path.abspath(__file__)\ndel path\n\nfrom ptvsd import _version\n__version__ = _version.get_versions()[\"version\"]\ndel _version\n\nfrom ptvsd.server.attach_server import (\n attach,\n break_into_debugger,\n debug_this_thread,\n enable_attach,\n is_attached,\n wait_for_attach,\n)\n", "path": "src/ptvsd/__init__.py"}]}
939
164
gh_patches_debug_47811
rasdani/github-patches
git_diff
CTFd__CTFd-1921
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Stub issue for ctfcli #13 https://github.com/CTFd/ctfcli/issues/13 This needs to be resolved in CTFd most likely. </issue> <code> [start of CTFd/plugins/dynamic_challenges/__init__.py] 1 from __future__ import division # Use floating point for math calculations 2 3 import math 4 5 from flask import Blueprint 6 7 from CTFd.models import Challenges, Solves, db 8 from CTFd.plugins import register_plugin_assets_directory 9 from CTFd.plugins.challenges import CHALLENGE_CLASSES, BaseChallenge 10 from CTFd.plugins.migrations import upgrade 11 from CTFd.utils.modes import get_model 12 13 14 class DynamicChallenge(Challenges): 15 __mapper_args__ = {"polymorphic_identity": "dynamic"} 16 id = db.Column( 17 db.Integer, db.ForeignKey("challenges.id", ondelete="CASCADE"), primary_key=True 18 ) 19 initial = db.Column(db.Integer, default=0) 20 minimum = db.Column(db.Integer, default=0) 21 decay = db.Column(db.Integer, default=0) 22 23 def __init__(self, *args, **kwargs): 24 super(DynamicChallenge, self).__init__(**kwargs) 25 self.initial = kwargs["value"] 26 27 28 class DynamicValueChallenge(BaseChallenge): 29 id = "dynamic" # Unique identifier used to register challenges 30 name = "dynamic" # Name of a challenge type 31 templates = { # Handlebars templates used for each aspect of challenge editing & viewing 32 "create": "/plugins/dynamic_challenges/assets/create.html", 33 "update": "/plugins/dynamic_challenges/assets/update.html", 34 "view": "/plugins/dynamic_challenges/assets/view.html", 35 } 36 scripts = { # Scripts that are loaded when a template is loaded 37 "create": "/plugins/dynamic_challenges/assets/create.js", 38 "update": "/plugins/dynamic_challenges/assets/update.js", 39 "view": "/plugins/dynamic_challenges/assets/view.js", 40 } 41 # Route at which files are accessible. This must be registered using register_plugin_assets_directory() 42 route = "/plugins/dynamic_challenges/assets/" 43 # Blueprint used to access the static_folder directory. 44 blueprint = Blueprint( 45 "dynamic_challenges", 46 __name__, 47 template_folder="templates", 48 static_folder="assets", 49 ) 50 challenge_model = DynamicChallenge 51 52 @classmethod 53 def calculate_value(cls, challenge): 54 Model = get_model() 55 56 solve_count = ( 57 Solves.query.join(Model, Solves.account_id == Model.id) 58 .filter( 59 Solves.challenge_id == challenge.id, 60 Model.hidden == False, 61 Model.banned == False, 62 ) 63 .count() 64 ) 65 66 # If the solve count is 0 we shouldn't manipulate the solve count to 67 # let the math update back to normal 68 if solve_count != 0: 69 # We subtract -1 to allow the first solver to get max point value 70 solve_count -= 1 71 72 # It is important that this calculation takes into account floats. 73 # Hence this file uses from __future__ import division 74 value = ( 75 ((challenge.minimum - challenge.initial) / (challenge.decay ** 2)) 76 * (solve_count ** 2) 77 ) + challenge.initial 78 79 value = math.ceil(value) 80 81 if value < challenge.minimum: 82 value = challenge.minimum 83 84 challenge.value = value 85 db.session.commit() 86 return challenge 87 88 @classmethod 89 def read(cls, challenge): 90 """ 91 This method is in used to access the data of a challenge in a format processable by the front end. 92 93 :param challenge: 94 :return: Challenge object, data dictionary to be returned to the user 95 """ 96 challenge = DynamicChallenge.query.filter_by(id=challenge.id).first() 97 data = { 98 "id": challenge.id, 99 "name": challenge.name, 100 "value": challenge.value, 101 "initial": challenge.initial, 102 "decay": challenge.decay, 103 "minimum": challenge.minimum, 104 "description": challenge.description, 105 "category": challenge.category, 106 "state": challenge.state, 107 "max_attempts": challenge.max_attempts, 108 "type": challenge.type, 109 "type_data": { 110 "id": cls.id, 111 "name": cls.name, 112 "templates": cls.templates, 113 "scripts": cls.scripts, 114 }, 115 } 116 return data 117 118 @classmethod 119 def update(cls, challenge, request): 120 """ 121 This method is used to update the information associated with a challenge. This should be kept strictly to the 122 Challenges table and any child tables. 123 124 :param challenge: 125 :param request: 126 :return: 127 """ 128 data = request.form or request.get_json() 129 130 for attr, value in data.items(): 131 # We need to set these to floats so that the next operations don't operate on strings 132 if attr in ("initial", "minimum", "decay"): 133 value = float(value) 134 setattr(challenge, attr, value) 135 136 return DynamicValueChallenge.calculate_value(challenge) 137 138 @classmethod 139 def solve(cls, user, team, challenge, request): 140 super().solve(user, team, challenge, request) 141 142 DynamicValueChallenge.calculate_value(challenge) 143 144 145 def load(app): 146 upgrade() 147 CHALLENGE_CLASSES["dynamic"] = DynamicValueChallenge 148 register_plugin_assets_directory( 149 app, base_path="/plugins/dynamic_challenges/assets/" 150 ) 151 [end of CTFd/plugins/dynamic_challenges/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/CTFd/plugins/dynamic_challenges/__init__.py b/CTFd/plugins/dynamic_challenges/__init__.py --- a/CTFd/plugins/dynamic_challenges/__init__.py +++ b/CTFd/plugins/dynamic_challenges/__init__.py @@ -22,7 +22,7 @@ def __init__(self, *args, **kwargs): super(DynamicChallenge, self).__init__(**kwargs) - self.initial = kwargs["value"] + self.value = kwargs["initial"] class DynamicValueChallenge(BaseChallenge):
{"golden_diff": "diff --git a/CTFd/plugins/dynamic_challenges/__init__.py b/CTFd/plugins/dynamic_challenges/__init__.py\n--- a/CTFd/plugins/dynamic_challenges/__init__.py\n+++ b/CTFd/plugins/dynamic_challenges/__init__.py\n@@ -22,7 +22,7 @@\n \n def __init__(self, *args, **kwargs):\n super(DynamicChallenge, self).__init__(**kwargs)\n- self.initial = kwargs[\"value\"]\n+ self.value = kwargs[\"initial\"]\n \n \n class DynamicValueChallenge(BaseChallenge):\n", "issue": "Stub issue for ctfcli #13\nhttps://github.com/CTFd/ctfcli/issues/13\r\n\r\nThis needs to be resolved in CTFd most likely.\n", "before_files": [{"content": "from __future__ import division # Use floating point for math calculations\n\nimport math\n\nfrom flask import Blueprint\n\nfrom CTFd.models import Challenges, Solves, db\nfrom CTFd.plugins import register_plugin_assets_directory\nfrom CTFd.plugins.challenges import CHALLENGE_CLASSES, BaseChallenge\nfrom CTFd.plugins.migrations import upgrade\nfrom CTFd.utils.modes import get_model\n\n\nclass DynamicChallenge(Challenges):\n __mapper_args__ = {\"polymorphic_identity\": \"dynamic\"}\n id = db.Column(\n db.Integer, db.ForeignKey(\"challenges.id\", ondelete=\"CASCADE\"), primary_key=True\n )\n initial = db.Column(db.Integer, default=0)\n minimum = db.Column(db.Integer, default=0)\n decay = db.Column(db.Integer, default=0)\n\n def __init__(self, *args, **kwargs):\n super(DynamicChallenge, self).__init__(**kwargs)\n self.initial = kwargs[\"value\"]\n\n\nclass DynamicValueChallenge(BaseChallenge):\n id = \"dynamic\" # Unique identifier used to register challenges\n name = \"dynamic\" # Name of a challenge type\n templates = { # Handlebars templates used for each aspect of challenge editing & viewing\n \"create\": \"/plugins/dynamic_challenges/assets/create.html\",\n \"update\": \"/plugins/dynamic_challenges/assets/update.html\",\n \"view\": \"/plugins/dynamic_challenges/assets/view.html\",\n }\n scripts = { # Scripts that are loaded when a template is loaded\n \"create\": \"/plugins/dynamic_challenges/assets/create.js\",\n \"update\": \"/plugins/dynamic_challenges/assets/update.js\",\n \"view\": \"/plugins/dynamic_challenges/assets/view.js\",\n }\n # Route at which files are accessible. This must be registered using register_plugin_assets_directory()\n route = \"/plugins/dynamic_challenges/assets/\"\n # Blueprint used to access the static_folder directory.\n blueprint = Blueprint(\n \"dynamic_challenges\",\n __name__,\n template_folder=\"templates\",\n static_folder=\"assets\",\n )\n challenge_model = DynamicChallenge\n\n @classmethod\n def calculate_value(cls, challenge):\n Model = get_model()\n\n solve_count = (\n Solves.query.join(Model, Solves.account_id == Model.id)\n .filter(\n Solves.challenge_id == challenge.id,\n Model.hidden == False,\n Model.banned == False,\n )\n .count()\n )\n\n # If the solve count is 0 we shouldn't manipulate the solve count to\n # let the math update back to normal\n if solve_count != 0:\n # We subtract -1 to allow the first solver to get max point value\n solve_count -= 1\n\n # It is important that this calculation takes into account floats.\n # Hence this file uses from __future__ import division\n value = (\n ((challenge.minimum - challenge.initial) / (challenge.decay ** 2))\n * (solve_count ** 2)\n ) + challenge.initial\n\n value = math.ceil(value)\n\n if value < challenge.minimum:\n value = challenge.minimum\n\n challenge.value = value\n db.session.commit()\n return challenge\n\n @classmethod\n def read(cls, challenge):\n \"\"\"\n This method is in used to access the data of a challenge in a format processable by the front end.\n\n :param challenge:\n :return: Challenge object, data dictionary to be returned to the user\n \"\"\"\n challenge = DynamicChallenge.query.filter_by(id=challenge.id).first()\n data = {\n \"id\": challenge.id,\n \"name\": challenge.name,\n \"value\": challenge.value,\n \"initial\": challenge.initial,\n \"decay\": challenge.decay,\n \"minimum\": challenge.minimum,\n \"description\": challenge.description,\n \"category\": challenge.category,\n \"state\": challenge.state,\n \"max_attempts\": challenge.max_attempts,\n \"type\": challenge.type,\n \"type_data\": {\n \"id\": cls.id,\n \"name\": cls.name,\n \"templates\": cls.templates,\n \"scripts\": cls.scripts,\n },\n }\n return data\n\n @classmethod\n def update(cls, challenge, request):\n \"\"\"\n This method is used to update the information associated with a challenge. This should be kept strictly to the\n Challenges table and any child tables.\n\n :param challenge:\n :param request:\n :return:\n \"\"\"\n data = request.form or request.get_json()\n\n for attr, value in data.items():\n # We need to set these to floats so that the next operations don't operate on strings\n if attr in (\"initial\", \"minimum\", \"decay\"):\n value = float(value)\n setattr(challenge, attr, value)\n\n return DynamicValueChallenge.calculate_value(challenge)\n\n @classmethod\n def solve(cls, user, team, challenge, request):\n super().solve(user, team, challenge, request)\n\n DynamicValueChallenge.calculate_value(challenge)\n\n\ndef load(app):\n upgrade()\n CHALLENGE_CLASSES[\"dynamic\"] = DynamicValueChallenge\n register_plugin_assets_directory(\n app, base_path=\"/plugins/dynamic_challenges/assets/\"\n )\n", "path": "CTFd/plugins/dynamic_challenges/__init__.py"}]}
2,046
126
gh_patches_debug_21182
rasdani/github-patches
git_diff
saleor__saleor-527
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> 302 Status in Shipping Method. **TD;LR** When I enter the "Shipping Address" entire as anonymous or create a new address. It makes me submit it again and again. Using c79bdbb13c399b253e3a26480d01f1cb219b4700 I get this most annoying bug. I can't solve from days. It only happens in the production site, not on the test as well as development. I really don't understand where I am going wrong. What should really happen `Shipping Address(302) -> Shipping Method(200)` Problem: `Shipping Address(302) -> Shipping Method(302) -> Shipping Address(200)`. This mean the users has to submit the form again and again. Cases when `Shipping Method(200)` - 3 to 5 attempts of submitting the form. - 2 to 5 minutes of waiting to submit again. I have even done an attempt using the `old checkout`, when the billing address was the first form, Still result is the same. </issue> <code> [start of saleor/core/__init__.py] 1 from __future__ import unicode_literals 2 3 from babel.numbers import get_territory_currencies 4 from django.conf import settings 5 from django_countries import countries 6 from django_countries.fields import Country 7 from geolite2 import geolite2 8 9 TOKEN_PATTERN = ('(?P<token>[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}' 10 '-[0-9a-z]{12})') 11 12 13 def get_country_by_ip(ip_address): 14 reader = geolite2.reader() 15 geo_data = reader.get(ip_address) 16 geolite2.close() 17 if geo_data and 'country' in geo_data and 'iso_code' in geo_data['country']: 18 country_iso_code = geo_data['country']['iso_code'] 19 if country_iso_code in countries: 20 return Country(country_iso_code) 21 22 23 def get_currency_for_country(country): 24 currencies = get_territory_currencies(country.code) 25 if len(currencies): 26 main_currency = currencies[0] 27 if main_currency in settings.AVAILABLE_CURRENCIES: 28 return main_currency 29 return settings.DEFAULT_CURRENCY 30 [end of saleor/core/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/saleor/core/__init__.py b/saleor/core/__init__.py --- a/saleor/core/__init__.py +++ b/saleor/core/__init__.py @@ -2,6 +2,7 @@ from babel.numbers import get_territory_currencies from django.conf import settings +from django.core.checks import register, Tags, Warning from django_countries import countries from django_countries.fields import Country from geolite2 import geolite2 @@ -27,3 +28,19 @@ if main_currency in settings.AVAILABLE_CURRENCIES: return main_currency return settings.DEFAULT_CURRENCY + + +@register() +def check_session_caching(app_configs, **kwargs): + errors = [] + cached_engines = { + 'django.contrib.sessions.backends.cache', + 'django.contrib.sessions.backends.cached_db'} + if ('locmem' in settings.CACHES['default']['BACKEND'] and + settings.SESSION_ENGINE in cached_engines): + errors.append( + Warning( + 'Session caching cannot work with locmem backend', + 'User sessions need to be globally shared, use a cache server like Redis.', + 'saleor.W001')) + return errors
{"golden_diff": "diff --git a/saleor/core/__init__.py b/saleor/core/__init__.py\n--- a/saleor/core/__init__.py\n+++ b/saleor/core/__init__.py\n@@ -2,6 +2,7 @@\n \n from babel.numbers import get_territory_currencies\n from django.conf import settings\n+from django.core.checks import register, Tags, Warning\n from django_countries import countries\n from django_countries.fields import Country\n from geolite2 import geolite2\n@@ -27,3 +28,19 @@\n if main_currency in settings.AVAILABLE_CURRENCIES:\n return main_currency\n return settings.DEFAULT_CURRENCY\n+\n+\n+@register()\n+def check_session_caching(app_configs, **kwargs):\n+ errors = []\n+ cached_engines = {\n+ 'django.contrib.sessions.backends.cache',\n+ 'django.contrib.sessions.backends.cached_db'}\n+ if ('locmem' in settings.CACHES['default']['BACKEND'] and\n+ settings.SESSION_ENGINE in cached_engines):\n+ errors.append(\n+ Warning(\n+ 'Session caching cannot work with locmem backend',\n+ 'User sessions need to be globally shared, use a cache server like Redis.',\n+ 'saleor.W001'))\n+ return errors\n", "issue": "302 Status in Shipping Method. \n**TD;LR** When I enter the \"Shipping Address\" entire as anonymous or create a new address. It makes me submit it again and again.\n\nUsing c79bdbb13c399b253e3a26480d01f1cb219b4700\n\nI get this most annoying bug. I can't solve from days. It only happens in the production site, not on the test as well as development. I really don't understand where I am going wrong.\n\nWhat should really happen `Shipping Address(302) -> Shipping Method(200)`\n\nProblem:\n`Shipping Address(302) -> Shipping Method(302) -> Shipping Address(200)`.\nThis mean the users has to submit the form again and again.\n\nCases when `Shipping Method(200)` \n- 3 to 5 attempts of submitting the form.\n- 2 to 5 minutes of waiting to submit again.\n\nI have even done an attempt using the `old checkout`, when the billing address was the first form, Still result is the same.\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nfrom babel.numbers import get_territory_currencies\nfrom django.conf import settings\nfrom django_countries import countries\nfrom django_countries.fields import Country\nfrom geolite2 import geolite2\n\nTOKEN_PATTERN = ('(?P<token>[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}'\n '-[0-9a-z]{12})')\n\n\ndef get_country_by_ip(ip_address):\n reader = geolite2.reader()\n geo_data = reader.get(ip_address)\n geolite2.close()\n if geo_data and 'country' in geo_data and 'iso_code' in geo_data['country']:\n country_iso_code = geo_data['country']['iso_code']\n if country_iso_code in countries:\n return Country(country_iso_code)\n\n\ndef get_currency_for_country(country):\n currencies = get_territory_currencies(country.code)\n if len(currencies):\n main_currency = currencies[0]\n if main_currency in settings.AVAILABLE_CURRENCIES:\n return main_currency\n return settings.DEFAULT_CURRENCY\n", "path": "saleor/core/__init__.py"}]}
1,091
285
gh_patches_debug_5118
rasdani/github-patches
git_diff
akvo__akvo-rsr-2254
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Create a 500 page Create a (working) 500 page with a message like: > Oops something went wrong. We have been notified and will take action asap. Please check in later or contact us at xyz. RSR team </issue> <code> [start of akvo/rsr/views/error.py] 1 # -*- coding: utf-8 -*- 2 """ 3 Akvo RSR is covered by the GNU Affero General Public License. 4 See more details in the license.txt file located at the root folder of the 5 Akvo RSR module. For additional details on the GNU license please 6 see < http://www.gnu.org/licenses/agpl.html >. 7 """ 8 9 from django.http import HttpResponse 10 11 12 def server_error(request, template_name='500.html'): 13 HttpResponse("Server Error - 500") 14 [end of akvo/rsr/views/error.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/akvo/rsr/views/error.py b/akvo/rsr/views/error.py --- a/akvo/rsr/views/error.py +++ b/akvo/rsr/views/error.py @@ -6,8 +6,12 @@ see < http://www.gnu.org/licenses/agpl.html >. """ -from django.http import HttpResponse +from django.shortcuts import render_to_response +from django.template import RequestContext def server_error(request, template_name='500.html'): - HttpResponse("Server Error - 500") + response = render_to_response('500.html', {}, context_instance=RequestContext(request)) + response.status_code = 500 + return response +
{"golden_diff": "diff --git a/akvo/rsr/views/error.py b/akvo/rsr/views/error.py\n--- a/akvo/rsr/views/error.py\n+++ b/akvo/rsr/views/error.py\n@@ -6,8 +6,12 @@\n see < http://www.gnu.org/licenses/agpl.html >.\n \"\"\"\n \n-from django.http import HttpResponse\n+from django.shortcuts import render_to_response\n+from django.template import RequestContext\n \n \n def server_error(request, template_name='500.html'):\n- HttpResponse(\"Server Error - 500\")\n+ response = render_to_response('500.html', {}, context_instance=RequestContext(request))\n+ response.status_code = 500\n+ return response\n+\n", "issue": "Create a 500 page\nCreate a (working) 500 page with a message like:\n\n> Oops something went wrong. We have been notified and will take action asap. Please check in later or contact us at xyz. RSR team\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\n Akvo RSR is covered by the GNU Affero General Public License.\n See more details in the license.txt file located at the root folder of the\n Akvo RSR module. For additional details on the GNU license please\n see < http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nfrom django.http import HttpResponse\n\n\ndef server_error(request, template_name='500.html'):\n HttpResponse(\"Server Error - 500\")\n", "path": "akvo/rsr/views/error.py"}]}
718
158
gh_patches_debug_31160
rasdani/github-patches
git_diff
ibis-project__ibis-2308
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Impala pandas_interop.py DataFrameWriter.write_csv() fails with PermissionError on Windows Platforms pandas_interop.py DataFrameWriter.write_csv() fails with PermissionError on Windows Platforms due to how Pythons [tempfile.NamedTemporaryFile()](https://docs.python.org/3/library/tempfile.html#tempfile.NamedTemporaryFile) works. Specifically, if the OS is Windows and the NamedTemporaryFile delete parameter is True, the IO operation will fail, because Python will open the file and then Pandas will attempt to open the file again. See [this SO answer ](https://stackoverflow.com/a/23212515/3744998)for more information. I have replicated the issue manually and verified that setting the delete parameter to False resolves the issue. Perhaps we can add a platform check and explicitly set the delete parameter based on the platform? Then after we [exit the context statements scope](https://github.com/ibis-project/ibis/blob/master/ibis/impala/pandas_interop.py#L80), explicitly delete the temporary file. I imagine it could look something like the following. ``` python import os def write_csv(self, path): if os.name == 'nt': # Should probably think of a better name for this boolean delete = False else: delete = True with tempfile.NamedTemporaryFile(delete=delete) as f: tmp_name = f.name # Write the DataFrame to the temporary file path if options.verbose: util.log( 'Writing DataFrame to temporary file {}'.format(f.name) ) self.df.to_csv( f.name, header=False, index=False, sep=',', quoting=csv.QUOTE_NONE, escapechar='\\', na_rep='#NULL', ) f.seek(0) if options.verbose: util.log('Writing CSV to: {0}'.format(path)) self.hdfs.put(path, f.name) if not delete: os.remove(tmp_name) return path ``` I don't know that much about the Ibis code base, so this is just a first pass. I hope it's helpful. </issue> <code> [start of ibis/impala/pandas_interop.py] 1 # Copyright 2014 Cloudera Inc. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import csv 16 import tempfile 17 18 from posixpath import join as pjoin 19 20 import ibis.common.exceptions as com 21 import ibis.expr.schema as sch 22 import ibis.util as util 23 from ibis.config import options 24 25 26 class DataFrameWriter: 27 28 """ 29 Interface class for writing pandas objects to Impala tables 30 31 Class takes ownership of any temporary data written to HDFS 32 """ 33 34 def __init__(self, client, df, path=None): 35 self.client = client 36 self.hdfs = client.hdfs 37 38 self.df = df 39 40 self.temp_hdfs_dirs = [] 41 42 def write_temp_csv(self): 43 temp_hdfs_dir = pjoin( 44 options.impala.temp_hdfs_path, 'pandas_{}'.format(util.guid()) 45 ) 46 self.hdfs.mkdir(temp_hdfs_dir) 47 48 # Keep track of the temporary HDFS file 49 self.temp_hdfs_dirs.append(temp_hdfs_dir) 50 51 # Write the file to HDFS 52 hdfs_path = pjoin(temp_hdfs_dir, '0.csv') 53 54 self.write_csv(hdfs_path) 55 56 return temp_hdfs_dir 57 58 def write_csv(self, path): 59 with tempfile.NamedTemporaryFile() as f: 60 # Write the DataFrame to the temporary file path 61 if options.verbose: 62 util.log( 63 'Writing DataFrame to temporary file {}'.format(f.name) 64 ) 65 66 self.df.to_csv( 67 f.name, 68 header=False, 69 index=False, 70 sep=',', 71 quoting=csv.QUOTE_NONE, 72 escapechar='\\', 73 na_rep='#NULL', 74 ) 75 f.seek(0) 76 77 if options.verbose: 78 util.log('Writing CSV to: {0}'.format(path)) 79 80 self.hdfs.put(path, f.name) 81 return path 82 83 def get_schema(self): 84 # define a temporary table using delimited data 85 return sch.infer(self.df) 86 87 def delimited_table(self, csv_dir, name=None, database=None): 88 temp_delimited_name = 'ibis_tmp_pandas_{0}'.format(util.guid()) 89 schema = self.get_schema() 90 91 return self.client.delimited_file( 92 csv_dir, 93 schema, 94 name=temp_delimited_name, 95 database=database, 96 delimiter=',', 97 na_rep='#NULL', 98 escapechar='\\\\', 99 external=True, 100 persist=False, 101 ) 102 103 def __del__(self): 104 try: 105 self.cleanup() 106 except com.IbisError: 107 pass 108 109 def cleanup(self): 110 for path in self.temp_hdfs_dirs: 111 self.hdfs.rmdir(path) 112 self.temp_hdfs_dirs = [] 113 self.csv_dir = None 114 115 116 def write_temp_dataframe(client, df): 117 writer = DataFrameWriter(client, df) 118 path = writer.write_temp_csv() 119 return writer, writer.delimited_table(path) 120 [end of ibis/impala/pandas_interop.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ibis/impala/pandas_interop.py b/ibis/impala/pandas_interop.py --- a/ibis/impala/pandas_interop.py +++ b/ibis/impala/pandas_interop.py @@ -13,8 +13,8 @@ # limitations under the License. import csv +import os import tempfile - from posixpath import join as pjoin import ibis.common.exceptions as com @@ -56,15 +56,21 @@ return temp_hdfs_dir def write_csv(self, path): - with tempfile.NamedTemporaryFile() as f: + # Use a temporary dir instead of a temporary file + # to provide Windows support and avoid #2267 + # https://github.com/ibis-project/ibis/issues/2267 + with tempfile.TemporaryDirectory() as f: # Write the DataFrame to the temporary file path + tmp_file_path = os.path.join(f, 'impala_temp_file.csv') if options.verbose: util.log( - 'Writing DataFrame to temporary file {}'.format(f.name) + 'Writing DataFrame to temporary directory {}'.format( + tmp_file_path + ) ) self.df.to_csv( - f.name, + tmp_file_path, header=False, index=False, sep=',', @@ -72,12 +78,11 @@ escapechar='\\', na_rep='#NULL', ) - f.seek(0) if options.verbose: util.log('Writing CSV to: {0}'.format(path)) - self.hdfs.put(path, f.name) + self.hdfs.put(path, tmp_file_path) return path def get_schema(self):
{"golden_diff": "diff --git a/ibis/impala/pandas_interop.py b/ibis/impala/pandas_interop.py\n--- a/ibis/impala/pandas_interop.py\n+++ b/ibis/impala/pandas_interop.py\n@@ -13,8 +13,8 @@\n # limitations under the License.\n \n import csv\n+import os\n import tempfile\n-\n from posixpath import join as pjoin\n \n import ibis.common.exceptions as com\n@@ -56,15 +56,21 @@\n return temp_hdfs_dir\n \n def write_csv(self, path):\n- with tempfile.NamedTemporaryFile() as f:\n+ # Use a temporary dir instead of a temporary file\n+ # to provide Windows support and avoid #2267\n+ # https://github.com/ibis-project/ibis/issues/2267\n+ with tempfile.TemporaryDirectory() as f:\n # Write the DataFrame to the temporary file path\n+ tmp_file_path = os.path.join(f, 'impala_temp_file.csv')\n if options.verbose:\n util.log(\n- 'Writing DataFrame to temporary file {}'.format(f.name)\n+ 'Writing DataFrame to temporary directory {}'.format(\n+ tmp_file_path\n+ )\n )\n \n self.df.to_csv(\n- f.name,\n+ tmp_file_path,\n header=False,\n index=False,\n sep=',',\n@@ -72,12 +78,11 @@\n escapechar='\\\\',\n na_rep='#NULL',\n )\n- f.seek(0)\n \n if options.verbose:\n util.log('Writing CSV to: {0}'.format(path))\n \n- self.hdfs.put(path, f.name)\n+ self.hdfs.put(path, tmp_file_path)\n return path\n \n def get_schema(self):\n", "issue": "Impala pandas_interop.py DataFrameWriter.write_csv() fails with PermissionError on Windows Platforms\npandas_interop.py DataFrameWriter.write_csv() fails with PermissionError on Windows Platforms due to how Pythons [tempfile.NamedTemporaryFile()](https://docs.python.org/3/library/tempfile.html#tempfile.NamedTemporaryFile) works. \r\n\r\nSpecifically, if the OS is Windows and the NamedTemporaryFile delete parameter is True, the IO operation will fail, because Python will open the file and then Pandas will attempt to open the file again. See [this SO answer ](https://stackoverflow.com/a/23212515/3744998)for more information. \r\n\r\nI have replicated the issue manually and verified that setting the delete parameter to False resolves the issue. Perhaps we can add a platform check and explicitly set the delete parameter based on the platform? Then after we [exit the context statements scope](https://github.com/ibis-project/ibis/blob/master/ibis/impala/pandas_interop.py#L80), explicitly delete the temporary file. I imagine it could look something like the following.\r\n\r\n``` python\r\n\r\n import os\r\n\r\n def write_csv(self, path):\r\n if os.name == 'nt':\r\n # Should probably think of a better name for this boolean\r\n delete = False\r\n else:\r\n delete = True\r\n\r\n with tempfile.NamedTemporaryFile(delete=delete) as f:\r\n tmp_name = f.name\r\n # Write the DataFrame to the temporary file path\r\n if options.verbose:\r\n util.log(\r\n 'Writing DataFrame to temporary file {}'.format(f.name)\r\n )\r\n\r\n self.df.to_csv(\r\n f.name,\r\n header=False,\r\n index=False,\r\n sep=',',\r\n quoting=csv.QUOTE_NONE,\r\n escapechar='\\\\',\r\n na_rep='#NULL',\r\n )\r\n f.seek(0)\r\n\r\n if options.verbose:\r\n util.log('Writing CSV to: {0}'.format(path))\r\n\r\n self.hdfs.put(path, f.name)\r\n\r\n if not delete:\r\n os.remove(tmp_name)\r\n\r\n return path\r\n```\r\n\r\n\r\nI don't know that much about the Ibis code base, so this is just a first pass. I hope it's helpful. \n", "before_files": [{"content": "# Copyright 2014 Cloudera Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport csv\nimport tempfile\n\nfrom posixpath import join as pjoin\n\nimport ibis.common.exceptions as com\nimport ibis.expr.schema as sch\nimport ibis.util as util\nfrom ibis.config import options\n\n\nclass DataFrameWriter:\n\n \"\"\"\n Interface class for writing pandas objects to Impala tables\n\n Class takes ownership of any temporary data written to HDFS\n \"\"\"\n\n def __init__(self, client, df, path=None):\n self.client = client\n self.hdfs = client.hdfs\n\n self.df = df\n\n self.temp_hdfs_dirs = []\n\n def write_temp_csv(self):\n temp_hdfs_dir = pjoin(\n options.impala.temp_hdfs_path, 'pandas_{}'.format(util.guid())\n )\n self.hdfs.mkdir(temp_hdfs_dir)\n\n # Keep track of the temporary HDFS file\n self.temp_hdfs_dirs.append(temp_hdfs_dir)\n\n # Write the file to HDFS\n hdfs_path = pjoin(temp_hdfs_dir, '0.csv')\n\n self.write_csv(hdfs_path)\n\n return temp_hdfs_dir\n\n def write_csv(self, path):\n with tempfile.NamedTemporaryFile() as f:\n # Write the DataFrame to the temporary file path\n if options.verbose:\n util.log(\n 'Writing DataFrame to temporary file {}'.format(f.name)\n )\n\n self.df.to_csv(\n f.name,\n header=False,\n index=False,\n sep=',',\n quoting=csv.QUOTE_NONE,\n escapechar='\\\\',\n na_rep='#NULL',\n )\n f.seek(0)\n\n if options.verbose:\n util.log('Writing CSV to: {0}'.format(path))\n\n self.hdfs.put(path, f.name)\n return path\n\n def get_schema(self):\n # define a temporary table using delimited data\n return sch.infer(self.df)\n\n def delimited_table(self, csv_dir, name=None, database=None):\n temp_delimited_name = 'ibis_tmp_pandas_{0}'.format(util.guid())\n schema = self.get_schema()\n\n return self.client.delimited_file(\n csv_dir,\n schema,\n name=temp_delimited_name,\n database=database,\n delimiter=',',\n na_rep='#NULL',\n escapechar='\\\\\\\\',\n external=True,\n persist=False,\n )\n\n def __del__(self):\n try:\n self.cleanup()\n except com.IbisError:\n pass\n\n def cleanup(self):\n for path in self.temp_hdfs_dirs:\n self.hdfs.rmdir(path)\n self.temp_hdfs_dirs = []\n self.csv_dir = None\n\n\ndef write_temp_dataframe(client, df):\n writer = DataFrameWriter(client, df)\n path = writer.write_temp_csv()\n return writer, writer.delimited_table(path)\n", "path": "ibis/impala/pandas_interop.py"}]}
2,002
396
gh_patches_debug_4876
rasdani/github-patches
git_diff
akvo__akvo-rsr-1921
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Project editor crash for conditions without a text ## Test plan 1. Open the project editor, and click the 'Show advanced IATI fields' button 2. In the results and indicators section, add a project condition with only a type (no condition text) 3. Save 4. Refresh the page 5. **RESULT:** No error should be shown and the condition should state "No condition specified" ## Issue description Hasn't happened on Live yet, but it did on Test. See http://sentry.support.akvo-ops.org/rsr/test/group/690/. </issue> <code> [start of akvo/rsr/models/project_condition.py] 1 # -*- coding: utf-8 -*- 2 3 # Akvo RSR is covered by the GNU Affero General Public License. 4 # See more details in the license.txt file located at the root folder of the Akvo RSR module. 5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >. 6 7 8 from django.db import models 9 from django.utils.translation import ugettext_lazy as _ 10 11 from ..fields import ValidXMLCharField 12 13 from akvo.codelists.models import ConditionType 14 from akvo.codelists.store.codelists_v201 import CONDITION_TYPE 15 from akvo.utils import codelist_choices, codelist_value 16 17 18 class ProjectCondition(models.Model): 19 project = models.ForeignKey('Project', verbose_name=_(u'project'), related_name='conditions') 20 text = ValidXMLCharField(_(u'condition'), blank=True, max_length=100, 21 help_text=_(u'(100 characters)')) 22 type = ValidXMLCharField(_(u'condition type'), blank=True, max_length=1, 23 choices=codelist_choices(CONDITION_TYPE)) 24 25 def __unicode__(self): 26 return self.text if self.text else _(u'No condition specified') 27 28 def iati_type(self): 29 return codelist_value(ConditionType, self, 'type') 30 31 class Meta: 32 app_label = 'rsr' 33 verbose_name = _(u'condition') 34 verbose_name_plural = _(u'conditions') 35 [end of akvo/rsr/models/project_condition.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/akvo/rsr/models/project_condition.py b/akvo/rsr/models/project_condition.py --- a/akvo/rsr/models/project_condition.py +++ b/akvo/rsr/models/project_condition.py @@ -23,7 +23,7 @@ choices=codelist_choices(CONDITION_TYPE)) def __unicode__(self): - return self.text if self.text else _(u'No condition specified') + return self.text if self.text else u'%s' % _(u'No condition specified') def iati_type(self): return codelist_value(ConditionType, self, 'type')
{"golden_diff": "diff --git a/akvo/rsr/models/project_condition.py b/akvo/rsr/models/project_condition.py\n--- a/akvo/rsr/models/project_condition.py\n+++ b/akvo/rsr/models/project_condition.py\n@@ -23,7 +23,7 @@\n choices=codelist_choices(CONDITION_TYPE))\n \n def __unicode__(self):\n- return self.text if self.text else _(u'No condition specified')\n+ return self.text if self.text else u'%s' % _(u'No condition specified')\n \n def iati_type(self):\n return codelist_value(ConditionType, self, 'type')\n", "issue": "Project editor crash for conditions without a text\n## Test plan\n1. Open the project editor, and click the 'Show advanced IATI fields' button\n2. In the results and indicators section, add a project condition with only a type (no condition text)\n3. Save\n4. Refresh the page\n5. **RESULT:** No error should be shown and the condition should state \"No condition specified\"\n## Issue description\n\nHasn't happened on Live yet, but it did on Test. See http://sentry.support.akvo-ops.org/rsr/test/group/690/.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\n\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom ..fields import ValidXMLCharField\n\nfrom akvo.codelists.models import ConditionType\nfrom akvo.codelists.store.codelists_v201 import CONDITION_TYPE\nfrom akvo.utils import codelist_choices, codelist_value\n\n\nclass ProjectCondition(models.Model):\n project = models.ForeignKey('Project', verbose_name=_(u'project'), related_name='conditions')\n text = ValidXMLCharField(_(u'condition'), blank=True, max_length=100,\n help_text=_(u'(100 characters)'))\n type = ValidXMLCharField(_(u'condition type'), blank=True, max_length=1,\n choices=codelist_choices(CONDITION_TYPE))\n\n def __unicode__(self):\n return self.text if self.text else _(u'No condition specified')\n\n def iati_type(self):\n return codelist_value(ConditionType, self, 'type')\n\n class Meta:\n app_label = 'rsr'\n verbose_name = _(u'condition')\n verbose_name_plural = _(u'conditions')\n", "path": "akvo/rsr/models/project_condition.py"}]}
1,039
137
gh_patches_debug_3195
rasdani/github-patches
git_diff
hartwork__jawanndenn-225
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> missing rapid-json dependency for pip installing via pip3 does not also install the rapid-json dependency. missing rapid-json dependency for pip installing via pip3 does not also install the rapid-json dependency. </issue> <code> [start of setup.py] 1 #! /usr/bin/env python3 2 # Copyright (C) 2016 Sebastian Pipping <[email protected]> 3 # Licensed under GNU Affero GPL v3 or later 4 5 import os 6 7 from setuptools import find_packages, setup 8 9 from jawanndenn.metadata import APP_NAME, VERSION_STR 10 11 12 def _read(filename): 13 with open(filename) as f: 14 return f.read() 15 16 17 _tests_require = [ 18 'factory-boy>=2.12.0', 19 'parameterized>=0.7.1', 20 ] 21 22 _extras_require = { 23 'tests': _tests_require, 24 } 25 26 27 def _collect_package_data(top_directory): 28 for root, dirs, files in os.walk(os.path.join(top_directory, 'static')): 29 if files: 30 relative_root = os.path.relpath(root, top_directory) 31 yield os.path.join(relative_root, '*') 32 33 34 if __name__ == '__main__': 35 setup( 36 name=APP_NAME, 37 version=VERSION_STR, 38 39 license='AGPLv3+', 40 description='Libre alternative to Doodle', 41 long_description=_read('README.rst'), 42 43 author='Sebastian Pipping', 44 author_email='[email protected]', 45 url='https://github.com/hartwork/jawanndenn', 46 47 python_requires='>=3.6', 48 install_requires=[ 49 'django>=2.2.7', 50 'django-extensions>=2.2.5', 51 'django-ratelimit>=2.0.0', 52 'djangorestframework>=3.11.0', 53 'gunicorn>=20.0.4', 54 'gunicorn-color>=0.1.0', 55 ], 56 extras_require=_extras_require, 57 tests_require=_tests_require, 58 59 packages=find_packages(), 60 package_data={ 61 APP_NAME: list(_collect_package_data(APP_NAME)), 62 }, 63 64 entry_points={ 65 'console_scripts': [ 66 f'{APP_NAME} = {APP_NAME}.__main__:main', 67 ], 68 }, 69 70 classifiers=[ 71 'Development Status :: 4 - Beta', 72 'Framework :: Django', 73 'Framework :: Django :: 2.2', 74 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', # noqa: E501 75 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', # noqa: E501 76 'Programming Language :: JavaScript', 77 'Programming Language :: Python', 78 'Programming Language :: Python :: 3', 79 'Programming Language :: Python :: 3.6', 80 'Programming Language :: Python :: 3.7', 81 'Programming Language :: Python :: 3.8', 82 'Programming Language :: Python :: 3 :: Only', 83 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 84 'Topic :: Office/Business :: Scheduling', 85 ] 86 ) 87 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -52,6 +52,8 @@ 'djangorestframework>=3.11.0', 'gunicorn>=20.0.4', 'gunicorn-color>=0.1.0', + 'python-dateutil>=2.8.1', + 'python-rapidjson>=1.0', ], extras_require=_extras_require, tests_require=_tests_require,
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -52,6 +52,8 @@\n 'djangorestframework>=3.11.0',\n 'gunicorn>=20.0.4',\n 'gunicorn-color>=0.1.0',\n+ 'python-dateutil>=2.8.1',\n+ 'python-rapidjson>=1.0',\n ],\n extras_require=_extras_require,\n tests_require=_tests_require,\n", "issue": "missing rapid-json dependency for pip\ninstalling via pip3 does not also install the rapid-json dependency.\nmissing rapid-json dependency for pip\ninstalling via pip3 does not also install the rapid-json dependency.\n", "before_files": [{"content": "#! /usr/bin/env python3\n# Copyright (C) 2016 Sebastian Pipping <[email protected]>\n# Licensed under GNU Affero GPL v3 or later\n\nimport os\n\nfrom setuptools import find_packages, setup\n\nfrom jawanndenn.metadata import APP_NAME, VERSION_STR\n\n\ndef _read(filename):\n with open(filename) as f:\n return f.read()\n\n\n_tests_require = [\n 'factory-boy>=2.12.0',\n 'parameterized>=0.7.1',\n]\n\n_extras_require = {\n 'tests': _tests_require,\n}\n\n\ndef _collect_package_data(top_directory):\n for root, dirs, files in os.walk(os.path.join(top_directory, 'static')):\n if files:\n relative_root = os.path.relpath(root, top_directory)\n yield os.path.join(relative_root, '*')\n\n\nif __name__ == '__main__':\n setup(\n name=APP_NAME,\n version=VERSION_STR,\n\n license='AGPLv3+',\n description='Libre alternative to Doodle',\n long_description=_read('README.rst'),\n\n author='Sebastian Pipping',\n author_email='[email protected]',\n url='https://github.com/hartwork/jawanndenn',\n\n python_requires='>=3.6',\n install_requires=[\n 'django>=2.2.7',\n 'django-extensions>=2.2.5',\n 'django-ratelimit>=2.0.0',\n 'djangorestframework>=3.11.0',\n 'gunicorn>=20.0.4',\n 'gunicorn-color>=0.1.0',\n ],\n extras_require=_extras_require,\n tests_require=_tests_require,\n\n packages=find_packages(),\n package_data={\n APP_NAME: list(_collect_package_data(APP_NAME)),\n },\n\n entry_points={\n 'console_scripts': [\n f'{APP_NAME} = {APP_NAME}.__main__:main',\n ],\n },\n\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Framework :: Django',\n 'Framework :: Django :: 2.2',\n 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', # noqa: E501\n 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', # noqa: E501\n 'Programming Language :: JavaScript',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',\n 'Topic :: Office/Business :: Scheduling',\n ]\n )\n", "path": "setup.py"}]}
1,371
112
gh_patches_debug_18134
rasdani/github-patches
git_diff
ocadotechnology__aimmo-350
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Functional Test 1 - Effect expiry We want to check that the effect can expire. This would be as follows: Place an avatar on the map in one of the cells. Place a pickup somewhere. Make a move in some direction, towards the pickup. Call `avatar.pickup.effect.onturn()` 10 times until the effect expires. Make sure that the avatar holds *no effect* (not pickup!). </issue> <code> [start of aimmo-game/simulation/effects.py] 1 from abc import ABCMeta, abstractmethod 2 3 import math 4 5 import sys 6 7 8 class _Effect(object): 9 __metaclass__ = ABCMeta 10 11 def __init__(self, avatar): 12 self._avatar = avatar 13 self.is_expired = False 14 15 @abstractmethod 16 def on_turn(self): 17 raise NotImplementedError() 18 19 20 class _TimedEffect(_Effect): 21 __metaclass__ = ABCMeta 22 EFFECT_TIME = 10 23 24 def __init__(self, *args): 25 super(_TimedEffect, self).__init__(*args) 26 self._time_remaining = self.EFFECT_TIME 27 28 def remove(self): 29 try: 30 self._avatar.effects.remove(self) 31 except KeyError as e: 32 raise KeyError("The avatar object does not exist! Cannot remove the effect.") 33 34 def on_turn(self): 35 self._time_remaining -= 1 36 if self._time_remaining <= 0: 37 self.is_expired = True 38 39 40 class InvulnerabilityPickupEffect(_TimedEffect): 41 def __init__(self, *args): 42 super(InvulnerabilityPickupEffect, self).__init__(*args) 43 self._avatar.resistance += 1000 44 45 def remove(self): 46 super(InvulnerabilityPickupEffect, self).remove() 47 self._avatar.resistance -= 1000 48 49 50 class DamageBoostPickupEffect(_TimedEffect): 51 def __init__(self, damage_boost, *args): 52 # For now we have no maximum damage boost constraints, so infinity needs to be casted into a max int. 53 if math.isinf(damage_boost): 54 damage_boost = sys.maxsize 55 56 self._damage_boost = int(round(damage_boost)) 57 super(DamageBoostPickupEffect, self).__init__(*args) 58 self._avatar.attack_strength += self._damage_boost 59 60 def remove(self): 61 super(DamageBoostPickupEffect, self).remove() 62 self._avatar.attack_strength -= self._damage_boost 63 [end of aimmo-game/simulation/effects.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/aimmo-game/simulation/effects.py b/aimmo-game/simulation/effects.py --- a/aimmo-game/simulation/effects.py +++ b/aimmo-game/simulation/effects.py @@ -1,9 +1,10 @@ from abc import ABCMeta, abstractmethod import math - import sys +INVULNERABILITY_RESISTANCE = 1000 + class _Effect(object): __metaclass__ = ABCMeta @@ -40,11 +41,11 @@ class InvulnerabilityPickupEffect(_TimedEffect): def __init__(self, *args): super(InvulnerabilityPickupEffect, self).__init__(*args) - self._avatar.resistance += 1000 + self._avatar.resistance += INVULNERABILITY_RESISTANCE def remove(self): super(InvulnerabilityPickupEffect, self).remove() - self._avatar.resistance -= 1000 + self._avatar.resistance -= INVULNERABILITY_RESISTANCE class DamageBoostPickupEffect(_TimedEffect):
{"golden_diff": "diff --git a/aimmo-game/simulation/effects.py b/aimmo-game/simulation/effects.py\n--- a/aimmo-game/simulation/effects.py\n+++ b/aimmo-game/simulation/effects.py\n@@ -1,9 +1,10 @@\n from abc import ABCMeta, abstractmethod\n \n import math\n-\n import sys\n \n+INVULNERABILITY_RESISTANCE = 1000\n+\n \n class _Effect(object):\n __metaclass__ = ABCMeta\n@@ -40,11 +41,11 @@\n class InvulnerabilityPickupEffect(_TimedEffect):\n def __init__(self, *args):\n super(InvulnerabilityPickupEffect, self).__init__(*args)\n- self._avatar.resistance += 1000\n+ self._avatar.resistance += INVULNERABILITY_RESISTANCE\n \n def remove(self):\n super(InvulnerabilityPickupEffect, self).remove()\n- self._avatar.resistance -= 1000\n+ self._avatar.resistance -= INVULNERABILITY_RESISTANCE\n \n \n class DamageBoostPickupEffect(_TimedEffect):\n", "issue": "Functional Test 1 - Effect expiry \nWe want to check that the effect can expire. This would be as follows:\n\nPlace an avatar on the map in one of the cells.\nPlace a pickup somewhere.\nMake a move in some direction, towards the pickup.\nCall `avatar.pickup.effect.onturn()` 10 times until the effect expires.\nMake sure that the avatar holds *no effect* (not pickup!).\n", "before_files": [{"content": "from abc import ABCMeta, abstractmethod\n\nimport math\n\nimport sys\n\n\nclass _Effect(object):\n __metaclass__ = ABCMeta\n\n def __init__(self, avatar):\n self._avatar = avatar\n self.is_expired = False\n\n @abstractmethod\n def on_turn(self):\n raise NotImplementedError()\n\n\nclass _TimedEffect(_Effect):\n __metaclass__ = ABCMeta\n EFFECT_TIME = 10\n\n def __init__(self, *args):\n super(_TimedEffect, self).__init__(*args)\n self._time_remaining = self.EFFECT_TIME\n\n def remove(self):\n try:\n self._avatar.effects.remove(self)\n except KeyError as e:\n raise KeyError(\"The avatar object does not exist! Cannot remove the effect.\")\n\n def on_turn(self):\n self._time_remaining -= 1\n if self._time_remaining <= 0:\n self.is_expired = True\n\n\nclass InvulnerabilityPickupEffect(_TimedEffect):\n def __init__(self, *args):\n super(InvulnerabilityPickupEffect, self).__init__(*args)\n self._avatar.resistance += 1000\n\n def remove(self):\n super(InvulnerabilityPickupEffect, self).remove()\n self._avatar.resistance -= 1000\n\n\nclass DamageBoostPickupEffect(_TimedEffect):\n def __init__(self, damage_boost, *args):\n # For now we have no maximum damage boost constraints, so infinity needs to be casted into a max int.\n if math.isinf(damage_boost):\n damage_boost = sys.maxsize\n\n self._damage_boost = int(round(damage_boost))\n super(DamageBoostPickupEffect, self).__init__(*args)\n self._avatar.attack_strength += self._damage_boost\n\n def remove(self):\n super(DamageBoostPickupEffect, self).remove()\n self._avatar.attack_strength -= self._damage_boost\n", "path": "aimmo-game/simulation/effects.py"}]}
1,171
246
gh_patches_debug_4004
rasdani/github-patches
git_diff
tobymao__sqlglot-552
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> cast to map with clickhouse dialect ignores case Yet another clickhouse case sensitivity bug :) ClickHouse CLI: ``` albatross :) select cast(map('a', 1, 'b', 2) as Map(String, Int8)); SELECT CAST(map('a', 1, 'b', 2), 'Map(String, Int8)') Query id: bab188fb-69f7-4c0f-9bde-ac524a08f6e8 ┌─CAST(map('a', 1, 'b', 2), 'Map(String, Int8)')─┐ │ {'a':1,'b':2} │ └────────────────────────────────────────────────┘ 1 row in set. Elapsed: 0.001 sec. ``` sqlglot: ``` In [7]: sg.parse_one("cast(map('a', 1, 'b', 2) as Map(String, Int8))", read="clickhouse").sql(dialect="clickhouse") Out[7]: "CAST(map('a', 1, 'b', 2) AS MAP(TEXT, TINYINT))" ``` Perhaps there's a way to disable all case transformations for the clickhouse dialect. </issue> <code> [start of sqlglot/dialects/clickhouse.py] 1 from sqlglot import exp 2 from sqlglot.dialects.dialect import Dialect, inline_array_sql, var_map_sql 3 from sqlglot.generator import Generator 4 from sqlglot.helper import csv 5 from sqlglot.parser import Parser, parse_var_map 6 from sqlglot.tokens import Tokenizer, TokenType 7 8 9 def _lower_func(sql): 10 index = sql.index("(") 11 return sql[:index].lower() + sql[index:] 12 13 14 class ClickHouse(Dialect): 15 normalize_functions = None 16 null_ordering = "nulls_are_last" 17 18 class Tokenizer(Tokenizer): 19 IDENTIFIERS = ['"', "`"] 20 21 KEYWORDS = { 22 **Tokenizer.KEYWORDS, 23 "NULLABLE": TokenType.NULLABLE, 24 "FINAL": TokenType.FINAL, 25 "DATETIME64": TokenType.DATETIME, 26 "INT8": TokenType.TINYINT, 27 "INT16": TokenType.SMALLINT, 28 "INT32": TokenType.INT, 29 "INT64": TokenType.BIGINT, 30 "FLOAT32": TokenType.FLOAT, 31 "FLOAT64": TokenType.DOUBLE, 32 } 33 34 class Parser(Parser): 35 FUNCTIONS = { 36 **Parser.FUNCTIONS, 37 "MAP": parse_var_map, 38 } 39 40 def _parse_table(self, schema=False): 41 this = super()._parse_table(schema) 42 43 if self._match(TokenType.FINAL): 44 this = self.expression(exp.Final, this=this) 45 46 return this 47 48 class Generator(Generator): 49 STRUCT_DELIMITER = ("(", ")") 50 51 TYPE_MAPPING = { 52 **Generator.TYPE_MAPPING, 53 exp.DataType.Type.NULLABLE: "Nullable", 54 exp.DataType.Type.DATETIME: "DateTime64", 55 } 56 57 TRANSFORMS = { 58 **Generator.TRANSFORMS, 59 exp.Array: inline_array_sql, 60 exp.StrPosition: lambda self, e: f"position({csv(self.sql(e, 'this'), self.sql(e, 'substr'), self.sql(e, 'position'))})", 61 exp.Final: lambda self, e: f"{self.sql(e, 'this')} FINAL", 62 exp.Map: lambda self, e: _lower_func(var_map_sql(self, e)), 63 exp.VarMap: lambda self, e: _lower_func(var_map_sql(self, e)), 64 } 65 66 EXPLICIT_UNION = True 67 [end of sqlglot/dialects/clickhouse.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sqlglot/dialects/clickhouse.py b/sqlglot/dialects/clickhouse.py --- a/sqlglot/dialects/clickhouse.py +++ b/sqlglot/dialects/clickhouse.py @@ -51,6 +51,9 @@ **Generator.TYPE_MAPPING, exp.DataType.Type.NULLABLE: "Nullable", exp.DataType.Type.DATETIME: "DateTime64", + exp.DataType.Type.MAP: "Map", + exp.DataType.Type.ARRAY: "Array", + exp.DataType.Type.STRUCT: "Tuple", } TRANSFORMS = {
{"golden_diff": "diff --git a/sqlglot/dialects/clickhouse.py b/sqlglot/dialects/clickhouse.py\n--- a/sqlglot/dialects/clickhouse.py\n+++ b/sqlglot/dialects/clickhouse.py\n@@ -51,6 +51,9 @@\n **Generator.TYPE_MAPPING,\n exp.DataType.Type.NULLABLE: \"Nullable\",\n exp.DataType.Type.DATETIME: \"DateTime64\",\n+ exp.DataType.Type.MAP: \"Map\",\n+ exp.DataType.Type.ARRAY: \"Array\",\n+ exp.DataType.Type.STRUCT: \"Tuple\",\n }\n \n TRANSFORMS = {\n", "issue": "cast to map with clickhouse dialect ignores case\nYet another clickhouse case sensitivity bug :)\r\n\r\n\r\nClickHouse CLI:\r\n\r\n```\r\nalbatross :) select cast(map('a', 1, 'b', 2) as Map(String, Int8));\r\n\r\nSELECT CAST(map('a', 1, 'b', 2), 'Map(String, Int8)')\r\n\r\nQuery id: bab188fb-69f7-4c0f-9bde-ac524a08f6e8\r\n\r\n\u250c\u2500CAST(map('a', 1, 'b', 2), 'Map(String, Int8)')\u2500\u2510\r\n\u2502 {'a':1,'b':2} \u2502\r\n\u2514\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2518\r\n\r\n1 row in set. Elapsed: 0.001 sec.\r\n```\r\n\r\nsqlglot:\r\n\r\n```\r\nIn [7]: sg.parse_one(\"cast(map('a', 1, 'b', 2) as Map(String, Int8))\", read=\"clickhouse\").sql(dialect=\"clickhouse\")\r\nOut[7]: \"CAST(map('a', 1, 'b', 2) AS MAP(TEXT, TINYINT))\"\r\n```\r\n\r\nPerhaps there's a way to disable all case transformations for the clickhouse dialect.\n", "before_files": [{"content": "from sqlglot import exp\nfrom sqlglot.dialects.dialect import Dialect, inline_array_sql, var_map_sql\nfrom sqlglot.generator import Generator\nfrom sqlglot.helper import csv\nfrom sqlglot.parser import Parser, parse_var_map\nfrom sqlglot.tokens import Tokenizer, TokenType\n\n\ndef _lower_func(sql):\n index = sql.index(\"(\")\n return sql[:index].lower() + sql[index:]\n\n\nclass ClickHouse(Dialect):\n normalize_functions = None\n null_ordering = \"nulls_are_last\"\n\n class Tokenizer(Tokenizer):\n IDENTIFIERS = ['\"', \"`\"]\n\n KEYWORDS = {\n **Tokenizer.KEYWORDS,\n \"NULLABLE\": TokenType.NULLABLE,\n \"FINAL\": TokenType.FINAL,\n \"DATETIME64\": TokenType.DATETIME,\n \"INT8\": TokenType.TINYINT,\n \"INT16\": TokenType.SMALLINT,\n \"INT32\": TokenType.INT,\n \"INT64\": TokenType.BIGINT,\n \"FLOAT32\": TokenType.FLOAT,\n \"FLOAT64\": TokenType.DOUBLE,\n }\n\n class Parser(Parser):\n FUNCTIONS = {\n **Parser.FUNCTIONS,\n \"MAP\": parse_var_map,\n }\n\n def _parse_table(self, schema=False):\n this = super()._parse_table(schema)\n\n if self._match(TokenType.FINAL):\n this = self.expression(exp.Final, this=this)\n\n return this\n\n class Generator(Generator):\n STRUCT_DELIMITER = (\"(\", \")\")\n\n TYPE_MAPPING = {\n **Generator.TYPE_MAPPING,\n exp.DataType.Type.NULLABLE: \"Nullable\",\n exp.DataType.Type.DATETIME: \"DateTime64\",\n }\n\n TRANSFORMS = {\n **Generator.TRANSFORMS,\n exp.Array: inline_array_sql,\n exp.StrPosition: lambda self, e: f\"position({csv(self.sql(e, 'this'), self.sql(e, 'substr'), self.sql(e, 'position'))})\",\n exp.Final: lambda self, e: f\"{self.sql(e, 'this')} FINAL\",\n exp.Map: lambda self, e: _lower_func(var_map_sql(self, e)),\n exp.VarMap: lambda self, e: _lower_func(var_map_sql(self, e)),\n }\n\n EXPLICIT_UNION = True\n", "path": "sqlglot/dialects/clickhouse.py"}]}
1,438
136
gh_patches_debug_14670
rasdani/github-patches
git_diff
ipython__ipython-3669
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> AttributeError: 'BlockingKernelClient' object has no attribute 'started_channels' I have an app that is centered around IPython running in qt (https://github.com/neuropy/neuropy/blob/master/neuropy/main.py). Yesterday I updated IPython from git, and have had to change a few things since the refactoring about 3 months ago, which split a kernel client off of the kernel manager (see #3011). In the process, I came across the error in the title. This error only happens when I try to create a normal two-process app, but not when creating an inprocess app, ie, it happens with QtKernelManager, but not with QtInProcessKernelManager. I think a BlockingKernelClient happens to be the default for QtKernelClient, which may be due to #3222. Not sure how to fix this in IPython, but to get around it, I set the client_factory to QtKernelClient, and then calling QtKernelManager.client() works fine. My minimalist example is in this gist: https://gist.github.com/mspacek/6018206 Can this be fixed so that setting up the 2-process app doesn't require the extra step of changing the client_factory? </issue> <code> [start of IPython/qt/manager.py] 1 """ Defines a KernelClient that provides signals and slots. 2 """ 3 4 from IPython.external.qt import QtCore 5 6 # Local imports 7 from IPython.utils.traitlets import Bool 8 9 from IPython.kernel import KernelManager 10 from IPython.kernel.restarter import KernelRestarter 11 12 from .kernel_mixins import QtKernelManagerMixin, QtKernelRestarterMixin 13 14 15 class QtKernelRestarter(KernelRestarter, QtKernelRestarterMixin): 16 17 def start(self): 18 if self._timer is None: 19 self._timer = QtCore.QTimer() 20 self._timer.timeout.connect(self.poll) 21 self._timer.start(self.time_to_dead * 1000) 22 23 def stop(self): 24 self._timer.stop() 25 26 def poll(self): 27 super(QtKernelRestarter, self).poll() 28 29 30 class QtKernelManager(KernelManager, QtKernelManagerMixin): 31 """A KernelManager with Qt signals for restart""" 32 33 autorestart = Bool(True, config=True) 34 35 def start_restarter(self): 36 if self.autorestart and self.has_kernel: 37 if self._restarter is None: 38 self._restarter = QtKernelRestarter( 39 kernel_manager=self, 40 parent=self, 41 log=self.log, 42 ) 43 self._restarter.add_callback(self._handle_kernel_restarted) 44 self._restarter.start() 45 46 def stop_restarter(self): 47 if self.autorestart: 48 if self._restarter is not None: 49 self._restarter.stop() 50 51 def _handle_kernel_restarted(self): 52 self.kernel_restarted.emit() 53 [end of IPython/qt/manager.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/IPython/qt/manager.py b/IPython/qt/manager.py --- a/IPython/qt/manager.py +++ b/IPython/qt/manager.py @@ -4,7 +4,7 @@ from IPython.external.qt import QtCore # Local imports -from IPython.utils.traitlets import Bool +from IPython.utils.traitlets import Bool, DottedObjectName from IPython.kernel import KernelManager from IPython.kernel.restarter import KernelRestarter @@ -30,6 +30,7 @@ class QtKernelManager(KernelManager, QtKernelManagerMixin): """A KernelManager with Qt signals for restart""" + client_class = DottedObjectName('IPython.qt.client.QtKernelClient') autorestart = Bool(True, config=True) def start_restarter(self):
{"golden_diff": "diff --git a/IPython/qt/manager.py b/IPython/qt/manager.py\n--- a/IPython/qt/manager.py\n+++ b/IPython/qt/manager.py\n@@ -4,7 +4,7 @@\n from IPython.external.qt import QtCore\n \n # Local imports\n-from IPython.utils.traitlets import Bool\n+from IPython.utils.traitlets import Bool, DottedObjectName\n \n from IPython.kernel import KernelManager\n from IPython.kernel.restarter import KernelRestarter\n@@ -30,6 +30,7 @@\n class QtKernelManager(KernelManager, QtKernelManagerMixin):\n \"\"\"A KernelManager with Qt signals for restart\"\"\"\n \n+ client_class = DottedObjectName('IPython.qt.client.QtKernelClient')\n autorestart = Bool(True, config=True)\n \n def start_restarter(self):\n", "issue": "AttributeError: 'BlockingKernelClient' object has no attribute 'started_channels'\nI have an app that is centered around IPython running in qt (https://github.com/neuropy/neuropy/blob/master/neuropy/main.py). Yesterday I updated IPython from git, and have had to change a few things since the refactoring about 3 months ago, which split a kernel client off of the kernel manager (see #3011). In the process, I came across the error in the title. This error only happens when I try to create a normal two-process app, but not when creating an inprocess app, ie, it happens with QtKernelManager, but not with QtInProcessKernelManager. I think a BlockingKernelClient happens to be the default for QtKernelClient, which may be due to #3222. Not sure how to fix this in IPython, but to get around it, I set the client_factory to QtKernelClient, and then calling QtKernelManager.client() works fine. My minimalist example is in this gist: https://gist.github.com/mspacek/6018206\n\nCan this be fixed so that setting up the 2-process app doesn't require the extra step of changing the client_factory?\n\n", "before_files": [{"content": "\"\"\" Defines a KernelClient that provides signals and slots.\n\"\"\"\n\nfrom IPython.external.qt import QtCore\n\n# Local imports\nfrom IPython.utils.traitlets import Bool\n\nfrom IPython.kernel import KernelManager\nfrom IPython.kernel.restarter import KernelRestarter\n\nfrom .kernel_mixins import QtKernelManagerMixin, QtKernelRestarterMixin\n\n\nclass QtKernelRestarter(KernelRestarter, QtKernelRestarterMixin):\n\n def start(self):\n if self._timer is None:\n self._timer = QtCore.QTimer()\n self._timer.timeout.connect(self.poll)\n self._timer.start(self.time_to_dead * 1000)\n\n def stop(self):\n self._timer.stop()\n\n def poll(self):\n super(QtKernelRestarter, self).poll()\n\n\nclass QtKernelManager(KernelManager, QtKernelManagerMixin):\n \"\"\"A KernelManager with Qt signals for restart\"\"\"\n\n autorestart = Bool(True, config=True)\n\n def start_restarter(self):\n if self.autorestart and self.has_kernel:\n if self._restarter is None:\n self._restarter = QtKernelRestarter(\n kernel_manager=self,\n parent=self,\n log=self.log,\n )\n self._restarter.add_callback(self._handle_kernel_restarted)\n self._restarter.start()\n\n def stop_restarter(self):\n if self.autorestart:\n if self._restarter is not None:\n self._restarter.stop()\n\n def _handle_kernel_restarted(self):\n self.kernel_restarted.emit()\n", "path": "IPython/qt/manager.py"}]}
1,229
178
gh_patches_debug_21565
rasdani/github-patches
git_diff
OCA__server-tools-508
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Fix travis build on 9.0 branch the 9.0 branch of the project is red, with really strange errors about computed fields. There have been such errors since the merge of #469 but for some reason they were neither fatal nor breaking the build. Splitting `database_cleanup` tests in .travis.yml seems to let the other modules go green, but the tests for database_cleanup are red. </issue> <code> [start of database_cleanup/models/purge_wizard.py] 1 # -*- coding: utf-8 -*- 2 # © 2014-2016 Therp BV <http://therp.nl> 3 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). 4 import logging 5 from openerp import _, api, fields, models 6 from openerp.exceptions import AccessDenied 7 8 9 class CleanupPurgeLine(models.AbstractModel): 10 """ Abstract base class for the purge wizard lines """ 11 _name = 'cleanup.purge.line' 12 _order = 'name' 13 14 name = fields.Char('Name', readonly=True) 15 purged = fields.Boolean('Purged', readonly=True) 16 wizard_id = fields.Many2one('cleanup.purge.wizard') 17 18 logger = logging.getLogger('openerp.addons.database_cleanup') 19 20 @api.multi 21 def purge(self): 22 raise NotImplementedError 23 24 @api.model 25 def create(self, values): 26 # make sure the user trying this is actually supposed to do it 27 if not self.env.ref('database_cleanup.menu_database_cleanup')\ 28 .parent_id._filter_visible_menus(): 29 raise AccessDenied 30 return super(CleanupPurgeLine, self).create(values) 31 32 33 class PurgeWizard(models.AbstractModel): 34 """ Abstract base class for the purge wizards """ 35 _name = 'cleanup.purge.wizard' 36 _description = 'Purge stuff' 37 38 @api.model 39 def default_get(self, fields_list): 40 res = super(PurgeWizard, self).default_get(fields_list) 41 if 'purge_line_ids' in fields_list: 42 res['purge_line_ids'] = self.find() 43 return res 44 45 @api.multi 46 def find(self): 47 raise NotImplementedError 48 49 @api.multi 50 def purge_all(self): 51 self.mapped('purge_line_ids').purge() 52 return True 53 54 @api.model 55 def get_wizard_action(self): 56 wizard = self.create({}) 57 return { 58 'type': 'ir.actions.act_window', 59 'name': wizard.display_name, 60 'views': [(False, 'form')], 61 'res_model': self._name, 62 'res_id': wizard.id, 63 'flags': { 64 'action_buttons': False, 65 'sidebar': False, 66 }, 67 } 68 69 @api.multi 70 def select_lines(self): 71 return { 72 'type': 'ir.actions.act_window', 73 'name': _('Select lines to purge'), 74 'views': [(False, 'tree'), (False, 'form')], 75 'res_model': self._fields['purge_line_ids'].comodel_name, 76 'domain': [('wizard_id', 'in', self.ids)], 77 } 78 79 @api.multi 80 def name_get(self): 81 return [ 82 (this.id, self._description) 83 for this in self 84 ] 85 86 @api.model 87 def create(self, values): 88 # make sure the user trying this is actually supposed to do it 89 if not self.env.ref('database_cleanup.menu_database_cleanup')\ 90 .parent_id._filter_visible_menus(): 91 raise AccessDenied 92 return super(PurgeWizard, self).create(values) 93 94 purge_line_ids = fields.One2many('cleanup.purge.line', 'wizard_id') 95 [end of database_cleanup/models/purge_wizard.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/database_cleanup/models/purge_wizard.py b/database_cleanup/models/purge_wizard.py --- a/database_cleanup/models/purge_wizard.py +++ b/database_cleanup/models/purge_wizard.py @@ -24,8 +24,8 @@ @api.model def create(self, values): # make sure the user trying this is actually supposed to do it - if not self.env.ref('database_cleanup.menu_database_cleanup')\ - .parent_id._filter_visible_menus(): + if self.env.ref( + 'base.group_erp_manager') not in self.env.user.groups_id: raise AccessDenied return super(CleanupPurgeLine, self).create(values) @@ -86,8 +86,8 @@ @api.model def create(self, values): # make sure the user trying this is actually supposed to do it - if not self.env.ref('database_cleanup.menu_database_cleanup')\ - .parent_id._filter_visible_menus(): + if self.env.ref( + 'base.group_erp_manager') not in self.env.user.groups_id: raise AccessDenied return super(PurgeWizard, self).create(values)
{"golden_diff": "diff --git a/database_cleanup/models/purge_wizard.py b/database_cleanup/models/purge_wizard.py\n--- a/database_cleanup/models/purge_wizard.py\n+++ b/database_cleanup/models/purge_wizard.py\n@@ -24,8 +24,8 @@\n @api.model\n def create(self, values):\n # make sure the user trying this is actually supposed to do it\n- if not self.env.ref('database_cleanup.menu_database_cleanup')\\\n- .parent_id._filter_visible_menus():\n+ if self.env.ref(\n+ 'base.group_erp_manager') not in self.env.user.groups_id:\n raise AccessDenied\n return super(CleanupPurgeLine, self).create(values)\n \n@@ -86,8 +86,8 @@\n @api.model\n def create(self, values):\n # make sure the user trying this is actually supposed to do it\n- if not self.env.ref('database_cleanup.menu_database_cleanup')\\\n- .parent_id._filter_visible_menus():\n+ if self.env.ref(\n+ 'base.group_erp_manager') not in self.env.user.groups_id:\n raise AccessDenied\n return super(PurgeWizard, self).create(values)\n", "issue": "Fix travis build on 9.0 branch\nthe 9.0 branch of the project is red, with really strange errors about computed fields. There have been such errors since the merge of #469 but for some reason they were neither fatal nor breaking the build. \n\nSplitting `database_cleanup` tests in .travis.yml seems to let the other modules go green, but the tests for database_cleanup are red. \n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# \u00a9 2014-2016 Therp BV <http://therp.nl>\n# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).\nimport logging\nfrom openerp import _, api, fields, models\nfrom openerp.exceptions import AccessDenied\n\n\nclass CleanupPurgeLine(models.AbstractModel):\n \"\"\" Abstract base class for the purge wizard lines \"\"\"\n _name = 'cleanup.purge.line'\n _order = 'name'\n\n name = fields.Char('Name', readonly=True)\n purged = fields.Boolean('Purged', readonly=True)\n wizard_id = fields.Many2one('cleanup.purge.wizard')\n\n logger = logging.getLogger('openerp.addons.database_cleanup')\n\n @api.multi\n def purge(self):\n raise NotImplementedError\n\n @api.model\n def create(self, values):\n # make sure the user trying this is actually supposed to do it\n if not self.env.ref('database_cleanup.menu_database_cleanup')\\\n .parent_id._filter_visible_menus():\n raise AccessDenied\n return super(CleanupPurgeLine, self).create(values)\n\n\nclass PurgeWizard(models.AbstractModel):\n \"\"\" Abstract base class for the purge wizards \"\"\"\n _name = 'cleanup.purge.wizard'\n _description = 'Purge stuff'\n\n @api.model\n def default_get(self, fields_list):\n res = super(PurgeWizard, self).default_get(fields_list)\n if 'purge_line_ids' in fields_list:\n res['purge_line_ids'] = self.find()\n return res\n\n @api.multi\n def find(self):\n raise NotImplementedError\n\n @api.multi\n def purge_all(self):\n self.mapped('purge_line_ids').purge()\n return True\n\n @api.model\n def get_wizard_action(self):\n wizard = self.create({})\n return {\n 'type': 'ir.actions.act_window',\n 'name': wizard.display_name,\n 'views': [(False, 'form')],\n 'res_model': self._name,\n 'res_id': wizard.id,\n 'flags': {\n 'action_buttons': False,\n 'sidebar': False,\n },\n }\n\n @api.multi\n def select_lines(self):\n return {\n 'type': 'ir.actions.act_window',\n 'name': _('Select lines to purge'),\n 'views': [(False, 'tree'), (False, 'form')],\n 'res_model': self._fields['purge_line_ids'].comodel_name,\n 'domain': [('wizard_id', 'in', self.ids)],\n }\n\n @api.multi\n def name_get(self):\n return [\n (this.id, self._description)\n for this in self\n ]\n\n @api.model\n def create(self, values):\n # make sure the user trying this is actually supposed to do it\n if not self.env.ref('database_cleanup.menu_database_cleanup')\\\n .parent_id._filter_visible_menus():\n raise AccessDenied\n return super(PurgeWizard, self).create(values)\n\n purge_line_ids = fields.One2many('cleanup.purge.line', 'wizard_id')\n", "path": "database_cleanup/models/purge_wizard.py"}]}
1,502
258
gh_patches_debug_5371
rasdani/github-patches
git_diff
getnikola__nikola-3511
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Incorrect time zone guessing on macOS On macOS Big Sur 11.2, we guess the full path to the zoneinfo file (instead of just the zone name). ``` $ nikola init … Time zone [private/var/db/timezone/tz/2021a.1.0/zoneinfo/Europe/Warsaw]: ``` </issue> <code> [start of nikola/packages/tzlocal/unix.py] 1 """Unix support for tzlocal.""" 2 import os 3 import re 4 5 import dateutil.tz 6 7 _cache_tz = None 8 9 10 def _try_tz_from_env(): 11 tzenv = os.environ.get("TZ") 12 if tzenv and tzenv[0] == ":": 13 tzenv = tzenv[1:] 14 try: 15 if tzenv: 16 dateutil.tz.gettz(tzenv) 17 return tzenv 18 except Exception: 19 pass 20 21 22 def _get_localzone(_root="/"): 23 """Try to find the local timezone configuration. 24 25 The parameter _root makes the function look for files like /etc/localtime 26 beneath the _root directory. This is primarily used by the tests. 27 In normal usage you call the function without parameters. 28 """ 29 tzenv = _try_tz_from_env() 30 if tzenv: 31 return tzenv 32 33 # Are we under Termux on Android? 34 if os.path.exists("/system/bin/getprop"): 35 import subprocess 36 37 androidtz = ( 38 subprocess.check_output(["getprop", "persist.sys.timezone"]) 39 .strip() 40 .decode() 41 ) 42 return androidtz 43 44 # Now look for distribution specific configuration files 45 # that contain the timezone name. 46 for configfile in ("etc/timezone", "var/db/zoneinfo"): 47 tzpath = os.path.join(_root, configfile) 48 try: 49 with open(tzpath, "rb") as tzfile: 50 data = tzfile.read() 51 52 # Issue #3 was that /etc/timezone was a zoneinfo file. 53 # That's a misconfiguration, but we need to handle it gracefully: 54 if data[:5] == b"TZif2": 55 continue 56 57 etctz = data.strip().decode() 58 if not etctz: 59 # Empty file, skip 60 continue 61 for etctz in data.decode().splitlines(): 62 # Get rid of host definitions and comments: 63 if " " in etctz: 64 etctz, dummy = etctz.split(" ", 1) 65 if "#" in etctz: 66 etctz, dummy = etctz.split("#", 1) 67 if not etctz: 68 continue 69 tz = etctz.replace(" ", "_") 70 return tz 71 72 except IOError: 73 # File doesn't exist or is a directory 74 continue 75 76 # CentOS has a ZONE setting in /etc/sysconfig/clock, 77 # OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and 78 # Gentoo has a TIMEZONE setting in /etc/conf.d/clock 79 # We look through these files for a timezone: 80 81 zone_re = re.compile(r"\s*ZONE\s*=\s*\"") 82 timezone_re = re.compile(r"\s*TIMEZONE\s*=\s*\"") 83 end_re = re.compile('"') 84 85 for filename in ("etc/sysconfig/clock", "etc/conf.d/clock"): 86 tzpath = os.path.join(_root, filename) 87 try: 88 with open(tzpath, "rt") as tzfile: 89 data = tzfile.readlines() 90 91 for line in data: 92 # Look for the ZONE= setting. 93 match = zone_re.match(line) 94 if match is None: 95 # No ZONE= setting. Look for the TIMEZONE= setting. 96 match = timezone_re.match(line) 97 if match is not None: 98 # Some setting existed 99 line = line[match.end():] 100 etctz = line[: end_re.search(line).start()] 101 102 # We found a timezone 103 tz = etctz.replace(" ", "_") 104 return tz 105 106 except IOError: 107 # File doesn't exist or is a directory 108 continue 109 110 # systemd distributions use symlinks that include the zone name, 111 # see manpage of localtime(5) and timedatectl(1) 112 tzpath = os.path.join(_root, "etc/localtime") 113 if os.path.exists(tzpath) and os.path.islink(tzpath): 114 tzpath = os.path.realpath(tzpath) 115 start = tzpath.find("/") + 1 116 while start != 0: 117 tzpath = tzpath[start:] 118 try: 119 dateutil.tz.gettz(tzpath) 120 return tzpath 121 except Exception: 122 pass 123 start = tzpath.find("/") + 1 124 125 # Nothing found, return UTC 126 return None 127 128 129 def get_localzone(): 130 """Get the computers configured local timezone, if any.""" 131 global _cache_tz 132 if _cache_tz is None: 133 _cache_tz = _get_localzone() 134 135 return _cache_tz 136 137 138 def reload_localzone(): 139 """Reload the cached localzone. You need to call this if the timezone has changed.""" 140 global _cache_tz 141 _cache_tz = _get_localzone() 142 return _cache_tz 143 [end of nikola/packages/tzlocal/unix.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/nikola/packages/tzlocal/unix.py b/nikola/packages/tzlocal/unix.py --- a/nikola/packages/tzlocal/unix.py +++ b/nikola/packages/tzlocal/unix.py @@ -116,8 +116,9 @@ while start != 0: tzpath = tzpath[start:] try: - dateutil.tz.gettz(tzpath) - return tzpath + tested_tz = dateutil.tz.gettz(tzpath) + if tested_tz: + return tzpath except Exception: pass start = tzpath.find("/") + 1
{"golden_diff": "diff --git a/nikola/packages/tzlocal/unix.py b/nikola/packages/tzlocal/unix.py\n--- a/nikola/packages/tzlocal/unix.py\n+++ b/nikola/packages/tzlocal/unix.py\n@@ -116,8 +116,9 @@\n while start != 0:\n tzpath = tzpath[start:]\n try:\n- dateutil.tz.gettz(tzpath)\n- return tzpath\n+ tested_tz = dateutil.tz.gettz(tzpath)\n+ if tested_tz:\n+ return tzpath\n except Exception:\n pass\n start = tzpath.find(\"/\") + 1\n", "issue": "Incorrect time zone guessing on macOS\nOn macOS Big Sur 11.2, we guess the full path to the zoneinfo file (instead of just the zone name).\r\n\r\n```\r\n$ nikola init\r\n\u2026\r\nTime zone [private/var/db/timezone/tz/2021a.1.0/zoneinfo/Europe/Warsaw]:\r\n```\n", "before_files": [{"content": "\"\"\"Unix support for tzlocal.\"\"\"\nimport os\nimport re\n\nimport dateutil.tz\n\n_cache_tz = None\n\n\ndef _try_tz_from_env():\n tzenv = os.environ.get(\"TZ\")\n if tzenv and tzenv[0] == \":\":\n tzenv = tzenv[1:]\n try:\n if tzenv:\n dateutil.tz.gettz(tzenv)\n return tzenv\n except Exception:\n pass\n\n\ndef _get_localzone(_root=\"/\"):\n \"\"\"Try to find the local timezone configuration.\n\n The parameter _root makes the function look for files like /etc/localtime\n beneath the _root directory. This is primarily used by the tests.\n In normal usage you call the function without parameters.\n \"\"\"\n tzenv = _try_tz_from_env()\n if tzenv:\n return tzenv\n\n # Are we under Termux on Android?\n if os.path.exists(\"/system/bin/getprop\"):\n import subprocess\n\n androidtz = (\n subprocess.check_output([\"getprop\", \"persist.sys.timezone\"])\n .strip()\n .decode()\n )\n return androidtz\n\n # Now look for distribution specific configuration files\n # that contain the timezone name.\n for configfile in (\"etc/timezone\", \"var/db/zoneinfo\"):\n tzpath = os.path.join(_root, configfile)\n try:\n with open(tzpath, \"rb\") as tzfile:\n data = tzfile.read()\n\n # Issue #3 was that /etc/timezone was a zoneinfo file.\n # That's a misconfiguration, but we need to handle it gracefully:\n if data[:5] == b\"TZif2\":\n continue\n\n etctz = data.strip().decode()\n if not etctz:\n # Empty file, skip\n continue\n for etctz in data.decode().splitlines():\n # Get rid of host definitions and comments:\n if \" \" in etctz:\n etctz, dummy = etctz.split(\" \", 1)\n if \"#\" in etctz:\n etctz, dummy = etctz.split(\"#\", 1)\n if not etctz:\n continue\n tz = etctz.replace(\" \", \"_\")\n return tz\n\n except IOError:\n # File doesn't exist or is a directory\n continue\n\n # CentOS has a ZONE setting in /etc/sysconfig/clock,\n # OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and\n # Gentoo has a TIMEZONE setting in /etc/conf.d/clock\n # We look through these files for a timezone:\n\n zone_re = re.compile(r\"\\s*ZONE\\s*=\\s*\\\"\")\n timezone_re = re.compile(r\"\\s*TIMEZONE\\s*=\\s*\\\"\")\n end_re = re.compile('\"')\n\n for filename in (\"etc/sysconfig/clock\", \"etc/conf.d/clock\"):\n tzpath = os.path.join(_root, filename)\n try:\n with open(tzpath, \"rt\") as tzfile:\n data = tzfile.readlines()\n\n for line in data:\n # Look for the ZONE= setting.\n match = zone_re.match(line)\n if match is None:\n # No ZONE= setting. Look for the TIMEZONE= setting.\n match = timezone_re.match(line)\n if match is not None:\n # Some setting existed\n line = line[match.end():]\n etctz = line[: end_re.search(line).start()]\n\n # We found a timezone\n tz = etctz.replace(\" \", \"_\")\n return tz\n\n except IOError:\n # File doesn't exist or is a directory\n continue\n\n # systemd distributions use symlinks that include the zone name,\n # see manpage of localtime(5) and timedatectl(1)\n tzpath = os.path.join(_root, \"etc/localtime\")\n if os.path.exists(tzpath) and os.path.islink(tzpath):\n tzpath = os.path.realpath(tzpath)\n start = tzpath.find(\"/\") + 1\n while start != 0:\n tzpath = tzpath[start:]\n try:\n dateutil.tz.gettz(tzpath)\n return tzpath\n except Exception:\n pass\n start = tzpath.find(\"/\") + 1\n\n # Nothing found, return UTC\n return None\n\n\ndef get_localzone():\n \"\"\"Get the computers configured local timezone, if any.\"\"\"\n global _cache_tz\n if _cache_tz is None:\n _cache_tz = _get_localzone()\n\n return _cache_tz\n\n\ndef reload_localzone():\n \"\"\"Reload the cached localzone. You need to call this if the timezone has changed.\"\"\"\n global _cache_tz\n _cache_tz = _get_localzone()\n return _cache_tz\n", "path": "nikola/packages/tzlocal/unix.py"}]}
2,011
150
gh_patches_debug_26724
rasdani/github-patches
git_diff
ivy-llc__ivy-14028
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> diag_indices_from mask_indices </issue> <code> [start of ivy/functional/frontends/jax/numpy/indexing.py] 1 # local 2 import ivy 3 from ivy.functional.frontends.jax.func_wrapper import ( 4 to_ivy_arrays_and_back, 5 ) 6 7 8 @to_ivy_arrays_and_back 9 def diagonal(a, offset=0, axis1=0, axis2=1): 10 return ivy.diagonal(a, offset=offset, axis1=axis1, axis2=axis2) 11 12 13 @to_ivy_arrays_and_back 14 def diag(v, k=0): 15 return ivy.diag(v, k=k) 16 17 18 @to_ivy_arrays_and_back 19 def diag_indices(n, ndim=2): 20 idx = ivy.arange(n, dtype=int) 21 return (idx,) * ndim 22 23 24 # take_along_axis 25 @to_ivy_arrays_and_back 26 def take_along_axis(arr, indices, axis, mode="fill"): 27 return ivy.take_along_axis(arr, indices, axis, mode=mode) 28 29 30 @to_ivy_arrays_and_back 31 def tril_indices(n_rows, n_cols=None, k=0): 32 return ivy.tril_indices(n_rows, n_cols, k) 33 34 35 @to_ivy_arrays_and_back 36 def triu_indices(n, k=0, m=None): 37 return ivy.triu_indices(n, m, k) 38 39 40 @to_ivy_arrays_and_back 41 def triu_indices_from(arr, k=0): 42 return ivy.triu_indices(arr.shape[-2], arr.shape[-1], k) 43 44 45 def tril_indices_from(arr, k=0): 46 return ivy.tril_indices(arr.shape[-2], arr.shape[-1], k) 47 48 49 # unravel_index 50 @to_ivy_arrays_and_back 51 def unravel_index(indices, shape): 52 ret = [x.astype("int64") for x in ivy.unravel_index(indices, shape)] 53 return tuple(ret) 54 [end of ivy/functional/frontends/jax/numpy/indexing.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ivy/functional/frontends/jax/numpy/indexing.py b/ivy/functional/frontends/jax/numpy/indexing.py --- a/ivy/functional/frontends/jax/numpy/indexing.py +++ b/ivy/functional/frontends/jax/numpy/indexing.py @@ -1,3 +1,6 @@ +# global +import inspect + # local import ivy from ivy.functional.frontends.jax.func_wrapper import ( @@ -42,6 +45,7 @@ return ivy.triu_indices(arr.shape[-2], arr.shape[-1], k) +@to_ivy_arrays_and_back def tril_indices_from(arr, k=0): return ivy.tril_indices(arr.shape[-2], arr.shape[-1], k) @@ -51,3 +55,28 @@ def unravel_index(indices, shape): ret = [x.astype("int64") for x in ivy.unravel_index(indices, shape)] return tuple(ret) + + +@to_ivy_arrays_and_back +def mask_indices(n, mask_func, k=0): + mask_func_obj = inspect.unwrap(mask_func) + mask_func_name = mask_func_obj.__name__ + try: + ivy_mask_func_obj = getattr(ivy.functional.frontends.jax.numpy, mask_func_name) + a = ivy.ones((n, n)) + mask = ivy_mask_func_obj(a, k=k) + indices = ivy.argwhere(mask.ivy_array) + return indices[:, 0], indices[:, 1] + except AttributeError as e: + print(f"Attribute error: {e}") + + +@to_ivy_arrays_and_back +def diag_indices_from(arr): + print(arr) + n = arr.shape[0] + ndim = ivy.get_num_dims(arr) + if not all(arr.shape[i] == n for i in range(ndim)): + raise ValueError("All dimensions of input must be of equal length") + idx = ivy.arange(n, dtype=int) + return (idx,) * ndim
{"golden_diff": "diff --git a/ivy/functional/frontends/jax/numpy/indexing.py b/ivy/functional/frontends/jax/numpy/indexing.py\n--- a/ivy/functional/frontends/jax/numpy/indexing.py\n+++ b/ivy/functional/frontends/jax/numpy/indexing.py\n@@ -1,3 +1,6 @@\n+# global\n+import inspect\n+\n # local\n import ivy\n from ivy.functional.frontends.jax.func_wrapper import (\n@@ -42,6 +45,7 @@\n return ivy.triu_indices(arr.shape[-2], arr.shape[-1], k)\n \n \n+@to_ivy_arrays_and_back\n def tril_indices_from(arr, k=0):\n return ivy.tril_indices(arr.shape[-2], arr.shape[-1], k)\n \n@@ -51,3 +55,28 @@\n def unravel_index(indices, shape):\n ret = [x.astype(\"int64\") for x in ivy.unravel_index(indices, shape)]\n return tuple(ret)\n+\n+\n+@to_ivy_arrays_and_back\n+def mask_indices(n, mask_func, k=0):\n+ mask_func_obj = inspect.unwrap(mask_func)\n+ mask_func_name = mask_func_obj.__name__\n+ try:\n+ ivy_mask_func_obj = getattr(ivy.functional.frontends.jax.numpy, mask_func_name)\n+ a = ivy.ones((n, n))\n+ mask = ivy_mask_func_obj(a, k=k)\n+ indices = ivy.argwhere(mask.ivy_array)\n+ return indices[:, 0], indices[:, 1]\n+ except AttributeError as e:\n+ print(f\"Attribute error: {e}\")\n+\n+\n+@to_ivy_arrays_and_back\n+def diag_indices_from(arr):\n+ print(arr)\n+ n = arr.shape[0]\n+ ndim = ivy.get_num_dims(arr)\n+ if not all(arr.shape[i] == n for i in range(ndim)):\n+ raise ValueError(\"All dimensions of input must be of equal length\")\n+ idx = ivy.arange(n, dtype=int)\n+ return (idx,) * ndim\n", "issue": "diag_indices_from\n\nmask_indices\n\n", "before_files": [{"content": "# local\nimport ivy\nfrom ivy.functional.frontends.jax.func_wrapper import (\n to_ivy_arrays_and_back,\n)\n\n\n@to_ivy_arrays_and_back\ndef diagonal(a, offset=0, axis1=0, axis2=1):\n return ivy.diagonal(a, offset=offset, axis1=axis1, axis2=axis2)\n\n\n@to_ivy_arrays_and_back\ndef diag(v, k=0):\n return ivy.diag(v, k=k)\n\n\n@to_ivy_arrays_and_back\ndef diag_indices(n, ndim=2):\n idx = ivy.arange(n, dtype=int)\n return (idx,) * ndim\n\n\n# take_along_axis\n@to_ivy_arrays_and_back\ndef take_along_axis(arr, indices, axis, mode=\"fill\"):\n return ivy.take_along_axis(arr, indices, axis, mode=mode)\n\n\n@to_ivy_arrays_and_back\ndef tril_indices(n_rows, n_cols=None, k=0):\n return ivy.tril_indices(n_rows, n_cols, k)\n\n\n@to_ivy_arrays_and_back\ndef triu_indices(n, k=0, m=None):\n return ivy.triu_indices(n, m, k)\n\n\n@to_ivy_arrays_and_back\ndef triu_indices_from(arr, k=0):\n return ivy.triu_indices(arr.shape[-2], arr.shape[-1], k)\n\n\ndef tril_indices_from(arr, k=0):\n return ivy.tril_indices(arr.shape[-2], arr.shape[-1], k)\n\n\n# unravel_index\n@to_ivy_arrays_and_back\ndef unravel_index(indices, shape):\n ret = [x.astype(\"int64\") for x in ivy.unravel_index(indices, shape)]\n return tuple(ret)\n", "path": "ivy/functional/frontends/jax/numpy/indexing.py"}]}
1,053
465
gh_patches_debug_1501
rasdani/github-patches
git_diff
googleapis__google-cloud-python-6332
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 'api_core-1.6.0a2'? or 'api_core-1.5.1'? I'd like to use the changes from https://github.com/googleapis/google-cloud-python/pull/6310 in the library I'm working on. Not sure about the version number for that one, since: - I don't know what else has been released since 1.6.0a1 - I don't know what is intended by 1.6.0a1 in https://github.com/googleapis/google-cloud-python/pull/6267 (is it a pre-release?) </issue> <code> [start of api_core/setup.py] 1 # Copyright 2018 Google LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import io 16 import os 17 18 import setuptools 19 20 21 # Package metadata. 22 23 name = 'google-api-core' 24 description = 'Google API client core library' 25 version = '1.6.0a1' 26 # Should be one of: 27 # 'Development Status :: 3 - Alpha' 28 # 'Development Status :: 4 - Beta' 29 # 'Development Status :: 5 - Production/Stable' 30 release_status = 'Development Status :: 5 - Production/Stable' 31 dependencies = [ 32 'googleapis-common-protos<2.0dev,>=1.5.3', 33 'protobuf>=3.4.0', 34 'google-auth<2.0.0dev,>=0.4.0', 35 'requests<3.0.0dev,>=2.18.0', 36 'setuptools>=34.0.0', 37 'six>=1.10.0', 38 'pytz', 39 'futures>=3.2.0;python_version<"3.2"' 40 ] 41 extras = { 42 'grpc': 'grpcio>=1.8.2', 43 'grpcio-gcp': 'grpcio-gcp>=0.2.2' 44 } 45 46 47 # Setup boilerplate below this line. 48 49 package_root = os.path.abspath(os.path.dirname(__file__)) 50 51 readme_filename = os.path.join(package_root, 'README.rst') 52 with io.open(readme_filename, encoding='utf-8') as readme_file: 53 readme = readme_file.read() 54 55 # Only include packages under the 'google' namespace. Do not include tests, 56 # benchmarks, etc. 57 packages = [ 58 package for package in setuptools.find_packages() 59 if package.startswith('google')] 60 61 # Determine which namespaces are needed. 62 namespaces = ['google'] 63 if 'google.cloud' in packages: 64 namespaces.append('google.cloud') 65 66 67 setuptools.setup( 68 name=name, 69 version=version, 70 description=description, 71 long_description=readme, 72 author='Google LLC', 73 author_email='[email protected]', 74 license='Apache 2.0', 75 url='https://github.com/GoogleCloudPlatform/google-cloud-python', 76 classifiers=[ 77 release_status, 78 'Intended Audience :: Developers', 79 'License :: OSI Approved :: Apache Software License', 80 'Programming Language :: Python', 81 'Programming Language :: Python :: 2', 82 'Programming Language :: Python :: 2.7', 83 'Programming Language :: Python :: 3', 84 'Programming Language :: Python :: 3.5', 85 'Programming Language :: Python :: 3.6', 86 'Programming Language :: Python :: 3.7', 87 'Operating System :: OS Independent', 88 'Topic :: Internet', 89 ], 90 platforms='Posix; MacOS X; Windows', 91 packages=packages, 92 namespace_packages=namespaces, 93 install_requires=dependencies, 94 extras_require=extras, 95 include_package_data=True, 96 zip_safe=False, 97 ) 98 [end of api_core/setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/api_core/setup.py b/api_core/setup.py --- a/api_core/setup.py +++ b/api_core/setup.py @@ -22,7 +22,7 @@ name = 'google-api-core' description = 'Google API client core library' -version = '1.6.0a1' +version = '1.5.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'
{"golden_diff": "diff --git a/api_core/setup.py b/api_core/setup.py\n--- a/api_core/setup.py\n+++ b/api_core/setup.py\n@@ -22,7 +22,7 @@\n \n name = 'google-api-core'\n description = 'Google API client core library'\n-version = '1.6.0a1'\n+version = '1.5.1'\n # Should be one of:\n # 'Development Status :: 3 - Alpha'\n # 'Development Status :: 4 - Beta'\n", "issue": "Release 'api_core-1.6.0a2'? or 'api_core-1.5.1'?\nI'd like to use the changes from https://github.com/googleapis/google-cloud-python/pull/6310 in the library I'm working on.\r\n\r\nNot sure about the version number for that one, since:\r\n\r\n- I don't know what else has been released since 1.6.0a1\r\n- I don't know what is intended by 1.6.0a1 in https://github.com/googleapis/google-cloud-python/pull/6267 (is it a pre-release?)\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = 'google-api-core'\ndescription = 'Google API client core library'\nversion = '1.6.0a1'\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = 'Development Status :: 5 - Production/Stable'\ndependencies = [\n 'googleapis-common-protos<2.0dev,>=1.5.3',\n 'protobuf>=3.4.0',\n 'google-auth<2.0.0dev,>=0.4.0',\n 'requests<3.0.0dev,>=2.18.0',\n 'setuptools>=34.0.0',\n 'six>=1.10.0',\n 'pytz',\n 'futures>=3.2.0;python_version<\"3.2\"'\n]\nextras = {\n 'grpc': 'grpcio>=1.8.2',\n 'grpcio-gcp': 'grpcio-gcp>=0.2.2'\n}\n\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, 'README.rst')\nwith io.open(readme_filename, encoding='utf-8') as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages()\n if package.startswith('google')]\n\n# Determine which namespaces are needed.\nnamespaces = ['google']\nif 'google.cloud' in packages:\n namespaces.append('google.cloud')\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author='Google LLC',\n author_email='[email protected]',\n license='Apache 2.0',\n url='https://github.com/GoogleCloudPlatform/google-cloud-python',\n classifiers=[\n release_status,\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Operating System :: OS Independent',\n 'Topic :: Internet',\n ],\n platforms='Posix; MacOS X; Windows',\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "api_core/setup.py"}]}
1,595
105
gh_patches_debug_22057
rasdani/github-patches
git_diff
facebookresearch__hydra-2713
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Hydra ray launcher requirements update # 🚀 Feature Request The requirements of `hydra-ray-launcher` are too restrictive. Please provide wider bounds. In particular, so that is compatible with the latest version of ray and with python 3.10 ``` install_requires=[ "boto3==1.22.6", "hydra-core>=1.1.2", "ray[default]==1.12.0", "aiohttp==3.8.1", "cloudpickle==2.0.0", "pickle5==0.0.11; python_version < '3.8'", ], ``` ## Motivation **Is your feature request related to a problem? Please describe.** There are multiple issues pointing to this problem #2407 , #2336 and many others ## Pitch **Describe the solution you'd like** <!-- A clear and concise description of what you want to happen. --> **Describe alternatives you've considered** <!-- A clear and concise description of any alternative solutions or features you've considered. --> **Are you willing to open a pull request?** (See [CONTRIBUTING](../../CONTRIBUTING.md)) ## Additional context Add any other context or screenshots about the feature request here. </issue> <code> [start of plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py] 1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved 2 3 __version__ = "1.2.0" 4 [end of plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py] [start of plugins/hydra_ray_launcher/setup.py] 1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved 2 # type: ignore 3 from pathlib import Path 4 5 from read_version import read_version 6 from setuptools import find_namespace_packages, setup 7 8 setup( 9 name="hydra-ray-launcher", 10 version=read_version("hydra_plugins/hydra_ray_launcher", "__init__.py"), 11 author="Jieru Hu", 12 author_email="[email protected]", 13 description="Hydra Ray launcher plugin", 14 long_description=(Path(__file__).parent / "README.md").read_text(), 15 long_description_content_type="text/markdown", 16 url="https://github.com/facebookresearch/hydra/", 17 packages=find_namespace_packages(include=["hydra_plugins.*"]), 18 classifiers=[ 19 "License :: OSI Approved :: MIT License", 20 "Programming Language :: Python :: 3.7", 21 "Programming Language :: Python :: 3.8", 22 "Programming Language :: Python :: 3.9", 23 "Operating System :: MacOS", 24 "Operating System :: POSIX :: Linux", 25 ], 26 install_requires=[ 27 "boto3==1.22.6", 28 "hydra-core>=1.1.2", 29 "ray[default]~=1.12", 30 "aiohttp==3.8.1", 31 "cloudpickle==2.0.0", 32 "pickle5==0.0.11; python_version < '3.8'", 33 ], 34 include_package_data=True, 35 ) 36 [end of plugins/hydra_ray_launcher/setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py b/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py --- a/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py +++ b/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py @@ -1,3 +1,3 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved -__version__ = "1.2.0" +__version__ = "1.2.1" diff --git a/plugins/hydra_ray_launcher/setup.py b/plugins/hydra_ray_launcher/setup.py --- a/plugins/hydra_ray_launcher/setup.py +++ b/plugins/hydra_ray_launcher/setup.py @@ -20,16 +20,18 @@ "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: MacOS", "Operating System :: POSIX :: Linux", ], install_requires=[ - "boto3==1.22.6", + "boto3", "hydra-core>=1.1.2", - "ray[default]~=1.12", - "aiohttp==3.8.1", - "cloudpickle==2.0.0", - "pickle5==0.0.11; python_version < '3.8'", + "ray[default]<3", + "aiohttp<4", + "cloudpickle<3", + "pickle5==0.0.12; python_version < '3.8'", ], include_package_data=True, )
{"golden_diff": "diff --git a/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py b/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py\n--- a/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py\n+++ b/plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py\n@@ -1,3 +1,3 @@\n # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n \n-__version__ = \"1.2.0\"\n+__version__ = \"1.2.1\"\ndiff --git a/plugins/hydra_ray_launcher/setup.py b/plugins/hydra_ray_launcher/setup.py\n--- a/plugins/hydra_ray_launcher/setup.py\n+++ b/plugins/hydra_ray_launcher/setup.py\n@@ -20,16 +20,18 @@\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n+ \"Programming Language :: Python :: 3.10\",\n+ \"Programming Language :: Python :: 3.11\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX :: Linux\",\n ],\n install_requires=[\n- \"boto3==1.22.6\",\n+ \"boto3\",\n \"hydra-core>=1.1.2\",\n- \"ray[default]~=1.12\",\n- \"aiohttp==3.8.1\",\n- \"cloudpickle==2.0.0\",\n- \"pickle5==0.0.11; python_version < '3.8'\",\n+ \"ray[default]<3\",\n+ \"aiohttp<4\",\n+ \"cloudpickle<3\",\n+ \"pickle5==0.0.12; python_version < '3.8'\",\n ],\n include_package_data=True,\n )\n", "issue": "Hydra ray launcher requirements update\n# \ud83d\ude80 Feature Request\r\n\r\nThe requirements of `hydra-ray-launcher` are too restrictive. Please provide wider bounds. In particular, so that is compatible with the latest version of ray and with python 3.10\r\n\r\n```\r\ninstall_requires=[\r\n \"boto3==1.22.6\",\r\n \"hydra-core>=1.1.2\",\r\n \"ray[default]==1.12.0\",\r\n \"aiohttp==3.8.1\",\r\n \"cloudpickle==2.0.0\",\r\n \"pickle5==0.0.11; python_version < '3.8'\",\r\n ],\r\n```\r\n\r\n## Motivation\r\n\r\n**Is your feature request related to a problem? Please describe.**\r\n\r\nThere are multiple issues pointing to this problem #2407 , #2336 and many others\r\n\r\n## Pitch\r\n\r\n**Describe the solution you'd like**\r\n<!-- A clear and concise description of what you want to happen. -->\r\n\r\n**Describe alternatives you've considered**\r\n<!-- A clear and concise description of any alternative solutions or features you've considered. -->\r\n\r\n**Are you willing to open a pull request?** (See [CONTRIBUTING](../../CONTRIBUTING.md))\r\n\r\n## Additional context\r\nAdd any other context or screenshots about the feature request here.\r\n\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n__version__ = \"1.2.0\"\n", "path": "plugins/hydra_ray_launcher/hydra_plugins/hydra_ray_launcher/__init__.py"}, {"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# type: ignore\nfrom pathlib import Path\n\nfrom read_version import read_version\nfrom setuptools import find_namespace_packages, setup\n\nsetup(\n name=\"hydra-ray-launcher\",\n version=read_version(\"hydra_plugins/hydra_ray_launcher\", \"__init__.py\"),\n author=\"Jieru Hu\",\n author_email=\"[email protected]\",\n description=\"Hydra Ray launcher plugin\",\n long_description=(Path(__file__).parent / \"README.md\").read_text(),\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/facebookresearch/hydra/\",\n packages=find_namespace_packages(include=[\"hydra_plugins.*\"]),\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX :: Linux\",\n ],\n install_requires=[\n \"boto3==1.22.6\",\n \"hydra-core>=1.1.2\",\n \"ray[default]~=1.12\",\n \"aiohttp==3.8.1\",\n \"cloudpickle==2.0.0\",\n \"pickle5==0.0.11; python_version < '3.8'\",\n ],\n include_package_data=True,\n)\n", "path": "plugins/hydra_ray_launcher/setup.py"}]}
1,281
432
gh_patches_debug_1855
rasdani/github-patches
git_diff
flairNLP__flair-300
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update torch version to 1.0.0 torch version 1.0.0 is available. Use torch version 1.0.0 in Flair. </issue> <code> [start of setup.py] 1 from setuptools import setup, find_packages 2 3 setup( 4 name='flair', 5 version='0.3.2', 6 description='A very simple framework for state-of-the-art NLP', 7 long_description=open("README.md", encoding='utf-8').read(), 8 long_description_content_type="text/markdown", 9 author='Alan Akbik', 10 author_email='[email protected]', 11 url='https://github.com/zalandoresearch/flair', 12 packages=find_packages(exclude='test'), # same as name 13 license='MIT', 14 install_requires=[ 15 'torch==0.4.1', 16 'gensim==3.4.0', 17 'typing==3.6.4', 18 'tqdm==4.26.0', 19 'segtok==1.5.7', 20 'matplotlib==3.0.0', 21 'mpld3==0.3', 22 'sklearn', 23 'sqlitedict==1.6.0', 24 'deprecated==1.2.4', 25 'hyperopt==0.1.1', 26 'pytorch-pretrained-bert==0.3.0' 27 ], 28 include_package_data=True, 29 python_requires='>=3.6', 30 ) 31 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ packages=find_packages(exclude='test'), # same as name license='MIT', install_requires=[ - 'torch==0.4.1', + 'torch==1.0.0', 'gensim==3.4.0', 'typing==3.6.4', 'tqdm==4.26.0',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -12,7 +12,7 @@\n packages=find_packages(exclude='test'), # same as name\n license='MIT',\n install_requires=[\n- 'torch==0.4.1',\n+ 'torch==1.0.0',\n 'gensim==3.4.0',\n 'typing==3.6.4',\n 'tqdm==4.26.0',\n", "issue": "Update torch version to 1.0.0\ntorch version 1.0.0 is available. Use torch version 1.0.0 in Flair.\n", "before_files": [{"content": "from setuptools import setup, find_packages\n\nsetup(\n name='flair',\n version='0.3.2',\n description='A very simple framework for state-of-the-art NLP',\n long_description=open(\"README.md\", encoding='utf-8').read(),\n long_description_content_type=\"text/markdown\",\n author='Alan Akbik',\n author_email='[email protected]',\n url='https://github.com/zalandoresearch/flair',\n packages=find_packages(exclude='test'), # same as name\n license='MIT',\n install_requires=[\n 'torch==0.4.1',\n 'gensim==3.4.0',\n 'typing==3.6.4',\n 'tqdm==4.26.0',\n 'segtok==1.5.7',\n 'matplotlib==3.0.0',\n 'mpld3==0.3',\n 'sklearn',\n 'sqlitedict==1.6.0',\n 'deprecated==1.2.4',\n 'hyperopt==0.1.1',\n 'pytorch-pretrained-bert==0.3.0'\n ],\n include_package_data=True,\n python_requires='>=3.6',\n)\n", "path": "setup.py"}]}
892
113
gh_patches_debug_29724
rasdani/github-patches
git_diff
pallets__click-545
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Bash completion for MultiCommands returns subcommands twice until it stops. Python 2.7.10 broken in click >= 6 ok in click (5.1) ``` manage hello hello world world ``` </issue> <code> [start of click/_bashcomplete.py] 1 import os 2 import re 3 from .utils import echo 4 from .parser import split_arg_string 5 from .core import MultiCommand, Option 6 7 8 COMPLETION_SCRIPT = ''' 9 %(complete_func)s() { 10 COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ 11 COMP_CWORD=$COMP_CWORD \\ 12 %(autocomplete_var)s=complete $1 ) ) 13 return 0 14 } 15 16 complete -F %(complete_func)s -o default %(script_names)s 17 ''' 18 19 _invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') 20 21 22 def get_completion_script(prog_name, complete_var): 23 cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_')) 24 return (COMPLETION_SCRIPT % { 25 'complete_func': '_%s_completion' % cf_name, 26 'script_names': prog_name, 27 'autocomplete_var': complete_var, 28 }).strip() + ';' 29 30 31 def resolve_ctx(cli, prog_name, args): 32 ctx = cli.make_context(prog_name, args, resilient_parsing=True) 33 while ctx.args and isinstance(ctx.command, MultiCommand): 34 cmd = ctx.command.get_command(ctx, ctx.args[0]) 35 if cmd is None: 36 return None 37 ctx = cmd.make_context(ctx.args[0], ctx.args[1:], parent=ctx, 38 resilient_parsing=True) 39 return ctx 40 41 42 def do_complete(cli, prog_name): 43 cwords = split_arg_string(os.environ['COMP_WORDS']) 44 cword = int(os.environ['COMP_CWORD']) 45 args = cwords[1:cword] 46 try: 47 incomplete = cwords[cword] 48 except IndexError: 49 incomplete = '' 50 51 ctx = resolve_ctx(cli, prog_name, args) 52 if ctx is None: 53 return True 54 55 choices = [] 56 if incomplete and not incomplete[:1].isalnum(): 57 for param in ctx.command.params: 58 if not isinstance(param, Option): 59 continue 60 choices.extend(param.opts) 61 choices.extend(param.secondary_opts) 62 elif isinstance(ctx.command, MultiCommand): 63 choices.extend(ctx.command.list_commands(ctx)) 64 65 for item in choices: 66 if item.startswith(incomplete): 67 echo(item) 68 69 return True 70 71 72 def bashcomplete(cli, prog_name, complete_var, complete_instr): 73 if complete_instr == 'source': 74 echo(get_completion_script(prog_name, complete_var)) 75 return True 76 elif complete_instr == 'complete': 77 return do_complete(cli, prog_name) 78 return False 79 [end of click/_bashcomplete.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/click/_bashcomplete.py b/click/_bashcomplete.py --- a/click/_bashcomplete.py +++ b/click/_bashcomplete.py @@ -30,27 +30,19 @@ def resolve_ctx(cli, prog_name, args): ctx = cli.make_context(prog_name, args, resilient_parsing=True) - while ctx.args and isinstance(ctx.command, MultiCommand): - cmd = ctx.command.get_command(ctx, ctx.args[0]) + while ctx.args + ctx.protected_args and isinstance(ctx.command, MultiCommand): + a = ctx.args + ctx.protected_args + cmd = ctx.command.get_command(ctx, a[0]) if cmd is None: return None - ctx = cmd.make_context(ctx.args[0], ctx.args[1:], parent=ctx, - resilient_parsing=True) + ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True) return ctx -def do_complete(cli, prog_name): - cwords = split_arg_string(os.environ['COMP_WORDS']) - cword = int(os.environ['COMP_CWORD']) - args = cwords[1:cword] - try: - incomplete = cwords[cword] - except IndexError: - incomplete = '' - +def get_choices(cli, prog_name, args, incomplete): ctx = resolve_ctx(cli, prog_name, args) if ctx is None: - return True + return choices = [] if incomplete and not incomplete[:1].isalnum(): @@ -64,7 +56,20 @@ for item in choices: if item.startswith(incomplete): - echo(item) + yield item + + +def do_complete(cli, prog_name): + cwords = split_arg_string(os.environ['COMP_WORDS']) + cword = int(os.environ['COMP_CWORD']) + args = cwords[1:cword] + try: + incomplete = cwords[cword] + except IndexError: + incomplete = '' + + for item in get_choices(cli, prog_name, args, incomplete): + echo(item) return True
{"golden_diff": "diff --git a/click/_bashcomplete.py b/click/_bashcomplete.py\n--- a/click/_bashcomplete.py\n+++ b/click/_bashcomplete.py\n@@ -30,27 +30,19 @@\n \n def resolve_ctx(cli, prog_name, args):\n ctx = cli.make_context(prog_name, args, resilient_parsing=True)\n- while ctx.args and isinstance(ctx.command, MultiCommand):\n- cmd = ctx.command.get_command(ctx, ctx.args[0])\n+ while ctx.args + ctx.protected_args and isinstance(ctx.command, MultiCommand):\n+ a = ctx.args + ctx.protected_args\n+ cmd = ctx.command.get_command(ctx, a[0])\n if cmd is None:\n return None\n- ctx = cmd.make_context(ctx.args[0], ctx.args[1:], parent=ctx,\n- resilient_parsing=True)\n+ ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True)\n return ctx\n \n \n-def do_complete(cli, prog_name):\n- cwords = split_arg_string(os.environ['COMP_WORDS'])\n- cword = int(os.environ['COMP_CWORD'])\n- args = cwords[1:cword]\n- try:\n- incomplete = cwords[cword]\n- except IndexError:\n- incomplete = ''\n-\n+def get_choices(cli, prog_name, args, incomplete):\n ctx = resolve_ctx(cli, prog_name, args)\n if ctx is None:\n- return True\n+ return\n \n choices = []\n if incomplete and not incomplete[:1].isalnum():\n@@ -64,7 +56,20 @@\n \n for item in choices:\n if item.startswith(incomplete):\n- echo(item)\n+ yield item\n+\n+\n+def do_complete(cli, prog_name):\n+ cwords = split_arg_string(os.environ['COMP_WORDS'])\n+ cword = int(os.environ['COMP_CWORD'])\n+ args = cwords[1:cword]\n+ try:\n+ incomplete = cwords[cword]\n+ except IndexError:\n+ incomplete = ''\n+\n+ for item in get_choices(cli, prog_name, args, incomplete):\n+ echo(item)\n \n return True\n", "issue": "Bash completion for MultiCommands returns subcommands twice until it stops.\nPython 2.7.10\nbroken in click >= 6\nok in click (5.1)\n\n```\nmanage hello hello world world\n```\n\n", "before_files": [{"content": "import os\nimport re\nfrom .utils import echo\nfrom .parser import split_arg_string\nfrom .core import MultiCommand, Option\n\n\nCOMPLETION_SCRIPT = '''\n%(complete_func)s() {\n COMPREPLY=( $( env COMP_WORDS=\"${COMP_WORDS[*]}\" \\\\\n COMP_CWORD=$COMP_CWORD \\\\\n %(autocomplete_var)s=complete $1 ) )\n return 0\n}\n\ncomplete -F %(complete_func)s -o default %(script_names)s\n'''\n\n_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]')\n\n\ndef get_completion_script(prog_name, complete_var):\n cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_'))\n return (COMPLETION_SCRIPT % {\n 'complete_func': '_%s_completion' % cf_name,\n 'script_names': prog_name,\n 'autocomplete_var': complete_var,\n }).strip() + ';'\n\n\ndef resolve_ctx(cli, prog_name, args):\n ctx = cli.make_context(prog_name, args, resilient_parsing=True)\n while ctx.args and isinstance(ctx.command, MultiCommand):\n cmd = ctx.command.get_command(ctx, ctx.args[0])\n if cmd is None:\n return None\n ctx = cmd.make_context(ctx.args[0], ctx.args[1:], parent=ctx,\n resilient_parsing=True)\n return ctx\n\n\ndef do_complete(cli, prog_name):\n cwords = split_arg_string(os.environ['COMP_WORDS'])\n cword = int(os.environ['COMP_CWORD'])\n args = cwords[1:cword]\n try:\n incomplete = cwords[cword]\n except IndexError:\n incomplete = ''\n\n ctx = resolve_ctx(cli, prog_name, args)\n if ctx is None:\n return True\n\n choices = []\n if incomplete and not incomplete[:1].isalnum():\n for param in ctx.command.params:\n if not isinstance(param, Option):\n continue\n choices.extend(param.opts)\n choices.extend(param.secondary_opts)\n elif isinstance(ctx.command, MultiCommand):\n choices.extend(ctx.command.list_commands(ctx))\n\n for item in choices:\n if item.startswith(incomplete):\n echo(item)\n\n return True\n\n\ndef bashcomplete(cli, prog_name, complete_var, complete_instr):\n if complete_instr == 'source':\n echo(get_completion_script(prog_name, complete_var))\n return True\n elif complete_instr == 'complete':\n return do_complete(cli, prog_name)\n return False\n", "path": "click/_bashcomplete.py"}]}
1,260
479
gh_patches_debug_14200
rasdani/github-patches
git_diff
mitmproxy__mitmproxy-898
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Client Address is not displayed for flows from dumpfiles I have two clients connecting to mitmproxy now, and I'd like to see the client IP for each request in the "Detail" tab. Maybe show the server's IP too, since it can differ when connections come from different local IPs (e.g. 127.0.0.1 and 192.168.1.101). </issue> <code> [start of libmproxy/console/flowdetailview.py] 1 from __future__ import absolute_import 2 import urwid 3 from . import common, searchable 4 from .. import utils 5 6 7 def maybe_timestamp(base, attr): 8 if base and getattr(base, attr): 9 return utils.format_timestamp_with_milli(getattr(base, attr)) 10 else: 11 return "active" 12 pass 13 14 15 def flowdetails(state, flow): 16 text = [] 17 18 cc = flow.client_conn 19 sc = flow.server_conn 20 req = flow.request 21 resp = flow.response 22 23 if sc: 24 text.append(urwid.Text([("head", "Server Connection:")])) 25 parts = [ 26 ["Address", "%s:%s" % sc.address()], 27 ] 28 29 text.extend( 30 common.format_keyvals(parts, key="key", val="text", indent=4) 31 ) 32 33 c = sc.cert 34 if c: 35 text.append(urwid.Text([("head", "Server Certificate:")])) 36 parts = [ 37 ["Type", "%s, %s bits" % c.keyinfo], 38 ["SHA1 digest", c.digest("sha1")], 39 ["Valid to", str(c.notafter)], 40 ["Valid from", str(c.notbefore)], 41 ["Serial", str(c.serial)], 42 [ 43 "Subject", 44 urwid.BoxAdapter( 45 urwid.ListBox( 46 common.format_keyvals( 47 c.subject, 48 key="highlight", 49 val="text" 50 ) 51 ), 52 len(c.subject) 53 ) 54 ], 55 [ 56 "Issuer", 57 urwid.BoxAdapter( 58 urwid.ListBox( 59 common.format_keyvals( 60 c.issuer, key="highlight", val="text" 61 ) 62 ), 63 len(c.issuer) 64 ) 65 ] 66 ] 67 68 if c.altnames: 69 parts.append( 70 [ 71 "Alt names", 72 ", ".join(c.altnames) 73 ] 74 ) 75 text.extend( 76 common.format_keyvals(parts, key="key", val="text", indent=4) 77 ) 78 79 if cc: 80 text.append(urwid.Text([("head", "Client Connection:")])) 81 82 parts = [ 83 ["Address", "%s:%s" % cc.address()], 84 # ["Requests", "%s"%cc.requestcount], 85 ] 86 87 text.extend( 88 common.format_keyvals(parts, key="key", val="text", indent=4) 89 ) 90 91 parts = [] 92 93 parts.append( 94 [ 95 "Client conn. established", 96 maybe_timestamp(cc, "timestamp_start") 97 ] 98 ) 99 parts.append( 100 [ 101 "Server conn. initiated", 102 maybe_timestamp(sc, "timestamp_start") 103 ] 104 ) 105 parts.append( 106 [ 107 "Server conn. TCP handshake", 108 maybe_timestamp(sc, "timestamp_tcp_setup") 109 ] 110 ) 111 if sc.ssl_established: 112 parts.append( 113 [ 114 "Server conn. SSL handshake", 115 maybe_timestamp(sc, "timestamp_ssl_setup") 116 ] 117 ) 118 parts.append( 119 [ 120 "Client conn. SSL handshake", 121 maybe_timestamp(cc, "timestamp_ssl_setup") 122 ] 123 ) 124 parts.append( 125 [ 126 "First request byte", 127 maybe_timestamp(req, "timestamp_start") 128 ] 129 ) 130 parts.append( 131 [ 132 "Request complete", 133 maybe_timestamp(req, "timestamp_end") 134 ] 135 ) 136 parts.append( 137 [ 138 "First response byte", 139 maybe_timestamp(resp, "timestamp_start") 140 ] 141 ) 142 parts.append( 143 [ 144 "Response complete", 145 maybe_timestamp(resp, "timestamp_end") 146 ] 147 ) 148 149 # sort operations by timestamp 150 parts = sorted(parts, key=lambda p: p[1]) 151 152 text.append(urwid.Text([("head", "Timing:")])) 153 text.extend(common.format_keyvals(parts, key="key", val="text", indent=4)) 154 return searchable.Searchable(state, text) 155 [end of libmproxy/console/flowdetailview.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/libmproxy/console/flowdetailview.py b/libmproxy/console/flowdetailview.py --- a/libmproxy/console/flowdetailview.py +++ b/libmproxy/console/flowdetailview.py @@ -20,7 +20,7 @@ req = flow.request resp = flow.response - if sc: + if sc is not None: text.append(urwid.Text([("head", "Server Connection:")])) parts = [ ["Address", "%s:%s" % sc.address()], @@ -76,7 +76,7 @@ common.format_keyvals(parts, key="key", val="text", indent=4) ) - if cc: + if cc is not None: text.append(urwid.Text([("head", "Client Connection:")])) parts = [
{"golden_diff": "diff --git a/libmproxy/console/flowdetailview.py b/libmproxy/console/flowdetailview.py\n--- a/libmproxy/console/flowdetailview.py\n+++ b/libmproxy/console/flowdetailview.py\n@@ -20,7 +20,7 @@\n req = flow.request\n resp = flow.response\n \n- if sc:\n+ if sc is not None:\n text.append(urwid.Text([(\"head\", \"Server Connection:\")]))\n parts = [\n [\"Address\", \"%s:%s\" % sc.address()],\n@@ -76,7 +76,7 @@\n common.format_keyvals(parts, key=\"key\", val=\"text\", indent=4)\n )\n \n- if cc:\n+ if cc is not None:\n text.append(urwid.Text([(\"head\", \"Client Connection:\")]))\n \n parts = [\n", "issue": "Client Address is not displayed for flows from dumpfiles\nI have two clients connecting to mitmproxy now, and I'd like to see the client IP for each request in the \"Detail\" tab.\n\nMaybe show the server's IP too, since it can differ when connections come from different local IPs (e.g. 127.0.0.1 and 192.168.1.101).\n\n", "before_files": [{"content": "from __future__ import absolute_import\nimport urwid\nfrom . import common, searchable\nfrom .. import utils\n\n\ndef maybe_timestamp(base, attr):\n if base and getattr(base, attr):\n return utils.format_timestamp_with_milli(getattr(base, attr))\n else:\n return \"active\"\n pass\n\n\ndef flowdetails(state, flow):\n text = []\n\n cc = flow.client_conn\n sc = flow.server_conn\n req = flow.request\n resp = flow.response\n\n if sc:\n text.append(urwid.Text([(\"head\", \"Server Connection:\")]))\n parts = [\n [\"Address\", \"%s:%s\" % sc.address()],\n ]\n\n text.extend(\n common.format_keyvals(parts, key=\"key\", val=\"text\", indent=4)\n )\n\n c = sc.cert\n if c:\n text.append(urwid.Text([(\"head\", \"Server Certificate:\")]))\n parts = [\n [\"Type\", \"%s, %s bits\" % c.keyinfo],\n [\"SHA1 digest\", c.digest(\"sha1\")],\n [\"Valid to\", str(c.notafter)],\n [\"Valid from\", str(c.notbefore)],\n [\"Serial\", str(c.serial)],\n [\n \"Subject\",\n urwid.BoxAdapter(\n urwid.ListBox(\n common.format_keyvals(\n c.subject,\n key=\"highlight\",\n val=\"text\"\n )\n ),\n len(c.subject)\n )\n ],\n [\n \"Issuer\",\n urwid.BoxAdapter(\n urwid.ListBox(\n common.format_keyvals(\n c.issuer, key=\"highlight\", val=\"text\"\n )\n ),\n len(c.issuer)\n )\n ]\n ]\n\n if c.altnames:\n parts.append(\n [\n \"Alt names\",\n \", \".join(c.altnames)\n ]\n )\n text.extend(\n common.format_keyvals(parts, key=\"key\", val=\"text\", indent=4)\n )\n\n if cc:\n text.append(urwid.Text([(\"head\", \"Client Connection:\")]))\n\n parts = [\n [\"Address\", \"%s:%s\" % cc.address()],\n # [\"Requests\", \"%s\"%cc.requestcount],\n ]\n\n text.extend(\n common.format_keyvals(parts, key=\"key\", val=\"text\", indent=4)\n )\n\n parts = []\n\n parts.append(\n [\n \"Client conn. established\",\n maybe_timestamp(cc, \"timestamp_start\")\n ]\n )\n parts.append(\n [\n \"Server conn. initiated\",\n maybe_timestamp(sc, \"timestamp_start\")\n ]\n )\n parts.append(\n [\n \"Server conn. TCP handshake\",\n maybe_timestamp(sc, \"timestamp_tcp_setup\")\n ]\n )\n if sc.ssl_established:\n parts.append(\n [\n \"Server conn. SSL handshake\",\n maybe_timestamp(sc, \"timestamp_ssl_setup\")\n ]\n )\n parts.append(\n [\n \"Client conn. SSL handshake\",\n maybe_timestamp(cc, \"timestamp_ssl_setup\")\n ]\n )\n parts.append(\n [\n \"First request byte\",\n maybe_timestamp(req, \"timestamp_start\")\n ]\n )\n parts.append(\n [\n \"Request complete\",\n maybe_timestamp(req, \"timestamp_end\")\n ]\n )\n parts.append(\n [\n \"First response byte\",\n maybe_timestamp(resp, \"timestamp_start\")\n ]\n )\n parts.append(\n [\n \"Response complete\",\n maybe_timestamp(resp, \"timestamp_end\")\n ]\n )\n\n # sort operations by timestamp\n parts = sorted(parts, key=lambda p: p[1])\n\n text.append(urwid.Text([(\"head\", \"Timing:\")]))\n text.extend(common.format_keyvals(parts, key=\"key\", val=\"text\", indent=4))\n return searchable.Searchable(state, text)\n", "path": "libmproxy/console/flowdetailview.py"}]}
1,803
185
gh_patches_debug_16006
rasdani/github-patches
git_diff
openvinotoolkit__datumaro-275
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> CLI help usage is misleading Recommendation of usage in cli help message is misleading. **Steps:** ``` datum -h ``` **Expected result:** Correct form for usage recommendation **Current result:** ``` usage: datumaro [-h] [--version] [--loglevel LOGLEVEL] ``` ``` Run 'datumaro COMMAND --help``` **Notes:** datumaro is not correct form, and results in "command not found". The same is command specific help messages. </issue> <code> [start of datumaro/cli/__main__.py] 1 2 # Copyright (C) 2019-2021 Intel Corporation 3 # 4 # SPDX-License-Identifier: MIT 5 6 import argparse 7 import logging as log 8 import sys 9 10 from . import contexts, commands 11 from .util import CliException, add_subparser 12 from ..version import VERSION 13 14 15 _log_levels = { 16 'debug': log.DEBUG, 17 'info': log.INFO, 18 'warning': log.WARNING, 19 'error': log.ERROR, 20 'critical': log.CRITICAL 21 } 22 23 def loglevel(name): 24 return _log_levels[name] 25 26 class _LogManager: 27 @classmethod 28 def init_logger(cls, args=None): 29 # Define minimalistic parser only to obtain loglevel 30 parser = argparse.ArgumentParser(add_help=False) 31 cls._define_loglevel_option(parser) 32 args, _ = parser.parse_known_args(args) 33 34 log.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', 35 level=args.loglevel) 36 37 @staticmethod 38 def _define_loglevel_option(parser): 39 parser.add_argument('--loglevel', type=loglevel, default='info', 40 help="Logging level (options: %s; default: %s)" % \ 41 (', '.join(_log_levels.keys()), "%(default)s")) 42 return parser 43 44 45 def _make_subcommands_help(commands, help_line_start=0): 46 desc = "" 47 for command_name, _, command_help in commands: 48 desc += (" %-" + str(max(0, help_line_start - 2 - 1)) + "s%s\n") % \ 49 (command_name, command_help) 50 return desc 51 52 def make_parser(): 53 parser = argparse.ArgumentParser(prog="datumaro", 54 description="Dataset Framework", 55 formatter_class=argparse.RawDescriptionHelpFormatter) 56 57 parser.add_argument('--version', action='version', version=VERSION) 58 _LogManager._define_loglevel_option(parser) 59 60 known_contexts = [ 61 ('project', contexts.project, "Actions with project (deprecated)"), 62 ('source', contexts.source, "Actions with data sources"), 63 ('model', contexts.model, "Actions with models"), 64 ] 65 known_commands = [ 66 ('create', commands.create, "Create project"), 67 ('import', commands.import_, "Create project from existing dataset"), 68 ('add', commands.add, "Add data source to project"), 69 ('remove', commands.remove, "Remove data source from project"), 70 ('export', commands.export, "Export project in some format"), 71 ('filter', commands.filter, "Filter project"), 72 ('transform', commands.transform, "Transform project"), 73 ('merge', commands.merge, "Merge projects"), 74 ('convert', commands.convert, "Convert dataset into another format"), 75 ('diff', commands.diff, "Compare projects with intersection"), 76 ('ediff', commands.ediff, "Compare projects for equality"), 77 ('stats', commands.stats, "Compute project statistics"), 78 ('info', commands.info, "Print project info"), 79 ('explain', commands.explain, "Run Explainable AI algorithm for model"), 80 ('validate', commands.validate, "Validate project") 81 ] 82 83 # Argparse doesn't support subparser groups: 84 # https://stackoverflow.com/questions/32017020/grouping-argparse-subparser-arguments 85 help_line_start = max((len(e[0]) for e in known_contexts + known_commands), 86 default=0) 87 help_line_start = max((2 + help_line_start) // 4 + 1, 6) * 4 # align to tabs 88 subcommands_desc = "" 89 if known_contexts: 90 subcommands_desc += "Contexts:\n" 91 subcommands_desc += _make_subcommands_help(known_contexts, 92 help_line_start) 93 if known_commands: 94 if subcommands_desc: 95 subcommands_desc += "\n" 96 subcommands_desc += "Commands:\n" 97 subcommands_desc += _make_subcommands_help(known_commands, 98 help_line_start) 99 if subcommands_desc: 100 subcommands_desc += \ 101 "\nRun '%s COMMAND --help' for more information on a command." % \ 102 parser.prog 103 104 subcommands = parser.add_subparsers(title=subcommands_desc, 105 description="", help=argparse.SUPPRESS) 106 for command_name, command, _ in known_contexts + known_commands: 107 add_subparser(subcommands, command_name, command.build_parser) 108 109 return parser 110 111 112 def main(args=None): 113 _LogManager.init_logger(args) 114 115 parser = make_parser() 116 args = parser.parse_args(args) 117 118 if 'command' not in args: 119 parser.print_help() 120 return 1 121 122 try: 123 return args.command(args) 124 except CliException as e: 125 log.error(e) 126 return 1 127 except Exception as e: 128 log.error(e) 129 raise 130 131 132 if __name__ == '__main__': 133 sys.exit(main()) [end of datumaro/cli/__main__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/datumaro/cli/__main__.py b/datumaro/cli/__main__.py --- a/datumaro/cli/__main__.py +++ b/datumaro/cli/__main__.py @@ -5,6 +5,7 @@ import argparse import logging as log +import os.path as osp import sys from . import contexts, commands @@ -50,9 +51,11 @@ return desc def make_parser(): - parser = argparse.ArgumentParser(prog="datumaro", + parser = argparse.ArgumentParser( description="Dataset Framework", formatter_class=argparse.RawDescriptionHelpFormatter) + if parser.prog == osp.basename(__file__): # python -m datumaro ... + parser.prog = 'datumaro' parser.add_argument('--version', action='version', version=VERSION) _LogManager._define_loglevel_option(parser)
{"golden_diff": "diff --git a/datumaro/cli/__main__.py b/datumaro/cli/__main__.py\n--- a/datumaro/cli/__main__.py\n+++ b/datumaro/cli/__main__.py\n@@ -5,6 +5,7 @@\n \n import argparse\n import logging as log\n+import os.path as osp\n import sys\n \n from . import contexts, commands\n@@ -50,9 +51,11 @@\n return desc\n \n def make_parser():\n- parser = argparse.ArgumentParser(prog=\"datumaro\",\n+ parser = argparse.ArgumentParser(\n description=\"Dataset Framework\",\n formatter_class=argparse.RawDescriptionHelpFormatter)\n+ if parser.prog == osp.basename(__file__): # python -m datumaro ...\n+ parser.prog = 'datumaro'\n \n parser.add_argument('--version', action='version', version=VERSION)\n _LogManager._define_loglevel_option(parser)\n", "issue": "CLI help usage is misleading\nRecommendation of usage in cli help message is misleading.\r\n\r\n**Steps:**\r\n```\r\ndatum -h\r\n```\r\n**Expected result:**\r\nCorrect form for usage recommendation\r\n\r\n**Current result:**\r\n```\r\nusage: datumaro [-h] [--version] [--loglevel LOGLEVEL]\r\n```\r\n``` Run 'datumaro COMMAND --help```\r\n\r\n**Notes:**\r\ndatumaro is not correct form, and results in \"command not found\". The same is command specific help messages. \r\n\n", "before_files": [{"content": "\n# Copyright (C) 2019-2021 Intel Corporation\n#\n# SPDX-License-Identifier: MIT\n\nimport argparse\nimport logging as log\nimport sys\n\nfrom . import contexts, commands\nfrom .util import CliException, add_subparser\nfrom ..version import VERSION\n\n\n_log_levels = {\n 'debug': log.DEBUG,\n 'info': log.INFO,\n 'warning': log.WARNING,\n 'error': log.ERROR,\n 'critical': log.CRITICAL\n}\n\ndef loglevel(name):\n return _log_levels[name]\n\nclass _LogManager:\n @classmethod\n def init_logger(cls, args=None):\n # Define minimalistic parser only to obtain loglevel\n parser = argparse.ArgumentParser(add_help=False)\n cls._define_loglevel_option(parser)\n args, _ = parser.parse_known_args(args)\n\n log.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',\n level=args.loglevel)\n\n @staticmethod\n def _define_loglevel_option(parser):\n parser.add_argument('--loglevel', type=loglevel, default='info',\n help=\"Logging level (options: %s; default: %s)\" % \\\n (', '.join(_log_levels.keys()), \"%(default)s\"))\n return parser\n\n\ndef _make_subcommands_help(commands, help_line_start=0):\n desc = \"\"\n for command_name, _, command_help in commands:\n desc += (\" %-\" + str(max(0, help_line_start - 2 - 1)) + \"s%s\\n\") % \\\n (command_name, command_help)\n return desc\n\ndef make_parser():\n parser = argparse.ArgumentParser(prog=\"datumaro\",\n description=\"Dataset Framework\",\n formatter_class=argparse.RawDescriptionHelpFormatter)\n\n parser.add_argument('--version', action='version', version=VERSION)\n _LogManager._define_loglevel_option(parser)\n\n known_contexts = [\n ('project', contexts.project, \"Actions with project (deprecated)\"),\n ('source', contexts.source, \"Actions with data sources\"),\n ('model', contexts.model, \"Actions with models\"),\n ]\n known_commands = [\n ('create', commands.create, \"Create project\"),\n ('import', commands.import_, \"Create project from existing dataset\"),\n ('add', commands.add, \"Add data source to project\"),\n ('remove', commands.remove, \"Remove data source from project\"),\n ('export', commands.export, \"Export project in some format\"),\n ('filter', commands.filter, \"Filter project\"),\n ('transform', commands.transform, \"Transform project\"),\n ('merge', commands.merge, \"Merge projects\"),\n ('convert', commands.convert, \"Convert dataset into another format\"),\n ('diff', commands.diff, \"Compare projects with intersection\"),\n ('ediff', commands.ediff, \"Compare projects for equality\"),\n ('stats', commands.stats, \"Compute project statistics\"),\n ('info', commands.info, \"Print project info\"),\n ('explain', commands.explain, \"Run Explainable AI algorithm for model\"),\n ('validate', commands.validate, \"Validate project\")\n ]\n\n # Argparse doesn't support subparser groups:\n # https://stackoverflow.com/questions/32017020/grouping-argparse-subparser-arguments\n help_line_start = max((len(e[0]) for e in known_contexts + known_commands),\n default=0)\n help_line_start = max((2 + help_line_start) // 4 + 1, 6) * 4 # align to tabs\n subcommands_desc = \"\"\n if known_contexts:\n subcommands_desc += \"Contexts:\\n\"\n subcommands_desc += _make_subcommands_help(known_contexts,\n help_line_start)\n if known_commands:\n if subcommands_desc:\n subcommands_desc += \"\\n\"\n subcommands_desc += \"Commands:\\n\"\n subcommands_desc += _make_subcommands_help(known_commands,\n help_line_start)\n if subcommands_desc:\n subcommands_desc += \\\n \"\\nRun '%s COMMAND --help' for more information on a command.\" % \\\n parser.prog\n\n subcommands = parser.add_subparsers(title=subcommands_desc,\n description=\"\", help=argparse.SUPPRESS)\n for command_name, command, _ in known_contexts + known_commands:\n add_subparser(subcommands, command_name, command.build_parser)\n\n return parser\n\n\ndef main(args=None):\n _LogManager.init_logger(args)\n\n parser = make_parser()\n args = parser.parse_args(args)\n\n if 'command' not in args:\n parser.print_help()\n return 1\n\n try:\n return args.command(args)\n except CliException as e:\n log.error(e)\n return 1\n except Exception as e:\n log.error(e)\n raise\n\n\nif __name__ == '__main__':\n sys.exit(main())", "path": "datumaro/cli/__main__.py"}]}
1,978
194
gh_patches_debug_15173
rasdani/github-patches
git_diff
searxng__searxng-131
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> searx_extra/update/update_languages.py : crash on peertube engine The URL https://peer.tube/api/v1/videos/languages doesn't exist anymore: https://github.com/searxng/searxng/blob/ee83c99d2b40efaaf4f3e6635e7a43f7731cb128/searx/engines/peertube.py#L24-L25 See also https://framagit.org/framasoft/peertube/search-index/-/tree/master/server/controllers/api ---- See https://framagit.org/framasoft/peertube/search-index/-/blob/master/client/src/views/Search.vue#L644-667 </issue> <code> [start of searx/engines/peertube.py] 1 # SPDX-License-Identifier: AGPL-3.0-or-later 2 """ 3 peertube (Videos) 4 """ 5 6 from json import loads 7 from datetime import datetime 8 from urllib.parse import urlencode 9 from searx.utils import html_to_text 10 11 # about 12 about = { 13 "website": 'https://joinpeertube.org', 14 "wikidata_id": 'Q50938515', 15 "official_api_documentation": 'https://docs.joinpeertube.org/api-rest-reference.html', 16 "use_official_api": True, 17 "require_api_key": False, 18 "results": 'JSON', 19 } 20 21 # engine dependent config 22 categories = ["videos"] 23 paging = True 24 base_url = "https://peer.tube" 25 supported_languages_url = base_url + "/api/v1/videos/languages" 26 27 28 # do search-request 29 def request(query, params): 30 sanitized_url = base_url.rstrip("/") 31 pageno = (params["pageno"] - 1) * 15 32 search_url = sanitized_url + "/api/v1/search/videos/?pageno={pageno}&{query}" 33 query_dict = {"search": query} 34 language = params["language"].split("-")[0] 35 # pylint: disable=undefined-variable 36 if "all" != language and language in supported_languages: 37 query_dict["languageOneOf"] = language 38 params["url"] = search_url.format( 39 query=urlencode(query_dict), pageno=pageno 40 ) 41 return params 42 43 44 def _get_offset_from_pageno(pageno): 45 return (pageno - 1) * 15 + 1 46 47 48 # get response from search-request 49 def response(resp): 50 sanitized_url = base_url.rstrip("/") 51 results = [] 52 53 search_res = loads(resp.text) 54 55 embedded_url = ( 56 '<iframe width="560" height="315" sandbox="allow-same-origin allow-scripts allow-popups" ' 57 + 'src="' 58 + sanitized_url 59 + '{embed_path}" frameborder="0" allowfullscreen></iframe>' 60 ) 61 # return empty array if there are no results 62 if "data" not in search_res: 63 return [] 64 65 # parse results 66 for res in search_res["data"]: 67 title = res["name"] 68 url = sanitized_url + "/videos/watch/" + res["uuid"] 69 description = res["description"] 70 if description: 71 content = html_to_text(res["description"]) 72 else: 73 content = "" 74 thumbnail = sanitized_url + res["thumbnailPath"] 75 publishedDate = datetime.strptime(res["publishedAt"], "%Y-%m-%dT%H:%M:%S.%fZ") 76 embedded = embedded_url.format(embed_path=res["embedPath"]) 77 78 results.append( 79 { 80 "template": "videos.html", 81 "url": url, 82 "title": title, 83 "content": content, 84 "publishedDate": publishedDate, 85 "embedded": embedded, 86 "thumbnail": thumbnail, 87 } 88 ) 89 90 # return results 91 return results 92 93 94 def _fetch_supported_languages(resp): 95 peertube_languages = list(loads(resp.text).keys()) 96 return peertube_languages 97 [end of searx/engines/peertube.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/searx/engines/peertube.py b/searx/engines/peertube.py --- a/searx/engines/peertube.py +++ b/searx/engines/peertube.py @@ -22,7 +22,9 @@ categories = ["videos"] paging = True base_url = "https://peer.tube" -supported_languages_url = base_url + "/api/v1/videos/languages" +supported_languages_url = ( + 'https://framagit.org/framasoft/peertube/search-index/-/raw/master/client/src/views/Search.vue' +) # do search-request @@ -92,5 +94,9 @@ def _fetch_supported_languages(resp): - peertube_languages = list(loads(resp.text).keys()) + import re + + # https://docs.python.org/3/howto/regex.html#greedy-versus-non-greedy + videolanguages = re.search(r"videoLanguages \(\) \{(.*?)\]", resp.text, re.DOTALL) + peertube_languages = [m.group(1) for m in re.finditer(r"\{ id: '([a-z]+)', label:", videolanguages.group(1))] return peertube_languages
{"golden_diff": "diff --git a/searx/engines/peertube.py b/searx/engines/peertube.py\n--- a/searx/engines/peertube.py\n+++ b/searx/engines/peertube.py\n@@ -22,7 +22,9 @@\n categories = [\"videos\"]\n paging = True\n base_url = \"https://peer.tube\"\n-supported_languages_url = base_url + \"/api/v1/videos/languages\"\n+supported_languages_url = (\n+ 'https://framagit.org/framasoft/peertube/search-index/-/raw/master/client/src/views/Search.vue'\n+)\n \n \n # do search-request\n@@ -92,5 +94,9 @@\n \n \n def _fetch_supported_languages(resp):\n- peertube_languages = list(loads(resp.text).keys())\n+ import re\n+\n+ # https://docs.python.org/3/howto/regex.html#greedy-versus-non-greedy\n+ videolanguages = re.search(r\"videoLanguages \\(\\) \\{(.*?)\\]\", resp.text, re.DOTALL)\n+ peertube_languages = [m.group(1) for m in re.finditer(r\"\\{ id: '([a-z]+)', label:\", videolanguages.group(1))]\n return peertube_languages\n", "issue": "searx_extra/update/update_languages.py : crash on peertube engine\nThe URL https://peer.tube/api/v1/videos/languages doesn't exist anymore:\r\nhttps://github.com/searxng/searxng/blob/ee83c99d2b40efaaf4f3e6635e7a43f7731cb128/searx/engines/peertube.py#L24-L25\r\n\r\nSee also https://framagit.org/framasoft/peertube/search-index/-/tree/master/server/controllers/api\r\n\r\n----\r\n\r\nSee https://framagit.org/framasoft/peertube/search-index/-/blob/master/client/src/views/Search.vue#L644-667\n", "before_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n\"\"\"\n peertube (Videos)\n\"\"\"\n\nfrom json import loads\nfrom datetime import datetime\nfrom urllib.parse import urlencode\nfrom searx.utils import html_to_text\n\n# about\nabout = {\n \"website\": 'https://joinpeertube.org',\n \"wikidata_id\": 'Q50938515',\n \"official_api_documentation\": 'https://docs.joinpeertube.org/api-rest-reference.html',\n \"use_official_api\": True,\n \"require_api_key\": False,\n \"results\": 'JSON',\n}\n\n# engine dependent config\ncategories = [\"videos\"]\npaging = True\nbase_url = \"https://peer.tube\"\nsupported_languages_url = base_url + \"/api/v1/videos/languages\"\n\n\n# do search-request\ndef request(query, params):\n sanitized_url = base_url.rstrip(\"/\")\n pageno = (params[\"pageno\"] - 1) * 15\n search_url = sanitized_url + \"/api/v1/search/videos/?pageno={pageno}&{query}\"\n query_dict = {\"search\": query}\n language = params[\"language\"].split(\"-\")[0]\n # pylint: disable=undefined-variable\n if \"all\" != language and language in supported_languages:\n query_dict[\"languageOneOf\"] = language\n params[\"url\"] = search_url.format(\n query=urlencode(query_dict), pageno=pageno\n )\n return params\n\n\ndef _get_offset_from_pageno(pageno):\n return (pageno - 1) * 15 + 1\n\n\n# get response from search-request\ndef response(resp):\n sanitized_url = base_url.rstrip(\"/\")\n results = []\n\n search_res = loads(resp.text)\n\n embedded_url = (\n '<iframe width=\"560\" height=\"315\" sandbox=\"allow-same-origin allow-scripts allow-popups\" '\n + 'src=\"'\n + sanitized_url\n + '{embed_path}\" frameborder=\"0\" allowfullscreen></iframe>'\n )\n # return empty array if there are no results\n if \"data\" not in search_res:\n return []\n\n # parse results\n for res in search_res[\"data\"]:\n title = res[\"name\"]\n url = sanitized_url + \"/videos/watch/\" + res[\"uuid\"]\n description = res[\"description\"]\n if description:\n content = html_to_text(res[\"description\"])\n else:\n content = \"\"\n thumbnail = sanitized_url + res[\"thumbnailPath\"]\n publishedDate = datetime.strptime(res[\"publishedAt\"], \"%Y-%m-%dT%H:%M:%S.%fZ\")\n embedded = embedded_url.format(embed_path=res[\"embedPath\"])\n\n results.append(\n {\n \"template\": \"videos.html\",\n \"url\": url,\n \"title\": title,\n \"content\": content,\n \"publishedDate\": publishedDate,\n \"embedded\": embedded,\n \"thumbnail\": thumbnail,\n }\n )\n\n # return results\n return results\n\n\ndef _fetch_supported_languages(resp):\n peertube_languages = list(loads(resp.text).keys())\n return peertube_languages\n", "path": "searx/engines/peertube.py"}]}
1,580
284
gh_patches_debug_12234
rasdani/github-patches
git_diff
ivy-llc__ivy-15998
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> multiply </issue> <code> [start of ivy/functional/frontends/paddle/tensor/math.py] 1 # global 2 import ivy 3 from ivy.func_wrapper import with_unsupported_dtypes, with_supported_dtypes 4 from ivy.functional.frontends.paddle.func_wrapper import ( 5 to_ivy_arrays_and_back, 6 ) 7 8 9 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 10 @to_ivy_arrays_and_back 11 def sin(x, name=None): 12 return ivy.sin(x) 13 14 15 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 16 @to_ivy_arrays_and_back 17 def cos(x, name=None): 18 return ivy.cos(x) 19 20 21 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 22 @to_ivy_arrays_and_back 23 def acos(x, name=None): 24 return ivy.acos(x) 25 26 27 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 28 @to_ivy_arrays_and_back 29 def cosh(x, name=None): 30 return ivy.cosh(x) 31 32 33 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 34 @to_ivy_arrays_and_back 35 def tanh(x, name=None): 36 return ivy.tanh(x) 37 38 39 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 40 @to_ivy_arrays_and_back 41 def acosh(x, name=None): 42 return ivy.acosh(x) 43 44 45 @with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle") 46 @to_ivy_arrays_and_back 47 def asin(x, name=None): 48 return ivy.asin(x) 49 50 51 @with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle") 52 @to_ivy_arrays_and_back 53 def log_softmax(x, name=None): 54 return ivy.log_softmax(x) 55 56 57 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 58 @to_ivy_arrays_and_back 59 def log(x, name=None): 60 return ivy.log(x) 61 62 63 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 64 @to_ivy_arrays_and_back 65 def divide(x, y, name=None): 66 return ivy.divide(x, y) 67 68 69 @with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle") 70 @to_ivy_arrays_and_back 71 def sqrt(x, name=None): 72 return ivy.sqrt(x) 73 74 75 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 76 @to_ivy_arrays_and_back 77 def atanh(x, name=None): 78 return ivy.atanh(x) 79 80 81 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 82 @to_ivy_arrays_and_back 83 def atan(x, name=None): 84 return ivy.atan(x) 85 86 87 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 88 @to_ivy_arrays_and_back 89 def round(x, name=None): 90 return ivy.round(x) 91 92 93 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 94 @to_ivy_arrays_and_back 95 def ceil(x, name=None): 96 return ivy.ceil(x) 97 [end of ivy/functional/frontends/paddle/tensor/math.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ivy/functional/frontends/paddle/tensor/math.py b/ivy/functional/frontends/paddle/tensor/math.py --- a/ivy/functional/frontends/paddle/tensor/math.py +++ b/ivy/functional/frontends/paddle/tensor/math.py @@ -66,6 +66,24 @@ return ivy.divide(x, y) +@with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") +@to_ivy_arrays_and_back +def multiply(x, y, name=None): + return ivy.multiply(x, y) + + +@with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") +@to_ivy_arrays_and_back +def add(x, y, name=None): + return ivy.add(x, y) + + +@with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") +@to_ivy_arrays_and_back +def subtract(x, y, name=None): + return ivy.subtract(x, y) + + @with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle") @to_ivy_arrays_and_back def sqrt(x, name=None):
{"golden_diff": "diff --git a/ivy/functional/frontends/paddle/tensor/math.py b/ivy/functional/frontends/paddle/tensor/math.py\n--- a/ivy/functional/frontends/paddle/tensor/math.py\n+++ b/ivy/functional/frontends/paddle/tensor/math.py\n@@ -66,6 +66,24 @@\n return ivy.divide(x, y)\n \n \n+@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n+@to_ivy_arrays_and_back\n+def multiply(x, y, name=None):\n+ return ivy.multiply(x, y)\n+\n+\n+@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n+@to_ivy_arrays_and_back\n+def add(x, y, name=None):\n+ return ivy.add(x, y)\n+\n+\n+@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n+@to_ivy_arrays_and_back\n+def subtract(x, y, name=None):\n+ return ivy.subtract(x, y)\n+\n+\n @with_supported_dtypes({\"2.4.2 and below\": (\"float32\", \"float64\")}, \"paddle\")\n @to_ivy_arrays_and_back\n def sqrt(x, name=None):\n", "issue": "multiply\n\n", "before_files": [{"content": "# global\nimport ivy\nfrom ivy.func_wrapper import with_unsupported_dtypes, with_supported_dtypes\nfrom ivy.functional.frontends.paddle.func_wrapper import (\n to_ivy_arrays_and_back,\n)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef sin(x, name=None):\n return ivy.sin(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef cos(x, name=None):\n return ivy.cos(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef acos(x, name=None):\n return ivy.acos(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef cosh(x, name=None):\n return ivy.cosh(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef tanh(x, name=None):\n return ivy.tanh(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef acosh(x, name=None):\n return ivy.acosh(x)\n\n\n@with_supported_dtypes({\"2.4.2 and below\": (\"float32\", \"float64\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef asin(x, name=None):\n return ivy.asin(x)\n\n\n@with_supported_dtypes({\"2.4.2 and below\": (\"float32\", \"float64\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef log_softmax(x, name=None):\n return ivy.log_softmax(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef log(x, name=None):\n return ivy.log(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef divide(x, y, name=None):\n return ivy.divide(x, y)\n\n\n@with_supported_dtypes({\"2.4.2 and below\": (\"float32\", \"float64\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef sqrt(x, name=None):\n return ivy.sqrt(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef atanh(x, name=None):\n return ivy.atanh(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef atan(x, name=None):\n return ivy.atan(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef round(x, name=None):\n return ivy.round(x)\n\n\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n@to_ivy_arrays_and_back\ndef ceil(x, name=None):\n return ivy.ceil(x)\n", "path": "ivy/functional/frontends/paddle/tensor/math.py"}]}
1,618
317
gh_patches_debug_59756
rasdani/github-patches
git_diff
pyca__cryptography-3819
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Signer/Verifier deprecation warning has wrong stacklevel Seeing this with Cryptography 2.0: ``` .../python3.5/site-packages/cryptography/hazmat/backends/openssl/rsa.py:477: DeprecationWarning: signer and verifier have been deprecated. Please use sign and verify instead. _warn_sign_verify_deprecated() .../python3.5/site-packages/cryptography/hazmat/backends/openssl/rsa.py:382: DeprecationWarning: signer and verifier have been deprecated. Please use sign and verify instead. _warn_sign_verify_deprecated() ``` I see a few open issues related to deprecations (e.g. #3794), but I'm not sure if any of them cover this particular message. </issue> <code> [start of src/cryptography/hazmat/backends/openssl/utils.py] 1 # This file is dual licensed under the terms of the Apache License, Version 2 # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 # for complete details. 4 5 from __future__ import absolute_import, division, print_function 6 7 import warnings 8 9 from cryptography import utils 10 from cryptography.hazmat.primitives import hashes 11 from cryptography.hazmat.primitives.asymmetric.utils import Prehashed 12 13 14 def _calculate_digest_and_algorithm(backend, data, algorithm): 15 if not isinstance(algorithm, Prehashed): 16 hash_ctx = hashes.Hash(algorithm, backend) 17 hash_ctx.update(data) 18 data = hash_ctx.finalize() 19 else: 20 algorithm = algorithm._algorithm 21 22 if len(data) != algorithm.digest_size: 23 raise ValueError( 24 "The provided data must be the same length as the hash " 25 "algorithm's digest size." 26 ) 27 28 return (data, algorithm) 29 30 31 def _check_not_prehashed(signature_algorithm): 32 if isinstance(signature_algorithm, Prehashed): 33 raise TypeError( 34 "Prehashed is only supported in the sign and verify methods. " 35 "It cannot be used with signer or verifier." 36 ) 37 38 39 def _warn_sign_verify_deprecated(): 40 warnings.warn( 41 "signer and verifier have been deprecated. Please use sign " 42 "and verify instead.", 43 utils.PersistentlyDeprecated, 44 stacklevel=2 45 ) 46 [end of src/cryptography/hazmat/backends/openssl/utils.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/cryptography/hazmat/backends/openssl/utils.py b/src/cryptography/hazmat/backends/openssl/utils.py --- a/src/cryptography/hazmat/backends/openssl/utils.py +++ b/src/cryptography/hazmat/backends/openssl/utils.py @@ -41,5 +41,5 @@ "signer and verifier have been deprecated. Please use sign " "and verify instead.", utils.PersistentlyDeprecated, - stacklevel=2 + stacklevel=3 )
{"golden_diff": "diff --git a/src/cryptography/hazmat/backends/openssl/utils.py b/src/cryptography/hazmat/backends/openssl/utils.py\n--- a/src/cryptography/hazmat/backends/openssl/utils.py\n+++ b/src/cryptography/hazmat/backends/openssl/utils.py\n@@ -41,5 +41,5 @@\n \"signer and verifier have been deprecated. Please use sign \"\n \"and verify instead.\",\n utils.PersistentlyDeprecated,\n- stacklevel=2\n+ stacklevel=3\n )\n", "issue": "Signer/Verifier deprecation warning has wrong stacklevel\nSeeing this with Cryptography 2.0:\r\n\r\n```\r\n.../python3.5/site-packages/cryptography/hazmat/backends/openssl/rsa.py:477: DeprecationWarning: signer and verifier have been deprecated. Please use sign and verify instead.\r\n _warn_sign_verify_deprecated()\r\n.../python3.5/site-packages/cryptography/hazmat/backends/openssl/rsa.py:382: DeprecationWarning: signer and verifier have been deprecated. Please use sign and verify instead.\r\n _warn_sign_verify_deprecated()\r\n```\r\n\r\nI see a few open issues related to deprecations (e.g. #3794), but I'm not sure if any of them cover this particular message.\n", "before_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport warnings\n\nfrom cryptography import utils\nfrom cryptography.hazmat.primitives import hashes\nfrom cryptography.hazmat.primitives.asymmetric.utils import Prehashed\n\n\ndef _calculate_digest_and_algorithm(backend, data, algorithm):\n if not isinstance(algorithm, Prehashed):\n hash_ctx = hashes.Hash(algorithm, backend)\n hash_ctx.update(data)\n data = hash_ctx.finalize()\n else:\n algorithm = algorithm._algorithm\n\n if len(data) != algorithm.digest_size:\n raise ValueError(\n \"The provided data must be the same length as the hash \"\n \"algorithm's digest size.\"\n )\n\n return (data, algorithm)\n\n\ndef _check_not_prehashed(signature_algorithm):\n if isinstance(signature_algorithm, Prehashed):\n raise TypeError(\n \"Prehashed is only supported in the sign and verify methods. \"\n \"It cannot be used with signer or verifier.\"\n )\n\n\ndef _warn_sign_verify_deprecated():\n warnings.warn(\n \"signer and verifier have been deprecated. Please use sign \"\n \"and verify instead.\",\n utils.PersistentlyDeprecated,\n stacklevel=2\n )\n", "path": "src/cryptography/hazmat/backends/openssl/utils.py"}]}
1,096
116
gh_patches_debug_23200
rasdani/github-patches
git_diff
RedHatInsights__insights-core-3248
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update pydoc for Messages (/var/log/messages) Correct the examples: https://github.com/RedHatInsights/insights-core/blob/master/insights/parsers/messages.py#L32 </issue> <code> [start of insights/parsers/messages.py] 1 """ 2 Messages file ``/var/log/messages`` 3 =================================== 4 """ 5 6 from .. import Syslog, parser 7 from insights.specs import Specs 8 9 10 @parser(Specs.messages) 11 class Messages(Syslog): 12 """ 13 Read the ``/var/log/messages`` file. 14 15 .. note:: 16 Please refer to its super-class :class:`insights.core.Syslog` for more 17 details. 18 19 Sample log lines:: 20 21 May 18 15:13:34 lxc-rhel68-sat56 jabberd/sm[11057]: session started: [email protected]/superclient 22 May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon 23 May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: Launching a JVM... 24 May 18 15:24:28 lxc-rhel68-sat56 yum[11597]: Installed: lynx-2.8.6-27.el6.x86_64 25 May 18 15:36:19 lxc-rhel68-sat56 yum[11954]: Updated: sos-3.2-40.el6.noarch 26 27 .. note:: 28 Because /var/log/messages timestamps by default have no year, 29 the year of the logs will be inferred from the year in your timestamp. 30 This will also work around December/January crossovers. 31 32 Examples: 33 >>> Messages.filters.append('wrapper') 34 >>> Messages.token_scan('daemon_start', 'Wrapper Started as Daemon') 35 >>> msgs = shared[Messages] 36 >>> len(msgs.lines) 37 >>> wrapper_msgs = msgs.get('wrapper') # Can only rely on lines filtered being present 38 >>> wrapper_msgs[0] 39 {'timestamp': 'May 18 15:13:36', 'hostname': 'lxc-rhel68-sat56', 40 'procname': wrapper[11375]', 'message': '--> Wrapper Started as Daemon', 41 'raw_message': 'May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon' 42 } 43 >>> msgs.daemon_start # Token set if matching lines present in logs 44 True 45 """ 46 pass 47 [end of insights/parsers/messages.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/insights/parsers/messages.py b/insights/parsers/messages.py --- a/insights/parsers/messages.py +++ b/insights/parsers/messages.py @@ -30,17 +30,14 @@ This will also work around December/January crossovers. Examples: - >>> Messages.filters.append('wrapper') >>> Messages.token_scan('daemon_start', 'Wrapper Started as Daemon') - >>> msgs = shared[Messages] + >>> type(msgs) + <class 'insights.parsers.messages.Messages'> >>> len(msgs.lines) + 9 >>> wrapper_msgs = msgs.get('wrapper') # Can only rely on lines filtered being present - >>> wrapper_msgs[0] - {'timestamp': 'May 18 15:13:36', 'hostname': 'lxc-rhel68-sat56', - 'procname': wrapper[11375]', 'message': '--> Wrapper Started as Daemon', - 'raw_message': 'May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon' - } - >>> msgs.daemon_start # Token set if matching lines present in logs + >>> result = {'raw_message': 'May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon', 'message': '--> Wrapper Started as Daemon', 'timestamp': 'May 18 15:13:36', 'hostname': 'lxc-rhel68-sat56', 'procname': 'wrapper[11375]'} + >>> wrapper_msgs[0] == result True """ pass
{"golden_diff": "diff --git a/insights/parsers/messages.py b/insights/parsers/messages.py\n--- a/insights/parsers/messages.py\n+++ b/insights/parsers/messages.py\n@@ -30,17 +30,14 @@\n This will also work around December/January crossovers.\n \n Examples:\n- >>> Messages.filters.append('wrapper')\n >>> Messages.token_scan('daemon_start', 'Wrapper Started as Daemon')\n- >>> msgs = shared[Messages]\n+ >>> type(msgs)\n+ <class 'insights.parsers.messages.Messages'>\n >>> len(msgs.lines)\n+ 9\n >>> wrapper_msgs = msgs.get('wrapper') # Can only rely on lines filtered being present\n- >>> wrapper_msgs[0]\n- {'timestamp': 'May 18 15:13:36', 'hostname': 'lxc-rhel68-sat56',\n- 'procname': wrapper[11375]', 'message': '--> Wrapper Started as Daemon',\n- 'raw_message': 'May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon'\n- }\n- >>> msgs.daemon_start # Token set if matching lines present in logs\n+ >>> result = {'raw_message': 'May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon', 'message': '--> Wrapper Started as Daemon', 'timestamp': 'May 18 15:13:36', 'hostname': 'lxc-rhel68-sat56', 'procname': 'wrapper[11375]'}\n+ >>> wrapper_msgs[0] == result\n True\n \"\"\"\n pass\n", "issue": "Update pydoc for Messages (/var/log/messages)\nCorrect the examples: https://github.com/RedHatInsights/insights-core/blob/master/insights/parsers/messages.py#L32\n", "before_files": [{"content": "\"\"\"\nMessages file ``/var/log/messages``\n===================================\n\"\"\"\n\nfrom .. import Syslog, parser\nfrom insights.specs import Specs\n\n\n@parser(Specs.messages)\nclass Messages(Syslog):\n \"\"\"\n Read the ``/var/log/messages`` file.\n\n .. note::\n Please refer to its super-class :class:`insights.core.Syslog` for more\n details.\n\n Sample log lines::\n\n May 18 15:13:34 lxc-rhel68-sat56 jabberd/sm[11057]: session started: [email protected]/superclient\n May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon\n May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: Launching a JVM...\n May 18 15:24:28 lxc-rhel68-sat56 yum[11597]: Installed: lynx-2.8.6-27.el6.x86_64\n May 18 15:36:19 lxc-rhel68-sat56 yum[11954]: Updated: sos-3.2-40.el6.noarch\n\n .. note::\n Because /var/log/messages timestamps by default have no year,\n the year of the logs will be inferred from the year in your timestamp.\n This will also work around December/January crossovers.\n\n Examples:\n >>> Messages.filters.append('wrapper')\n >>> Messages.token_scan('daemon_start', 'Wrapper Started as Daemon')\n >>> msgs = shared[Messages]\n >>> len(msgs.lines)\n >>> wrapper_msgs = msgs.get('wrapper') # Can only rely on lines filtered being present\n >>> wrapper_msgs[0]\n {'timestamp': 'May 18 15:13:36', 'hostname': 'lxc-rhel68-sat56',\n 'procname': wrapper[11375]', 'message': '--> Wrapper Started as Daemon',\n 'raw_message': 'May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: --> Wrapper Started as Daemon'\n }\n >>> msgs.daemon_start # Token set if matching lines present in logs\n True\n \"\"\"\n pass\n", "path": "insights/parsers/messages.py"}]}
1,234
409
gh_patches_debug_11114
rasdani/github-patches
git_diff
freedomofpress__securedrop-1890
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Tails 3.0 does not restore the original filename when uncompressing a file # Bug Tails 3.0 does not preserve the filename when extracting an archive. ## Description When extracting an archive, Tails 3.0 will rename the file inside the archive to _data_. ## Steps to Reproduce Download a SecureDrop submission, decrypt the submission and extract the archive. ## Expected Behavior Tails should preserve the filename and leave you with, say, _code_name.pdf_. ## Actual Behavior Tails 3.0 leaves you with a file called _data_ instead. ## Comments I suspect this is an issue with Tails 3.0 and not the way SecureDrop creates the archive in the first place. Filing here just in case. </issue> <code> [start of install_files/ansible-base/roles/tails-config/files/securedrop_init.py] 1 #!/usr/bin/python 2 3 import os 4 import sys 5 import subprocess 6 7 8 # check for root 9 if os.geteuid() != 0: 10 sys.exit('You need to run this as root') 11 12 # paths 13 path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions' 14 path_torrc_backup = '/etc/tor/torrc.bak' 15 path_torrc = '/etc/tor/torrc' 16 17 # load torrc_additions 18 if os.path.isfile(path_torrc_additions): 19 torrc_additions = open(path_torrc_additions).read() 20 else: 21 sys.exit('Error opening {0} for reading'.format(path_torrc_additions)) 22 23 # load torrc 24 if os.path.isfile(path_torrc_backup): 25 torrc = open(path_torrc_backup).read() 26 else: 27 if os.path.isfile(path_torrc): 28 torrc = open(path_torrc).read() 29 else: 30 sys.exit('Error opening {0} for reading'.format(path_torrc)) 31 32 # save a backup 33 open(path_torrc_backup, 'w').write(torrc) 34 35 # append the additions 36 open(path_torrc, 'w').write(torrc + torrc_additions) 37 38 # reload tor 39 try: 40 subprocess.check_call(['systemctl', 'reload', '[email protected]']) 41 except subprocess.CalledProcessError: 42 sys.exit('Error reloading Tor') 43 44 # notify the user 45 subprocess.call(['tails-notify-user', 46 'SecureDrop successfully auto-configured!', 47 'You can now access the Journalist Interface.\nIf you are an admin, you can now SSH to the servers.']) 48 [end of install_files/ansible-base/roles/tails-config/files/securedrop_init.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py --- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py +++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py @@ -41,6 +41,13 @@ except subprocess.CalledProcessError: sys.exit('Error reloading Tor') +# Turn off "automatic-decompression" in Nautilus to ensure the original +# submission filename is restored (see +# https://github.com/freedomofpress/securedrop/issues/1862#issuecomment-311519750). +subprocess.call(['/usr/bin/dconf', 'write', + '/org/gnome/nautilus/preferences/automatic-decompression', + 'false']) + # notify the user subprocess.call(['tails-notify-user', 'SecureDrop successfully auto-configured!',
{"golden_diff": "diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py\n--- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py\n+++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py\n@@ -41,6 +41,13 @@\n except subprocess.CalledProcessError:\n sys.exit('Error reloading Tor')\n \n+# Turn off \"automatic-decompression\" in Nautilus to ensure the original\n+# submission filename is restored (see\n+# https://github.com/freedomofpress/securedrop/issues/1862#issuecomment-311519750).\n+subprocess.call(['/usr/bin/dconf', 'write',\n+ '/org/gnome/nautilus/preferences/automatic-decompression',\n+ 'false'])\n+\n # notify the user\n subprocess.call(['tails-notify-user',\n 'SecureDrop successfully auto-configured!',\n", "issue": "Tails 3.0 does not restore the original filename when uncompressing a file\n# Bug\r\n\r\nTails 3.0 does not preserve the filename when extracting an archive.\r\n\r\n## Description\r\n\r\nWhen extracting an archive, Tails 3.0 will rename the file inside the archive to _data_.\r\n\r\n## Steps to Reproduce\r\n\r\nDownload a SecureDrop submission, decrypt the submission and extract the archive.\r\n\r\n## Expected Behavior\r\n\r\nTails should preserve the filename and leave you with, say, _code_name.pdf_.\r\n\r\n## Actual Behavior\r\n\r\nTails 3.0 leaves you with a file called _data_ instead.\r\n\r\n## Comments\r\n\r\nI suspect this is an issue with Tails 3.0 and not the way SecureDrop creates the archive in the first place. Filing here just in case.\n", "before_files": [{"content": "#!/usr/bin/python\n\nimport os\nimport sys\nimport subprocess\n\n\n# check for root\nif os.geteuid() != 0:\n sys.exit('You need to run this as root')\n\n# paths\npath_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'\npath_torrc_backup = '/etc/tor/torrc.bak'\npath_torrc = '/etc/tor/torrc'\n\n# load torrc_additions\nif os.path.isfile(path_torrc_additions):\n torrc_additions = open(path_torrc_additions).read()\nelse:\n sys.exit('Error opening {0} for reading'.format(path_torrc_additions))\n\n# load torrc\nif os.path.isfile(path_torrc_backup):\n torrc = open(path_torrc_backup).read()\nelse:\n if os.path.isfile(path_torrc):\n torrc = open(path_torrc).read()\n else:\n sys.exit('Error opening {0} for reading'.format(path_torrc))\n\n # save a backup\n open(path_torrc_backup, 'w').write(torrc)\n\n# append the additions\nopen(path_torrc, 'w').write(torrc + torrc_additions)\n\n# reload tor\ntry:\n subprocess.check_call(['systemctl', 'reload', '[email protected]'])\nexcept subprocess.CalledProcessError:\n sys.exit('Error reloading Tor')\n\n# notify the user\nsubprocess.call(['tails-notify-user',\n 'SecureDrop successfully auto-configured!',\n 'You can now access the Journalist Interface.\\nIf you are an admin, you can now SSH to the servers.'])\n", "path": "install_files/ansible-base/roles/tails-config/files/securedrop_init.py"}]}
1,178
219
gh_patches_debug_13980
rasdani/github-patches
git_diff
ocf__ocfweb-185
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> libsass>0.10.0 breaks dh-virtuaenv https://jenkins.ocf.berkeley.edu/view/ocfweb-deploy/job/ocfweb-build-deb/232/console ``` # build sass, output static files debian/ocfweb/usr/share/python/ocfweb/bin/python setup.py build_sass running build_sass building 'ocfweb' sass /opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/setuptools/dist.py:294: UserWarning: The version specified ('2016.01.30.23.27-git7cd5ebbd') is an invalid version, this may not work as expected with newer versions of setuptools, pip, and PyPI. Please see PEP 440 for more details. "details." % self.metadata.version Traceback (most recent call last): File "setup.py", line 44, in <module> 'ocfweb-run-periodic-functions = ocfweb.bin.run_periodic_functions:main', File "/usr/lib/python3.4/distutils/core.py", line 148, in setup dist.run_commands() File "/usr/lib/python3.4/distutils/dist.py", line 955, in run_commands self.run_command(cmd) File "/usr/lib/python3.4/distutils/dist.py", line 974, in run_command cmd_obj.run() File "/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/distutils.py", line 123, in run output_style=self.output_style File "/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/builder.py", line 177, in build ).values() File "/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/builder.py", line 73, in build_directory _root_css=_root_css) File "/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/builder.py", line 63, in build_directory include_paths=[_root_sass]) File "/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sass.py", line 644, in compile raise CompileError(v) sass.CompileError: b'Error: ".page-staff-hours .hour .ocf-staffhours-faces" failed to @extend ".list-inline".\n The selector ".list-inline" was not found.\n Use "@extend .list-inline !optional" if the extend should be able to fail.\n on line 37 of ocfweb/static/scss/pages/staff-hours.scss\n>> @extend .list-inline;\n ------------^\n' debian/rules:17: recipe for target 'override_dh_virtualenv' failed ``` reproduces locally, but only with `make builddeb` and not the regular `make scss` </issue> <code> [start of setup.py] 1 from setuptools import find_packages 2 from setuptools import setup 3 4 try: 5 with open('.version') as f: 6 VERSION = f.readline().strip() 7 except IOError: 8 VERSION = 'unknown' 9 10 setup( 11 name='ocfweb', 12 version=VERSION, 13 packages=find_packages(exclude=['debian', 'venv']), 14 include_package_data=True, 15 url='https://www.ocf.berkeley.edu/', 16 author='Open Computing Facility', 17 author_email='[email protected]', 18 install_requires=[ 19 'cached-property', 20 'celery[redis]', 21 'django-bootstrap-form', 22 'django-ipware', 23 'django-mathfilters', 24 'django-redis', 25 'django>=1.10,<1.10.999', 26 'gunicorn', 27 'libsass<=0.10.0', # https://github.com/ocf/ocfweb/issues/128 28 'matplotlib', 29 'mistune', 30 'numpy', 31 'ocflib', 32 'pycrypto', 33 'pygments', 34 'pymysql', 35 'python-dateutil', 36 ], 37 sass_manifests={ 38 'ocfweb': ('static/scss',), # XXX: must be tuple 39 }, 40 entry_points={ 41 'console_scripts': [ 42 'ocfweb-run-periodic-functions = ocfweb.bin.run_periodic_functions:main', 43 ], 44 }, 45 classifiers=[ 46 'Programming Language :: Python :: 3', 47 ], 48 ) 49 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ 'django-redis', 'django>=1.10,<1.10.999', 'gunicorn', - 'libsass<=0.10.0', # https://github.com/ocf/ocfweb/issues/128 + 'libsass', 'matplotlib', 'mistune', 'numpy', @@ -34,9 +34,6 @@ 'pymysql', 'python-dateutil', ], - sass_manifests={ - 'ocfweb': ('static/scss',), # XXX: must be tuple - }, entry_points={ 'console_scripts': [ 'ocfweb-run-periodic-functions = ocfweb.bin.run_periodic_functions:main',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -24,7 +24,7 @@\n 'django-redis',\n 'django>=1.10,<1.10.999',\n 'gunicorn',\n- 'libsass<=0.10.0', # https://github.com/ocf/ocfweb/issues/128\n+ 'libsass',\n 'matplotlib',\n 'mistune',\n 'numpy',\n@@ -34,9 +34,6 @@\n 'pymysql',\n 'python-dateutil',\n ],\n- sass_manifests={\n- 'ocfweb': ('static/scss',), # XXX: must be tuple\n- },\n entry_points={\n 'console_scripts': [\n 'ocfweb-run-periodic-functions = ocfweb.bin.run_periodic_functions:main',\n", "issue": "libsass>0.10.0 breaks dh-virtuaenv\nhttps://jenkins.ocf.berkeley.edu/view/ocfweb-deploy/job/ocfweb-build-deb/232/console\n\n```\n# build sass, output static files\ndebian/ocfweb/usr/share/python/ocfweb/bin/python setup.py build_sass\nrunning build_sass\nbuilding 'ocfweb' sass\n/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/setuptools/dist.py:294: UserWarning: The version specified ('2016.01.30.23.27-git7cd5ebbd') is an invalid version, this may not work as expected with newer versions of setuptools, pip, and PyPI. Please see PEP 440 for more details.\n \"details.\" % self.metadata.version\nTraceback (most recent call last):\n File \"setup.py\", line 44, in <module>\n 'ocfweb-run-periodic-functions = ocfweb.bin.run_periodic_functions:main',\n File \"/usr/lib/python3.4/distutils/core.py\", line 148, in setup\n dist.run_commands()\n File \"/usr/lib/python3.4/distutils/dist.py\", line 955, in run_commands\n self.run_command(cmd)\n File \"/usr/lib/python3.4/distutils/dist.py\", line 974, in run_command\n cmd_obj.run()\n File \"/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/distutils.py\", line 123, in run\n output_style=self.output_style\n File \"/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/builder.py\", line 177, in build\n ).values()\n File \"/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/builder.py\", line 73, in build_directory\n _root_css=_root_css)\n File \"/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sassutils/builder.py\", line 63, in build_directory\n include_paths=[_root_sass])\n File \"/opt/jenkins/slave/workspace/ocfweb-build-deb/src/debian/ocfweb/usr/share/python/ocfweb/lib/python3.4/site-packages/sass.py\", line 644, in compile\n raise CompileError(v)\nsass.CompileError: b'Error: \".page-staff-hours .hour .ocf-staffhours-faces\" failed to @extend \".list-inline\".\\n The selector \".list-inline\" was not found.\\n Use \"@extend .list-inline !optional\" if the extend should be able to fail.\\n on line 37 of ocfweb/static/scss/pages/staff-hours.scss\\n>> @extend .list-inline;\\n ------------^\\n'\ndebian/rules:17: recipe for target 'override_dh_virtualenv' failed\n```\n\nreproduces locally, but only with `make builddeb` and not the regular `make scss`\n\n", "before_files": [{"content": "from setuptools import find_packages\nfrom setuptools import setup\n\ntry:\n with open('.version') as f:\n VERSION = f.readline().strip()\nexcept IOError:\n VERSION = 'unknown'\n\nsetup(\n name='ocfweb',\n version=VERSION,\n packages=find_packages(exclude=['debian', 'venv']),\n include_package_data=True,\n url='https://www.ocf.berkeley.edu/',\n author='Open Computing Facility',\n author_email='[email protected]',\n install_requires=[\n 'cached-property',\n 'celery[redis]',\n 'django-bootstrap-form',\n 'django-ipware',\n 'django-mathfilters',\n 'django-redis',\n 'django>=1.10,<1.10.999',\n 'gunicorn',\n 'libsass<=0.10.0', # https://github.com/ocf/ocfweb/issues/128\n 'matplotlib',\n 'mistune',\n 'numpy',\n 'ocflib',\n 'pycrypto',\n 'pygments',\n 'pymysql',\n 'python-dateutil',\n ],\n sass_manifests={\n 'ocfweb': ('static/scss',), # XXX: must be tuple\n },\n entry_points={\n 'console_scripts': [\n 'ocfweb-run-periodic-functions = ocfweb.bin.run_periodic_functions:main',\n ],\n },\n classifiers=[\n 'Programming Language :: Python :: 3',\n ],\n)\n", "path": "setup.py"}]}
1,718
200
gh_patches_debug_41281
rasdani/github-patches
git_diff
mozilla__bugbug-310
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Make NLP dependencies optional Some dependencies are only needed in some images, we should make some dependencies optional so that they are only installed when necessary. For example, Spacy is the biggest dependency, but it is currently not really used by any image. </issue> <code> [start of setup.py] 1 # -*- coding: utf-8 -*- 2 # This Source Code Form is subject to the terms of the Mozilla Public 3 # License, v. 2.0. If a copy of the MPL was not distributed with this file, 4 # You can obtain one at http://mozilla.org/MPL/2.0/. 5 6 import os 7 8 from setuptools import find_packages, setup 9 10 here = os.path.dirname(__file__) 11 12 13 def read_requirements(file_): 14 requires = [] 15 links = [] 16 with open(os.path.join(here, file_)) as f: 17 for line in f.readlines(): 18 line = line.strip() 19 20 if line.startswith("https://"): 21 links.append(line + "-1.0.0") 22 extras = "" 23 if "[" in line: 24 extras = "[" + line.split("[")[1].split("]")[0] + "]" 25 line = line.split("#")[1].split("egg=")[1] + extras 26 elif line == "" or line.startswith("#") or line.startswith("-"): 27 continue 28 line = line.split("#")[0].strip() 29 requires.append(line) 30 31 return sorted(list(set(requires))), links 32 33 34 install_requires, dependency_links = read_requirements("requirements.txt") 35 36 37 with open(os.path.join(here, "VERSION")) as f: 38 version = f.read().strip() 39 40 setup( 41 name="bugbug", 42 version=version, 43 description="ML tools for Mozilla projects", 44 author="Marco Castelluccio", 45 author_email="[email protected]", 46 install_requires=install_requires, 47 dependency_links=dependency_links, 48 packages=find_packages(exclude=["contrib", "docs", "tests"]), 49 include_package_data=True, 50 license="MPL2", 51 entry_points={ 52 "console_scripts": [ 53 "bugbug-data-commits = scripts.commit_retriever:main", 54 "bugbug-data-bugzilla = scripts.bug_retriever:main", 55 "bugbug-train = scripts.trainer:main", 56 ] 57 }, 58 ) 59 [end of setup.py] [start of bugbug/nlp.py] 1 # -*- coding: utf-8 -*- 2 # This Source Code Form is subject to the terms of the Mozilla Public 3 # License, v. 2.0. If a copy of the MPL was not distributed with this file, 4 # You can obtain one at http://mozilla.org/MPL/2.0/. 5 6 from collections import defaultdict 7 from functools import lru_cache 8 9 import numpy as np 10 import spacy 11 from gensim.models import KeyedVectors 12 from sklearn.base import BaseEstimator, TransformerMixin 13 from sklearn.feature_extraction.text import TfidfVectorizer 14 from spacy.tokenizer import Tokenizer 15 16 nlp = spacy.load("en_core_web_sm") 17 18 19 def spacy_token_lemmatizer(text): 20 if len(text) > nlp.max_length: 21 text = text[: nlp.max_length - 1] 22 doc = nlp(text) 23 return [token.lemma_ for token in doc] 24 25 26 class SpacyVectorizer(TfidfVectorizer): 27 def __init__(self, *args, **kwargs): 28 super().__init__(tokenizer=spacy_token_lemmatizer, *args, **kwargs) 29 30 31 @lru_cache() 32 def get_word_embeddings(): 33 word_embeddings = KeyedVectors.load_word2vec_format("wiki-news-300d-1M-subword.vec") 34 word_embeddings.init_sims(replace=True) 35 return word_embeddings 36 37 38 class MeanEmbeddingTransformer(BaseEstimator, TransformerMixin): 39 def __init__(self): 40 self.model = get_word_embeddings() 41 self.dim = len(self.model["if"]) 42 43 def fit(self, x, y=None): 44 return self 45 46 def transform(self, data): 47 tokenizer = Tokenizer(nlp.vocab) 48 return np.array( 49 [ 50 np.mean( 51 [ 52 self.model[w.text.lower()] 53 for w in words 54 if w.text.lower() in self.model 55 ] 56 or [np.zeros(self.dim)], 57 axis=0, 58 ) 59 for words in tokenizer.pipe(data) 60 ] 61 ) 62 63 def get_feature_names(self): 64 return np.array([f"_{i}" for i in range(self.dim)], dtype=object) 65 66 67 class TfidfMeanEmbeddingTransformer(MeanEmbeddingTransformer): 68 def __init__(self): 69 super().__init__() 70 self.word2weight = None 71 72 def fit(self, X, y=None): 73 tfidf = TfidfVectorizer(analyzer=lambda x: x) 74 tfidf.fit(X) 75 76 # If a word was never seen, it must be at least as infrequent as any of the known words. 77 # So, the default idf is the max of known idfs. 78 max_idf = max(tfidf.idf_) 79 self.word2weight = defaultdict( 80 lambda: max_idf, [(w, tfidf.idf_[i]) for w, i in tfidf.vocabulary_.items()] 81 ) 82 83 return self 84 85 def transform(self, data): 86 tokenizer = Tokenizer(nlp.vocab) 87 return np.array( 88 [ 89 np.mean( 90 [ 91 self.model[w.text.lower()] * self.word2weight[w.text.lower()] 92 for w in words 93 if w.text.lower() in self.model 94 ] 95 or [np.zeros(self.dim)], 96 axis=0, 97 ) 98 for words in tokenizer.pipe(data) 99 ] 100 ) 101 [end of bugbug/nlp.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/bugbug/nlp.py b/bugbug/nlp.py --- a/bugbug/nlp.py +++ b/bugbug/nlp.py @@ -3,17 +3,41 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. +import sys from collections import defaultdict from functools import lru_cache import numpy as np -import spacy -from gensim.models import KeyedVectors from sklearn.base import BaseEstimator, TransformerMixin from sklearn.feature_extraction.text import TfidfVectorizer -from spacy.tokenizer import Tokenizer -nlp = spacy.load("en_core_web_sm") +HAS_OPTIONAL_DEPENDENCIES = False + +try: + import spacy + + from spacy.tokenizer import Tokenizer + from gensim.models import KeyedVectors + + HAS_OPTIONAL_DEPENDENCIES = True +except ImportError: + pass + +try: + if HAS_OPTIONAL_DEPENDENCIES: + nlp = spacy.load("en_core_web_sm") +except OSError: + msg = ( + "Spacy model is missing, install it with: " + f"{sys.executable} -m spacy download en_core_web_sm" + ) + print(msg, file=sys.stderr) + +OPT_MSG_MISSING = ( + "Optional dependencies are missing, install them with: pip install bugbug[nlp]\n" + "You might need also to download the models with: " + f"{sys.executable} -m spacy download en_core_web_sm" +) def spacy_token_lemmatizer(text): @@ -25,6 +49,11 @@ class SpacyVectorizer(TfidfVectorizer): def __init__(self, *args, **kwargs): + + # Detect when the Spacy optional dependency is missing + if not HAS_OPTIONAL_DEPENDENCIES: + raise NotImplementedError(OPT_MSG_MISSING) + super().__init__(tokenizer=spacy_token_lemmatizer, *args, **kwargs) @@ -37,6 +66,10 @@ class MeanEmbeddingTransformer(BaseEstimator, TransformerMixin): def __init__(self): + # Detect when the Gensim optional dependency are missing + if not HAS_OPTIONAL_DEPENDENCIES: + raise NotImplementedError(OPT_MSG_MISSING) + self.model = get_word_embeddings() self.dim = len(self.model["if"]) diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -37,6 +37,20 @@ with open(os.path.join(here, "VERSION")) as f: version = f.read().strip() +# Read the extra requirements +extras = ["nlp"] + +extras_require = {} + +for extra in extras: + extras_install, extra_links = read_requirements("extra-%s-requirements.txt" % extra) + + # Merge the dependency links + dependency_links.extend(extra_links) + + extras_require[extra] = extras_install + + setup( name="bugbug", version=version, @@ -44,6 +58,7 @@ author="Marco Castelluccio", author_email="[email protected]", install_requires=install_requires, + extras_require=extras_require, dependency_links=dependency_links, packages=find_packages(exclude=["contrib", "docs", "tests"]), include_package_data=True, @@ -55,4 +70,9 @@ "bugbug-train = scripts.trainer:main", ] }, + classifiers=[ + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3 :: Only", + "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", + ], )
{"golden_diff": "diff --git a/bugbug/nlp.py b/bugbug/nlp.py\n--- a/bugbug/nlp.py\n+++ b/bugbug/nlp.py\n@@ -3,17 +3,41 @@\n # License, v. 2.0. If a copy of the MPL was not distributed with this file,\n # You can obtain one at http://mozilla.org/MPL/2.0/.\n \n+import sys\n from collections import defaultdict\n from functools import lru_cache\n \n import numpy as np\n-import spacy\n-from gensim.models import KeyedVectors\n from sklearn.base import BaseEstimator, TransformerMixin\n from sklearn.feature_extraction.text import TfidfVectorizer\n-from spacy.tokenizer import Tokenizer\n \n-nlp = spacy.load(\"en_core_web_sm\")\n+HAS_OPTIONAL_DEPENDENCIES = False\n+\n+try:\n+ import spacy\n+\n+ from spacy.tokenizer import Tokenizer\n+ from gensim.models import KeyedVectors\n+\n+ HAS_OPTIONAL_DEPENDENCIES = True\n+except ImportError:\n+ pass\n+\n+try:\n+ if HAS_OPTIONAL_DEPENDENCIES:\n+ nlp = spacy.load(\"en_core_web_sm\")\n+except OSError:\n+ msg = (\n+ \"Spacy model is missing, install it with: \"\n+ f\"{sys.executable} -m spacy download en_core_web_sm\"\n+ )\n+ print(msg, file=sys.stderr)\n+\n+OPT_MSG_MISSING = (\n+ \"Optional dependencies are missing, install them with: pip install bugbug[nlp]\\n\"\n+ \"You might need also to download the models with: \"\n+ f\"{sys.executable} -m spacy download en_core_web_sm\"\n+)\n \n \n def spacy_token_lemmatizer(text):\n@@ -25,6 +49,11 @@\n \n class SpacyVectorizer(TfidfVectorizer):\n def __init__(self, *args, **kwargs):\n+\n+ # Detect when the Spacy optional dependency is missing\n+ if not HAS_OPTIONAL_DEPENDENCIES:\n+ raise NotImplementedError(OPT_MSG_MISSING)\n+\n super().__init__(tokenizer=spacy_token_lemmatizer, *args, **kwargs)\n \n \n@@ -37,6 +66,10 @@\n \n class MeanEmbeddingTransformer(BaseEstimator, TransformerMixin):\n def __init__(self):\n+ # Detect when the Gensim optional dependency are missing\n+ if not HAS_OPTIONAL_DEPENDENCIES:\n+ raise NotImplementedError(OPT_MSG_MISSING)\n+\n self.model = get_word_embeddings()\n self.dim = len(self.model[\"if\"])\n \ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -37,6 +37,20 @@\n with open(os.path.join(here, \"VERSION\")) as f:\n version = f.read().strip()\n \n+# Read the extra requirements\n+extras = [\"nlp\"]\n+\n+extras_require = {}\n+\n+for extra in extras:\n+ extras_install, extra_links = read_requirements(\"extra-%s-requirements.txt\" % extra)\n+\n+ # Merge the dependency links\n+ dependency_links.extend(extra_links)\n+\n+ extras_require[extra] = extras_install\n+\n+\n setup(\n name=\"bugbug\",\n version=version,\n@@ -44,6 +58,7 @@\n author=\"Marco Castelluccio\",\n author_email=\"[email protected]\",\n install_requires=install_requires,\n+ extras_require=extras_require,\n dependency_links=dependency_links,\n packages=find_packages(exclude=[\"contrib\", \"docs\", \"tests\"]),\n include_package_data=True,\n@@ -55,4 +70,9 @@\n \"bugbug-train = scripts.trainer:main\",\n ]\n },\n+ classifiers=[\n+ \"Programming Language :: Python :: 3.6\",\n+ \"Programming Language :: Python :: 3 :: Only\",\n+ \"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)\",\n+ ],\n )\n", "issue": "Make NLP dependencies optional\nSome dependencies are only needed in some images, we should make some dependencies optional so that they are only installed when necessary.\r\n\r\nFor example, Spacy is the biggest dependency, but it is currently not really used by any image.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nimport os\n\nfrom setuptools import find_packages, setup\n\nhere = os.path.dirname(__file__)\n\n\ndef read_requirements(file_):\n requires = []\n links = []\n with open(os.path.join(here, file_)) as f:\n for line in f.readlines():\n line = line.strip()\n\n if line.startswith(\"https://\"):\n links.append(line + \"-1.0.0\")\n extras = \"\"\n if \"[\" in line:\n extras = \"[\" + line.split(\"[\")[1].split(\"]\")[0] + \"]\"\n line = line.split(\"#\")[1].split(\"egg=\")[1] + extras\n elif line == \"\" or line.startswith(\"#\") or line.startswith(\"-\"):\n continue\n line = line.split(\"#\")[0].strip()\n requires.append(line)\n\n return sorted(list(set(requires))), links\n\n\ninstall_requires, dependency_links = read_requirements(\"requirements.txt\")\n\n\nwith open(os.path.join(here, \"VERSION\")) as f:\n version = f.read().strip()\n\nsetup(\n name=\"bugbug\",\n version=version,\n description=\"ML tools for Mozilla projects\",\n author=\"Marco Castelluccio\",\n author_email=\"[email protected]\",\n install_requires=install_requires,\n dependency_links=dependency_links,\n packages=find_packages(exclude=[\"contrib\", \"docs\", \"tests\"]),\n include_package_data=True,\n license=\"MPL2\",\n entry_points={\n \"console_scripts\": [\n \"bugbug-data-commits = scripts.commit_retriever:main\",\n \"bugbug-data-bugzilla = scripts.bug_retriever:main\",\n \"bugbug-train = scripts.trainer:main\",\n ]\n },\n)\n", "path": "setup.py"}, {"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nfrom collections import defaultdict\nfrom functools import lru_cache\n\nimport numpy as np\nimport spacy\nfrom gensim.models import KeyedVectors\nfrom sklearn.base import BaseEstimator, TransformerMixin\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom spacy.tokenizer import Tokenizer\n\nnlp = spacy.load(\"en_core_web_sm\")\n\n\ndef spacy_token_lemmatizer(text):\n if len(text) > nlp.max_length:\n text = text[: nlp.max_length - 1]\n doc = nlp(text)\n return [token.lemma_ for token in doc]\n\n\nclass SpacyVectorizer(TfidfVectorizer):\n def __init__(self, *args, **kwargs):\n super().__init__(tokenizer=spacy_token_lemmatizer, *args, **kwargs)\n\n\n@lru_cache()\ndef get_word_embeddings():\n word_embeddings = KeyedVectors.load_word2vec_format(\"wiki-news-300d-1M-subword.vec\")\n word_embeddings.init_sims(replace=True)\n return word_embeddings\n\n\nclass MeanEmbeddingTransformer(BaseEstimator, TransformerMixin):\n def __init__(self):\n self.model = get_word_embeddings()\n self.dim = len(self.model[\"if\"])\n\n def fit(self, x, y=None):\n return self\n\n def transform(self, data):\n tokenizer = Tokenizer(nlp.vocab)\n return np.array(\n [\n np.mean(\n [\n self.model[w.text.lower()]\n for w in words\n if w.text.lower() in self.model\n ]\n or [np.zeros(self.dim)],\n axis=0,\n )\n for words in tokenizer.pipe(data)\n ]\n )\n\n def get_feature_names(self):\n return np.array([f\"_{i}\" for i in range(self.dim)], dtype=object)\n\n\nclass TfidfMeanEmbeddingTransformer(MeanEmbeddingTransformer):\n def __init__(self):\n super().__init__()\n self.word2weight = None\n\n def fit(self, X, y=None):\n tfidf = TfidfVectorizer(analyzer=lambda x: x)\n tfidf.fit(X)\n\n # If a word was never seen, it must be at least as infrequent as any of the known words.\n # So, the default idf is the max of known idfs.\n max_idf = max(tfidf.idf_)\n self.word2weight = defaultdict(\n lambda: max_idf, [(w, tfidf.idf_[i]) for w, i in tfidf.vocabulary_.items()]\n )\n\n return self\n\n def transform(self, data):\n tokenizer = Tokenizer(nlp.vocab)\n return np.array(\n [\n np.mean(\n [\n self.model[w.text.lower()] * self.word2weight[w.text.lower()]\n for w in words\n if w.text.lower() in self.model\n ]\n or [np.zeros(self.dim)],\n axis=0,\n )\n for words in tokenizer.pipe(data)\n ]\n )\n", "path": "bugbug/nlp.py"}]}
2,040
866
gh_patches_debug_11621
rasdani/github-patches
git_diff
apache__airflow-12057
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Exception logging success function instead of failure https://github.com/apache/airflow/blob/fdd9b6f65b608c516b8a062b058972d9a45ec9e3/airflow/sensors/sql_sensor.py#L97 </issue> <code> [start of airflow/sensors/sql_sensor.py] 1 # 2 # Licensed to the Apache Software Foundation (ASF) under one 3 # or more contributor license agreements. See the NOTICE file 4 # distributed with this work for additional information 5 # regarding copyright ownership. The ASF licenses this file 6 # to you under the Apache License, Version 2.0 (the 7 # "License"); you may not use this file except in compliance 8 # with the License. You may obtain a copy of the License at 9 # 10 # http://www.apache.org/licenses/LICENSE-2.0 11 # 12 # Unless required by applicable law or agreed to in writing, 13 # software distributed under the License is distributed on an 14 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 # KIND, either express or implied. See the License for the 16 # specific language governing permissions and limitations 17 # under the License. 18 19 from typing import Iterable 20 21 from airflow.exceptions import AirflowException 22 from airflow.hooks.base_hook import BaseHook 23 from airflow.sensors.base_sensor_operator import BaseSensorOperator 24 from airflow.utils.decorators import apply_defaults 25 26 27 class SqlSensor(BaseSensorOperator): 28 """ 29 Runs a sql statement repeatedly until a criteria is met. It will keep trying until 30 success or failure criteria are met, or if the first cell is not in (0, '0', '', None). 31 Optional success and failure callables are called with the first cell returned as the argument. 32 If success callable is defined the sensor will keep retrying until the criteria is met. 33 If failure callable is defined and the criteria is met the sensor will raise AirflowException. 34 Failure criteria is evaluated before success criteria. A fail_on_empty boolean can also 35 be passed to the sensor in which case it will fail if no rows have been returned 36 37 :param conn_id: The connection to run the sensor against 38 :type conn_id: str 39 :param sql: The sql to run. To pass, it needs to return at least one cell 40 that contains a non-zero / empty string value. 41 :type sql: str 42 :param parameters: The parameters to render the SQL query with (optional). 43 :type parameters: dict or iterable 44 :param success: Success criteria for the sensor is a Callable that takes first_cell 45 as the only argument, and returns a boolean (optional). 46 :type: success: Optional<Callable[[Any], bool]> 47 :param failure: Failure criteria for the sensor is a Callable that takes first_cell 48 as the only argument and return a boolean (optional). 49 :type: failure: Optional<Callable[[Any], bool]> 50 :param fail_on_empty: Explicitly fail on no rows returned. 51 :type: fail_on_empty: bool 52 """ 53 54 template_fields: Iterable[str] = ('sql',) 55 template_ext: Iterable[str] = ( 56 '.hql', 57 '.sql', 58 ) 59 ui_color = '#7c7287' 60 61 @apply_defaults 62 def __init__( 63 self, *, conn_id, sql, parameters=None, success=None, failure=None, fail_on_empty=False, **kwargs 64 ): 65 self.conn_id = conn_id 66 self.sql = sql 67 self.parameters = parameters 68 self.success = success 69 self.failure = failure 70 self.fail_on_empty = fail_on_empty 71 super().__init__(**kwargs) 72 73 def _get_hook(self): 74 conn = BaseHook.get_connection(self.conn_id) 75 76 allowed_conn_type = { 77 'google_cloud_platform', 78 'jdbc', 79 'mssql', 80 'mysql', 81 'odbc', 82 'oracle', 83 'postgres', 84 'presto', 85 'snowflake', 86 'sqlite', 87 'vertica', 88 } 89 if conn.conn_type not in allowed_conn_type: 90 raise AirflowException( 91 "The connection type is not supported by SqlSensor. " 92 + "Supported connection types: {}".format(list(allowed_conn_type)) 93 ) 94 return conn.get_hook() 95 96 def poke(self, context): 97 hook = self._get_hook() 98 99 self.log.info('Poking: %s (with parameters %s)', self.sql, self.parameters) 100 records = hook.get_records(self.sql, self.parameters) 101 if not records: 102 if self.fail_on_empty: 103 raise AirflowException("No rows returned, raising as per fail_on_empty flag") 104 else: 105 return False 106 first_cell = records[0][0] 107 if self.failure is not None: 108 if callable(self.failure): 109 if self.failure(first_cell): 110 raise AirflowException(f"Failure criteria met. self.failure({first_cell}) returned True") 111 else: 112 raise AirflowException(f"self.failure is present, but not callable -> {self.success}") 113 if self.success is not None: 114 if callable(self.success): 115 return self.success(first_cell) 116 else: 117 raise AirflowException(f"self.success is present, but not callable -> {self.success}") 118 return bool(first_cell) 119 [end of airflow/sensors/sql_sensor.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/airflow/sensors/sql_sensor.py b/airflow/sensors/sql_sensor.py --- a/airflow/sensors/sql_sensor.py +++ b/airflow/sensors/sql_sensor.py @@ -109,7 +109,7 @@ if self.failure(first_cell): raise AirflowException(f"Failure criteria met. self.failure({first_cell}) returned True") else: - raise AirflowException(f"self.failure is present, but not callable -> {self.success}") + raise AirflowException(f"self.failure is present, but not callable -> {self.failure}") if self.success is not None: if callable(self.success): return self.success(first_cell)
{"golden_diff": "diff --git a/airflow/sensors/sql_sensor.py b/airflow/sensors/sql_sensor.py\n--- a/airflow/sensors/sql_sensor.py\n+++ b/airflow/sensors/sql_sensor.py\n@@ -109,7 +109,7 @@\n if self.failure(first_cell):\n raise AirflowException(f\"Failure criteria met. self.failure({first_cell}) returned True\")\n else:\n- raise AirflowException(f\"self.failure is present, but not callable -> {self.success}\")\n+ raise AirflowException(f\"self.failure is present, but not callable -> {self.failure}\")\n if self.success is not None:\n if callable(self.success):\n return self.success(first_cell)\n", "issue": "Exception logging success function instead of failure\nhttps://github.com/apache/airflow/blob/fdd9b6f65b608c516b8a062b058972d9a45ec9e3/airflow/sensors/sql_sensor.py#L97\r\n\r\n\n", "before_files": [{"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\nfrom typing import Iterable\n\nfrom airflow.exceptions import AirflowException\nfrom airflow.hooks.base_hook import BaseHook\nfrom airflow.sensors.base_sensor_operator import BaseSensorOperator\nfrom airflow.utils.decorators import apply_defaults\n\n\nclass SqlSensor(BaseSensorOperator):\n \"\"\"\n Runs a sql statement repeatedly until a criteria is met. It will keep trying until\n success or failure criteria are met, or if the first cell is not in (0, '0', '', None).\n Optional success and failure callables are called with the first cell returned as the argument.\n If success callable is defined the sensor will keep retrying until the criteria is met.\n If failure callable is defined and the criteria is met the sensor will raise AirflowException.\n Failure criteria is evaluated before success criteria. A fail_on_empty boolean can also\n be passed to the sensor in which case it will fail if no rows have been returned\n\n :param conn_id: The connection to run the sensor against\n :type conn_id: str\n :param sql: The sql to run. To pass, it needs to return at least one cell\n that contains a non-zero / empty string value.\n :type sql: str\n :param parameters: The parameters to render the SQL query with (optional).\n :type parameters: dict or iterable\n :param success: Success criteria for the sensor is a Callable that takes first_cell\n as the only argument, and returns a boolean (optional).\n :type: success: Optional<Callable[[Any], bool]>\n :param failure: Failure criteria for the sensor is a Callable that takes first_cell\n as the only argument and return a boolean (optional).\n :type: failure: Optional<Callable[[Any], bool]>\n :param fail_on_empty: Explicitly fail on no rows returned.\n :type: fail_on_empty: bool\n \"\"\"\n\n template_fields: Iterable[str] = ('sql',)\n template_ext: Iterable[str] = (\n '.hql',\n '.sql',\n )\n ui_color = '#7c7287'\n\n @apply_defaults\n def __init__(\n self, *, conn_id, sql, parameters=None, success=None, failure=None, fail_on_empty=False, **kwargs\n ):\n self.conn_id = conn_id\n self.sql = sql\n self.parameters = parameters\n self.success = success\n self.failure = failure\n self.fail_on_empty = fail_on_empty\n super().__init__(**kwargs)\n\n def _get_hook(self):\n conn = BaseHook.get_connection(self.conn_id)\n\n allowed_conn_type = {\n 'google_cloud_platform',\n 'jdbc',\n 'mssql',\n 'mysql',\n 'odbc',\n 'oracle',\n 'postgres',\n 'presto',\n 'snowflake',\n 'sqlite',\n 'vertica',\n }\n if conn.conn_type not in allowed_conn_type:\n raise AirflowException(\n \"The connection type is not supported by SqlSensor. \"\n + \"Supported connection types: {}\".format(list(allowed_conn_type))\n )\n return conn.get_hook()\n\n def poke(self, context):\n hook = self._get_hook()\n\n self.log.info('Poking: %s (with parameters %s)', self.sql, self.parameters)\n records = hook.get_records(self.sql, self.parameters)\n if not records:\n if self.fail_on_empty:\n raise AirflowException(\"No rows returned, raising as per fail_on_empty flag\")\n else:\n return False\n first_cell = records[0][0]\n if self.failure is not None:\n if callable(self.failure):\n if self.failure(first_cell):\n raise AirflowException(f\"Failure criteria met. self.failure({first_cell}) returned True\")\n else:\n raise AirflowException(f\"self.failure is present, but not callable -> {self.success}\")\n if self.success is not None:\n if callable(self.success):\n return self.success(first_cell)\n else:\n raise AirflowException(f\"self.success is present, but not callable -> {self.success}\")\n return bool(first_cell)\n", "path": "airflow/sensors/sql_sensor.py"}]}
1,903
152
gh_patches_debug_8494
rasdani/github-patches
git_diff
cloudtools__troposphere-1040
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Appsync ApiKey Expires needs to be an int The expires prop on the appsync ApiKey class needs to be an int otherwise the decimal place causes cloudformation to throw a `Can not deserialize value of type java.lang.Long from String` error. </issue> <code> [start of troposphere/appsync.py] 1 # Copyright (c) 2012-2017, Mark Peek <[email protected]> 2 # All rights reserved. 3 # 4 # See LICENSE file for full license. 5 6 from . import AWSObject, AWSProperty 7 from .validators import boolean 8 9 10 class ApiKey(AWSObject): 11 resource_type = "AWS::AppSync::ApiKey" 12 13 props = { 14 'ApiId': (basestring, True), 15 'Description': (basestring, False), 16 'Expires': (float, False), 17 } 18 19 20 class DynamoDBConfig(AWSProperty): 21 props = { 22 'AwsRegion': (basestring, True), 23 'TableName': (basestring, True), 24 'UseCallerCredentials': (boolean, False), 25 } 26 27 28 class ElasticsearchConfig(AWSProperty): 29 props = { 30 'AwsRegion': (basestring, True), 31 'Endpoint': (basestring, True), 32 } 33 34 35 class LambdaConfig(AWSProperty): 36 props = { 37 'LambdaFunctionArn': (basestring, True), 38 } 39 40 41 class DataSource(AWSObject): 42 resource_type = "AWS::AppSync::DataSource" 43 44 props = { 45 'ApiId': (basestring, True), 46 'Description': (basestring, False), 47 'DynamoDBConfig': (DynamoDBConfig, False), 48 'ElasticsearchConfig': (ElasticsearchConfig, False), 49 'LambdaConfig': (LambdaConfig, False), 50 'Name': (basestring, True), 51 'ServiceRoleArn': (basestring, False), 52 'Type': (basestring, True), 53 } 54 55 56 class LogConfig(AWSProperty): 57 props = { 58 'CloudWatchLogsRoleArn': (basestring, False), 59 'FieldLogLevel': (basestring, False), 60 } 61 62 63 class OpenIDConnectConfig(AWSProperty): 64 props = { 65 'AuthTTL': (float, False), 66 'ClientId': (basestring, False), 67 'IatTTL': (float, False), 68 'Issuer': (basestring, True), 69 } 70 71 72 class UserPoolConfig(AWSProperty): 73 props = { 74 'AppIdClientRegex': (basestring, False), 75 'AwsRegion': (basestring, False), 76 'DefaultAction': (basestring, False), 77 'UserPoolId': (basestring, False), 78 } 79 80 81 class GraphQLApi(AWSObject): 82 resource_type = "AWS::AppSync::GraphQLApi" 83 84 props = { 85 'AuthenticationType': (basestring, True), 86 'LogConfig': (LogConfig, False), 87 'Name': (basestring, True), 88 'OpenIDConnectConfig': (OpenIDConnectConfig, False), 89 'UserPoolConfig': (UserPoolConfig, False), 90 } 91 92 93 class GraphQLSchema(AWSObject): 94 resource_type = "AWS::AppSync::GraphQLSchema" 95 96 props = { 97 'ApiId': (basestring, True), 98 'Definition': (basestring, False), 99 'DefinitionS3Location': (basestring, False), 100 } 101 102 103 class Resolver(AWSObject): 104 resource_type = "AWS::AppSync::Resolver" 105 106 props = { 107 'ApiId': (basestring, True), 108 'DataSourceName': (basestring, True), 109 'FieldName': (basestring, True), 110 'RequestMappingTemplate': (basestring, False), 111 'RequestMappingTemplateS3Location': (basestring, False), 112 'ResponseMappingTemplate': (basestring, False), 113 'ResponseMappingTemplateS3Location': (basestring, False), 114 'TypeName': (basestring, True), 115 } 116 [end of troposphere/appsync.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/troposphere/appsync.py b/troposphere/appsync.py --- a/troposphere/appsync.py +++ b/troposphere/appsync.py @@ -4,7 +4,7 @@ # See LICENSE file for full license. from . import AWSObject, AWSProperty -from .validators import boolean +from .validators import boolean, integer class ApiKey(AWSObject): @@ -13,7 +13,7 @@ props = { 'ApiId': (basestring, True), 'Description': (basestring, False), - 'Expires': (float, False), + 'Expires': (integer, False), }
{"golden_diff": "diff --git a/troposphere/appsync.py b/troposphere/appsync.py\n--- a/troposphere/appsync.py\n+++ b/troposphere/appsync.py\n@@ -4,7 +4,7 @@\n # See LICENSE file for full license.\n \n from . import AWSObject, AWSProperty\n-from .validators import boolean\n+from .validators import boolean, integer\n \n \n class ApiKey(AWSObject):\n@@ -13,7 +13,7 @@\n props = {\n 'ApiId': (basestring, True),\n 'Description': (basestring, False),\n- 'Expires': (float, False),\n+ 'Expires': (integer, False),\n }\n", "issue": "Appsync ApiKey Expires needs to be an int\nThe expires prop on the appsync ApiKey class needs to be an int otherwise the decimal place causes cloudformation to throw a `Can not deserialize value of type java.lang.Long from String` error. \n", "before_files": [{"content": "# Copyright (c) 2012-2017, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSObject, AWSProperty\nfrom .validators import boolean\n\n\nclass ApiKey(AWSObject):\n resource_type = \"AWS::AppSync::ApiKey\"\n\n props = {\n 'ApiId': (basestring, True),\n 'Description': (basestring, False),\n 'Expires': (float, False),\n }\n\n\nclass DynamoDBConfig(AWSProperty):\n props = {\n 'AwsRegion': (basestring, True),\n 'TableName': (basestring, True),\n 'UseCallerCredentials': (boolean, False),\n }\n\n\nclass ElasticsearchConfig(AWSProperty):\n props = {\n 'AwsRegion': (basestring, True),\n 'Endpoint': (basestring, True),\n }\n\n\nclass LambdaConfig(AWSProperty):\n props = {\n 'LambdaFunctionArn': (basestring, True),\n }\n\n\nclass DataSource(AWSObject):\n resource_type = \"AWS::AppSync::DataSource\"\n\n props = {\n 'ApiId': (basestring, True),\n 'Description': (basestring, False),\n 'DynamoDBConfig': (DynamoDBConfig, False),\n 'ElasticsearchConfig': (ElasticsearchConfig, False),\n 'LambdaConfig': (LambdaConfig, False),\n 'Name': (basestring, True),\n 'ServiceRoleArn': (basestring, False),\n 'Type': (basestring, True),\n }\n\n\nclass LogConfig(AWSProperty):\n props = {\n 'CloudWatchLogsRoleArn': (basestring, False),\n 'FieldLogLevel': (basestring, False),\n }\n\n\nclass OpenIDConnectConfig(AWSProperty):\n props = {\n 'AuthTTL': (float, False),\n 'ClientId': (basestring, False),\n 'IatTTL': (float, False),\n 'Issuer': (basestring, True),\n }\n\n\nclass UserPoolConfig(AWSProperty):\n props = {\n 'AppIdClientRegex': (basestring, False),\n 'AwsRegion': (basestring, False),\n 'DefaultAction': (basestring, False),\n 'UserPoolId': (basestring, False),\n }\n\n\nclass GraphQLApi(AWSObject):\n resource_type = \"AWS::AppSync::GraphQLApi\"\n\n props = {\n 'AuthenticationType': (basestring, True),\n 'LogConfig': (LogConfig, False),\n 'Name': (basestring, True),\n 'OpenIDConnectConfig': (OpenIDConnectConfig, False),\n 'UserPoolConfig': (UserPoolConfig, False),\n }\n\n\nclass GraphQLSchema(AWSObject):\n resource_type = \"AWS::AppSync::GraphQLSchema\"\n\n props = {\n 'ApiId': (basestring, True),\n 'Definition': (basestring, False),\n 'DefinitionS3Location': (basestring, False),\n }\n\n\nclass Resolver(AWSObject):\n resource_type = \"AWS::AppSync::Resolver\"\n\n props = {\n 'ApiId': (basestring, True),\n 'DataSourceName': (basestring, True),\n 'FieldName': (basestring, True),\n 'RequestMappingTemplate': (basestring, False),\n 'RequestMappingTemplateS3Location': (basestring, False),\n 'ResponseMappingTemplate': (basestring, False),\n 'ResponseMappingTemplateS3Location': (basestring, False),\n 'TypeName': (basestring, True),\n }\n", "path": "troposphere/appsync.py"}]}
1,608
147
gh_patches_debug_2684
rasdani/github-patches
git_diff
CTFd__CTFd-1918
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Users in admin scoreboard show user position instead of team position In teams mode on the admin panel, users are shown with their user position on the scoreboard instead of their teams position. We should be showing both. </issue> <code> [start of CTFd/admin/users.py] 1 from flask import render_template, request, url_for 2 from sqlalchemy.sql import not_ 3 4 from CTFd.admin import admin 5 from CTFd.models import Challenges, Tracking, Users 6 from CTFd.utils import get_config 7 from CTFd.utils.decorators import admins_only 8 from CTFd.utils.modes import TEAMS_MODE 9 10 11 @admin.route("/admin/users") 12 @admins_only 13 def users_listing(): 14 q = request.args.get("q") 15 field = request.args.get("field") 16 page = abs(request.args.get("page", 1, type=int)) 17 filters = [] 18 users = [] 19 20 if q: 21 # The field exists as an exposed column 22 if Users.__mapper__.has_property(field): 23 filters.append(getattr(Users, field).like("%{}%".format(q))) 24 25 if q and field == "ip": 26 users = ( 27 Users.query.join(Tracking, Users.id == Tracking.user_id) 28 .filter(Tracking.ip.like("%{}%".format(q))) 29 .order_by(Users.id.asc()) 30 .paginate(page=page, per_page=50) 31 ) 32 else: 33 users = ( 34 Users.query.filter(*filters) 35 .order_by(Users.id.asc()) 36 .paginate(page=page, per_page=50) 37 ) 38 39 args = dict(request.args) 40 args.pop("page", 1) 41 42 return render_template( 43 "admin/users/users.html", 44 users=users, 45 prev_page=url_for(request.endpoint, page=users.prev_num, **args), 46 next_page=url_for(request.endpoint, page=users.next_num, **args), 47 q=q, 48 field=field, 49 ) 50 51 52 @admin.route("/admin/users/new") 53 @admins_only 54 def users_new(): 55 return render_template("admin/users/new.html") 56 57 58 @admin.route("/admin/users/<int:user_id>") 59 @admins_only 60 def users_detail(user_id): 61 # Get user object 62 user = Users.query.filter_by(id=user_id).first_or_404() 63 64 # Get the user's solves 65 solves = user.get_solves(admin=True) 66 67 # Get challenges that the user is missing 68 if get_config("user_mode") == TEAMS_MODE: 69 if user.team: 70 all_solves = user.team.get_solves(admin=True) 71 else: 72 all_solves = user.get_solves(admin=True) 73 else: 74 all_solves = user.get_solves(admin=True) 75 76 solve_ids = [s.challenge_id for s in all_solves] 77 missing = Challenges.query.filter(not_(Challenges.id.in_(solve_ids))).all() 78 79 # Get IP addresses that the User has used 80 addrs = ( 81 Tracking.query.filter_by(user_id=user_id).order_by(Tracking.date.desc()).all() 82 ) 83 84 # Get Fails 85 fails = user.get_fails(admin=True) 86 87 # Get Awards 88 awards = user.get_awards(admin=True) 89 90 # Get user properties 91 score = user.get_score(admin=True) 92 place = user.get_place(admin=True) 93 94 return render_template( 95 "admin/users/user.html", 96 solves=solves, 97 user=user, 98 addrs=addrs, 99 score=score, 100 missing=missing, 101 place=place, 102 fails=fails, 103 awards=awards, 104 ) 105 [end of CTFd/admin/users.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/CTFd/admin/users.py b/CTFd/admin/users.py --- a/CTFd/admin/users.py +++ b/CTFd/admin/users.py @@ -88,8 +88,8 @@ awards = user.get_awards(admin=True) # Get user properties - score = user.get_score(admin=True) - place = user.get_place(admin=True) + score = user.account.get_score(admin=True) + place = user.account.get_place(admin=True) return render_template( "admin/users/user.html",
{"golden_diff": "diff --git a/CTFd/admin/users.py b/CTFd/admin/users.py\n--- a/CTFd/admin/users.py\n+++ b/CTFd/admin/users.py\n@@ -88,8 +88,8 @@\n awards = user.get_awards(admin=True)\n \n # Get user properties\n- score = user.get_score(admin=True)\n- place = user.get_place(admin=True)\n+ score = user.account.get_score(admin=True)\n+ place = user.account.get_place(admin=True)\n \n return render_template(\n \"admin/users/user.html\",\n", "issue": "Users in admin scoreboard show user position instead of team position\nIn teams mode on the admin panel, users are shown with their user position on the scoreboard instead of their teams position. We should be showing both. \n", "before_files": [{"content": "from flask import render_template, request, url_for\nfrom sqlalchemy.sql import not_\n\nfrom CTFd.admin import admin\nfrom CTFd.models import Challenges, Tracking, Users\nfrom CTFd.utils import get_config\nfrom CTFd.utils.decorators import admins_only\nfrom CTFd.utils.modes import TEAMS_MODE\n\n\[email protected](\"/admin/users\")\n@admins_only\ndef users_listing():\n q = request.args.get(\"q\")\n field = request.args.get(\"field\")\n page = abs(request.args.get(\"page\", 1, type=int))\n filters = []\n users = []\n\n if q:\n # The field exists as an exposed column\n if Users.__mapper__.has_property(field):\n filters.append(getattr(Users, field).like(\"%{}%\".format(q)))\n\n if q and field == \"ip\":\n users = (\n Users.query.join(Tracking, Users.id == Tracking.user_id)\n .filter(Tracking.ip.like(\"%{}%\".format(q)))\n .order_by(Users.id.asc())\n .paginate(page=page, per_page=50)\n )\n else:\n users = (\n Users.query.filter(*filters)\n .order_by(Users.id.asc())\n .paginate(page=page, per_page=50)\n )\n\n args = dict(request.args)\n args.pop(\"page\", 1)\n\n return render_template(\n \"admin/users/users.html\",\n users=users,\n prev_page=url_for(request.endpoint, page=users.prev_num, **args),\n next_page=url_for(request.endpoint, page=users.next_num, **args),\n q=q,\n field=field,\n )\n\n\[email protected](\"/admin/users/new\")\n@admins_only\ndef users_new():\n return render_template(\"admin/users/new.html\")\n\n\[email protected](\"/admin/users/<int:user_id>\")\n@admins_only\ndef users_detail(user_id):\n # Get user object\n user = Users.query.filter_by(id=user_id).first_or_404()\n\n # Get the user's solves\n solves = user.get_solves(admin=True)\n\n # Get challenges that the user is missing\n if get_config(\"user_mode\") == TEAMS_MODE:\n if user.team:\n all_solves = user.team.get_solves(admin=True)\n else:\n all_solves = user.get_solves(admin=True)\n else:\n all_solves = user.get_solves(admin=True)\n\n solve_ids = [s.challenge_id for s in all_solves]\n missing = Challenges.query.filter(not_(Challenges.id.in_(solve_ids))).all()\n\n # Get IP addresses that the User has used\n addrs = (\n Tracking.query.filter_by(user_id=user_id).order_by(Tracking.date.desc()).all()\n )\n\n # Get Fails\n fails = user.get_fails(admin=True)\n\n # Get Awards\n awards = user.get_awards(admin=True)\n\n # Get user properties\n score = user.get_score(admin=True)\n place = user.get_place(admin=True)\n\n return render_template(\n \"admin/users/user.html\",\n solves=solves,\n user=user,\n addrs=addrs,\n score=score,\n missing=missing,\n place=place,\n fails=fails,\n awards=awards,\n )\n", "path": "CTFd/admin/users.py"}]}
1,496
121
gh_patches_debug_7313
rasdani/github-patches
git_diff
MycroftAI__mycroft-core-1195
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Calling the pause() function does not pause the playback This [pause() function](https://github.com/MycroftAI/mycroft-core/blob/a9d627c979b98053719200c6084f3b31a9a97c62/mycroft/skills/audioservice.py#L83) does not have the desired effect of pausing the playback. `self.emitter.emit(Message('mycroft.sudio.service.stop'))` This stops the playback, but the Audioservice does not expose a stop function. I could make the commit to add the stop() function if agreed upon. </issue> <code> [start of mycroft/skills/audioservice.py] 1 # Copyright 2017 Mycroft AI Inc. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 # 15 import time 16 17 from os.path import abspath 18 19 from mycroft.messagebus.message import Message 20 21 22 def ensure_uri(s): 23 """ 24 Interprete paths as file:// uri's 25 26 Args: 27 s: string to be checked 28 29 Returns: 30 if s is uri, s is returned otherwise file:// is prepended 31 """ 32 if '://' not in s: 33 return 'file://' + abspath(s) 34 else: 35 return s 36 37 38 class AudioService(): 39 """ 40 AudioService object for interacting with the audio subsystem 41 42 Args: 43 emitter: eventemitter or websocket object 44 """ 45 46 def __init__(self, emitter): 47 self.emitter = emitter 48 self.emitter.on('mycroft.audio.service.track_info_reply', 49 self._track_info) 50 self.info = None 51 52 def _track_info(self, message=None): 53 """ 54 Handler for catching returning track info 55 """ 56 self.info = message.data 57 58 def play(self, tracks=[], utterance=''): 59 """ Start playback. 60 61 Args: 62 tracks: track uri or list of track uri's 63 utterance: forward utterance for further processing by the 64 audio service. 65 """ 66 if isinstance(tracks, basestring): 67 tracks = [tracks] 68 elif not isinstance(tracks, list): 69 raise ValueError 70 tracks = [ensure_uri(t) for t in tracks] 71 self.emitter.emit(Message('mycroft.audio.service.play', 72 data={'tracks': tracks, 73 'utterance': utterance})) 74 75 def next(self): 76 """ Change to next track. """ 77 self.emitter.emit(Message('mycroft.audio.service.next')) 78 79 def prev(self): 80 """ Change to previous track. """ 81 self.emitter.emit(Message('mycroft.audio.service.prev')) 82 83 def pause(self): 84 """ Pause playback. """ 85 self.emitter.emit(Message('mycroft.audio.service.pause')) 86 87 def resume(self): 88 """ Resume paused playback. """ 89 self.emitter.emit(Message('mycroft.audio.service.resume')) 90 91 def track_info(self): 92 """ Request information of current playing track. 93 94 Returns: 95 Dict with track info. 96 """ 97 self.info = None 98 self.emitter.emit(Message('mycroft.audio.service.track_info')) 99 wait = 5.0 100 while self.info is None and wait >= 0: 101 time.sleep(0.1) 102 wait -= 0.1 103 104 return self.info or {} 105 106 @property 107 def is_playing(self): 108 return self.track_info() != {} 109 [end of mycroft/skills/audioservice.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/mycroft/skills/audioservice.py b/mycroft/skills/audioservice.py --- a/mycroft/skills/audioservice.py +++ b/mycroft/skills/audioservice.py @@ -72,6 +72,10 @@ data={'tracks': tracks, 'utterance': utterance})) + def stop(self): + """ Stop the track. """ + self.emitter.emit(Message('mycroft.audio.service.stop')) + def next(self): """ Change to next track. """ self.emitter.emit(Message('mycroft.audio.service.next'))
{"golden_diff": "diff --git a/mycroft/skills/audioservice.py b/mycroft/skills/audioservice.py\n--- a/mycroft/skills/audioservice.py\n+++ b/mycroft/skills/audioservice.py\n@@ -72,6 +72,10 @@\n data={'tracks': tracks,\n 'utterance': utterance}))\n \n+ def stop(self):\n+ \"\"\" Stop the track. \"\"\"\n+ self.emitter.emit(Message('mycroft.audio.service.stop'))\n+\n def next(self):\n \"\"\" Change to next track. \"\"\"\n self.emitter.emit(Message('mycroft.audio.service.next'))\n", "issue": "Calling the pause() function does not pause the playback\nThis [pause() function](https://github.com/MycroftAI/mycroft-core/blob/a9d627c979b98053719200c6084f3b31a9a97c62/mycroft/skills/audioservice.py#L83) does not have the desired effect of pausing the playback. \r\n`self.emitter.emit(Message('mycroft.sudio.service.stop'))`\r\nThis stops the playback, but the Audioservice does not expose a stop function. I could make the commit to add the stop() function if agreed upon.\n", "before_files": [{"content": "# Copyright 2017 Mycroft AI Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport time\n\nfrom os.path import abspath\n\nfrom mycroft.messagebus.message import Message\n\n\ndef ensure_uri(s):\n \"\"\"\n Interprete paths as file:// uri's\n\n Args:\n s: string to be checked\n\n Returns:\n if s is uri, s is returned otherwise file:// is prepended\n \"\"\"\n if '://' not in s:\n return 'file://' + abspath(s)\n else:\n return s\n\n\nclass AudioService():\n \"\"\"\n AudioService object for interacting with the audio subsystem\n\n Args:\n emitter: eventemitter or websocket object\n \"\"\"\n\n def __init__(self, emitter):\n self.emitter = emitter\n self.emitter.on('mycroft.audio.service.track_info_reply',\n self._track_info)\n self.info = None\n\n def _track_info(self, message=None):\n \"\"\"\n Handler for catching returning track info\n \"\"\"\n self.info = message.data\n\n def play(self, tracks=[], utterance=''):\n \"\"\" Start playback.\n\n Args:\n tracks: track uri or list of track uri's\n utterance: forward utterance for further processing by the\n audio service.\n \"\"\"\n if isinstance(tracks, basestring):\n tracks = [tracks]\n elif not isinstance(tracks, list):\n raise ValueError\n tracks = [ensure_uri(t) for t in tracks]\n self.emitter.emit(Message('mycroft.audio.service.play',\n data={'tracks': tracks,\n 'utterance': utterance}))\n\n def next(self):\n \"\"\" Change to next track. \"\"\"\n self.emitter.emit(Message('mycroft.audio.service.next'))\n\n def prev(self):\n \"\"\" Change to previous track. \"\"\"\n self.emitter.emit(Message('mycroft.audio.service.prev'))\n\n def pause(self):\n \"\"\" Pause playback. \"\"\"\n self.emitter.emit(Message('mycroft.audio.service.pause'))\n\n def resume(self):\n \"\"\" Resume paused playback. \"\"\"\n self.emitter.emit(Message('mycroft.audio.service.resume'))\n\n def track_info(self):\n \"\"\" Request information of current playing track.\n\n Returns:\n Dict with track info.\n \"\"\"\n self.info = None\n self.emitter.emit(Message('mycroft.audio.service.track_info'))\n wait = 5.0\n while self.info is None and wait >= 0:\n time.sleep(0.1)\n wait -= 0.1\n\n return self.info or {}\n\n @property\n def is_playing(self):\n return self.track_info() != {}\n", "path": "mycroft/skills/audioservice.py"}]}
1,583
133
gh_patches_debug_14746
rasdani/github-patches
git_diff
GoogleCloudPlatform__PerfKitBenchmarker-563
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Project id not respected when deleting disk in Cloudstack I forgot to pass project_id as an agrugment when deleting volume. </issue> <code> [start of perfkitbenchmarker/cloudstack/cloudstack_disk.py] 1 # Copyright 2014 Google Inc. All rights reserved. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 """Module containing classes related to CloudStack disks. 15 16 Disks can be created, deleted, attached to VMs, and detached from VMs. 17 """ 18 19 from perfkitbenchmarker import disk 20 from perfkitbenchmarker import vm_util 21 from perfkitbenchmarker import flags 22 from perfkitbenchmarker.cloudstack import util 23 24 import string 25 import logging 26 27 FLAGS = flags.FLAGS 28 29 30 class CloudStackDisk(disk.BaseDisk): 31 """Object representing a Cloudstack Disk.""" 32 33 34 def __init__(self, disk_spec, name, zone_id, project_id=None): 35 super(CloudStackDisk, self).__init__(disk_spec) 36 37 self.cs = util.CsClient( 38 FLAGS.CS_API_URL, 39 FLAGS.CS_API_KEY, 40 FLAGS.CS_API_SECRET 41 ) 42 43 self.attached_vm_name = None 44 self.attached_vm_id = None 45 self.name = name 46 47 self.zone_id = zone_id 48 self.project_id = project_id 49 50 self.disk_offering_id = self._GetBestOfferingId(self.disk_size) 51 assert self.disk_offering_id, "Unable get disk offering of given size" 52 53 if self.disk_spec.disk_type: 54 logging.warn("Cloudstack does not support disk types") 55 56 57 @vm_util.Retry(max_retries=3) 58 def _Create(self): 59 """Creates the disk.""" 60 61 62 volume = self.cs.create_volume(self.name, 63 self.disk_offering_id, 64 self.zone_id, 65 self.project_id) 66 67 assert volume, "Unable to create volume" 68 69 self.volume_id = volume['id'] 70 self.disk_type = volume['type'] 71 self.actual_disk_size = int(volume['size']) / (2 ** 30) # In GB 72 73 74 def _Delete(self): 75 """Deletes the disk.""" 76 vol = self.cs.get_volume(self.name) 77 if vol: 78 self.cs.delete_volume(self.volume_id) 79 80 81 def _Exists(self): 82 """Returns true if the disk exists.""" 83 vol = self.cs.get_volume(self.name, self.project_id) 84 if vol: 85 return True 86 return False 87 88 89 @vm_util.Retry(max_retries=3) 90 def Attach(self, vm): 91 """Attaches the disk to a VM. 92 93 Args: 94 vm: The CloudStackVirtualMachine instance to which 95 the disk will be attached. 96 97 """ 98 99 res = self.cs.attach_volume(self.volume_id, vm.id) 100 assert res, "Unable to attach volume" 101 102 self.device_id = res['deviceid'] 103 104 self.device_path = "/dev/xvd" + \ 105 str(string.ascii_lowercase[self.device_id]) 106 107 108 109 def Detach(self): 110 """Detaches the disk from a VM.""" 111 112 self.cs.detach_volume(self.volume_id) 113 114 115 def _GetBestOfferingId(self, disk_size): 116 """ Given a disk_size (in GB), try to find a disk 117 offering that is atleast as big as the requested 118 one. 119 """ 120 121 disk_offerings = self.cs.list_disk_offerings() 122 sorted_do = sorted(disk_offerings, key=lambda x: x['disksize']) 123 124 for do in sorted_do: 125 if int(do['disksize']) >= disk_size: 126 return do['id'] 127 128 return None 129 [end of perfkitbenchmarker/cloudstack/cloudstack_disk.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/perfkitbenchmarker/cloudstack/cloudstack_disk.py b/perfkitbenchmarker/cloudstack/cloudstack_disk.py --- a/perfkitbenchmarker/cloudstack/cloudstack_disk.py +++ b/perfkitbenchmarker/cloudstack/cloudstack_disk.py @@ -50,7 +50,7 @@ self.disk_offering_id = self._GetBestOfferingId(self.disk_size) assert self.disk_offering_id, "Unable get disk offering of given size" - if self.disk_spec.disk_type: + if disk_spec.disk_type: logging.warn("Cloudstack does not support disk types") @@ -73,7 +73,7 @@ def _Delete(self): """Deletes the disk.""" - vol = self.cs.get_volume(self.name) + vol = self.cs.get_volume(self.name, self.project_id) if vol: self.cs.delete_volume(self.volume_id)
{"golden_diff": "diff --git a/perfkitbenchmarker/cloudstack/cloudstack_disk.py b/perfkitbenchmarker/cloudstack/cloudstack_disk.py\n--- a/perfkitbenchmarker/cloudstack/cloudstack_disk.py\n+++ b/perfkitbenchmarker/cloudstack/cloudstack_disk.py\n@@ -50,7 +50,7 @@\n self.disk_offering_id = self._GetBestOfferingId(self.disk_size)\n assert self.disk_offering_id, \"Unable get disk offering of given size\"\n \n- if self.disk_spec.disk_type:\n+ if disk_spec.disk_type:\n logging.warn(\"Cloudstack does not support disk types\")\n \n \n@@ -73,7 +73,7 @@\n \n def _Delete(self):\n \"\"\"Deletes the disk.\"\"\"\n- vol = self.cs.get_volume(self.name)\n+ vol = self.cs.get_volume(self.name, self.project_id)\n if vol:\n self.cs.delete_volume(self.volume_id)\n", "issue": "Project id not respected when deleting disk in Cloudstack\nI forgot to pass project_id as an agrugment when deleting volume. \n\n", "before_files": [{"content": "# Copyright 2014 Google Inc. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Module containing classes related to CloudStack disks.\n\nDisks can be created, deleted, attached to VMs, and detached from VMs.\n\"\"\"\n\nfrom perfkitbenchmarker import disk\nfrom perfkitbenchmarker import vm_util\nfrom perfkitbenchmarker import flags\nfrom perfkitbenchmarker.cloudstack import util\n\nimport string\nimport logging\n\nFLAGS = flags.FLAGS\n\n\nclass CloudStackDisk(disk.BaseDisk):\n \"\"\"Object representing a Cloudstack Disk.\"\"\"\n\n\n def __init__(self, disk_spec, name, zone_id, project_id=None):\n super(CloudStackDisk, self).__init__(disk_spec)\n\n self.cs = util.CsClient(\n FLAGS.CS_API_URL,\n FLAGS.CS_API_KEY,\n FLAGS.CS_API_SECRET\n )\n\n self.attached_vm_name = None\n self.attached_vm_id = None\n self.name = name\n\n self.zone_id = zone_id\n self.project_id = project_id\n\n self.disk_offering_id = self._GetBestOfferingId(self.disk_size)\n assert self.disk_offering_id, \"Unable get disk offering of given size\"\n\n if self.disk_spec.disk_type:\n logging.warn(\"Cloudstack does not support disk types\")\n\n\n @vm_util.Retry(max_retries=3)\n def _Create(self):\n \"\"\"Creates the disk.\"\"\"\n\n\n volume = self.cs.create_volume(self.name,\n self.disk_offering_id,\n self.zone_id,\n self.project_id)\n\n assert volume, \"Unable to create volume\"\n\n self.volume_id = volume['id']\n self.disk_type = volume['type']\n self.actual_disk_size = int(volume['size']) / (2 ** 30) # In GB\n\n\n def _Delete(self):\n \"\"\"Deletes the disk.\"\"\"\n vol = self.cs.get_volume(self.name)\n if vol:\n self.cs.delete_volume(self.volume_id)\n\n\n def _Exists(self):\n \"\"\"Returns true if the disk exists.\"\"\"\n vol = self.cs.get_volume(self.name, self.project_id)\n if vol:\n return True\n return False\n\n\n @vm_util.Retry(max_retries=3)\n def Attach(self, vm):\n \"\"\"Attaches the disk to a VM.\n\n Args:\n vm: The CloudStackVirtualMachine instance to which\n the disk will be attached.\n\n \"\"\"\n\n res = self.cs.attach_volume(self.volume_id, vm.id)\n assert res, \"Unable to attach volume\"\n\n self.device_id = res['deviceid']\n\n self.device_path = \"/dev/xvd\" + \\\n str(string.ascii_lowercase[self.device_id])\n\n\n\n def Detach(self):\n \"\"\"Detaches the disk from a VM.\"\"\"\n\n self.cs.detach_volume(self.volume_id)\n\n\n def _GetBestOfferingId(self, disk_size):\n \"\"\" Given a disk_size (in GB), try to find a disk\n offering that is atleast as big as the requested\n one.\n \"\"\"\n\n disk_offerings = self.cs.list_disk_offerings()\n sorted_do = sorted(disk_offerings, key=lambda x: x['disksize'])\n\n for do in sorted_do:\n if int(do['disksize']) >= disk_size:\n return do['id']\n\n return None\n", "path": "perfkitbenchmarker/cloudstack/cloudstack_disk.py"}]}
1,697
199
gh_patches_debug_21325
rasdani/github-patches
git_diff
akvo__akvo-rsr-3020
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Incorrect mandatory validation for qualitative indicators The indicator measure for qualitative indicators is hidden, but the code checking for completion of sections doesn't ignore that field, and considers it mandatory. </issue> <code> [start of akvo/iati/checks/fields/results.py] 1 # -*- coding: utf-8 -*- 2 3 # Akvo RSR is covered by the GNU Affero General Public License. 4 # See more details in the license.txt file located at the root folder of the Akvo RSR module. 5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >. 6 7 8 def results(project): 9 """ 10 Check if result has a type, title and at least one indicator. 11 Check if indicator has a measure and title. 12 Check if indicator baseline has year and value. 13 Check if indicator period has a start and end date, and start before end. 14 15 :param project: Project object 16 :return: All checks passed boolean, [Check results] 17 """ 18 checks = [] 19 all_checks_passed = True 20 21 for result in project.results.all(): 22 if not result.type: 23 all_checks_passed = False 24 checks.append((u'error', u'result (id: %s) has no type specified' % str(result.pk))) 25 26 if not result.title: 27 all_checks_passed = False 28 checks.append((u'error', u'result (id: %s) has no title specified' % str(result.pk))) 29 30 if not result.indicators.all(): 31 all_checks_passed = False 32 checks.append((u'error', u'result (id: %s) has no indicator(s)' % str(result.pk))) 33 34 for indicator in result.indicators.all(): 35 if not indicator.measure: 36 all_checks_passed = False 37 checks.append((u'error', u'indicator (id: %s) has no measure specified' % 38 str(indicator.pk))) 39 40 if not indicator.title: 41 all_checks_passed = False 42 checks.append((u'error', u'indicator (id: %s) has no title specified' % 43 str(indicator.pk))) 44 45 if (indicator.baseline_value and not indicator.baseline_year) or \ 46 (not indicator.baseline_value and indicator.baseline_year): 47 all_checks_passed = False 48 checks.append((u'error', u'indicator (id: %s) baseline has no value or year ' 49 u'specified' % str(indicator.pk))) 50 51 for reference in indicator.references.all(): 52 if not reference.reference: 53 all_checks_passed = False 54 checks.append((u'error', u'indicator reference (id: %s) has no code ' 55 u'specified' % str(reference.pk))) 56 57 if not reference.vocabulary: 58 all_checks_passed = False 59 checks.append((u'error', u'indicator reference (id: %s) has no vocabulary ' 60 u'specified' % str(reference.pk))) 61 62 if reference.vocabulary == '99' and not reference.vocabulary_uri: 63 all_checks_passed = False 64 checks.append((u'error', u'indicator reference (id: %s) has vocabulary 99 ' 65 u'(reporting organisation) but no vocabulary URI ' 66 u'specified' % str(reference.pk))) 67 68 for period in indicator.periods.all(): 69 if not period.period_start: 70 all_checks_passed = False 71 checks.append((u'error', u'indicator period (id: %s) has no start date ' 72 u'specified' % str(period.pk))) 73 74 if not period.period_end: 75 all_checks_passed = False 76 checks.append((u'error', u'indicator period (id: %s) has no end date ' 77 u'specified' % str(period.pk))) 78 79 if period.period_start and period.period_end and \ 80 period.period_start > period.period_end: 81 all_checks_passed = False 82 checks.append((u'error', u'indicator period (id: %s) has a start date ' 83 u'later than the end date' % str(period.pk))) 84 85 if not period.target_value and (period.target_comment or 86 period.target_locations.all() or 87 period.target_dimensions.all()): 88 all_checks_passed = False 89 checks.append((u'error', u'indicator period (id: %s) has no target value, but ' 90 u'does have a target comment, target location(s) or ' 91 u'target dimension(s)' % str(period.pk))) 92 93 if not period.actual_value and (period.actual_comment or 94 period.actual_locations.all() or 95 period.actual_dimensions.all()): 96 all_checks_passed = False 97 checks.append((u'error', u'indicator period (id: %s) has no actual value, but ' 98 u'does have an actual comment, actual location(s) or ' 99 u'actual dimension(s)' % str(period.pk))) 100 101 if project.results.all() and all_checks_passed: 102 checks.append((u'success', u'has valid result(s)')) 103 104 return all_checks_passed, checks 105 [end of akvo/iati/checks/fields/results.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/akvo/iati/checks/fields/results.py b/akvo/iati/checks/fields/results.py --- a/akvo/iati/checks/fields/results.py +++ b/akvo/iati/checks/fields/results.py @@ -4,6 +4,8 @@ # See more details in the license.txt file located at the root folder of the Akvo RSR module. # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >. +from akvo.rsr.models.result.utils import QUALITATIVE + def results(project): """ @@ -32,7 +34,7 @@ checks.append((u'error', u'result (id: %s) has no indicator(s)' % str(result.pk))) for indicator in result.indicators.all(): - if not indicator.measure: + if indicator.type != QUALITATIVE and not indicator.measure: all_checks_passed = False checks.append((u'error', u'indicator (id: %s) has no measure specified' % str(indicator.pk)))
{"golden_diff": "diff --git a/akvo/iati/checks/fields/results.py b/akvo/iati/checks/fields/results.py\n--- a/akvo/iati/checks/fields/results.py\n+++ b/akvo/iati/checks/fields/results.py\n@@ -4,6 +4,8 @@\n # See more details in the license.txt file located at the root folder of the Akvo RSR module.\n # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n \n+from akvo.rsr.models.result.utils import QUALITATIVE\n+\n \n def results(project):\n \"\"\"\n@@ -32,7 +34,7 @@\n checks.append((u'error', u'result (id: %s) has no indicator(s)' % str(result.pk)))\n \n for indicator in result.indicators.all():\n- if not indicator.measure:\n+ if indicator.type != QUALITATIVE and not indicator.measure:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no measure specified' %\n str(indicator.pk)))\n", "issue": "Incorrect mandatory validation for qualitative indicators\nThe indicator measure for qualitative indicators is hidden, but the code checking for completion of sections doesn't ignore that field, and considers it mandatory. \n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\n\ndef results(project):\n \"\"\"\n Check if result has a type, title and at least one indicator.\n Check if indicator has a measure and title.\n Check if indicator baseline has year and value.\n Check if indicator period has a start and end date, and start before end.\n\n :param project: Project object\n :return: All checks passed boolean, [Check results]\n \"\"\"\n checks = []\n all_checks_passed = True\n\n for result in project.results.all():\n if not result.type:\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no type specified' % str(result.pk)))\n\n if not result.title:\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no title specified' % str(result.pk)))\n\n if not result.indicators.all():\n all_checks_passed = False\n checks.append((u'error', u'result (id: %s) has no indicator(s)' % str(result.pk)))\n\n for indicator in result.indicators.all():\n if not indicator.measure:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no measure specified' %\n str(indicator.pk)))\n\n if not indicator.title:\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) has no title specified' %\n str(indicator.pk)))\n\n if (indicator.baseline_value and not indicator.baseline_year) or \\\n (not indicator.baseline_value and indicator.baseline_year):\n all_checks_passed = False\n checks.append((u'error', u'indicator (id: %s) baseline has no value or year '\n u'specified' % str(indicator.pk)))\n\n for reference in indicator.references.all():\n if not reference.reference:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has no code '\n u'specified' % str(reference.pk)))\n\n if not reference.vocabulary:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has no vocabulary '\n u'specified' % str(reference.pk)))\n\n if reference.vocabulary == '99' and not reference.vocabulary_uri:\n all_checks_passed = False\n checks.append((u'error', u'indicator reference (id: %s) has vocabulary 99 '\n u'(reporting organisation) but no vocabulary URI '\n u'specified' % str(reference.pk)))\n\n for period in indicator.periods.all():\n if not period.period_start:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no start date '\n u'specified' % str(period.pk)))\n\n if not period.period_end:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no end date '\n u'specified' % str(period.pk)))\n\n if period.period_start and period.period_end and \\\n period.period_start > period.period_end:\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has a start date '\n u'later than the end date' % str(period.pk)))\n\n if not period.target_value and (period.target_comment or\n period.target_locations.all() or\n period.target_dimensions.all()):\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no target value, but '\n u'does have a target comment, target location(s) or '\n u'target dimension(s)' % str(period.pk)))\n\n if not period.actual_value and (period.actual_comment or\n period.actual_locations.all() or\n period.actual_dimensions.all()):\n all_checks_passed = False\n checks.append((u'error', u'indicator period (id: %s) has no actual value, but '\n u'does have an actual comment, actual location(s) or '\n u'actual dimension(s)' % str(period.pk)))\n\n if project.results.all() and all_checks_passed:\n checks.append((u'success', u'has valid result(s)'))\n\n return all_checks_passed, checks\n", "path": "akvo/iati/checks/fields/results.py"}]}
1,808
241