problem_id
stringlengths
18
22
source
stringclasses
1 value
task_type
stringclasses
1 value
in_source_id
stringlengths
13
58
prompt
stringlengths
1.71k
9.01k
golden_diff
stringlengths
151
4.94k
verification_info
stringlengths
465
11.3k
num_tokens_prompt
int64
557
2.05k
num_tokens_diff
int64
48
1.02k
gh_patches_debug_28895
rasdani/github-patches
git_diff
translate__pootle-3780
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Legal pages don't remember your acceptance of the agreemenet This is likely only a problem on the [LibreOffice dev server](https://translations-test.documentfoundation.org/projects/) but tracking it here so we don't forget. </issue> <code> [start of pootle/apps/staticpages/managers.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 # 4 # Copyright (C) Pootle contributors. 5 # 6 # This file is a part of the Pootle project. It is distributed under the GPL3 7 # or later license. See the LICENSE file for a copy of the license and the 8 # AUTHORS file for copyright and authorship information. 9 10 from django.db.models import F, Manager 11 12 13 class PageManager(Manager): 14 15 def live(self, user=None, **kwargs): 16 """Filters active (live) pages. 17 18 :param user: Current active user. If omitted or the user doesn't 19 have administration privileges, only active pages will be 20 returned. 21 """ 22 if user is not None and user.is_superuser: 23 return self.get_queryset() 24 25 return self.get_queryset().filter(active=True) 26 27 def pending_user_agreement(self, user, **kwargs): 28 """Filters active pages where the given `user` has pending 29 agreements. 30 """ 31 # FIXME: This should be a method exclusive to a LegalPage manager 32 return self.live().exclude( 33 agreement__user=user, 34 modified_on__lt=F('agreement__agreed_on'), 35 ).distinct() 36 [end of pootle/apps/staticpages/managers.py] [start of pootle/apps/pootle_misc/context_processors.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 # 4 # Copyright (C) Pootle contributors. 5 # 6 # This file is a part of the Pootle project. It is distributed under the GPL3 7 # or later license. See the LICENSE file for a copy of the license and the 8 # AUTHORS file for copyright and authorship information. 9 10 from django.conf import settings 11 from django.utils import translation 12 13 from pootle import __version__ 14 from pootle_language.models import Language 15 from pootle_project.models import Project 16 from staticpages.models import LegalPage 17 18 19 def _agreement_context(request): 20 """Returns whether the agreement box should be displayed or not.""" 21 request_path = request.META['PATH_INFO'] 22 nocheck = filter(lambda x: request_path.startswith(x), 23 settings.LEGALPAGE_NOCHECK_PREFIXES) 24 25 if (request.user.is_authenticated() and not nocheck and 26 LegalPage.objects.pending_user_agreement(request.user).exists()): 27 return True 28 29 return False 30 31 32 def pootle_context(request): 33 """Exposes settings to templates.""" 34 #FIXME: maybe we should expose relevant settings only? 35 return { 36 'settings': { 37 'TITLE': settings.TITLE, 38 'POOTLE_INSTANCE_ID': settings.POOTLE_INSTANCE_ID, 39 'CAN_CONTACT': settings.CAN_CONTACT and settings.CONTACT_EMAIL, 40 'SCRIPT_NAME': settings.SCRIPT_NAME, 41 'POOTLE_VERSION': __version__, 42 'CACHE_TIMEOUT': settings.CACHE_MIDDLEWARE_SECONDS, 43 'POOTLE_CACHE_TIMEOUT': settings.POOTLE_CACHE_TIMEOUT, 44 'DEBUG': settings.DEBUG, 45 }, 46 'custom': settings.CUSTOM_TEMPLATE_CONTEXT, 47 'ALL_LANGUAGES': Language.live.cached_dict(translation.get_language()), 48 'ALL_PROJECTS': Project.objects.cached_dict(request.user), 49 'display_agreement': _agreement_context(request), 50 } 51 [end of pootle/apps/pootle_misc/context_processors.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pootle/apps/pootle_misc/context_processors.py b/pootle/apps/pootle_misc/context_processors.py --- a/pootle/apps/pootle_misc/context_processors.py +++ b/pootle/apps/pootle_misc/context_processors.py @@ -23,7 +23,7 @@ settings.LEGALPAGE_NOCHECK_PREFIXES) if (request.user.is_authenticated() and not nocheck and - LegalPage.objects.pending_user_agreement(request.user).exists()): + LegalPage.objects.has_pending_agreement(request.user)): return True return False diff --git a/pootle/apps/staticpages/managers.py b/pootle/apps/staticpages/managers.py --- a/pootle/apps/staticpages/managers.py +++ b/pootle/apps/staticpages/managers.py @@ -24,12 +24,24 @@ return self.get_queryset().filter(active=True) + def has_pending_agreement(self, user): + agreements = self.pending_user_agreement(user) + return len(list(agreements)) > 0 + def pending_user_agreement(self, user, **kwargs): """Filters active pages where the given `user` has pending agreements. """ # FIXME: This should be a method exclusive to a LegalPage manager - return self.live().exclude( - agreement__user=user, - modified_on__lt=F('agreement__agreed_on'), - ).distinct() + return self.raw(''' + SELECT DISTINCT staticpages_legalpage.id + FROM staticpages_legalpage + WHERE (staticpages_legalpage.active = TRUE + AND NOT (staticpages_legalpage.id IN + (SELECT A.document_id + FROM staticpages_legalpage AS LP + INNER JOIN staticpages_agreement AS A + ON LP.id = A.document_id + WHERE A.user_id = %s AND + A.agreed_on > LP.modified_on))) + ''', [user.id])
{"golden_diff": "diff --git a/pootle/apps/pootle_misc/context_processors.py b/pootle/apps/pootle_misc/context_processors.py\n--- a/pootle/apps/pootle_misc/context_processors.py\n+++ b/pootle/apps/pootle_misc/context_processors.py\n@@ -23,7 +23,7 @@\n settings.LEGALPAGE_NOCHECK_PREFIXES)\n \n if (request.user.is_authenticated() and not nocheck and\n- LegalPage.objects.pending_user_agreement(request.user).exists()):\n+ LegalPage.objects.has_pending_agreement(request.user)):\n return True\n \n return False\ndiff --git a/pootle/apps/staticpages/managers.py b/pootle/apps/staticpages/managers.py\n--- a/pootle/apps/staticpages/managers.py\n+++ b/pootle/apps/staticpages/managers.py\n@@ -24,12 +24,24 @@\n \n return self.get_queryset().filter(active=True)\n \n+ def has_pending_agreement(self, user):\n+ agreements = self.pending_user_agreement(user)\n+ return len(list(agreements)) > 0\n+\n def pending_user_agreement(self, user, **kwargs):\n \"\"\"Filters active pages where the given `user` has pending\n agreements.\n \"\"\"\n # FIXME: This should be a method exclusive to a LegalPage manager\n- return self.live().exclude(\n- agreement__user=user,\n- modified_on__lt=F('agreement__agreed_on'),\n- ).distinct()\n+ return self.raw('''\n+ SELECT DISTINCT staticpages_legalpage.id\n+ FROM staticpages_legalpage\n+ WHERE (staticpages_legalpage.active = TRUE\n+ AND NOT (staticpages_legalpage.id IN\n+ (SELECT A.document_id\n+ FROM staticpages_legalpage AS LP\n+ INNER JOIN staticpages_agreement AS A\n+ ON LP.id = A.document_id\n+ WHERE A.user_id = %s AND\n+ A.agreed_on > LP.modified_on)))\n+ ''', [user.id])\n", "issue": "Legal pages don't remember your acceptance of the agreemenet\nThis is likely only a problem on the [LibreOffice dev server](https://translations-test.documentfoundation.org/projects/) but tracking it here so we don't forget.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.db.models import F, Manager\n\n\nclass PageManager(Manager):\n\n def live(self, user=None, **kwargs):\n \"\"\"Filters active (live) pages.\n\n :param user: Current active user. If omitted or the user doesn't\n have administration privileges, only active pages will be\n returned.\n \"\"\"\n if user is not None and user.is_superuser:\n return self.get_queryset()\n\n return self.get_queryset().filter(active=True)\n\n def pending_user_agreement(self, user, **kwargs):\n \"\"\"Filters active pages where the given `user` has pending\n agreements.\n \"\"\"\n # FIXME: This should be a method exclusive to a LegalPage manager\n return self.live().exclude(\n agreement__user=user,\n modified_on__lt=F('agreement__agreed_on'),\n ).distinct()\n", "path": "pootle/apps/staticpages/managers.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.conf import settings\nfrom django.utils import translation\n\nfrom pootle import __version__\nfrom pootle_language.models import Language\nfrom pootle_project.models import Project\nfrom staticpages.models import LegalPage\n\n\ndef _agreement_context(request):\n \"\"\"Returns whether the agreement box should be displayed or not.\"\"\"\n request_path = request.META['PATH_INFO']\n nocheck = filter(lambda x: request_path.startswith(x),\n settings.LEGALPAGE_NOCHECK_PREFIXES)\n\n if (request.user.is_authenticated() and not nocheck and\n LegalPage.objects.pending_user_agreement(request.user).exists()):\n return True\n\n return False\n\n\ndef pootle_context(request):\n \"\"\"Exposes settings to templates.\"\"\"\n #FIXME: maybe we should expose relevant settings only?\n return {\n 'settings': {\n 'TITLE': settings.TITLE,\n 'POOTLE_INSTANCE_ID': settings.POOTLE_INSTANCE_ID,\n 'CAN_CONTACT': settings.CAN_CONTACT and settings.CONTACT_EMAIL,\n 'SCRIPT_NAME': settings.SCRIPT_NAME,\n 'POOTLE_VERSION': __version__,\n 'CACHE_TIMEOUT': settings.CACHE_MIDDLEWARE_SECONDS,\n 'POOTLE_CACHE_TIMEOUT': settings.POOTLE_CACHE_TIMEOUT,\n 'DEBUG': settings.DEBUG,\n },\n 'custom': settings.CUSTOM_TEMPLATE_CONTEXT,\n 'ALL_LANGUAGES': Language.live.cached_dict(translation.get_language()),\n 'ALL_PROJECTS': Project.objects.cached_dict(request.user),\n 'display_agreement': _agreement_context(request),\n }\n", "path": "pootle/apps/pootle_misc/context_processors.py"}]}
1,438
440
gh_patches_debug_1241
rasdani/github-patches
git_diff
crytic__slither-1108
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [Bug]: Infinite loop in RTLO detector ### What happened? Slither hangs on this code indefinitely ### Can you share code with us to reproduce this bug? https://github.com/ethereum/solidity/blob/develop/test/libsolidity/syntaxTests/comments/multiline_unicode_direction_override_5.sol ### Version 0.8.2 ### Relevant log output _No response_ </issue> <code> [start of slither/detectors/source/rtlo.py] 1 import re 2 from slither.detectors.abstract_detector import AbstractDetector, DetectorClassification 3 4 5 class RightToLeftOverride(AbstractDetector): 6 """ 7 Detect the usage of a Right-To-Left-Override (U+202E) character 8 """ 9 10 ARGUMENT = "rtlo" 11 HELP = "Right-To-Left-Override control character is used" 12 IMPACT = DetectorClassification.HIGH 13 CONFIDENCE = DetectorClassification.HIGH 14 15 WIKI = "https://github.com/crytic/slither/wiki/Detector-Documentation#right-to-left-override-character" 16 WIKI_TITLE = "Right-to-Left-Override character" 17 WIKI_DESCRIPTION = "An attacker can manipulate the logic of the contract by using a right-to-left-override character (`U+202E)`." 18 19 # region wiki_exploit_scenario 20 WIKI_EXPLOIT_SCENARIO = """ 21 ```solidity 22 contract Token 23 { 24 25 address payable o; // owner 26 mapping(address => uint) tokens; 27 28 function withdraw() external returns(uint) 29 { 30 uint amount = tokens[msg.sender]; 31 address payable d = msg.sender; 32 tokens[msg.sender] = 0; 33 _withdraw(/*owner‮/*noitanitsed*/ d, o/*‭ 34 /*value */, amount); 35 } 36 37 function _withdraw(address payable fee_receiver, address payable destination, uint value) internal 38 { 39 fee_receiver.transfer(1); 40 destination.transfer(value); 41 } 42 } 43 ``` 44 45 `Token` uses the right-to-left-override character when calling `_withdraw`. As a result, the fee is incorrectly sent to `msg.sender`, and the token balance is sent to the owner. 46 47 """ 48 # endregion wiki_exploit_scenario 49 50 WIKI_RECOMMENDATION = "Special control characters must not be allowed." 51 52 RTLO_CHARACTER_ENCODED = "\u202e".encode("utf-8") 53 STANDARD_JSON = False 54 55 def _detect(self): 56 results = [] 57 pattern = re.compile(".*\u202e.*".encode("utf-8")) 58 59 for filename, source in self.slither.source_code.items(): 60 # Attempt to find all RTLO characters in this source file. 61 original_source_encoded = source.encode("utf-8") 62 start_index = 0 63 64 # Keep searching all file contents for the character. 65 while True: 66 source_encoded = original_source_encoded[start_index:] 67 result_index = source_encoded.find(self.RTLO_CHARACTER_ENCODED) 68 69 # If we couldn't find the character in the remainder of source, stop. 70 if result_index == -1: 71 break 72 73 # We found another instance of the character, define our output 74 idx = start_index + result_index 75 76 relative = self.slither.crytic_compile.filename_lookup(filename).relative 77 info = f"{relative} contains a unicode right-to-left-override character at byte offset {idx}:\n" 78 79 # We have a patch, so pattern.find will return at least one result 80 81 info += f"\t- {pattern.findall(source_encoded)[0]}\n" 82 res = self.generate_result(info) 83 res.add_other( 84 "rtlo-character", 85 (filename, idx, len(self.RTLO_CHARACTER_ENCODED)), 86 self.compilation_unit, 87 ) 88 results.append(res) 89 90 # Advance the start index for the next iteration 91 start_index = result_index + 1 92 93 return results 94 [end of slither/detectors/source/rtlo.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/slither/detectors/source/rtlo.py b/slither/detectors/source/rtlo.py --- a/slither/detectors/source/rtlo.py +++ b/slither/detectors/source/rtlo.py @@ -88,6 +88,6 @@ results.append(res) # Advance the start index for the next iteration - start_index = result_index + 1 + start_index = idx + 1 return results
{"golden_diff": "diff --git a/slither/detectors/source/rtlo.py b/slither/detectors/source/rtlo.py\n--- a/slither/detectors/source/rtlo.py\n+++ b/slither/detectors/source/rtlo.py\n@@ -88,6 +88,6 @@\n results.append(res)\n \n # Advance the start index for the next iteration\n- start_index = result_index + 1\n+ start_index = idx + 1\n \n return results\n", "issue": "[Bug]: Infinite loop in RTLO detector\n### What happened?\n\nSlither hangs on this code indefinitely\n\n### Can you share code with us to reproduce this bug?\n\nhttps://github.com/ethereum/solidity/blob/develop/test/libsolidity/syntaxTests/comments/multiline_unicode_direction_override_5.sol\n\n### Version\n\n0.8.2\n\n### Relevant log output\n\n_No response_\n", "before_files": [{"content": "import re\nfrom slither.detectors.abstract_detector import AbstractDetector, DetectorClassification\n\n\nclass RightToLeftOverride(AbstractDetector):\n \"\"\"\n Detect the usage of a Right-To-Left-Override (U+202E) character\n \"\"\"\n\n ARGUMENT = \"rtlo\"\n HELP = \"Right-To-Left-Override control character is used\"\n IMPACT = DetectorClassification.HIGH\n CONFIDENCE = DetectorClassification.HIGH\n\n WIKI = \"https://github.com/crytic/slither/wiki/Detector-Documentation#right-to-left-override-character\"\n WIKI_TITLE = \"Right-to-Left-Override character\"\n WIKI_DESCRIPTION = \"An attacker can manipulate the logic of the contract by using a right-to-left-override character (`U+202E)`.\"\n\n # region wiki_exploit_scenario\n WIKI_EXPLOIT_SCENARIO = \"\"\"\n```solidity\ncontract Token\n{\n\n address payable o; // owner\n mapping(address => uint) tokens;\n\n function withdraw() external returns(uint)\n {\n uint amount = tokens[msg.sender];\n address payable d = msg.sender;\n tokens[msg.sender] = 0;\n _withdraw(/*owner\u202e/*noitanitsed*/ d, o/*\u202d\n\t\t /*value */, amount);\n }\n\n function _withdraw(address payable fee_receiver, address payable destination, uint value) internal\n {\n\t\tfee_receiver.transfer(1);\n\t\tdestination.transfer(value);\n }\n}\n```\n\n`Token` uses the right-to-left-override character when calling `_withdraw`. As a result, the fee is incorrectly sent to `msg.sender`, and the token balance is sent to the owner.\n\n\"\"\"\n # endregion wiki_exploit_scenario\n\n WIKI_RECOMMENDATION = \"Special control characters must not be allowed.\"\n\n RTLO_CHARACTER_ENCODED = \"\\u202e\".encode(\"utf-8\")\n STANDARD_JSON = False\n\n def _detect(self):\n results = []\n pattern = re.compile(\".*\\u202e.*\".encode(\"utf-8\"))\n\n for filename, source in self.slither.source_code.items():\n # Attempt to find all RTLO characters in this source file.\n original_source_encoded = source.encode(\"utf-8\")\n start_index = 0\n\n # Keep searching all file contents for the character.\n while True:\n source_encoded = original_source_encoded[start_index:]\n result_index = source_encoded.find(self.RTLO_CHARACTER_ENCODED)\n\n # If we couldn't find the character in the remainder of source, stop.\n if result_index == -1:\n break\n\n # We found another instance of the character, define our output\n idx = start_index + result_index\n\n relative = self.slither.crytic_compile.filename_lookup(filename).relative\n info = f\"{relative} contains a unicode right-to-left-override character at byte offset {idx}:\\n\"\n\n # We have a patch, so pattern.find will return at least one result\n\n info += f\"\\t- {pattern.findall(source_encoded)[0]}\\n\"\n res = self.generate_result(info)\n res.add_other(\n \"rtlo-character\",\n (filename, idx, len(self.RTLO_CHARACTER_ENCODED)),\n self.compilation_unit,\n )\n results.append(res)\n\n # Advance the start index for the next iteration\n start_index = result_index + 1\n\n return results\n", "path": "slither/detectors/source/rtlo.py"}]}
1,552
107
gh_patches_debug_28367
rasdani/github-patches
git_diff
dbt-labs__dbt-core-8909
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [CT-3279] Remove usage of dbt.config in dbt/adapters </issue> <code> [start of core/dbt/adapters/base/plugin.py] 1 from typing import List, Optional, Type 2 3 from dbt.adapters.base import Credentials 4 from dbt.exceptions import CompilationError 5 from dbt.adapters.protocol import AdapterProtocol 6 7 8 def project_name_from_path(include_path: str) -> str: 9 # avoid an import cycle 10 from dbt.config.project import PartialProject 11 12 partial = PartialProject.from_project_root(include_path) 13 if partial.project_name is None: 14 raise CompilationError(f"Invalid project at {include_path}: name not set!") 15 return partial.project_name 16 17 18 class AdapterPlugin: 19 """Defines the basic requirements for a dbt adapter plugin. 20 21 :param include_path: The path to this adapter plugin's root 22 :param dependencies: A list of adapter names that this adapter depends 23 upon. 24 """ 25 26 def __init__( 27 self, 28 adapter: Type[AdapterProtocol], 29 credentials: Type[Credentials], 30 include_path: str, 31 dependencies: Optional[List[str]] = None, 32 ) -> None: 33 34 self.adapter: Type[AdapterProtocol] = adapter 35 self.credentials: Type[Credentials] = credentials 36 self.include_path: str = include_path 37 self.project_name: str = project_name_from_path(include_path) 38 self.dependencies: List[str] 39 if dependencies is None: 40 self.dependencies = [] 41 else: 42 self.dependencies = dependencies 43 [end of core/dbt/adapters/base/plugin.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py --- a/core/dbt/adapters/base/plugin.py +++ b/core/dbt/adapters/base/plugin.py @@ -1,20 +1,10 @@ from typing import List, Optional, Type +from pathlib import Path from dbt.adapters.base import Credentials -from dbt.exceptions import CompilationError from dbt.adapters.protocol import AdapterProtocol -def project_name_from_path(include_path: str) -> str: - # avoid an import cycle - from dbt.config.project import PartialProject - - partial = PartialProject.from_project_root(include_path) - if partial.project_name is None: - raise CompilationError(f"Invalid project at {include_path}: name not set!") - return partial.project_name - - class AdapterPlugin: """Defines the basic requirements for a dbt adapter plugin. @@ -29,12 +19,13 @@ credentials: Type[Credentials], include_path: str, dependencies: Optional[List[str]] = None, + project_name: Optional[str] = None, ) -> None: self.adapter: Type[AdapterProtocol] = adapter self.credentials: Type[Credentials] = credentials self.include_path: str = include_path - self.project_name: str = project_name_from_path(include_path) + self.project_name: str = project_name or f"dbt_{Path(include_path).name}" self.dependencies: List[str] if dependencies is None: self.dependencies = []
{"golden_diff": "diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py\n--- a/core/dbt/adapters/base/plugin.py\n+++ b/core/dbt/adapters/base/plugin.py\n@@ -1,20 +1,10 @@\n from typing import List, Optional, Type\n+from pathlib import Path\n \n from dbt.adapters.base import Credentials\n-from dbt.exceptions import CompilationError\n from dbt.adapters.protocol import AdapterProtocol\n \n \n-def project_name_from_path(include_path: str) -> str:\n- # avoid an import cycle\n- from dbt.config.project import PartialProject\n-\n- partial = PartialProject.from_project_root(include_path)\n- if partial.project_name is None:\n- raise CompilationError(f\"Invalid project at {include_path}: name not set!\")\n- return partial.project_name\n-\n-\n class AdapterPlugin:\n \"\"\"Defines the basic requirements for a dbt adapter plugin.\n \n@@ -29,12 +19,13 @@\n credentials: Type[Credentials],\n include_path: str,\n dependencies: Optional[List[str]] = None,\n+ project_name: Optional[str] = None,\n ) -> None:\n \n self.adapter: Type[AdapterProtocol] = adapter\n self.credentials: Type[Credentials] = credentials\n self.include_path: str = include_path\n- self.project_name: str = project_name_from_path(include_path)\n+ self.project_name: str = project_name or f\"dbt_{Path(include_path).name}\"\n self.dependencies: List[str]\n if dependencies is None:\n self.dependencies = []\n", "issue": "[CT-3279] Remove usage of dbt.config in dbt/adapters\n\n", "before_files": [{"content": "from typing import List, Optional, Type\n\nfrom dbt.adapters.base import Credentials\nfrom dbt.exceptions import CompilationError\nfrom dbt.adapters.protocol import AdapterProtocol\n\n\ndef project_name_from_path(include_path: str) -> str:\n # avoid an import cycle\n from dbt.config.project import PartialProject\n\n partial = PartialProject.from_project_root(include_path)\n if partial.project_name is None:\n raise CompilationError(f\"Invalid project at {include_path}: name not set!\")\n return partial.project_name\n\n\nclass AdapterPlugin:\n \"\"\"Defines the basic requirements for a dbt adapter plugin.\n\n :param include_path: The path to this adapter plugin's root\n :param dependencies: A list of adapter names that this adapter depends\n upon.\n \"\"\"\n\n def __init__(\n self,\n adapter: Type[AdapterProtocol],\n credentials: Type[Credentials],\n include_path: str,\n dependencies: Optional[List[str]] = None,\n ) -> None:\n\n self.adapter: Type[AdapterProtocol] = adapter\n self.credentials: Type[Credentials] = credentials\n self.include_path: str = include_path\n self.project_name: str = project_name_from_path(include_path)\n self.dependencies: List[str]\n if dependencies is None:\n self.dependencies = []\n else:\n self.dependencies = dependencies\n", "path": "core/dbt/adapters/base/plugin.py"}]}
924
339
gh_patches_debug_10793
rasdani/github-patches
git_diff
pyro-ppl__pyro-724
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Deprecate dependency on scipy for pyro-ppl package Once distributions are migrated upstream and we do not have a core dependency on scipy, we should remove this from our requirements in `setup.py` (add it to test dependencies), so that users do not need to install a separate 16MB file when they do a pip install of `pyro-ppl`. </issue> <code> [start of setup.py] 1 from __future__ import absolute_import, division, print_function 2 3 import sys 4 5 from setuptools import find_packages, setup 6 7 # Find pyro version. 8 for line in open('pyro/__init__.py'): 9 if line.startswith('__version__ = '): 10 version = line.strip().split()[2][1:-1] 11 12 # Convert README.md to rst for display at https://pypi.python.org/pypi/pyro-ppl 13 # When releasing on pypi, make sure pandoc is on your system: 14 # $ brew install pandoc # OS X 15 # $ sudo apt-get install pandoc # Ubuntu Linux 16 try: 17 import pypandoc 18 long_description = pypandoc.convert('README.md', 'rst') 19 except (IOError, ImportError, OSError) as e: 20 sys.stderr.write('Failed to convert README.md to rst:\n {}\n'.format(e)) 21 sys.stderr.flush() 22 long_description = open('README.md').read() 23 24 # Remove badges since they will always be obsolete. 25 blacklist = ['Build Status', 'Latest Version', 'travis-ci.org', 'pypi.python.org'] 26 long_description = '\n'.join( 27 [line for line in long_description.split('\n') if not any(patt in line for patt in blacklist)]) 28 29 setup( 30 name='pyro-ppl', 31 version=version, 32 description='A Python library for probabilistic modeling and inference', 33 long_description=long_description, 34 packages=find_packages(exclude=('tests*',)), 35 url='http://pyro.ai', 36 author='Uber AI Labs', 37 author_email='[email protected]', 38 install_requires=[ 39 'numpy>=1.7', 40 'scipy>=0.19.0', 41 'cloudpickle>=0.3.1', 42 'graphviz>=0.8', 43 'networkx>=2.0.0', 44 'observations>=0.1.4', 45 'torch', 46 'six>=1.10.0', 47 ], 48 extras_require={ 49 'notebooks': ['jupyter>=1.0.0'], 50 'visualization': [ 51 'matplotlib>=1.3', 52 'visdom>=0.1.4', 53 'pillow', 54 ], 55 'test': [ 56 'pytest', 57 'pytest-cov', 58 'nbval', 59 # examples/tutorials 60 'matplotlib', 61 'visdom', 62 'torchvision', 63 ], 64 'profile': ['prettytable'], 65 'dev': [ 66 'torchvision', 67 'flake8', 68 'yapf', 69 'isort', 70 'pytest', 71 'pytest-xdist', 72 'nbval', 73 'nbstripout', 74 'pypandoc', 75 'sphinx', 76 'sphinx_rtd_theme', 77 ], 78 }, 79 tests_require=['flake8', 'pytest'], 80 keywords='machine learning statistics probabilistic programming bayesian modeling pytorch', 81 license='MIT License', 82 classifiers=[ 83 'Intended Audience :: Developers', 84 'Intended Audience :: Education', 85 'Intended Audience :: Science/Research', 86 'Operating System :: POSIX :: Linux', 87 'Operating System :: MacOS :: MacOS X', 88 'Programming Language :: Python :: 2.7', 89 'Programming Language :: Python :: 3.4', 90 ], 91 # yapf 92 ) 93 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -37,7 +37,6 @@ author_email='[email protected]', install_requires=[ 'numpy>=1.7', - 'scipy>=0.19.0', 'cloudpickle>=0.3.1', 'graphviz>=0.8', 'networkx>=2.0.0', @@ -56,6 +55,7 @@ 'pytest', 'pytest-cov', 'nbval', + 'scipy>=0.19.0', # examples/tutorials 'matplotlib', 'visdom',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -37,7 +37,6 @@\n author_email='[email protected]',\n install_requires=[\n 'numpy>=1.7',\n- 'scipy>=0.19.0',\n 'cloudpickle>=0.3.1',\n 'graphviz>=0.8',\n 'networkx>=2.0.0',\n@@ -56,6 +55,7 @@\n 'pytest',\n 'pytest-cov',\n 'nbval',\n+ 'scipy>=0.19.0',\n # examples/tutorials\n 'matplotlib',\n 'visdom',\n", "issue": "Deprecate dependency on scipy for pyro-ppl package\nOnce distributions are migrated upstream and we do not have a core dependency on scipy, we should remove this from our requirements in `setup.py` (add it to test dependencies), so that users do not need to install a separate 16MB file when they do a pip install of `pyro-ppl`.\n", "before_files": [{"content": "from __future__ import absolute_import, division, print_function\n\nimport sys\n\nfrom setuptools import find_packages, setup\n\n# Find pyro version.\nfor line in open('pyro/__init__.py'):\n if line.startswith('__version__ = '):\n version = line.strip().split()[2][1:-1]\n\n# Convert README.md to rst for display at https://pypi.python.org/pypi/pyro-ppl\n# When releasing on pypi, make sure pandoc is on your system:\n# $ brew install pandoc # OS X\n# $ sudo apt-get install pandoc # Ubuntu Linux\ntry:\n import pypandoc\n long_description = pypandoc.convert('README.md', 'rst')\nexcept (IOError, ImportError, OSError) as e:\n sys.stderr.write('Failed to convert README.md to rst:\\n {}\\n'.format(e))\n sys.stderr.flush()\n long_description = open('README.md').read()\n\n# Remove badges since they will always be obsolete.\nblacklist = ['Build Status', 'Latest Version', 'travis-ci.org', 'pypi.python.org']\nlong_description = '\\n'.join(\n [line for line in long_description.split('\\n') if not any(patt in line for patt in blacklist)])\n\nsetup(\n name='pyro-ppl',\n version=version,\n description='A Python library for probabilistic modeling and inference',\n long_description=long_description,\n packages=find_packages(exclude=('tests*',)),\n url='http://pyro.ai',\n author='Uber AI Labs',\n author_email='[email protected]',\n install_requires=[\n 'numpy>=1.7',\n 'scipy>=0.19.0',\n 'cloudpickle>=0.3.1',\n 'graphviz>=0.8',\n 'networkx>=2.0.0',\n 'observations>=0.1.4',\n 'torch',\n 'six>=1.10.0',\n ],\n extras_require={\n 'notebooks': ['jupyter>=1.0.0'],\n 'visualization': [\n 'matplotlib>=1.3',\n 'visdom>=0.1.4',\n 'pillow',\n ],\n 'test': [\n 'pytest',\n 'pytest-cov',\n 'nbval',\n # examples/tutorials\n 'matplotlib',\n 'visdom',\n 'torchvision',\n ],\n 'profile': ['prettytable'],\n 'dev': [\n 'torchvision',\n 'flake8',\n 'yapf',\n 'isort',\n 'pytest',\n 'pytest-xdist',\n 'nbval',\n 'nbstripout',\n 'pypandoc',\n 'sphinx',\n 'sphinx_rtd_theme',\n ],\n },\n tests_require=['flake8', 'pytest'],\n keywords='machine learning statistics probabilistic programming bayesian modeling pytorch',\n license='MIT License',\n classifiers=[\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'Operating System :: POSIX :: Linux',\n 'Operating System :: MacOS :: MacOS X',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.4',\n ],\n # yapf\n)\n", "path": "setup.py"}]}
1,490
154
gh_patches_debug_17814
rasdani/github-patches
git_diff
Qiskit__qiskit-7328
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Parameters needs examples in the documentation ### What is the expected enhancement? Many users land in https://qiskit.org/documentation/stubs/qiskit.circuit.Parameter.html when search on how to parametrize circuits. @Cryoris did a good jobs at explaining them in https://quantumcomputing.stackexchange.com/questions/13558/how-to-add-several-parameters-in-qiskit-circuit . I think the Parameter docstring needs a short example. </issue> <code> [start of qiskit/circuit/parameter.py] 1 # This code is part of Qiskit. 2 # 3 # (C) Copyright IBM 2017, 2019. 4 # 5 # This code is licensed under the Apache License, Version 2.0. You may 6 # obtain a copy of this license in the LICENSE.txt file in the root directory 7 # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. 8 # 9 # Any modifications or derivative works of this code must retain this 10 # copyright notice, and modified files need to carry a notice indicating 11 # that they have been altered from the originals. 12 """ 13 Parameter Class for variable parameters. 14 """ 15 16 from uuid import uuid4 17 18 from .parameterexpression import ParameterExpression 19 20 try: 21 import symengine 22 23 HAS_SYMENGINE = True 24 except ImportError: 25 HAS_SYMENGINE = False 26 27 28 class Parameter(ParameterExpression): 29 """Parameter Class for variable parameters.""" 30 31 def __new__(cls, name, uuid=None): # pylint: disable=unused-argument 32 # Parameter relies on self._uuid being set prior to other attributes 33 # (e.g. symbol_map) which may depend on self._uuid for Parameter's hash 34 # or __eq__ functions. 35 obj = object.__new__(cls) 36 37 if uuid is None: 38 obj._uuid = uuid4() 39 else: 40 obj._uuid = uuid 41 42 obj._hash = hash(obj._uuid) 43 return obj 44 45 def __getnewargs__(self): 46 # Unpickling won't in general call __init__ but will always call 47 # __new__. Specify arguments to be passed to __new__ when unpickling. 48 49 return (self.name, self._uuid) 50 51 def __init__(self, name: str): 52 """Create a new named :class:`Parameter`. 53 54 Args: 55 name: name of the ``Parameter``, used for visual representation. This can 56 be any unicode string, e.g. "ϕ". 57 """ 58 self._name = name 59 if not HAS_SYMENGINE: 60 from sympy import Symbol 61 62 symbol = Symbol(name) 63 else: 64 symbol = symengine.Symbol(name) 65 super().__init__(symbol_map={self: symbol}, expr=symbol) 66 67 def subs(self, parameter_map: dict): 68 """Substitute self with the corresponding parameter in ``parameter_map``.""" 69 return parameter_map[self] 70 71 @property 72 def name(self): 73 """Returns the name of the :class:`Parameter`.""" 74 return self._name 75 76 def __str__(self): 77 return self.name 78 79 def __copy__(self): 80 return self 81 82 def __deepcopy__(self, memo=None): 83 return self 84 85 def __repr__(self): 86 return f"{self.__class__.__name__}({self.name})" 87 88 def __eq__(self, other): 89 if isinstance(other, Parameter): 90 return self._uuid == other._uuid 91 elif isinstance(other, ParameterExpression): 92 return super().__eq__(other) 93 else: 94 return False 95 96 def __hash__(self): 97 return self._hash 98 99 def __getstate__(self): 100 return {"name": self._name} 101 102 def __setstate__(self, state): 103 self._name = state["name"] 104 if not HAS_SYMENGINE: 105 from sympy import Symbol 106 107 symbol = Symbol(self._name) 108 else: 109 symbol = symengine.Symbol(self._name) 110 super().__init__(symbol_map={self: symbol}, expr=symbol) 111 [end of qiskit/circuit/parameter.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/qiskit/circuit/parameter.py b/qiskit/circuit/parameter.py --- a/qiskit/circuit/parameter.py +++ b/qiskit/circuit/parameter.py @@ -26,7 +26,32 @@ class Parameter(ParameterExpression): - """Parameter Class for variable parameters.""" + """Parameter Class for variable parameters. + + A parameter is a variable value that is not required to be fixed + at circuit definition. + + Examples: + + Construct a variable-rotation X gate using circuit parameters. + + .. jupyter-execute:: + + from qiskit.circuit import QuantumCircuit, Parameter + + # create the parameter + phi = Parameter('phi') + qc = QuantumCircuit(1) + + # parameterize the rotation + qc.rx(phi, 0) + qc.draw() + + # bind the parameters after circuit to create a bound circuit + bc = qc.bind_parameters({phi: 3.14}) + bc.measure_all() + bc.draw() + """ def __new__(cls, name, uuid=None): # pylint: disable=unused-argument # Parameter relies on self._uuid being set prior to other attributes
{"golden_diff": "diff --git a/qiskit/circuit/parameter.py b/qiskit/circuit/parameter.py\n--- a/qiskit/circuit/parameter.py\n+++ b/qiskit/circuit/parameter.py\n@@ -26,7 +26,32 @@\n \n \n class Parameter(ParameterExpression):\n- \"\"\"Parameter Class for variable parameters.\"\"\"\n+ \"\"\"Parameter Class for variable parameters.\n+\n+ A parameter is a variable value that is not required to be fixed\n+ at circuit definition.\n+\n+ Examples:\n+\n+ Construct a variable-rotation X gate using circuit parameters.\n+\n+ .. jupyter-execute::\n+\n+ from qiskit.circuit import QuantumCircuit, Parameter\n+\n+ # create the parameter\n+ phi = Parameter('phi')\n+ qc = QuantumCircuit(1)\n+\n+ # parameterize the rotation\n+ qc.rx(phi, 0)\n+ qc.draw()\n+\n+ # bind the parameters after circuit to create a bound circuit\n+ bc = qc.bind_parameters({phi: 3.14})\n+ bc.measure_all()\n+ bc.draw()\n+ \"\"\"\n \n def __new__(cls, name, uuid=None): # pylint: disable=unused-argument\n # Parameter relies on self._uuid being set prior to other attributes\n", "issue": "Parameters needs examples in the documentation\n### What is the expected enhancement?\n\nMany users land in https://qiskit.org/documentation/stubs/qiskit.circuit.Parameter.html when search on how to parametrize circuits. @Cryoris did a good jobs at explaining them in https://quantumcomputing.stackexchange.com/questions/13558/how-to-add-several-parameters-in-qiskit-circuit . I think the Parameter docstring needs a short example.\n", "before_files": [{"content": "# This code is part of Qiskit.\n#\n# (C) Copyright IBM 2017, 2019.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\"\"\"\nParameter Class for variable parameters.\n\"\"\"\n\nfrom uuid import uuid4\n\nfrom .parameterexpression import ParameterExpression\n\ntry:\n import symengine\n\n HAS_SYMENGINE = True\nexcept ImportError:\n HAS_SYMENGINE = False\n\n\nclass Parameter(ParameterExpression):\n \"\"\"Parameter Class for variable parameters.\"\"\"\n\n def __new__(cls, name, uuid=None): # pylint: disable=unused-argument\n # Parameter relies on self._uuid being set prior to other attributes\n # (e.g. symbol_map) which may depend on self._uuid for Parameter's hash\n # or __eq__ functions.\n obj = object.__new__(cls)\n\n if uuid is None:\n obj._uuid = uuid4()\n else:\n obj._uuid = uuid\n\n obj._hash = hash(obj._uuid)\n return obj\n\n def __getnewargs__(self):\n # Unpickling won't in general call __init__ but will always call\n # __new__. Specify arguments to be passed to __new__ when unpickling.\n\n return (self.name, self._uuid)\n\n def __init__(self, name: str):\n \"\"\"Create a new named :class:`Parameter`.\n\n Args:\n name: name of the ``Parameter``, used for visual representation. This can\n be any unicode string, e.g. \"\u03d5\".\n \"\"\"\n self._name = name\n if not HAS_SYMENGINE:\n from sympy import Symbol\n\n symbol = Symbol(name)\n else:\n symbol = symengine.Symbol(name)\n super().__init__(symbol_map={self: symbol}, expr=symbol)\n\n def subs(self, parameter_map: dict):\n \"\"\"Substitute self with the corresponding parameter in ``parameter_map``.\"\"\"\n return parameter_map[self]\n\n @property\n def name(self):\n \"\"\"Returns the name of the :class:`Parameter`.\"\"\"\n return self._name\n\n def __str__(self):\n return self.name\n\n def __copy__(self):\n return self\n\n def __deepcopy__(self, memo=None):\n return self\n\n def __repr__(self):\n return f\"{self.__class__.__name__}({self.name})\"\n\n def __eq__(self, other):\n if isinstance(other, Parameter):\n return self._uuid == other._uuid\n elif isinstance(other, ParameterExpression):\n return super().__eq__(other)\n else:\n return False\n\n def __hash__(self):\n return self._hash\n\n def __getstate__(self):\n return {\"name\": self._name}\n\n def __setstate__(self, state):\n self._name = state[\"name\"]\n if not HAS_SYMENGINE:\n from sympy import Symbol\n\n symbol = Symbol(self._name)\n else:\n symbol = symengine.Symbol(self._name)\n super().__init__(symbol_map={self: symbol}, expr=symbol)\n", "path": "qiskit/circuit/parameter.py"}]}
1,618
281
gh_patches_debug_21047
rasdani/github-patches
git_diff
dask__distributed-3387
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> When --dashboard-prefix is used root location is always redirected to /status without adding the prefix The problem arises when using --dashboard-prefix, using http://DASHBOARD[:PORT]/PREFIX/ redirects to http://DASHBOARD[:PORT]/status instead of http://DASHBOARD[:PORT]/PREFIX/status. </issue> <code> [start of distributed/dashboard/core.py] 1 from distutils.version import LooseVersion 2 import os 3 import warnings 4 5 import bokeh 6 from bokeh.server.server import Server 7 from tornado import web 8 9 10 if LooseVersion(bokeh.__version__) < LooseVersion("0.13.0"): 11 warnings.warn( 12 "\nDask needs bokeh >= 0.13.0 for the dashboard." 13 "\nContinuing without the dashboard." 14 ) 15 raise ImportError("Dask needs bokeh >= 0.13.0") 16 17 18 class BokehServer(object): 19 server_kwargs = {} 20 21 def listen(self, addr): 22 if self.server: 23 return 24 if isinstance(addr, tuple): 25 ip, port = addr 26 else: 27 port = addr 28 ip = None 29 for i in range(5): 30 try: 31 server_kwargs = dict( 32 port=port, 33 address=ip, 34 check_unused_sessions_milliseconds=500, 35 allow_websocket_origin=["*"], 36 use_index=False, 37 extra_patterns=[(r"/", web.RedirectHandler, {"url": "/status"})], 38 ) 39 server_kwargs.update(self.server_kwargs) 40 self.server = Server(self.apps, **server_kwargs) 41 self.server.start() 42 43 handlers = [ 44 ( 45 self.prefix + r"/statics/(.*)", 46 web.StaticFileHandler, 47 {"path": os.path.join(os.path.dirname(__file__), "static")}, 48 ) 49 ] 50 51 self.server._tornado.add_handlers(r".*", handlers) 52 53 return 54 except (SystemExit, EnvironmentError) as exc: 55 if port != 0: 56 if "already in use" in str( 57 exc 58 ) or "Only one usage of" in str( # Unix/Mac 59 exc 60 ): # Windows 61 msg = ( 62 "Port %d is already in use. " 63 "\nPerhaps you already have a cluster running?" 64 "\nHosting the diagnostics dashboard on a random port instead." 65 % port 66 ) 67 else: 68 msg = ( 69 "Failed to start diagnostics server on port %d. " % port 70 + str(exc) 71 ) 72 warnings.warn("\n" + msg) 73 port = 0 74 if i == 4: 75 raise 76 77 @property 78 def port(self): 79 return ( 80 self.server.port 81 or list(self.server._http._sockets.values())[0].getsockname()[1] 82 ) 83 84 def stop(self): 85 for context in self.server._tornado._applications.values(): 86 context.run_unload_hook() 87 88 self.server._tornado._stats_job.stop() 89 self.server._tornado._cleanup_job.stop() 90 if self.server._tornado._ping_job is not None: 91 self.server._tornado._ping_job.stop() 92 93 # https://github.com/bokeh/bokeh/issues/5494 94 if LooseVersion(bokeh.__version__) >= "0.12.4": 95 self.server.stop() 96 [end of distributed/dashboard/core.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/distributed/dashboard/core.py b/distributed/dashboard/core.py --- a/distributed/dashboard/core.py +++ b/distributed/dashboard/core.py @@ -5,6 +5,7 @@ import bokeh from bokeh.server.server import Server from tornado import web +from urllib.parse import urljoin if LooseVersion(bokeh.__version__) < LooseVersion("0.13.0"): @@ -34,7 +35,13 @@ check_unused_sessions_milliseconds=500, allow_websocket_origin=["*"], use_index=False, - extra_patterns=[(r"/", web.RedirectHandler, {"url": "/status"})], + extra_patterns=[ + ( + r"/", + web.RedirectHandler, + {"url": urljoin(self.prefix.rstrip("/") + "/", r"status")}, + ) + ], ) server_kwargs.update(self.server_kwargs) self.server = Server(self.apps, **server_kwargs)
{"golden_diff": "diff --git a/distributed/dashboard/core.py b/distributed/dashboard/core.py\n--- a/distributed/dashboard/core.py\n+++ b/distributed/dashboard/core.py\n@@ -5,6 +5,7 @@\n import bokeh\n from bokeh.server.server import Server\n from tornado import web\n+from urllib.parse import urljoin\n \n \n if LooseVersion(bokeh.__version__) < LooseVersion(\"0.13.0\"):\n@@ -34,7 +35,13 @@\n check_unused_sessions_milliseconds=500,\n allow_websocket_origin=[\"*\"],\n use_index=False,\n- extra_patterns=[(r\"/\", web.RedirectHandler, {\"url\": \"/status\"})],\n+ extra_patterns=[\n+ (\n+ r\"/\",\n+ web.RedirectHandler,\n+ {\"url\": urljoin(self.prefix.rstrip(\"/\") + \"/\", r\"status\")},\n+ )\n+ ],\n )\n server_kwargs.update(self.server_kwargs)\n self.server = Server(self.apps, **server_kwargs)\n", "issue": "When --dashboard-prefix is used root location is always redirected to /status without adding the prefix\nThe problem arises when using --dashboard-prefix, using http://DASHBOARD[:PORT]/PREFIX/ redirects to http://DASHBOARD[:PORT]/status instead of http://DASHBOARD[:PORT]/PREFIX/status.\n", "before_files": [{"content": "from distutils.version import LooseVersion\nimport os\nimport warnings\n\nimport bokeh\nfrom bokeh.server.server import Server\nfrom tornado import web\n\n\nif LooseVersion(bokeh.__version__) < LooseVersion(\"0.13.0\"):\n warnings.warn(\n \"\\nDask needs bokeh >= 0.13.0 for the dashboard.\"\n \"\\nContinuing without the dashboard.\"\n )\n raise ImportError(\"Dask needs bokeh >= 0.13.0\")\n\n\nclass BokehServer(object):\n server_kwargs = {}\n\n def listen(self, addr):\n if self.server:\n return\n if isinstance(addr, tuple):\n ip, port = addr\n else:\n port = addr\n ip = None\n for i in range(5):\n try:\n server_kwargs = dict(\n port=port,\n address=ip,\n check_unused_sessions_milliseconds=500,\n allow_websocket_origin=[\"*\"],\n use_index=False,\n extra_patterns=[(r\"/\", web.RedirectHandler, {\"url\": \"/status\"})],\n )\n server_kwargs.update(self.server_kwargs)\n self.server = Server(self.apps, **server_kwargs)\n self.server.start()\n\n handlers = [\n (\n self.prefix + r\"/statics/(.*)\",\n web.StaticFileHandler,\n {\"path\": os.path.join(os.path.dirname(__file__), \"static\")},\n )\n ]\n\n self.server._tornado.add_handlers(r\".*\", handlers)\n\n return\n except (SystemExit, EnvironmentError) as exc:\n if port != 0:\n if \"already in use\" in str(\n exc\n ) or \"Only one usage of\" in str( # Unix/Mac\n exc\n ): # Windows\n msg = (\n \"Port %d is already in use. \"\n \"\\nPerhaps you already have a cluster running?\"\n \"\\nHosting the diagnostics dashboard on a random port instead.\"\n % port\n )\n else:\n msg = (\n \"Failed to start diagnostics server on port %d. \" % port\n + str(exc)\n )\n warnings.warn(\"\\n\" + msg)\n port = 0\n if i == 4:\n raise\n\n @property\n def port(self):\n return (\n self.server.port\n or list(self.server._http._sockets.values())[0].getsockname()[1]\n )\n\n def stop(self):\n for context in self.server._tornado._applications.values():\n context.run_unload_hook()\n\n self.server._tornado._stats_job.stop()\n self.server._tornado._cleanup_job.stop()\n if self.server._tornado._ping_job is not None:\n self.server._tornado._ping_job.stop()\n\n # https://github.com/bokeh/bokeh/issues/5494\n if LooseVersion(bokeh.__version__) >= \"0.12.4\":\n self.server.stop()\n", "path": "distributed/dashboard/core.py"}]}
1,415
210
gh_patches_debug_40460
rasdani/github-patches
git_diff
vacanza__python-holidays-371
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Wrong workday info for country HR Today (Oct. 8, 2020) my alarmclock automation did not go off, because my workday sensor gave the wrong info (no workday). This day used to be a holiday in Croatia, but is not anymore. binary_sensor: - platform: workday country: HR ![image](https://user-images.githubusercontent.com/61622258/95419607-4082d000-093a-11eb-904c-77a2639b1719.png) </issue> <code> [start of holidays/countries/croatia.py] 1 # -*- coding: utf-8 -*- 2 3 # python-holidays 4 # --------------- 5 # A fast, efficient Python library for generating country, province and state 6 # specific sets of holidays on the fly. It aims to make determining whether a 7 # specific date is a holiday as fast and flexible as possible. 8 # 9 # Author: ryanss <[email protected]> (c) 2014-2017 10 # dr-prodigy <[email protected]> (c) 2017-2020 11 # Website: https://github.com/dr-prodigy/python-holidays 12 # License: MIT (see LICENSE file) 13 14 from datetime import date 15 16 from dateutil.easter import easter 17 from dateutil.relativedelta import relativedelta as rd 18 19 from holidays.constants import JAN, MAY, JUN, AUG, OCT, \ 20 NOV, DEC 21 from holidays.holiday_base import HolidayBase 22 23 24 class Croatia(HolidayBase): 25 26 # https://en.wikipedia.org/wiki/Public_holidays_in_Croatia 27 28 def __init__(self, **kwargs): 29 self.country = "HR" 30 HolidayBase.__init__(self, **kwargs) 31 32 def _populate(self, year): 33 # New years 34 self[date(year, JAN, 1)] = "Nova Godina" 35 # Epiphany 36 self[date(year, JAN, 6)] = "Sveta tri kralja" 37 easter_date = easter(year) 38 39 # Easter 40 self[easter_date] = "Uskrs" 41 # Easter Monday 42 self[easter_date + rd(days=1)] = "Uskršnji ponedjeljak" 43 44 # Corpus Christi 45 self[easter_date + rd(days=60)] = "Tijelovo" 46 47 # International Workers' Day 48 self[date(year, MAY, 1)] = "Međunarodni praznik rada" 49 50 if year >= 2020: 51 # Statehood day 52 self[date(year, MAY, 30)] = "Dan državnosti" 53 54 # Anti-fascist struggle day 55 self[date(year, JUN, 22)] = "Dan antifašističke borbe" 56 57 if year < 2020: 58 # Statehood day 59 self[date(year, JUN, 25)] = "Dan državnosti" 60 61 # Victory and Homeland Thanksgiving Day 62 self[date(year, AUG, 5)] = "Dan pobjede i domovinske zahvalnosti" 63 64 # Assumption of Mary 65 self[date(year, AUG, 15)] = "Velika Gospa" 66 67 if year < 2020: 68 # Independence Day 69 self[date(year, OCT, 8)] = "Dan neovisnosti" 70 71 # All Saints' Day 72 self[date(year, NOV, 1)] = "Dan svih svetih" 73 74 if year >= 2020: 75 # Memorial day 76 self[date(year, NOV, 18)] =\ 77 "Dan sjećanja na žrtve Domovinskog rata" 78 79 # Christmas day 80 self[date(year, DEC, 25)] = "Božić" 81 82 # St. Stephen's day 83 self[date(year, DEC, 26)] = "Sveti Stjepan" 84 85 86 class HR(Croatia): 87 pass 88 89 90 class HRV(Croatia): 91 pass 92 [end of holidays/countries/croatia.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/holidays/countries/croatia.py b/holidays/countries/croatia.py --- a/holidays/countries/croatia.py +++ b/holidays/countries/croatia.py @@ -11,18 +11,18 @@ # Website: https://github.com/dr-prodigy/python-holidays # License: MIT (see LICENSE file) -from datetime import date +from datetime import date, timedelta from dateutil.easter import easter -from dateutil.relativedelta import relativedelta as rd -from holidays.constants import JAN, MAY, JUN, AUG, OCT, \ - NOV, DEC +from holidays.constants import JAN, MAY, JUN, AUG, OCT, NOV, DEC from holidays.holiday_base import HolidayBase class Croatia(HolidayBase): + # Updated with act 022-03 / 19-01 / 219 of 14 November 2019 + # https://narodne-novine.nn.hr/clanci/sluzbeni/2019_11_110_2212.html # https://en.wikipedia.org/wiki/Public_holidays_in_Croatia def __init__(self, **kwargs): @@ -32,6 +32,7 @@ def _populate(self, year): # New years self[date(year, JAN, 1)] = "Nova Godina" + # Epiphany self[date(year, JAN, 6)] = "Sveta tri kralja" easter_date = easter(year) @@ -39,23 +40,23 @@ # Easter self[easter_date] = "Uskrs" # Easter Monday - self[easter_date + rd(days=1)] = "Uskršnji ponedjeljak" + self[easter_date + timedelta(days=1)] = "Uskrsni ponedjeljak" # Corpus Christi - self[easter_date + rd(days=60)] = "Tijelovo" + self[easter_date + timedelta(days=60)] = "Tijelovo" # International Workers' Day self[date(year, MAY, 1)] = "Međunarodni praznik rada" + # Statehood day (new) if year >= 2020: - # Statehood day self[date(year, MAY, 30)] = "Dan državnosti" # Anti-fascist struggle day self[date(year, JUN, 22)] = "Dan antifašističke borbe" + # Statehood day (old) if year < 2020: - # Statehood day self[date(year, JUN, 25)] = "Dan državnosti" # Victory and Homeland Thanksgiving Day @@ -64,17 +65,16 @@ # Assumption of Mary self[date(year, AUG, 15)] = "Velika Gospa" + # Independence Day (old) if year < 2020: - # Independence Day self[date(year, OCT, 8)] = "Dan neovisnosti" # All Saints' Day - self[date(year, NOV, 1)] = "Dan svih svetih" + self[date(year, NOV, 1)] = "Svi sveti" if year >= 2020: # Memorial day - self[date(year, NOV, 18)] =\ - "Dan sjećanja na žrtve Domovinskog rata" + self[date(year, NOV, 18)] = "Dan sjećanja" # Christmas day self[date(year, DEC, 25)] = "Božić"
{"golden_diff": "diff --git a/holidays/countries/croatia.py b/holidays/countries/croatia.py\n--- a/holidays/countries/croatia.py\n+++ b/holidays/countries/croatia.py\n@@ -11,18 +11,18 @@\n # Website: https://github.com/dr-prodigy/python-holidays\n # License: MIT (see LICENSE file)\n \n-from datetime import date\n+from datetime import date, timedelta\n \n from dateutil.easter import easter\n-from dateutil.relativedelta import relativedelta as rd\n \n-from holidays.constants import JAN, MAY, JUN, AUG, OCT, \\\n- NOV, DEC\n+from holidays.constants import JAN, MAY, JUN, AUG, OCT, NOV, DEC\n from holidays.holiday_base import HolidayBase\n \n \n class Croatia(HolidayBase):\n \n+ # Updated with act 022-03 / 19-01 / 219 of 14 November 2019\n+ # https://narodne-novine.nn.hr/clanci/sluzbeni/2019_11_110_2212.html\n # https://en.wikipedia.org/wiki/Public_holidays_in_Croatia\n \n def __init__(self, **kwargs):\n@@ -32,6 +32,7 @@\n def _populate(self, year):\n # New years\n self[date(year, JAN, 1)] = \"Nova Godina\"\n+\n # Epiphany\n self[date(year, JAN, 6)] = \"Sveta tri kralja\"\n easter_date = easter(year)\n@@ -39,23 +40,23 @@\n # Easter\n self[easter_date] = \"Uskrs\"\n # Easter Monday\n- self[easter_date + rd(days=1)] = \"Uskr\u0161nji ponedjeljak\"\n+ self[easter_date + timedelta(days=1)] = \"Uskrsni ponedjeljak\"\n \n # Corpus Christi\n- self[easter_date + rd(days=60)] = \"Tijelovo\"\n+ self[easter_date + timedelta(days=60)] = \"Tijelovo\"\n \n # International Workers' Day\n self[date(year, MAY, 1)] = \"Me\u0111unarodni praznik rada\"\n \n+ # Statehood day (new)\n if year >= 2020:\n- # Statehood day\n self[date(year, MAY, 30)] = \"Dan dr\u017eavnosti\"\n \n # Anti-fascist struggle day\n self[date(year, JUN, 22)] = \"Dan antifa\u0161isti\u010dke borbe\"\n \n+ # Statehood day (old)\n if year < 2020:\n- # Statehood day\n self[date(year, JUN, 25)] = \"Dan dr\u017eavnosti\"\n \n # Victory and Homeland Thanksgiving Day\n@@ -64,17 +65,16 @@\n # Assumption of Mary\n self[date(year, AUG, 15)] = \"Velika Gospa\"\n \n+ # Independence Day (old)\n if year < 2020:\n- # Independence Day\n self[date(year, OCT, 8)] = \"Dan neovisnosti\"\n \n # All Saints' Day\n- self[date(year, NOV, 1)] = \"Dan svih svetih\"\n+ self[date(year, NOV, 1)] = \"Svi sveti\"\n \n if year >= 2020:\n # Memorial day\n- self[date(year, NOV, 18)] =\\\n- \"Dan sje\u0107anja na \u017ertve Domovinskog rata\"\n+ self[date(year, NOV, 18)] = \"Dan sje\u0107anja\"\n \n # Christmas day\n self[date(year, DEC, 25)] = \"Bo\u017ei\u0107\"\n", "issue": "Wrong workday info for country HR\nToday (Oct. 8, 2020) my alarmclock automation did not go off, because my workday sensor gave the wrong info (no workday). This day used to be a holiday in Croatia, but is not anymore.\r\n\r\nbinary_sensor:\r\n - platform: workday\r\n country: HR\r\n\r\n![image](https://user-images.githubusercontent.com/61622258/95419607-4082d000-093a-11eb-904c-77a2639b1719.png)\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# python-holidays\n# ---------------\n# A fast, efficient Python library for generating country, province and state\n# specific sets of holidays on the fly. It aims to make determining whether a\n# specific date is a holiday as fast and flexible as possible.\n#\n# Author: ryanss <[email protected]> (c) 2014-2017\n# dr-prodigy <[email protected]> (c) 2017-2020\n# Website: https://github.com/dr-prodigy/python-holidays\n# License: MIT (see LICENSE file)\n\nfrom datetime import date\n\nfrom dateutil.easter import easter\nfrom dateutil.relativedelta import relativedelta as rd\n\nfrom holidays.constants import JAN, MAY, JUN, AUG, OCT, \\\n NOV, DEC\nfrom holidays.holiday_base import HolidayBase\n\n\nclass Croatia(HolidayBase):\n\n # https://en.wikipedia.org/wiki/Public_holidays_in_Croatia\n\n def __init__(self, **kwargs):\n self.country = \"HR\"\n HolidayBase.__init__(self, **kwargs)\n\n def _populate(self, year):\n # New years\n self[date(year, JAN, 1)] = \"Nova Godina\"\n # Epiphany\n self[date(year, JAN, 6)] = \"Sveta tri kralja\"\n easter_date = easter(year)\n\n # Easter\n self[easter_date] = \"Uskrs\"\n # Easter Monday\n self[easter_date + rd(days=1)] = \"Uskr\u0161nji ponedjeljak\"\n\n # Corpus Christi\n self[easter_date + rd(days=60)] = \"Tijelovo\"\n\n # International Workers' Day\n self[date(year, MAY, 1)] = \"Me\u0111unarodni praznik rada\"\n\n if year >= 2020:\n # Statehood day\n self[date(year, MAY, 30)] = \"Dan dr\u017eavnosti\"\n\n # Anti-fascist struggle day\n self[date(year, JUN, 22)] = \"Dan antifa\u0161isti\u010dke borbe\"\n\n if year < 2020:\n # Statehood day\n self[date(year, JUN, 25)] = \"Dan dr\u017eavnosti\"\n\n # Victory and Homeland Thanksgiving Day\n self[date(year, AUG, 5)] = \"Dan pobjede i domovinske zahvalnosti\"\n\n # Assumption of Mary\n self[date(year, AUG, 15)] = \"Velika Gospa\"\n\n if year < 2020:\n # Independence Day\n self[date(year, OCT, 8)] = \"Dan neovisnosti\"\n\n # All Saints' Day\n self[date(year, NOV, 1)] = \"Dan svih svetih\"\n\n if year >= 2020:\n # Memorial day\n self[date(year, NOV, 18)] =\\\n \"Dan sje\u0107anja na \u017ertve Domovinskog rata\"\n\n # Christmas day\n self[date(year, DEC, 25)] = \"Bo\u017ei\u0107\"\n\n # St. Stephen's day\n self[date(year, DEC, 26)] = \"Sveti Stjepan\"\n\n\nclass HR(Croatia):\n pass\n\n\nclass HRV(Croatia):\n pass\n", "path": "holidays/countries/croatia.py"}]}
1,651
884
gh_patches_debug_10492
rasdani/github-patches
git_diff
fossasia__open-event-server-2532
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Bug: Kubernetes Deployment Info shown on heroku Do not try to show Kubernetes deployment info when not hosted on Kubernetes </issue> <code> [start of app/views/admin/super_admin/super_admin.py] 1 import os 2 3 from flask_admin import expose 4 5 from app.views.admin.super_admin.super_admin_base import SuperAdminBaseView 6 from ....helpers.data_getter import DataGetter 7 from app.helpers.helpers import get_latest_heroku_release, get_commit_info, get_count 8 from app.models.user import ATTENDEE,TRACK_ORGANIZER, COORGANIZER, ORGANIZER 9 from app.helpers.kubernetes import KubernetesApi 10 11 12 class SuperAdminView(SuperAdminBaseView): 13 14 @expose('/') 15 def index_view(self): 16 events = DataGetter.get_all_events()[:5] 17 number_live_events = get_count(DataGetter.get_all_live_events()) 18 number_draft_events = get_count(DataGetter.get_all_draft_events()) 19 number_past_events = get_count(DataGetter.get_all_past_events()) 20 super_admins = DataGetter.get_all_super_admins() 21 admins = DataGetter.get_all_admins() 22 registered_users = DataGetter.get_all_registered_users() 23 # TODO Fix function and correct this 24 organizers = get_count(DataGetter.get_all_user_roles(ORGANIZER)) 25 co_organizers = get_count(DataGetter.get_all_user_roles(COORGANIZER)) 26 track_organizers = get_count(DataGetter.get_all_user_roles(TRACK_ORGANIZER)) 27 attendees = get_count(DataGetter.get_all_user_roles(ATTENDEE)) 28 accepted_sessions = DataGetter.get_all_accepted_sessions() 29 rejected_sessions = DataGetter.get_all_rejected_sessions() 30 draft_sessions = DataGetter.get_all_draft_sessions() 31 email_times = DataGetter.get_email_by_times() 32 33 commit_info = None 34 version = None 35 on_kubernetes = False 36 pods_info = None 37 38 if KubernetesApi.is_on_kubernetes(): 39 on_kubernetes = True 40 kubernetes_api = KubernetesApi() 41 pods_info = kubernetes_api.get_pods()['items'] 42 version = os.getenv('REPOSITORY', 'https://github.com/fossasia/open-event-orga-server.git') 43 commit_info = os.getenv('BRANCH', 'development') 44 else: 45 version = get_latest_heroku_release() 46 commit_info = None 47 commit_number = None 48 if version: 49 commit_number = version['description'].split(' ')[1] 50 commit_info = get_commit_info(commit_number) 51 52 return self.render('/gentelella/admin/super_admin/widgets/index.html', 53 events=events, 54 version=version, 55 commit_info=commit_info, 56 on_kubernetes=True, 57 pods_info=pods_info, 58 number_live_events=number_live_events, 59 number_draft_events=number_draft_events, 60 number_past_events=number_past_events, 61 super_admins=super_admins, 62 admins=admins, 63 registered_users=registered_users, 64 organizers=organizers, 65 co_organizers=co_organizers, 66 track_organizers=track_organizers, 67 attendees=attendees, 68 accepted_sessions=accepted_sessions, 69 rejected_sessions=rejected_sessions, 70 draft_sessions=draft_sessions, 71 email_times=email_times) 72 [end of app/views/admin/super_admin/super_admin.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/app/views/admin/super_admin/super_admin.py b/app/views/admin/super_admin/super_admin.py --- a/app/views/admin/super_admin/super_admin.py +++ b/app/views/admin/super_admin/super_admin.py @@ -53,7 +53,7 @@ events=events, version=version, commit_info=commit_info, - on_kubernetes=True, + on_kubernetes=on_kubernetes, pods_info=pods_info, number_live_events=number_live_events, number_draft_events=number_draft_events,
{"golden_diff": "diff --git a/app/views/admin/super_admin/super_admin.py b/app/views/admin/super_admin/super_admin.py\n--- a/app/views/admin/super_admin/super_admin.py\n+++ b/app/views/admin/super_admin/super_admin.py\n@@ -53,7 +53,7 @@\n events=events,\n version=version,\n commit_info=commit_info,\n- on_kubernetes=True,\n+ on_kubernetes=on_kubernetes,\n pods_info=pods_info,\n number_live_events=number_live_events,\n number_draft_events=number_draft_events,\n", "issue": "Bug: Kubernetes Deployment Info shown on heroku\nDo not try to show Kubernetes deployment info when not hosted on Kubernetes\n", "before_files": [{"content": "import os\n\nfrom flask_admin import expose\n\nfrom app.views.admin.super_admin.super_admin_base import SuperAdminBaseView\nfrom ....helpers.data_getter import DataGetter\nfrom app.helpers.helpers import get_latest_heroku_release, get_commit_info, get_count\nfrom app.models.user import ATTENDEE,TRACK_ORGANIZER, COORGANIZER, ORGANIZER\nfrom app.helpers.kubernetes import KubernetesApi\n\n\nclass SuperAdminView(SuperAdminBaseView):\n\n @expose('/')\n def index_view(self):\n events = DataGetter.get_all_events()[:5]\n number_live_events = get_count(DataGetter.get_all_live_events())\n number_draft_events = get_count(DataGetter.get_all_draft_events())\n number_past_events = get_count(DataGetter.get_all_past_events())\n super_admins = DataGetter.get_all_super_admins()\n admins = DataGetter.get_all_admins()\n registered_users = DataGetter.get_all_registered_users()\n # TODO Fix function and correct this\n organizers = get_count(DataGetter.get_all_user_roles(ORGANIZER))\n co_organizers = get_count(DataGetter.get_all_user_roles(COORGANIZER))\n track_organizers = get_count(DataGetter.get_all_user_roles(TRACK_ORGANIZER))\n attendees = get_count(DataGetter.get_all_user_roles(ATTENDEE))\n accepted_sessions = DataGetter.get_all_accepted_sessions()\n rejected_sessions = DataGetter.get_all_rejected_sessions()\n draft_sessions = DataGetter.get_all_draft_sessions()\n email_times = DataGetter.get_email_by_times()\n\n commit_info = None\n version = None\n on_kubernetes = False\n pods_info = None\n\n if KubernetesApi.is_on_kubernetes():\n on_kubernetes = True\n kubernetes_api = KubernetesApi()\n pods_info = kubernetes_api.get_pods()['items']\n version = os.getenv('REPOSITORY', 'https://github.com/fossasia/open-event-orga-server.git')\n commit_info = os.getenv('BRANCH', 'development')\n else:\n version = get_latest_heroku_release()\n commit_info = None\n commit_number = None\n if version:\n commit_number = version['description'].split(' ')[1]\n commit_info = get_commit_info(commit_number)\n\n return self.render('/gentelella/admin/super_admin/widgets/index.html',\n events=events,\n version=version,\n commit_info=commit_info,\n on_kubernetes=True,\n pods_info=pods_info,\n number_live_events=number_live_events,\n number_draft_events=number_draft_events,\n number_past_events=number_past_events,\n super_admins=super_admins,\n admins=admins,\n registered_users=registered_users,\n organizers=organizers,\n co_organizers=co_organizers,\n track_organizers=track_organizers,\n attendees=attendees,\n accepted_sessions=accepted_sessions,\n rejected_sessions=rejected_sessions,\n draft_sessions=draft_sessions,\n email_times=email_times)\n", "path": "app/views/admin/super_admin/super_admin.py"}]}
1,341
124
gh_patches_debug_9975
rasdani/github-patches
git_diff
cookiecutter__cookiecutter-852
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> # -*- coding: utf-8 -*- is not consistently used in all Python source files - Cookiecutter version: Cookiecutter 1.4.0 - Template project url: none - Python version: 3.5 - Operating System: OS X ### Description: # -_\- coding: utf-8 -_\- is not consistently used in all Python source files ### What I've run: Nothing, it's an observation of an inconsistency in the project. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 3 import os 4 import io 5 import sys 6 7 from setuptools import setup 8 9 version = "1.4.0" 10 11 if sys.argv[-1] == 'publish': 12 os.system('python setup.py sdist upload') 13 os.system('python setup.py bdist_wheel upload') 14 sys.exit() 15 16 if sys.argv[-1] == 'tag': 17 os.system("git tag -a %s -m 'version %s'" % (version, version)) 18 os.system("git push --tags") 19 sys.exit() 20 21 with io.open('README.rst', 'r', encoding='utf-8') as readme_file: 22 readme = readme_file.read() 23 24 with io.open('HISTORY.rst', 'r', encoding='utf-8') as history_file: 25 history = history_file.read().replace('.. :changelog:', '') 26 27 requirements = [ 28 'future>=0.15.2', 29 'binaryornot>=0.2.0', 30 'jinja2>=2.7', 31 'click>=5.0', 32 'whichcraft>=0.4.0', 33 'poyo>=0.1.0', 34 'jinja2-time>=0.1.0' 35 ] 36 37 long_description = readme + '\n\n' + history 38 39 if sys.argv[-1] == 'readme': 40 print(long_description) 41 sys.exit() 42 43 44 setup( 45 name='cookiecutter', 46 version=version, 47 description=('A command-line utility that creates projects from project ' 48 'templates, e.g. creating a Python package project from a ' 49 'Python package project template.'), 50 long_description=long_description, 51 author='Audrey Roy', 52 author_email='[email protected]', 53 url='https://github.com/audreyr/cookiecutter', 54 packages=[ 55 'cookiecutter', 56 ], 57 package_dir={'cookiecutter': 'cookiecutter'}, 58 entry_points={ 59 'console_scripts': [ 60 'cookiecutter = cookiecutter.__main__:main', 61 ] 62 }, 63 include_package_data=True, 64 install_requires=requirements, 65 license='BSD', 66 zip_safe=False, 67 classifiers=[ 68 'Development Status :: 5 - Production/Stable', 69 'Environment :: Console', 70 'Intended Audience :: Developers', 71 'Natural Language :: English', 72 'License :: OSI Approved :: BSD License', 73 'Programming Language :: Python', 74 'Programming Language :: Python :: 2', 75 'Programming Language :: Python :: 2.7', 76 'Programming Language :: Python :: 3', 77 'Programming Language :: Python :: 3.3', 78 'Programming Language :: Python :: 3.4', 79 'Programming Language :: Python :: 3.5', 80 'Programming Language :: Python :: Implementation :: CPython', 81 'Programming Language :: Python :: Implementation :: PyPy', 82 'Topic :: Software Development', 83 ], 84 keywords=( 85 'cookiecutter, Python, projects, project templates, Jinja2, ' 86 'skeleton, scaffolding, project directory, setup.py, package, ' 87 'packaging' 88 ), 89 ) 90 [end of setup.py] [start of cookiecutter/__main__.py] 1 """Allow cookiecutter to be executable through `python -m cookiecutter`.""" 2 from __future__ import absolute_import 3 4 from .cli import main 5 6 7 if __name__ == "__main__": # pragma: no cover 8 main(prog_name="cookiecutter") 9 [end of cookiecutter/__main__.py] [start of __main__.py] 1 """Allow cookiecutter to be executable from a checkout or zip file.""" 2 import runpy 3 4 5 if __name__ == "__main__": 6 runpy.run_module("cookiecutter", run_name="__main__") 7 [end of __main__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/__main__.py b/__main__.py --- a/__main__.py +++ b/__main__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Allow cookiecutter to be executable from a checkout or zip file.""" import runpy diff --git a/cookiecutter/__main__.py b/cookiecutter/__main__.py --- a/cookiecutter/__main__.py +++ b/cookiecutter/__main__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Allow cookiecutter to be executable through `python -m cookiecutter`.""" from __future__ import absolute_import diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- import os import io
{"golden_diff": "diff --git a/__main__.py b/__main__.py\n--- a/__main__.py\n+++ b/__main__.py\n@@ -1,3 +1,4 @@\n+# -*- coding: utf-8 -*-\n \"\"\"Allow cookiecutter to be executable from a checkout or zip file.\"\"\"\n import runpy\n \ndiff --git a/cookiecutter/__main__.py b/cookiecutter/__main__.py\n--- a/cookiecutter/__main__.py\n+++ b/cookiecutter/__main__.py\n@@ -1,3 +1,4 @@\n+# -*- coding: utf-8 -*-\n \"\"\"Allow cookiecutter to be executable through `python -m cookiecutter`.\"\"\"\n from __future__ import absolute_import\n \ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,4 +1,5 @@\n #!/usr/bin/env python\n+# -*- coding: utf-8 -*-\n \n import os\n import io\n", "issue": "# -*- coding: utf-8 -*- is not consistently used in all Python source files\n- Cookiecutter version:\n Cookiecutter 1.4.0\n- Template project url:\n none\n- Python version:\n 3.5\n- Operating System:\n OS X\n ### Description:\n # -_\\- coding: utf-8 -_\\- is not consistently used in all Python source files\n### What I've run:\n\nNothing, it's an observation of an inconsistency in the project.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport os\nimport io\nimport sys\n\nfrom setuptools import setup\n\nversion = \"1.4.0\"\n\nif sys.argv[-1] == 'publish':\n os.system('python setup.py sdist upload')\n os.system('python setup.py bdist_wheel upload')\n sys.exit()\n\nif sys.argv[-1] == 'tag':\n os.system(\"git tag -a %s -m 'version %s'\" % (version, version))\n os.system(\"git push --tags\")\n sys.exit()\n\nwith io.open('README.rst', 'r', encoding='utf-8') as readme_file:\n readme = readme_file.read()\n\nwith io.open('HISTORY.rst', 'r', encoding='utf-8') as history_file:\n history = history_file.read().replace('.. :changelog:', '')\n\nrequirements = [\n 'future>=0.15.2',\n 'binaryornot>=0.2.0',\n 'jinja2>=2.7',\n 'click>=5.0',\n 'whichcraft>=0.4.0',\n 'poyo>=0.1.0',\n 'jinja2-time>=0.1.0'\n]\n\nlong_description = readme + '\\n\\n' + history\n\nif sys.argv[-1] == 'readme':\n print(long_description)\n sys.exit()\n\n\nsetup(\n name='cookiecutter',\n version=version,\n description=('A command-line utility that creates projects from project '\n 'templates, e.g. creating a Python package project from a '\n 'Python package project template.'),\n long_description=long_description,\n author='Audrey Roy',\n author_email='[email protected]',\n url='https://github.com/audreyr/cookiecutter',\n packages=[\n 'cookiecutter',\n ],\n package_dir={'cookiecutter': 'cookiecutter'},\n entry_points={\n 'console_scripts': [\n 'cookiecutter = cookiecutter.__main__:main',\n ]\n },\n include_package_data=True,\n install_requires=requirements,\n license='BSD',\n zip_safe=False,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Natural Language :: English',\n 'License :: OSI Approved :: BSD License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Software Development',\n ],\n keywords=(\n 'cookiecutter, Python, projects, project templates, Jinja2, '\n 'skeleton, scaffolding, project directory, setup.py, package, '\n 'packaging'\n ),\n)\n", "path": "setup.py"}, {"content": "\"\"\"Allow cookiecutter to be executable through `python -m cookiecutter`.\"\"\"\nfrom __future__ import absolute_import\n\nfrom .cli import main\n\n\nif __name__ == \"__main__\": # pragma: no cover\n main(prog_name=\"cookiecutter\")\n", "path": "cookiecutter/__main__.py"}, {"content": "\"\"\"Allow cookiecutter to be executable from a checkout or zip file.\"\"\"\nimport runpy\n\n\nif __name__ == \"__main__\":\n runpy.run_module(\"cookiecutter\", run_name=\"__main__\")\n", "path": "__main__.py"}]}
1,630
206
gh_patches_debug_50127
rasdani/github-patches
git_diff
scrapy__scrapy-4481
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> responsetypes.py ResponseTypes.from_headers typo as 'Content-type' Shall this line: https://github.com/scrapy/scrapy/blob/8845773d44329194ee73fa4985a94e768bf664e7/scrapy/responsetypes.py#L74 be changed to below? ` content_type=headers[b'Content-Type'], ` I guess this typo is with all the versions until today. </issue> <code> [start of scrapy/responsetypes.py] 1 """ 2 This module implements a class which returns the appropriate Response class 3 based on different criteria. 4 """ 5 from mimetypes import MimeTypes 6 from pkgutil import get_data 7 from io import StringIO 8 9 from scrapy.http import Response 10 from scrapy.utils.misc import load_object 11 from scrapy.utils.python import binary_is_text, to_bytes, to_unicode 12 13 14 class ResponseTypes: 15 16 CLASSES = { 17 'text/html': 'scrapy.http.HtmlResponse', 18 'application/atom+xml': 'scrapy.http.XmlResponse', 19 'application/rdf+xml': 'scrapy.http.XmlResponse', 20 'application/rss+xml': 'scrapy.http.XmlResponse', 21 'application/xhtml+xml': 'scrapy.http.HtmlResponse', 22 'application/vnd.wap.xhtml+xml': 'scrapy.http.HtmlResponse', 23 'application/xml': 'scrapy.http.XmlResponse', 24 'application/json': 'scrapy.http.TextResponse', 25 'application/x-json': 'scrapy.http.TextResponse', 26 'application/json-amazonui-streaming': 'scrapy.http.TextResponse', 27 'application/javascript': 'scrapy.http.TextResponse', 28 'application/x-javascript': 'scrapy.http.TextResponse', 29 'text/xml': 'scrapy.http.XmlResponse', 30 'text/*': 'scrapy.http.TextResponse', 31 } 32 33 def __init__(self): 34 self.classes = {} 35 self.mimetypes = MimeTypes() 36 mimedata = get_data('scrapy', 'mime.types').decode('utf8') 37 self.mimetypes.readfp(StringIO(mimedata)) 38 for mimetype, cls in self.CLASSES.items(): 39 self.classes[mimetype] = load_object(cls) 40 41 def from_mimetype(self, mimetype): 42 """Return the most appropriate Response class for the given mimetype""" 43 if mimetype is None: 44 return Response 45 elif mimetype in self.classes: 46 return self.classes[mimetype] 47 else: 48 basetype = "%s/*" % mimetype.split('/')[0] 49 return self.classes.get(basetype, Response) 50 51 def from_content_type(self, content_type, content_encoding=None): 52 """Return the most appropriate Response class from an HTTP Content-Type 53 header """ 54 if content_encoding: 55 return Response 56 mimetype = to_unicode(content_type).split(';')[0].strip().lower() 57 return self.from_mimetype(mimetype) 58 59 def from_content_disposition(self, content_disposition): 60 try: 61 filename = to_unicode(content_disposition, 62 encoding='latin-1', errors='replace').split(';')[1].split('=')[1] 63 filename = filename.strip('"\'') 64 return self.from_filename(filename) 65 except IndexError: 66 return Response 67 68 def from_headers(self, headers): 69 """Return the most appropriate Response class by looking at the HTTP 70 headers""" 71 cls = Response 72 if b'Content-Type' in headers: 73 cls = self.from_content_type( 74 content_type=headers[b'Content-type'], 75 content_encoding=headers.get(b'Content-Encoding') 76 ) 77 if cls is Response and b'Content-Disposition' in headers: 78 cls = self.from_content_disposition(headers[b'Content-Disposition']) 79 return cls 80 81 def from_filename(self, filename): 82 """Return the most appropriate Response class from a file name""" 83 mimetype, encoding = self.mimetypes.guess_type(filename) 84 if mimetype and not encoding: 85 return self.from_mimetype(mimetype) 86 else: 87 return Response 88 89 def from_body(self, body): 90 """Try to guess the appropriate response based on the body content. 91 This method is a bit magic and could be improved in the future, but 92 it's not meant to be used except for special cases where response types 93 cannot be guess using more straightforward methods.""" 94 chunk = body[:5000] 95 chunk = to_bytes(chunk) 96 if not binary_is_text(chunk): 97 return self.from_mimetype('application/octet-stream') 98 elif b"<html>" in chunk.lower(): 99 return self.from_mimetype('text/html') 100 elif b"<?xml" in chunk.lower(): 101 return self.from_mimetype('text/xml') 102 else: 103 return self.from_mimetype('text') 104 105 def from_args(self, headers=None, url=None, filename=None, body=None): 106 """Guess the most appropriate Response class based on 107 the given arguments.""" 108 cls = Response 109 if headers is not None: 110 cls = self.from_headers(headers) 111 if cls is Response and url is not None: 112 cls = self.from_filename(url) 113 if cls is Response and filename is not None: 114 cls = self.from_filename(filename) 115 if cls is Response and body is not None: 116 cls = self.from_body(body) 117 return cls 118 119 120 responsetypes = ResponseTypes() 121 [end of scrapy/responsetypes.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/scrapy/responsetypes.py b/scrapy/responsetypes.py --- a/scrapy/responsetypes.py +++ b/scrapy/responsetypes.py @@ -71,7 +71,7 @@ cls = Response if b'Content-Type' in headers: cls = self.from_content_type( - content_type=headers[b'Content-type'], + content_type=headers[b'Content-Type'], content_encoding=headers.get(b'Content-Encoding') ) if cls is Response and b'Content-Disposition' in headers:
{"golden_diff": "diff --git a/scrapy/responsetypes.py b/scrapy/responsetypes.py\n--- a/scrapy/responsetypes.py\n+++ b/scrapy/responsetypes.py\n@@ -71,7 +71,7 @@\n cls = Response\n if b'Content-Type' in headers:\n cls = self.from_content_type(\n- content_type=headers[b'Content-type'],\n+ content_type=headers[b'Content-Type'],\n content_encoding=headers.get(b'Content-Encoding')\n )\n if cls is Response and b'Content-Disposition' in headers:\n", "issue": "responsetypes.py ResponseTypes.from_headers typo as 'Content-type'\nShall this line:\r\n\r\nhttps://github.com/scrapy/scrapy/blob/8845773d44329194ee73fa4985a94e768bf664e7/scrapy/responsetypes.py#L74\r\n\r\nbe changed to below?\r\n`\r\ncontent_type=headers[b'Content-Type'],\r\n`\r\nI guess this typo is with all the versions until today.\n", "before_files": [{"content": "\"\"\"\nThis module implements a class which returns the appropriate Response class\nbased on different criteria.\n\"\"\"\nfrom mimetypes import MimeTypes\nfrom pkgutil import get_data\nfrom io import StringIO\n\nfrom scrapy.http import Response\nfrom scrapy.utils.misc import load_object\nfrom scrapy.utils.python import binary_is_text, to_bytes, to_unicode\n\n\nclass ResponseTypes:\n\n CLASSES = {\n 'text/html': 'scrapy.http.HtmlResponse',\n 'application/atom+xml': 'scrapy.http.XmlResponse',\n 'application/rdf+xml': 'scrapy.http.XmlResponse',\n 'application/rss+xml': 'scrapy.http.XmlResponse',\n 'application/xhtml+xml': 'scrapy.http.HtmlResponse',\n 'application/vnd.wap.xhtml+xml': 'scrapy.http.HtmlResponse',\n 'application/xml': 'scrapy.http.XmlResponse',\n 'application/json': 'scrapy.http.TextResponse',\n 'application/x-json': 'scrapy.http.TextResponse',\n 'application/json-amazonui-streaming': 'scrapy.http.TextResponse',\n 'application/javascript': 'scrapy.http.TextResponse',\n 'application/x-javascript': 'scrapy.http.TextResponse',\n 'text/xml': 'scrapy.http.XmlResponse',\n 'text/*': 'scrapy.http.TextResponse',\n }\n\n def __init__(self):\n self.classes = {}\n self.mimetypes = MimeTypes()\n mimedata = get_data('scrapy', 'mime.types').decode('utf8')\n self.mimetypes.readfp(StringIO(mimedata))\n for mimetype, cls in self.CLASSES.items():\n self.classes[mimetype] = load_object(cls)\n\n def from_mimetype(self, mimetype):\n \"\"\"Return the most appropriate Response class for the given mimetype\"\"\"\n if mimetype is None:\n return Response\n elif mimetype in self.classes:\n return self.classes[mimetype]\n else:\n basetype = \"%s/*\" % mimetype.split('/')[0]\n return self.classes.get(basetype, Response)\n\n def from_content_type(self, content_type, content_encoding=None):\n \"\"\"Return the most appropriate Response class from an HTTP Content-Type\n header \"\"\"\n if content_encoding:\n return Response\n mimetype = to_unicode(content_type).split(';')[0].strip().lower()\n return self.from_mimetype(mimetype)\n\n def from_content_disposition(self, content_disposition):\n try:\n filename = to_unicode(content_disposition,\n encoding='latin-1', errors='replace').split(';')[1].split('=')[1]\n filename = filename.strip('\"\\'')\n return self.from_filename(filename)\n except IndexError:\n return Response\n\n def from_headers(self, headers):\n \"\"\"Return the most appropriate Response class by looking at the HTTP\n headers\"\"\"\n cls = Response\n if b'Content-Type' in headers:\n cls = self.from_content_type(\n content_type=headers[b'Content-type'],\n content_encoding=headers.get(b'Content-Encoding')\n )\n if cls is Response and b'Content-Disposition' in headers:\n cls = self.from_content_disposition(headers[b'Content-Disposition'])\n return cls\n\n def from_filename(self, filename):\n \"\"\"Return the most appropriate Response class from a file name\"\"\"\n mimetype, encoding = self.mimetypes.guess_type(filename)\n if mimetype and not encoding:\n return self.from_mimetype(mimetype)\n else:\n return Response\n\n def from_body(self, body):\n \"\"\"Try to guess the appropriate response based on the body content.\n This method is a bit magic and could be improved in the future, but\n it's not meant to be used except for special cases where response types\n cannot be guess using more straightforward methods.\"\"\"\n chunk = body[:5000]\n chunk = to_bytes(chunk)\n if not binary_is_text(chunk):\n return self.from_mimetype('application/octet-stream')\n elif b\"<html>\" in chunk.lower():\n return self.from_mimetype('text/html')\n elif b\"<?xml\" in chunk.lower():\n return self.from_mimetype('text/xml')\n else:\n return self.from_mimetype('text')\n\n def from_args(self, headers=None, url=None, filename=None, body=None):\n \"\"\"Guess the most appropriate Response class based on\n the given arguments.\"\"\"\n cls = Response\n if headers is not None:\n cls = self.from_headers(headers)\n if cls is Response and url is not None:\n cls = self.from_filename(url)\n if cls is Response and filename is not None:\n cls = self.from_filename(filename)\n if cls is Response and body is not None:\n cls = self.from_body(body)\n return cls\n\n\nresponsetypes = ResponseTypes()\n", "path": "scrapy/responsetypes.py"}]}
1,894
122
gh_patches_debug_11555
rasdani/github-patches
git_diff
pypa__setuptools-753
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Setuptools doesn't play nice with Numpy See: https://github.com/numpy/numpy/blob/master/numpy/distutils/extension.py#L42-L52 That functionality was broken by: https://github.com/pypa/setuptools/blob/master/setuptools/extension.py#L39-L41 In this pr: https://github.com/pypa/setuptools/pull/718 Here's the the pdb session where I debugged this: ``` (Pdb) old_Extension.__module__ 'setuptools.extension' ``` </issue> <code> [start of setuptools/extension.py] 1 import sys 2 import re 3 import functools 4 import distutils.core 5 import distutils.errors 6 import distutils.extension 7 8 from setuptools.extern.six.moves import map 9 10 from .dist import _get_unpatched 11 from . import msvc 12 13 _Extension = _get_unpatched(distutils.core.Extension) 14 15 msvc.patch_for_specialized_compiler() 16 17 18 def _have_cython(): 19 """ 20 Return True if Cython can be imported. 21 """ 22 cython_impl = 'Cython.Distutils.build_ext' 23 try: 24 # from (cython_impl) import build_ext 25 __import__(cython_impl, fromlist=['build_ext']).build_ext 26 return True 27 except Exception: 28 pass 29 return False 30 31 32 # for compatibility 33 have_pyrex = _have_cython 34 35 36 class Extension(_Extension): 37 """Extension that uses '.c' files in place of '.pyx' files""" 38 39 def __init__(self, name, sources, py_limited_api=False, **kw): 40 self.py_limited_api = py_limited_api 41 _Extension.__init__(self, name, sources, **kw) 42 43 def _convert_pyx_sources_to_lang(self): 44 """ 45 Replace sources with .pyx extensions to sources with the target 46 language extension. This mechanism allows language authors to supply 47 pre-converted sources but to prefer the .pyx sources. 48 """ 49 if _have_cython(): 50 # the build has Cython, so allow it to compile the .pyx files 51 return 52 lang = self.language or '' 53 target_ext = '.cpp' if lang.lower() == 'c++' else '.c' 54 sub = functools.partial(re.sub, '.pyx$', target_ext) 55 self.sources = list(map(sub, self.sources)) 56 57 58 class Library(Extension): 59 """Just like a regular Extension, but built as a library instead""" 60 61 62 distutils.core.Extension = Extension 63 distutils.extension.Extension = Extension 64 if 'distutils.command.build_ext' in sys.modules: 65 sys.modules['distutils.command.build_ext'].Extension = Extension 66 [end of setuptools/extension.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setuptools/extension.py b/setuptools/extension.py --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -36,9 +36,11 @@ class Extension(_Extension): """Extension that uses '.c' files in place of '.pyx' files""" - def __init__(self, name, sources, py_limited_api=False, **kw): - self.py_limited_api = py_limited_api - _Extension.__init__(self, name, sources, **kw) + def __init__(self, name, sources, *args, **kw): + # The *args is needed for compatibility as calls may use positional + # arguments. py_limited_api may be set only via keyword. + self.py_limited_api = kw.pop("py_limited_api", False) + _Extension.__init__(self, name, sources, *args, **kw) def _convert_pyx_sources_to_lang(self): """
{"golden_diff": "diff --git a/setuptools/extension.py b/setuptools/extension.py\n--- a/setuptools/extension.py\n+++ b/setuptools/extension.py\n@@ -36,9 +36,11 @@\n class Extension(_Extension):\n \"\"\"Extension that uses '.c' files in place of '.pyx' files\"\"\"\n \n- def __init__(self, name, sources, py_limited_api=False, **kw):\n- self.py_limited_api = py_limited_api\n- _Extension.__init__(self, name, sources, **kw)\n+ def __init__(self, name, sources, *args, **kw):\n+ # The *args is needed for compatibility as calls may use positional\n+ # arguments. py_limited_api may be set only via keyword.\n+ self.py_limited_api = kw.pop(\"py_limited_api\", False)\n+ _Extension.__init__(self, name, sources, *args, **kw)\n \n def _convert_pyx_sources_to_lang(self):\n \"\"\"\n", "issue": "Setuptools doesn't play nice with Numpy\nSee: https://github.com/numpy/numpy/blob/master/numpy/distutils/extension.py#L42-L52\n\nThat functionality was broken by:\nhttps://github.com/pypa/setuptools/blob/master/setuptools/extension.py#L39-L41\n\nIn this pr: https://github.com/pypa/setuptools/pull/718\n\nHere's the the pdb session where I debugged this:\n\n```\n(Pdb) old_Extension.__module__\n'setuptools.extension'\n```\n\n", "before_files": [{"content": "import sys\nimport re\nimport functools\nimport distutils.core\nimport distutils.errors\nimport distutils.extension\n\nfrom setuptools.extern.six.moves import map\n\nfrom .dist import _get_unpatched\nfrom . import msvc\n\n_Extension = _get_unpatched(distutils.core.Extension)\n\nmsvc.patch_for_specialized_compiler()\n\n\ndef _have_cython():\n \"\"\"\n Return True if Cython can be imported.\n \"\"\"\n cython_impl = 'Cython.Distutils.build_ext'\n try:\n # from (cython_impl) import build_ext\n __import__(cython_impl, fromlist=['build_ext']).build_ext\n return True\n except Exception:\n pass\n return False\n\n\n# for compatibility\nhave_pyrex = _have_cython\n\n\nclass Extension(_Extension):\n \"\"\"Extension that uses '.c' files in place of '.pyx' files\"\"\"\n\n def __init__(self, name, sources, py_limited_api=False, **kw):\n self.py_limited_api = py_limited_api\n _Extension.__init__(self, name, sources, **kw)\n\n def _convert_pyx_sources_to_lang(self):\n \"\"\"\n Replace sources with .pyx extensions to sources with the target\n language extension. This mechanism allows language authors to supply\n pre-converted sources but to prefer the .pyx sources.\n \"\"\"\n if _have_cython():\n # the build has Cython, so allow it to compile the .pyx files\n return\n lang = self.language or ''\n target_ext = '.cpp' if lang.lower() == 'c++' else '.c'\n sub = functools.partial(re.sub, '.pyx$', target_ext)\n self.sources = list(map(sub, self.sources))\n\n\nclass Library(Extension):\n \"\"\"Just like a regular Extension, but built as a library instead\"\"\"\n\n\ndistutils.core.Extension = Extension\ndistutils.extension.Extension = Extension\nif 'distutils.command.build_ext' in sys.modules:\n sys.modules['distutils.command.build_ext'].Extension = Extension\n", "path": "setuptools/extension.py"}]}
1,216
220
gh_patches_debug_22181
rasdani/github-patches
git_diff
fossasia__open-event-server-6129
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> order_expiry_time migration error **Describe the bug** The latest migration, `2c7ff9781032` had a bug where the server_default was supposed to be a string. **Additional context** Working on it. </issue> <code> [start of migrations/versions/2c7ff9781032_.py] 1 """empty message 2 3 Revision ID: 2c7ff9781032 4 Revises: b08a4ffff5dd 5 Create Date: 2019-06-21 04:33:59.021835 6 7 """ 8 9 from alembic import op 10 import sqlalchemy as sa 11 import sqlalchemy_utils 12 13 14 # revision identifiers, used by Alembic. 15 revision = '2c7ff9781032' 16 down_revision = 'b08a4ffff5dd' 17 18 19 def upgrade(): 20 # ### commands auto generated by Alembic - please adjust! ### 21 op.drop_column('events', 'order_expiry_time') 22 op.drop_column('events_version', 'order_expiry_time') 23 op.add_column('settings', sa.Column('order_expiry_time', sa.Integer(), default=15, nullable=False)) 24 # ### end Alembic commands ### 25 26 27 def downgrade(): 28 # ### commands auto generated by Alembic - please adjust! ### 29 op.drop_column('settings', 'order_expiry_time') 30 op.add_column('events_version', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False)) 31 op.add_column('events', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False)) 32 # ### end Alembic commands ### 33 [end of migrations/versions/2c7ff9781032_.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/migrations/versions/2c7ff9781032_.py b/migrations/versions/2c7ff9781032_.py --- a/migrations/versions/2c7ff9781032_.py +++ b/migrations/versions/2c7ff9781032_.py @@ -20,13 +20,13 @@ # ### commands auto generated by Alembic - please adjust! ### op.drop_column('events', 'order_expiry_time') op.drop_column('events_version', 'order_expiry_time') - op.add_column('settings', sa.Column('order_expiry_time', sa.Integer(), default=15, nullable=False)) + op.add_column('settings', sa.Column('order_expiry_time', sa.Integer(), server_default='15', nullable=False)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('settings', 'order_expiry_time') - op.add_column('events_version', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False)) - op.add_column('events', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False)) + op.add_column('events_version', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('events', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=True)) # ### end Alembic commands ###
{"golden_diff": "diff --git a/migrations/versions/2c7ff9781032_.py b/migrations/versions/2c7ff9781032_.py\n--- a/migrations/versions/2c7ff9781032_.py\n+++ b/migrations/versions/2c7ff9781032_.py\n@@ -20,13 +20,13 @@\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('events', 'order_expiry_time')\n op.drop_column('events_version', 'order_expiry_time')\n- op.add_column('settings', sa.Column('order_expiry_time', sa.Integer(), default=15, nullable=False))\n+ op.add_column('settings', sa.Column('order_expiry_time', sa.Integer(), server_default='15', nullable=False))\n # ### end Alembic commands ###\n \n \n def downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('settings', 'order_expiry_time')\n- op.add_column('events_version', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False))\n- op.add_column('events', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False))\n+ op.add_column('events_version', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=True))\n+ op.add_column('events', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=True))\n # ### end Alembic commands ###\n", "issue": "order_expiry_time migration error\n**Describe the bug**\r\nThe latest migration, `2c7ff9781032` had a bug where the server_default was supposed to be a string. \r\n\r\n\r\n**Additional context**\r\nWorking on it.\n", "before_files": [{"content": "\"\"\"empty message\n\nRevision ID: 2c7ff9781032\nRevises: b08a4ffff5dd\nCreate Date: 2019-06-21 04:33:59.021835\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlalchemy_utils\n\n\n# revision identifiers, used by Alembic.\nrevision = '2c7ff9781032'\ndown_revision = 'b08a4ffff5dd'\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('events', 'order_expiry_time')\n op.drop_column('events_version', 'order_expiry_time')\n op.add_column('settings', sa.Column('order_expiry_time', sa.Integer(), default=15, nullable=False))\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('settings', 'order_expiry_time')\n op.add_column('events_version', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False))\n op.add_column('events', sa.Column('order_expiry_time', sa.INTEGER(), autoincrement=False, nullable=False))\n # ### end Alembic commands ###\n", "path": "migrations/versions/2c7ff9781032_.py"}]}
962
343
gh_patches_debug_40616
rasdani/github-patches
git_diff
optuna__optuna-1248
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> `experimental` decorator adds extra indents to docs An extra indent appears in the docs when the [`experimental`](https://github.com/optuna/optuna/blob/2d5f24b06eed56ece72b8dfa878135bb4bb63779/optuna/_experimental.py#L60) decorator is added to a method of a class. This does not happen with a top level (i.e., not nested) class or function. ## Steps to reproduce 1. add an `experimental` decorator to a method of a class 2. make the document ## Reproducible examples The following figure shows the docs of [`Study.enqueue_trial`](https://optuna.readthedocs.io/en/latest/reference/study.html#optuna.study.Study.enqueue_trial). An extra indent appears after the second paragraph. ![image](https://user-images.githubusercontent.com/1061922/81144389-e6f32f80-8fae-11ea-8dd2-33368293dafa.png) ## Expected behavior no extra indent is shown after the second paragraph For example, the docs of [`Study.get_trials`](https://optuna.readthedocs.io/en/latest/reference/study.html#optuna.study.Study.get_trials), which does not have the `experimental` decorator, appears as expected. ![image](https://user-images.githubusercontent.com/1061922/81143489-f1142e80-8fac-11ea-9896-e56086228168.png) The following figure shows another example. The docs of the [`train`](https://optuna.readthedocs.io/en/latest/reference/integration.html#optuna.integration.lightgbm.train) function of LightGBMTuner has no extra indent. Although the function has an `experimental` decorator, it belongs to the top level classes. ![image](https://user-images.githubusercontent.com/1061922/81146606-98945f80-8fb3-11ea-8d67-5cf1cfbf8768.png) </issue> <code> [start of optuna/_experimental.py] 1 import functools 2 import inspect 3 from typing import Any 4 from typing import Callable 5 import warnings 6 7 from optuna.exceptions import ExperimentalWarning 8 9 10 # White spaces of each line are necessary to beautifully rendered documentation. 11 # NOTE(crcrpar): When `experimental` decorator is applied to member methods, these lines require 12 # another four spaces. 13 _EXPERIMENTAL_DOCSTRING_TEMPLATE = """ 14 15 .. note:: 16 Added in v{ver} as an experimental feature. The interface may change in newer versions 17 without prior notice. See https://github.com/optuna/optuna/releases/tag/v{ver}. 18 """ 19 20 21 def _validate_version(version: str) -> None: 22 23 if not isinstance(version, str) or len(version.split(".")) != 3: 24 raise ValueError( 25 "Invalid version specification. Must follow `x.y.z` format but `{}` is given".format( 26 version 27 ) 28 ) 29 30 31 def experimental(version: str, name: str = None) -> Any: 32 """Decorate class or function as experimental. 33 34 Args: 35 version: The first version that supports the target feature. 36 name: The name of the feature. Defaults to the function or class name. Optional. 37 """ 38 39 _validate_version(version) 40 41 def _experimental_wrapper(f: Any) -> Any: 42 # f is either func or class. 43 44 def _experimental_func(func: Callable[[Any], Any]) -> Callable[[Any], Any]: 45 46 docstring = _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version) 47 if func.__doc__ is None: 48 func.__doc__ = "" 49 func.__doc__ += docstring 50 51 # TODO(crcrpar): Annotate this correctly. 52 @functools.wraps(func) 53 def new_func(*args: Any, **kwargs: Any) -> Any: 54 """Wrapped function.""" 55 56 warnings.warn( 57 "{} is experimental (supported from v{}). " 58 "The interface can change in the future.".format( 59 name if name is not None else func.__name__, version 60 ), 61 ExperimentalWarning, 62 ) 63 64 return func(*args, **kwargs) # type: ignore 65 66 return new_func 67 68 def _experimental_class(cls: Any) -> Any: 69 """Decorates a class as experimental. 70 71 This decorator is supposed to be applied to the experimental class. 72 """ 73 74 _original_init = cls.__init__ 75 76 @functools.wraps(_original_init) 77 def wrapped_init(self, *args, **kwargs) -> None: # type: ignore 78 warnings.warn( 79 "{} is experimental (supported from v{}). " 80 "The interface can change in the future.".format( 81 name if name is not None else cls.__name__, version 82 ), 83 ExperimentalWarning, 84 ) 85 86 _original_init(self, *args, **kwargs) 87 88 cls.__init__ = wrapped_init 89 90 if cls.__doc__ is None: 91 cls.__doc__ = "" 92 cls.__doc__ += _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version) 93 return cls 94 95 return _experimental_class(f) if inspect.isclass(f) else _experimental_func(f) 96 97 return _experimental_wrapper 98 [end of optuna/_experimental.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/optuna/_experimental.py b/optuna/_experimental.py --- a/optuna/_experimental.py +++ b/optuna/_experimental.py @@ -1,5 +1,6 @@ import functools import inspect +import textwrap from typing import Any from typing import Callable import warnings @@ -7,14 +8,11 @@ from optuna.exceptions import ExperimentalWarning -# White spaces of each line are necessary to beautifully rendered documentation. -# NOTE(crcrpar): When `experimental` decorator is applied to member methods, these lines require -# another four spaces. -_EXPERIMENTAL_DOCSTRING_TEMPLATE = """ +_EXPERIMENTAL_NOTE_TEMPLATE = """ - .. note:: - Added in v{ver} as an experimental feature. The interface may change in newer versions - without prior notice. See https://github.com/optuna/optuna/releases/tag/v{ver}. +.. note:: + Added in v{ver} as an experimental feature. The interface may change in newer versions + without prior notice. See https://github.com/optuna/optuna/releases/tag/v{ver}. """ @@ -28,6 +26,10 @@ ) +def _get_docstring_indent(docstring: str) -> str: + return docstring.split("\n")[-1] if "\n" in docstring else "" + + def experimental(version: str, name: str = None) -> Any: """Decorate class or function as experimental. @@ -43,16 +45,16 @@ def _experimental_func(func: Callable[[Any], Any]) -> Callable[[Any], Any]: - docstring = _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version) if func.__doc__ is None: func.__doc__ = "" - func.__doc__ += docstring + + note = _EXPERIMENTAL_NOTE_TEMPLATE.format(ver=version) + indent = _get_docstring_indent(func.__doc__) + func.__doc__ = func.__doc__.strip() + textwrap.indent(note, indent) + indent # TODO(crcrpar): Annotate this correctly. @functools.wraps(func) def new_func(*args: Any, **kwargs: Any) -> Any: - """Wrapped function.""" - warnings.warn( "{} is experimental (supported from v{}). " "The interface can change in the future.".format( @@ -70,7 +72,6 @@ This decorator is supposed to be applied to the experimental class. """ - _original_init = cls.__init__ @functools.wraps(_original_init) @@ -89,7 +90,11 @@ if cls.__doc__ is None: cls.__doc__ = "" - cls.__doc__ += _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version) + + note = _EXPERIMENTAL_NOTE_TEMPLATE.format(ver=version) + indent = _get_docstring_indent(cls.__doc__) + cls.__doc__ = cls.__doc__.strip() + textwrap.indent(note, indent) + indent + return cls return _experimental_class(f) if inspect.isclass(f) else _experimental_func(f)
{"golden_diff": "diff --git a/optuna/_experimental.py b/optuna/_experimental.py\n--- a/optuna/_experimental.py\n+++ b/optuna/_experimental.py\n@@ -1,5 +1,6 @@\n import functools\n import inspect\n+import textwrap\n from typing import Any\n from typing import Callable\n import warnings\n@@ -7,14 +8,11 @@\n from optuna.exceptions import ExperimentalWarning\n \n \n-# White spaces of each line are necessary to beautifully rendered documentation.\n-# NOTE(crcrpar): When `experimental` decorator is applied to member methods, these lines require\n-# another four spaces.\n-_EXPERIMENTAL_DOCSTRING_TEMPLATE = \"\"\"\n+_EXPERIMENTAL_NOTE_TEMPLATE = \"\"\"\n \n- .. note::\n- Added in v{ver} as an experimental feature. The interface may change in newer versions\n- without prior notice. See https://github.com/optuna/optuna/releases/tag/v{ver}.\n+.. note::\n+ Added in v{ver} as an experimental feature. The interface may change in newer versions\n+ without prior notice. See https://github.com/optuna/optuna/releases/tag/v{ver}.\n \"\"\"\n \n \n@@ -28,6 +26,10 @@\n )\n \n \n+def _get_docstring_indent(docstring: str) -> str:\n+ return docstring.split(\"\\n\")[-1] if \"\\n\" in docstring else \"\"\n+\n+\n def experimental(version: str, name: str = None) -> Any:\n \"\"\"Decorate class or function as experimental.\n \n@@ -43,16 +45,16 @@\n \n def _experimental_func(func: Callable[[Any], Any]) -> Callable[[Any], Any]:\n \n- docstring = _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version)\n if func.__doc__ is None:\n func.__doc__ = \"\"\n- func.__doc__ += docstring\n+\n+ note = _EXPERIMENTAL_NOTE_TEMPLATE.format(ver=version)\n+ indent = _get_docstring_indent(func.__doc__)\n+ func.__doc__ = func.__doc__.strip() + textwrap.indent(note, indent) + indent\n \n # TODO(crcrpar): Annotate this correctly.\n @functools.wraps(func)\n def new_func(*args: Any, **kwargs: Any) -> Any:\n- \"\"\"Wrapped function.\"\"\"\n-\n warnings.warn(\n \"{} is experimental (supported from v{}). \"\n \"The interface can change in the future.\".format(\n@@ -70,7 +72,6 @@\n \n This decorator is supposed to be applied to the experimental class.\n \"\"\"\n-\n _original_init = cls.__init__\n \n @functools.wraps(_original_init)\n@@ -89,7 +90,11 @@\n \n if cls.__doc__ is None:\n cls.__doc__ = \"\"\n- cls.__doc__ += _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version)\n+\n+ note = _EXPERIMENTAL_NOTE_TEMPLATE.format(ver=version)\n+ indent = _get_docstring_indent(cls.__doc__)\n+ cls.__doc__ = cls.__doc__.strip() + textwrap.indent(note, indent) + indent\n+\n return cls\n \n return _experimental_class(f) if inspect.isclass(f) else _experimental_func(f)\n", "issue": "`experimental` decorator adds extra indents to docs\nAn extra indent appears in the docs when the [`experimental`](https://github.com/optuna/optuna/blob/2d5f24b06eed56ece72b8dfa878135bb4bb63779/optuna/_experimental.py#L60) decorator is added to a method of a class. This does not happen with a top level (i.e., not nested) class or function.\r\n\r\n## Steps to reproduce\r\n\r\n1. add an `experimental` decorator to a method of a class\r\n2. make the document\r\n\r\n## Reproducible examples\r\nThe following figure shows the docs of [`Study.enqueue_trial`](https://optuna.readthedocs.io/en/latest/reference/study.html#optuna.study.Study.enqueue_trial). An extra indent appears after the second paragraph.\r\n\r\n![image](https://user-images.githubusercontent.com/1061922/81144389-e6f32f80-8fae-11ea-8dd2-33368293dafa.png)\r\n\r\n## Expected behavior\r\nno extra indent is shown after the second paragraph\r\n\r\nFor example, the docs of [`Study.get_trials`](https://optuna.readthedocs.io/en/latest/reference/study.html#optuna.study.Study.get_trials), which does not have the `experimental` decorator, appears as expected.\r\n\r\n![image](https://user-images.githubusercontent.com/1061922/81143489-f1142e80-8fac-11ea-9896-e56086228168.png)\r\n\r\nThe following figure shows another example. The docs of the [`train`](https://optuna.readthedocs.io/en/latest/reference/integration.html#optuna.integration.lightgbm.train) function of LightGBMTuner has no extra indent. Although the function has an `experimental` decorator, it belongs to the top level classes.\r\n\r\n![image](https://user-images.githubusercontent.com/1061922/81146606-98945f80-8fb3-11ea-8d67-5cf1cfbf8768.png)\r\n\n", "before_files": [{"content": "import functools\nimport inspect\nfrom typing import Any\nfrom typing import Callable\nimport warnings\n\nfrom optuna.exceptions import ExperimentalWarning\n\n\n# White spaces of each line are necessary to beautifully rendered documentation.\n# NOTE(crcrpar): When `experimental` decorator is applied to member methods, these lines require\n# another four spaces.\n_EXPERIMENTAL_DOCSTRING_TEMPLATE = \"\"\"\n\n .. note::\n Added in v{ver} as an experimental feature. The interface may change in newer versions\n without prior notice. See https://github.com/optuna/optuna/releases/tag/v{ver}.\n\"\"\"\n\n\ndef _validate_version(version: str) -> None:\n\n if not isinstance(version, str) or len(version.split(\".\")) != 3:\n raise ValueError(\n \"Invalid version specification. Must follow `x.y.z` format but `{}` is given\".format(\n version\n )\n )\n\n\ndef experimental(version: str, name: str = None) -> Any:\n \"\"\"Decorate class or function as experimental.\n\n Args:\n version: The first version that supports the target feature.\n name: The name of the feature. Defaults to the function or class name. Optional.\n \"\"\"\n\n _validate_version(version)\n\n def _experimental_wrapper(f: Any) -> Any:\n # f is either func or class.\n\n def _experimental_func(func: Callable[[Any], Any]) -> Callable[[Any], Any]:\n\n docstring = _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version)\n if func.__doc__ is None:\n func.__doc__ = \"\"\n func.__doc__ += docstring\n\n # TODO(crcrpar): Annotate this correctly.\n @functools.wraps(func)\n def new_func(*args: Any, **kwargs: Any) -> Any:\n \"\"\"Wrapped function.\"\"\"\n\n warnings.warn(\n \"{} is experimental (supported from v{}). \"\n \"The interface can change in the future.\".format(\n name if name is not None else func.__name__, version\n ),\n ExperimentalWarning,\n )\n\n return func(*args, **kwargs) # type: ignore\n\n return new_func\n\n def _experimental_class(cls: Any) -> Any:\n \"\"\"Decorates a class as experimental.\n\n This decorator is supposed to be applied to the experimental class.\n \"\"\"\n\n _original_init = cls.__init__\n\n @functools.wraps(_original_init)\n def wrapped_init(self, *args, **kwargs) -> None: # type: ignore\n warnings.warn(\n \"{} is experimental (supported from v{}). \"\n \"The interface can change in the future.\".format(\n name if name is not None else cls.__name__, version\n ),\n ExperimentalWarning,\n )\n\n _original_init(self, *args, **kwargs)\n\n cls.__init__ = wrapped_init\n\n if cls.__doc__ is None:\n cls.__doc__ = \"\"\n cls.__doc__ += _EXPERIMENTAL_DOCSTRING_TEMPLATE.format(ver=version)\n return cls\n\n return _experimental_class(f) if inspect.isclass(f) else _experimental_func(f)\n\n return _experimental_wrapper\n", "path": "optuna/_experimental.py"}]}
1,899
712
gh_patches_debug_14346
rasdani/github-patches
git_diff
svthalia__concrexit-2857
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Become a member menu item is missing! ### Describe the bug Become a member menu item is missing! ### How to reproduce 1. Check staging, the menu item is missing ### Expected behaviour It should be there ### Screenshots If applicable, add screenshots to help explain your problem. ### Additional context Probably this was missed in #2728 </issue> <code> [start of website/registrations/apps.py] 1 """Configuration for the newsletters package.""" 2 from django.apps import AppConfig 3 from django.utils.translation import gettext_lazy as _ 4 5 6 class RegistrationsConfig(AppConfig): 7 """AppConfig for the registrations package.""" 8 9 name = "registrations" 10 verbose_name = _("Registrations") 11 12 def ready(self): 13 """Import the signals when the app is ready.""" 14 # pylint: disable=unused-import,import-outside-toplevel 15 from . import signals 16 from .payables import register 17 18 register() 19 [end of website/registrations/apps.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/website/registrations/apps.py b/website/registrations/apps.py --- a/website/registrations/apps.py +++ b/website/registrations/apps.py @@ -1,5 +1,6 @@ """Configuration for the newsletters package.""" from django.apps import AppConfig +from django.urls import reverse from django.utils.translation import gettext_lazy as _ @@ -16,3 +17,16 @@ from .payables import register register() + + def menu_items(self): + return { + "categories": [{"name": "association", "title": "Association", "key": 1}], + "items": [ + { + "category": "association", + "title": "Become a member", + "url": reverse("registrations:index"), + "key": 6, + }, + ], + }
{"golden_diff": "diff --git a/website/registrations/apps.py b/website/registrations/apps.py\n--- a/website/registrations/apps.py\n+++ b/website/registrations/apps.py\n@@ -1,5 +1,6 @@\n \"\"\"Configuration for the newsletters package.\"\"\"\n from django.apps import AppConfig\n+from django.urls import reverse\n from django.utils.translation import gettext_lazy as _\n \n \n@@ -16,3 +17,16 @@\n from .payables import register\n \n register()\n+\n+ def menu_items(self):\n+ return {\n+ \"categories\": [{\"name\": \"association\", \"title\": \"Association\", \"key\": 1}],\n+ \"items\": [\n+ {\n+ \"category\": \"association\",\n+ \"title\": \"Become a member\",\n+ \"url\": reverse(\"registrations:index\"),\n+ \"key\": 6,\n+ },\n+ ],\n+ }\n", "issue": "Become a member menu item is missing!\n### Describe the bug\r\nBecome a member menu item is missing!\r\n\r\n### How to reproduce\r\n1. Check staging, the menu item is missing\r\n\r\n\r\n### Expected behaviour\r\nIt should be there\r\n\r\n### Screenshots\r\nIf applicable, add screenshots to help explain your problem.\r\n\r\n### Additional context\r\nProbably this was missed in #2728 \n", "before_files": [{"content": "\"\"\"Configuration for the newsletters package.\"\"\"\nfrom django.apps import AppConfig\nfrom django.utils.translation import gettext_lazy as _\n\n\nclass RegistrationsConfig(AppConfig):\n \"\"\"AppConfig for the registrations package.\"\"\"\n\n name = \"registrations\"\n verbose_name = _(\"Registrations\")\n\n def ready(self):\n \"\"\"Import the signals when the app is ready.\"\"\"\n # pylint: disable=unused-import,import-outside-toplevel\n from . import signals\n from .payables import register\n\n register()\n", "path": "website/registrations/apps.py"}]}
750
195
gh_patches_debug_13199
rasdani/github-patches
git_diff
Nitrate__Nitrate-671
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Replace smart_text() with smart_str() To suppress warning: `RemovedInDjango40Warning: smart_text() is deprecated in favor of smart_str()` Run command `runserver`, find out the warning messages from output. </issue> <code> [start of src/tcms/logs/views.py] 1 # -*- coding: utf-8 -*- 2 3 from django.conf import settings 4 from django.contrib.contenttypes.models import ContentType 5 6 from .models import TCMSLogModel 7 8 from django.utils.encoding import smart_text as smart_unicode 9 10 11 class TCMSLog: 12 """TCMS Log""" 13 14 def __init__(self, model): 15 super().__init__() 16 self.model = model 17 18 def get_new_log_object(self): 19 elements = ['who', 'field', 'original_value', 'new_value'] 20 21 for element in elements: 22 if not hasattr(self, element): 23 raise NotImplementedError( 24 f'Log does not have attribute {element}') 25 26 model = self.get_log_model() 27 new = model(**self.get_log_create_data()) 28 29 return new 30 31 def get_log_model(self): 32 """ 33 Get the log model to create with this class. 34 """ 35 return TCMSLogModel 36 37 def get_log_create_data(self): 38 return { 39 'content_object': self.model, 40 'site_id': settings.SITE_ID, 41 'who': self.who, 42 'field': self.field, 43 'original_value': self.original_value, 44 'new_value': self.new_value, 45 } 46 47 def make(self, who, new_value, field=None, original_value=None): 48 """Create new log""" 49 self.who = who 50 self.field = field or '' 51 self.original_value = original_value or '' 52 self.new_value = new_value 53 54 model = self.get_new_log_object() 55 model.save() 56 57 def lookup_content_type(self): 58 return ContentType.objects.get_for_model(self.model) 59 60 def get_query_set(self): 61 ctype = self.lookup_content_type() 62 model = self.get_log_model() 63 64 qs = model.objects.filter(content_type=ctype, 65 object_pk=smart_unicode(self.model.pk), 66 site=settings.SITE_ID) 67 qs = qs.select_related('who') 68 return qs 69 70 def list(self): 71 """List the logs""" 72 return self.get_query_set().all() 73 [end of src/tcms/logs/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/tcms/logs/views.py b/src/tcms/logs/views.py --- a/src/tcms/logs/views.py +++ b/src/tcms/logs/views.py @@ -5,7 +5,7 @@ from .models import TCMSLogModel -from django.utils.encoding import smart_text as smart_unicode +from django.utils.encoding import smart_str class TCMSLog: @@ -62,7 +62,7 @@ model = self.get_log_model() qs = model.objects.filter(content_type=ctype, - object_pk=smart_unicode(self.model.pk), + object_pk=smart_str(self.model.pk), site=settings.SITE_ID) qs = qs.select_related('who') return qs
{"golden_diff": "diff --git a/src/tcms/logs/views.py b/src/tcms/logs/views.py\n--- a/src/tcms/logs/views.py\n+++ b/src/tcms/logs/views.py\n@@ -5,7 +5,7 @@\n \n from .models import TCMSLogModel\n \n-from django.utils.encoding import smart_text as smart_unicode\n+from django.utils.encoding import smart_str\n \n \n class TCMSLog:\n@@ -62,7 +62,7 @@\n model = self.get_log_model()\n \n qs = model.objects.filter(content_type=ctype,\n- object_pk=smart_unicode(self.model.pk),\n+ object_pk=smart_str(self.model.pk),\n site=settings.SITE_ID)\n qs = qs.select_related('who')\n return qs\n", "issue": "Replace smart_text() with smart_str()\nTo suppress warning: `RemovedInDjango40Warning: smart_text() is deprecated in favor of smart_str()`\r\n\r\nRun command `runserver`, find out the warning messages from output.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom django.conf import settings\nfrom django.contrib.contenttypes.models import ContentType\n\nfrom .models import TCMSLogModel\n\nfrom django.utils.encoding import smart_text as smart_unicode\n\n\nclass TCMSLog:\n \"\"\"TCMS Log\"\"\"\n\n def __init__(self, model):\n super().__init__()\n self.model = model\n\n def get_new_log_object(self):\n elements = ['who', 'field', 'original_value', 'new_value']\n\n for element in elements:\n if not hasattr(self, element):\n raise NotImplementedError(\n f'Log does not have attribute {element}')\n\n model = self.get_log_model()\n new = model(**self.get_log_create_data())\n\n return new\n\n def get_log_model(self):\n \"\"\"\n Get the log model to create with this class.\n \"\"\"\n return TCMSLogModel\n\n def get_log_create_data(self):\n return {\n 'content_object': self.model,\n 'site_id': settings.SITE_ID,\n 'who': self.who,\n 'field': self.field,\n 'original_value': self.original_value,\n 'new_value': self.new_value,\n }\n\n def make(self, who, new_value, field=None, original_value=None):\n \"\"\"Create new log\"\"\"\n self.who = who\n self.field = field or ''\n self.original_value = original_value or ''\n self.new_value = new_value\n\n model = self.get_new_log_object()\n model.save()\n\n def lookup_content_type(self):\n return ContentType.objects.get_for_model(self.model)\n\n def get_query_set(self):\n ctype = self.lookup_content_type()\n model = self.get_log_model()\n\n qs = model.objects.filter(content_type=ctype,\n object_pk=smart_unicode(self.model.pk),\n site=settings.SITE_ID)\n qs = qs.select_related('who')\n return qs\n\n def list(self):\n \"\"\"List the logs\"\"\"\n return self.get_query_set().all()\n", "path": "src/tcms/logs/views.py"}]}
1,152
157
gh_patches_debug_10819
rasdani/github-patches
git_diff
aws-powertools__powertools-lambda-python-921
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> DeprecationWarning for distutils in Python 3.10 (will be removed in Python 3.12) <!--- Provide a general summary of the issue in the Title above --> <!--- How has this issue affected you? What are you trying to accomplish? --> **What were you trying to accomplish?** Run tests utilizing aws-lambda-powertools. ## Expected Behavior <!--- If you're describing a bug, tell us what should happen --> <!--- If you're suggesting a change/improvement, tell us how it should work --> No DeprecationWarning to appear. ## Current Behavior <!--- If describing a bug, tell us what happens instead of the expected behavior --> <!--- If suggesting a change/improvement, explain the difference from current behavior --> When running tests (specifically using `pytest`) that import `aws_lamdba_powertools`, a `DeprecationWarning` appears. ``` .../aws_lambda_powertools/shared/functions.py:1: DeprecationWarning: The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives from distutils.util import strtobool ``` ## Possible Solution <!--- Not obligatory, but suggest a fix/reason for the bug, --> <!--- or ideas how to implement the addition or change --> The only function used out of distutils is `strtobool`, and it is only used in one place, so that logic can simply be copied into the `resolve_truthy_env_var_choice` function in `aws_lambda_powertools.shared.functions`. ## Steps to Reproduce (for bugs) <!--- Provide a link to a live example, or an unambiguous set of steps to --> <!--- reproduce this bug. Include code to reproduce, if relevant --> 1. Use python3.10 2. Activate a new venv (`python3.10 -m venv .venv` and `. .venv/bin/activate`) 3. `pip install aws-lambda-powertools pytest` 4. `echo "import aws_lambda_powertools" > reproduce.py` 5. `pytest reproduce.py` ## Environment * **Powertools version used**: `1.22.0` * **Packaging format (Layers, PyPi)**: PyPi This warning appears in development workflows, but will be eventually breaking when Python 3.12 is released. </issue> <code> [start of aws_lambda_powertools/shared/functions.py] 1 from distutils.util import strtobool 2 from typing import Any, Optional, Union 3 4 5 def resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bool: 6 """Pick explicit choice over truthy env value, if available, otherwise return truthy env value 7 8 NOTE: Environment variable should be resolved by the caller. 9 10 Parameters 11 ---------- 12 env : str 13 environment variable actual value 14 choice : bool 15 explicit choice 16 17 Returns 18 ------- 19 choice : str 20 resolved choice as either bool or environment value 21 """ 22 return choice if choice is not None else strtobool(env) 23 24 25 def resolve_env_var_choice(env: Any, choice: Optional[Any] = None) -> Union[bool, Any]: 26 """Pick explicit choice over env, if available, otherwise return env value received 27 28 NOTE: Environment variable should be resolved by the caller. 29 30 Parameters 31 ---------- 32 env : Any 33 environment variable actual value 34 choice : bool 35 explicit choice 36 37 Returns 38 ------- 39 choice : str 40 resolved choice as either bool or environment value 41 """ 42 return choice if choice is not None else env 43 [end of aws_lambda_powertools/shared/functions.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -1,7 +1,16 @@ -from distutils.util import strtobool from typing import Any, Optional, Union +def strtobool(value): + value = value.lower() + if value in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif value in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError("invalid truth value %r" % (value,)) + + def resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bool: """Pick explicit choice over truthy env value, if available, otherwise return truthy env value
{"golden_diff": "diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py\n--- a/aws_lambda_powertools/shared/functions.py\n+++ b/aws_lambda_powertools/shared/functions.py\n@@ -1,7 +1,16 @@\n-from distutils.util import strtobool\n from typing import Any, Optional, Union\n \n \n+def strtobool(value):\n+ value = value.lower()\n+ if value in (\"y\", \"yes\", \"t\", \"true\", \"on\", \"1\"):\n+ return 1\n+ elif value in (\"n\", \"no\", \"f\", \"false\", \"off\", \"0\"):\n+ return 0\n+ else:\n+ raise ValueError(\"invalid truth value %r\" % (value,))\n+\n+\n def resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bool:\n \"\"\"Pick explicit choice over truthy env value, if available, otherwise return truthy env value\n", "issue": "DeprecationWarning for distutils in Python 3.10 (will be removed in Python 3.12)\n<!--- Provide a general summary of the issue in the Title above -->\r\n<!--- How has this issue affected you? What are you trying to accomplish? -->\r\n\r\n**What were you trying to accomplish?**\r\n\r\nRun tests utilizing aws-lambda-powertools.\r\n\r\n## Expected Behavior\r\n<!--- If you're describing a bug, tell us what should happen -->\r\n<!--- If you're suggesting a change/improvement, tell us how it should work -->\r\n\r\nNo DeprecationWarning to appear.\r\n\r\n## Current Behavior\r\n<!--- If describing a bug, tell us what happens instead of the expected behavior -->\r\n<!--- If suggesting a change/improvement, explain the difference from current behavior -->\r\n\r\nWhen running tests (specifically using `pytest`) that import `aws_lamdba_powertools`, a `DeprecationWarning` appears.\r\n\r\n```\r\n.../aws_lambda_powertools/shared/functions.py:1: DeprecationWarning: The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives\r\n from distutils.util import strtobool\r\n```\r\n\r\n## Possible Solution\r\n<!--- Not obligatory, but suggest a fix/reason for the bug, -->\r\n<!--- or ideas how to implement the addition or change -->\r\n\r\nThe only function used out of distutils is `strtobool`, and it is only used in one place, so that logic can simply be copied into the `resolve_truthy_env_var_choice` function in `aws_lambda_powertools.shared.functions`.\r\n\r\n## Steps to Reproduce (for bugs)\r\n<!--- Provide a link to a live example, or an unambiguous set of steps to -->\r\n<!--- reproduce this bug. Include code to reproduce, if relevant -->\r\n1. Use python3.10\r\n2. Activate a new venv (`python3.10 -m venv .venv` and `. .venv/bin/activate`)\r\n3. `pip install aws-lambda-powertools pytest`\r\n4. `echo \"import aws_lambda_powertools\" > reproduce.py`\r\n5. `pytest reproduce.py`\r\n\r\n## Environment\r\n\r\n* **Powertools version used**: `1.22.0`\r\n* **Packaging format (Layers, PyPi)**: PyPi\r\n\r\nThis warning appears in development workflows, but will be eventually breaking when Python 3.12 is released.\n", "before_files": [{"content": "from distutils.util import strtobool\nfrom typing import Any, Optional, Union\n\n\ndef resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bool:\n \"\"\"Pick explicit choice over truthy env value, if available, otherwise return truthy env value\n\n NOTE: Environment variable should be resolved by the caller.\n\n Parameters\n ----------\n env : str\n environment variable actual value\n choice : bool\n explicit choice\n\n Returns\n -------\n choice : str\n resolved choice as either bool or environment value\n \"\"\"\n return choice if choice is not None else strtobool(env)\n\n\ndef resolve_env_var_choice(env: Any, choice: Optional[Any] = None) -> Union[bool, Any]:\n \"\"\"Pick explicit choice over env, if available, otherwise return env value received\n\n NOTE: Environment variable should be resolved by the caller.\n\n Parameters\n ----------\n env : Any\n environment variable actual value\n choice : bool\n explicit choice\n\n Returns\n -------\n choice : str\n resolved choice as either bool or environment value\n \"\"\"\n return choice if choice is not None else env\n", "path": "aws_lambda_powertools/shared/functions.py"}]}
1,382
208
gh_patches_debug_13051
rasdani/github-patches
git_diff
ckan__ckan-7871
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> current_user.is_anonymous doesn't care if context has an user ## CKAN version 2.10 ## Describe the bug While looking into why https://github.com/ckan/ckan/pull/7266 started failing on unmodified ckan, I noticed that deciphering anonymous users has changed. On 2.9 `restrict_anon` works as follows https://github.com/ckan/ckan/blob/c4e2818818e08e60bb69d64229f8dbba531f8439/ckan/logic/auth/__init__.py#L51-L55 where `authz.auth_is_anon_user` checks if the context has an user in it. On 2.10 the functionality has changed: https://github.com/ckan/ckan/blob/d46613e346f9dc551aedb54c8c24baad919f78c1/ckan/logic/auth/__init__.py#L93-L97 which does not check for context at all and the password reset started failing on 2.10. Should the `is_anonymous` check for user in context or are we just relying on what flask login says about the user? </issue> <code> [start of ckan/logic/auth/__init__.py] 1 # encoding: utf-8 2 3 ''' 4 Helper functions to be used in the auth check functions 5 ''' 6 7 from typing import Any, Optional, TYPE_CHECKING, overload 8 from typing_extensions import Literal 9 10 import ckan.logic as logic 11 from ckan.types import Context, AuthResult, DataDict 12 from ckan.common import current_user 13 14 if TYPE_CHECKING: 15 import ckan.model as model_ 16 17 18 @overload 19 def _get_object(context: Context, 20 data_dict: Optional[DataDict], name: str, 21 class_name: Literal['Package']) -> 'model_.Package': 22 ... 23 24 25 @overload 26 def _get_object(context: Context, 27 data_dict: Optional[DataDict], name: str, 28 class_name: Literal['Resource']) -> 'model_.Resource': 29 ... 30 31 32 @overload 33 def _get_object(context: Context, 34 data_dict: Optional[DataDict], name: str, 35 class_name: Literal['Group']) -> 'model_.Group': 36 ... 37 38 39 @overload 40 def _get_object(context: Context, 41 data_dict: Optional[DataDict], name: str, 42 class_name: Literal['User']) -> 'model_.User': 43 ... 44 45 46 def _get_object(context: Context, 47 data_dict: Optional[DataDict], name: str, 48 class_name: str) -> Any: 49 # return the named item from model.class_name 50 model = context['model'] 51 if not data_dict: 52 data_dict = {} 53 id = data_dict.get('id', None) 54 if not id: 55 raise logic.ValidationError({ 56 "message": 'Missing id, can not get {0} object'.format( 57 class_name)}) 58 obj = getattr(model, class_name).get(id) 59 if not obj: 60 raise logic.NotFound 61 return obj 62 63 64 def get_package_object( 65 context: Context, 66 data_dict: Optional[DataDict] = None) -> 'model_.Package': 67 return _get_object(context, data_dict, 'package', 'Package') 68 69 70 def get_resource_object( 71 context: Context, 72 data_dict: Optional[DataDict] = None) -> 'model_.Resource': 73 return _get_object(context, data_dict, 'resource', 'Resource') 74 75 76 def get_group_object( 77 context: Context, 78 data_dict: Optional[DataDict] = None) -> 'model_.Group': 79 return _get_object(context, data_dict, 'group', 'Group') 80 81 82 def get_user_object( 83 context: Context, 84 data_dict: Optional[DataDict] = None) -> 'model_.User': 85 return _get_object(context, data_dict, 'user_obj', 'User') 86 87 88 def restrict_anon(context: Context) -> AuthResult: 89 if current_user.is_anonymous: 90 return {'success': False} 91 else: 92 return {'success': True} 93 [end of ckan/logic/auth/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ckan/logic/auth/__init__.py b/ckan/logic/auth/__init__.py --- a/ckan/logic/auth/__init__.py +++ b/ckan/logic/auth/__init__.py @@ -8,8 +8,8 @@ from typing_extensions import Literal import ckan.logic as logic +import ckan.authz as authz from ckan.types import Context, AuthResult, DataDict -from ckan.common import current_user if TYPE_CHECKING: import ckan.model as model_ @@ -86,7 +86,7 @@ def restrict_anon(context: Context) -> AuthResult: - if current_user.is_anonymous: + if authz.auth_is_anon_user(context): return {'success': False} else: return {'success': True}
{"golden_diff": "diff --git a/ckan/logic/auth/__init__.py b/ckan/logic/auth/__init__.py\n--- a/ckan/logic/auth/__init__.py\n+++ b/ckan/logic/auth/__init__.py\n@@ -8,8 +8,8 @@\n from typing_extensions import Literal\n \n import ckan.logic as logic\n+import ckan.authz as authz\n from ckan.types import Context, AuthResult, DataDict\n-from ckan.common import current_user\n \n if TYPE_CHECKING:\n import ckan.model as model_\n@@ -86,7 +86,7 @@\n \n \n def restrict_anon(context: Context) -> AuthResult:\n- if current_user.is_anonymous:\n+ if authz.auth_is_anon_user(context):\n return {'success': False}\n else:\n return {'success': True}\n", "issue": "current_user.is_anonymous doesn't care if context has an user\n## CKAN version\r\n2.10\r\n\r\n## Describe the bug\r\nWhile looking into why https://github.com/ckan/ckan/pull/7266 started failing on unmodified ckan, I noticed that deciphering anonymous users has changed. On 2.9 `restrict_anon` works as follows \r\nhttps://github.com/ckan/ckan/blob/c4e2818818e08e60bb69d64229f8dbba531f8439/ckan/logic/auth/__init__.py#L51-L55\r\n\r\nwhere `authz.auth_is_anon_user` checks if the context has an user in it. On 2.10 the functionality has changed:\r\n\r\nhttps://github.com/ckan/ckan/blob/d46613e346f9dc551aedb54c8c24baad919f78c1/ckan/logic/auth/__init__.py#L93-L97\r\n\r\nwhich does not check for context at all and the password reset started failing on 2.10. Should the `is_anonymous` check for user in context or are we just relying on what flask login says about the user? \r\n\r\n\n", "before_files": [{"content": "# encoding: utf-8\n\n'''\nHelper functions to be used in the auth check functions\n'''\n\nfrom typing import Any, Optional, TYPE_CHECKING, overload\nfrom typing_extensions import Literal\n\nimport ckan.logic as logic\nfrom ckan.types import Context, AuthResult, DataDict\nfrom ckan.common import current_user\n\nif TYPE_CHECKING:\n import ckan.model as model_\n\n\n@overload\ndef _get_object(context: Context,\n data_dict: Optional[DataDict], name: str,\n class_name: Literal['Package']) -> 'model_.Package':\n ...\n\n\n@overload\ndef _get_object(context: Context,\n data_dict: Optional[DataDict], name: str,\n class_name: Literal['Resource']) -> 'model_.Resource':\n ...\n\n\n@overload\ndef _get_object(context: Context,\n data_dict: Optional[DataDict], name: str,\n class_name: Literal['Group']) -> 'model_.Group':\n ...\n\n\n@overload\ndef _get_object(context: Context,\n data_dict: Optional[DataDict], name: str,\n class_name: Literal['User']) -> 'model_.User':\n ...\n\n\ndef _get_object(context: Context,\n data_dict: Optional[DataDict], name: str,\n class_name: str) -> Any:\n # return the named item from model.class_name\n model = context['model']\n if not data_dict:\n data_dict = {}\n id = data_dict.get('id', None)\n if not id:\n raise logic.ValidationError({\n \"message\": 'Missing id, can not get {0} object'.format(\n class_name)})\n obj = getattr(model, class_name).get(id)\n if not obj:\n raise logic.NotFound\n return obj\n\n\ndef get_package_object(\n context: Context,\n data_dict: Optional[DataDict] = None) -> 'model_.Package':\n return _get_object(context, data_dict, 'package', 'Package')\n\n\ndef get_resource_object(\n context: Context,\n data_dict: Optional[DataDict] = None) -> 'model_.Resource':\n return _get_object(context, data_dict, 'resource', 'Resource')\n\n\ndef get_group_object(\n context: Context,\n data_dict: Optional[DataDict] = None) -> 'model_.Group':\n return _get_object(context, data_dict, 'group', 'Group')\n\n\ndef get_user_object(\n context: Context,\n data_dict: Optional[DataDict] = None) -> 'model_.User':\n return _get_object(context, data_dict, 'user_obj', 'User')\n\n\ndef restrict_anon(context: Context) -> AuthResult:\n if current_user.is_anonymous:\n return {'success': False}\n else:\n return {'success': True}\n", "path": "ckan/logic/auth/__init__.py"}]}
1,624
183
gh_patches_debug_18077
rasdani/github-patches
git_diff
freedomofpress__securedrop-3258
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [qt-journalist-updater] Add a time estimate to the update progress bar ## Description [See epic #3076 for primary feature, PRs for this ticket should go into `qt-journalist-updater`] We should add a reasonable time estimate to the update progress bar as the upgrade progresses ## User Stories As a journalist user, I want an estimate of how long it will take until updates are finished so that I can plan. </issue> <code> [start of journalist_gui/journalist_gui/strings.py] 1 window_title = 'SecureDrop Updater' 2 update_in_progress = ("SecureDrop workstation updates are available! " 3 "You should install them now. If you don\'t want to, " 4 "you can install them the next time you reboot.") 5 fetching_update = 'Fetching and verifying latest update...' 6 updating_tails_env = 'Configuring local Tails environment...' 7 finished = 'Update successfully completed!' 8 finished_dialog_message = 'Updates completed successfully. Click OK to close.' 9 finished_dialog_title = 'SecureDrop Workstation is up to date!' 10 update_failed_dialog_title = 'Error Updating SecureDrop Workstation' 11 update_failed_generic_reason = ("Update failed. " 12 "Please contact your SecureDrop " 13 "administrator.") 14 update_failed_sig_failure = ("WARNING: Signature verification failed. " 15 "Contact your SecureDrop administrator " 16 "immediately.") 17 tailsconfig_failed_sudo_password = 'Sudo password incorrect' 18 tailsconfig_failed_generic_reason = ("Tails workstation configuration failed. " 19 "Contact your administrator.") 20 install_update_button = 'Install Now' 21 install_later_button = 'Install Later' 22 [end of journalist_gui/journalist_gui/strings.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py --- a/journalist_gui/journalist_gui/strings.py +++ b/journalist_gui/journalist_gui/strings.py @@ -2,8 +2,10 @@ update_in_progress = ("SecureDrop workstation updates are available! " "You should install them now. If you don\'t want to, " "you can install them the next time you reboot.") -fetching_update = 'Fetching and verifying latest update...' -updating_tails_env = 'Configuring local Tails environment...' +fetching_update = ('Fetching and verifying latest update...' + ' (4 mins remaining)') +updating_tails_env = ('Configuring local Tails environment...' + ' (1 min remaining)') finished = 'Update successfully completed!' finished_dialog_message = 'Updates completed successfully. Click OK to close.' finished_dialog_title = 'SecureDrop Workstation is up to date!'
{"golden_diff": "diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py\n--- a/journalist_gui/journalist_gui/strings.py\n+++ b/journalist_gui/journalist_gui/strings.py\n@@ -2,8 +2,10 @@\n update_in_progress = (\"SecureDrop workstation updates are available! \"\n \"You should install them now. If you don\\'t want to, \"\n \"you can install them the next time you reboot.\")\n-fetching_update = 'Fetching and verifying latest update...'\n-updating_tails_env = 'Configuring local Tails environment...'\n+fetching_update = ('Fetching and verifying latest update...'\n+ ' (4 mins remaining)')\n+updating_tails_env = ('Configuring local Tails environment...'\n+ ' (1 min remaining)')\n finished = 'Update successfully completed!'\n finished_dialog_message = 'Updates completed successfully. Click OK to close.'\n finished_dialog_title = 'SecureDrop Workstation is up to date!'\n", "issue": "[qt-journalist-updater] Add a time estimate to the update progress bar\n## Description\r\n\r\n[See epic #3076 for primary feature, PRs for this ticket should go into `qt-journalist-updater`]\r\n\r\nWe should add a reasonable time estimate to the update progress bar as the upgrade progresses\r\n\r\n## User Stories\r\n\r\nAs a journalist user, I want an estimate of how long it will take until updates are finished so that I can plan. \n", "before_files": [{"content": "window_title = 'SecureDrop Updater'\nupdate_in_progress = (\"SecureDrop workstation updates are available! \"\n \"You should install them now. If you don\\'t want to, \"\n \"you can install them the next time you reboot.\")\nfetching_update = 'Fetching and verifying latest update...'\nupdating_tails_env = 'Configuring local Tails environment...'\nfinished = 'Update successfully completed!'\nfinished_dialog_message = 'Updates completed successfully. Click OK to close.'\nfinished_dialog_title = 'SecureDrop Workstation is up to date!'\nupdate_failed_dialog_title = 'Error Updating SecureDrop Workstation'\nupdate_failed_generic_reason = (\"Update failed. \"\n \"Please contact your SecureDrop \"\n \"administrator.\")\nupdate_failed_sig_failure = (\"WARNING: Signature verification failed. \"\n \"Contact your SecureDrop administrator \"\n \"immediately.\")\ntailsconfig_failed_sudo_password = 'Sudo password incorrect'\ntailsconfig_failed_generic_reason = (\"Tails workstation configuration failed. \"\n \"Contact your administrator.\")\ninstall_update_button = 'Install Now'\ninstall_later_button = 'Install Later'\n", "path": "journalist_gui/journalist_gui/strings.py"}]}
896
212
gh_patches_debug_10610
rasdani/github-patches
git_diff
spacetelescope__jwql-465
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Import environment bokeh version in HTML As described in #425, having the environment Bokeh version NOT match the HTML template Bokeh version can cause problems. I wrote some tests #429 that will test and make sure this is always the case in the future, but we could be even smarter about this if we programmatically set the Bokeh version using a Django variable. We could even include it in `context_processors` so it happens behind the scenes. </issue> <code> [start of jwql/website/apps/jwql/context_processors.py] 1 """Provides functions that define context inherent to all views. 2 3 The functions within this module define ``context`` that will be 4 included in requests, in addition to any specific ``context`` provided 5 in the view. 6 7 Authors 8 ------- 9 10 - Matthew Bourque 11 12 Use 13 --- 14 15 This module is defined under the ``TEMPLATES.OPTIONS`` setting in 16 ``settings.py``, e.g.: 17 :: 18 19 TEMPLATES = [ 20 {'OPTIONS': {'context_processors': ['jwql.website.apps.jwql.context_processors.base_context'],},} 21 ] 22 23 As such, it will automatically be executed upon each request. 24 """ 25 26 import jwql 27 from jwql.utils.constants import JWST_INSTRUMENT_NAMES, MONITORS 28 29 from .oauth import auth_info 30 31 32 @auth_info 33 def base_context(request, user): 34 """Provide the context needed for the ``base.html`` template. 35 36 Parameters 37 ---------- 38 request : HttpRequest object 39 Incoming request from the webpage 40 41 Returns 42 ------- 43 context : dict 44 A dictionary containing data needed to render the ``base.html`` 45 template 46 """ 47 48 context = {} 49 context['inst_list'] = JWST_INSTRUMENT_NAMES 50 context['tools'] = MONITORS 51 context['user'] = user 52 context['version'] = jwql.__version__ 53 54 return context 55 [end of jwql/website/apps/jwql/context_processors.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/jwql/website/apps/jwql/context_processors.py b/jwql/website/apps/jwql/context_processors.py --- a/jwql/website/apps/jwql/context_processors.py +++ b/jwql/website/apps/jwql/context_processors.py @@ -23,6 +23,8 @@ As such, it will automatically be executed upon each request. """ +import bokeh + import jwql from jwql.utils.constants import JWST_INSTRUMENT_NAMES, MONITORS @@ -50,5 +52,6 @@ context['tools'] = MONITORS context['user'] = user context['version'] = jwql.__version__ + context['bokeh_version'] = bokeh.__version__ return context
{"golden_diff": "diff --git a/jwql/website/apps/jwql/context_processors.py b/jwql/website/apps/jwql/context_processors.py\n--- a/jwql/website/apps/jwql/context_processors.py\n+++ b/jwql/website/apps/jwql/context_processors.py\n@@ -23,6 +23,8 @@\n As such, it will automatically be executed upon each request.\n \"\"\"\n \n+import bokeh\n+\n import jwql\n from jwql.utils.constants import JWST_INSTRUMENT_NAMES, MONITORS\n \n@@ -50,5 +52,6 @@\n context['tools'] = MONITORS\n context['user'] = user\n context['version'] = jwql.__version__\n+ context['bokeh_version'] = bokeh.__version__\n \n return context\n", "issue": "Import environment bokeh version in HTML\nAs described in #425, having the environment Bokeh version NOT match the HTML template Bokeh version can cause problems.\r\n\r\nI wrote some tests #429 that will test and make sure this is always the case in the future, but we could be even smarter about this if we programmatically set the Bokeh version using a Django variable. We could even include it in `context_processors` so it happens behind the scenes.\n", "before_files": [{"content": "\"\"\"Provides functions that define context inherent to all views.\n\nThe functions within this module define ``context`` that will be\nincluded in requests, in addition to any specific ``context`` provided\nin the view.\n\nAuthors\n-------\n\n - Matthew Bourque\n\nUse\n---\n\n This module is defined under the ``TEMPLATES.OPTIONS`` setting in\n ``settings.py``, e.g.:\n ::\n\n TEMPLATES = [\n {'OPTIONS': {'context_processors': ['jwql.website.apps.jwql.context_processors.base_context'],},}\n ]\n\n As such, it will automatically be executed upon each request.\n\"\"\"\n\nimport jwql\nfrom jwql.utils.constants import JWST_INSTRUMENT_NAMES, MONITORS\n\nfrom .oauth import auth_info\n\n\n@auth_info\ndef base_context(request, user):\n \"\"\"Provide the context needed for the ``base.html`` template.\n\n Parameters\n ----------\n request : HttpRequest object\n Incoming request from the webpage\n\n Returns\n -------\n context : dict\n A dictionary containing data needed to render the ``base.html``\n template\n \"\"\"\n\n context = {}\n context['inst_list'] = JWST_INSTRUMENT_NAMES\n context['tools'] = MONITORS\n context['user'] = user\n context['version'] = jwql.__version__\n\n return context\n", "path": "jwql/website/apps/jwql/context_processors.py"}]}
1,043
178
gh_patches_debug_5092
rasdani/github-patches
git_diff
fossasia__open-event-server-5383
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Improper exception handling in manage.py **Describe the bug** Exceptions are not handled properly in `initialize_db` method of `manage` module. **To Reproduce** Steps to reproduce the behavior: 1. Open 'manage.py' 2. See https://github.com/fossasia/open-event-server/blob/05cbd1abee0d81dd24d701420c3530e3b9bd645f/manage.py#L77 **Expected behavior** It should follow standard exception handling format using `Exception` class. </issue> <code> [start of manage.py] 1 import os 2 3 from app.api.helpers.db import save_to_db 4 from app.models.event import Event, get_new_event_identifier 5 from app import manager 6 from app import current_app as app 7 from app.models import db 8 from app.models.speaker import Speaker 9 from populate_db import populate 10 from flask_migrate import stamp 11 from sqlalchemy.engine import reflection 12 13 from tests.unittests.auth_helper import create_super_admin 14 15 16 @manager.command 17 def list_routes(): 18 import urllib 19 20 output = [] 21 for rule in app.url_map.iter_rules(): 22 methods = ','.join(rule.methods) 23 line = urllib.unquote("{:50s} {:20s} {}".format( 24 rule.endpoint, methods, rule)) 25 output.append(line) 26 27 for line in sorted(output): 28 print(line) 29 30 31 @manager.command 32 def add_event_identifier(): 33 events = Event.query.all() 34 for event in events: 35 event.identifier = get_new_event_identifier() 36 save_to_db(event) 37 38 39 @manager.option('-e', '--event', help='Event ID. Eg. 1') 40 def fix_speaker_images(event): 41 from app.helpers.sessions_speakers.speakers import speaker_image_sizes 42 from app.helpers.sessions_speakers.speakers import save_resized_photo 43 import urllib 44 from app.helpers.storage import generate_hash 45 event_id = int(event) 46 image_sizes = speaker_image_sizes() 47 speakers = Speaker.query.filter_by(event_id=event_id).all() 48 for speaker in speakers: 49 if speaker.photo and speaker.photo.strip() != '': 50 file_relative_path = 'static/media/temp/' + generate_hash(str(speaker.id)) + '.jpg' 51 file_path = app.config['BASE_DIR'] + '/' + file_relative_path 52 urllib.urlretrieve(speaker.photo, file_path) 53 speaker.small = save_resized_photo(file_path, event_id, speaker.id, 'small', image_sizes) 54 speaker.thumbnail = save_resized_photo(file_path, event_id, speaker.id, 'thumbnail', image_sizes) 55 speaker.icon = save_resized_photo(file_path, event_id, speaker.id, 'icon', image_sizes) 56 db.session.add(speaker) 57 os.remove(file_path) 58 print("Downloaded " + speaker.photo + " into " + file_relative_path) 59 print("Processed - " + str(speaker.id)) 60 db.session.commit() 61 62 63 @manager.option('-c', '--credentials', help='Super admin credentials. Eg. username:password') 64 def initialize_db(credentials): 65 with app.app_context(): 66 populate_data = True 67 inspector = reflection.Inspector.from_engine(db.engine) 68 table_name = 'events' 69 table_names = inspector.get_table_names() 70 print("[LOG] Existing tables:") 71 print("[LOG] " + ','.join(table_names)) 72 if table_name not in table_names: 73 print("[LOG] Table not found. Attempting creation") 74 try: 75 db.create_all() 76 stamp() 77 except: 78 populate_data = False 79 print("[LOG] Could not create tables. Either database does not exist or tables already created") 80 if populate_data: 81 credentials = credentials.split(":") 82 create_super_admin(credentials[0], credentials[1]) 83 populate() 84 else: 85 print("[LOG] Tables already exist. Skipping data population & creation.") 86 87 88 @manager.command 89 def prepare_kubernetes_db(): 90 with app.app_context(): 91 initialize_db('[email protected]:fossasia') 92 93 94 if __name__ == "__main__": 95 manager.run() 96 [end of manage.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/manage.py b/manage.py --- a/manage.py +++ b/manage.py @@ -74,7 +74,7 @@ try: db.create_all() stamp() - except: + except Exception: populate_data = False print("[LOG] Could not create tables. Either database does not exist or tables already created") if populate_data:
{"golden_diff": "diff --git a/manage.py b/manage.py\n--- a/manage.py\n+++ b/manage.py\n@@ -74,7 +74,7 @@\n try:\n db.create_all()\n stamp()\n- except:\n+ except Exception:\n populate_data = False\n print(\"[LOG] Could not create tables. Either database does not exist or tables already created\")\n if populate_data:\n", "issue": "Improper exception handling in manage.py\n**Describe the bug**\r\nExceptions are not handled properly in `initialize_db` method of `manage` module.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Open 'manage.py'\r\n2. See https://github.com/fossasia/open-event-server/blob/05cbd1abee0d81dd24d701420c3530e3b9bd645f/manage.py#L77\r\n\r\n**Expected behavior**\r\nIt should follow standard exception handling format using `Exception` class.\n", "before_files": [{"content": "import os\n\nfrom app.api.helpers.db import save_to_db\nfrom app.models.event import Event, get_new_event_identifier\nfrom app import manager\nfrom app import current_app as app\nfrom app.models import db\nfrom app.models.speaker import Speaker\nfrom populate_db import populate\nfrom flask_migrate import stamp\nfrom sqlalchemy.engine import reflection\n\nfrom tests.unittests.auth_helper import create_super_admin\n\n\[email protected]\ndef list_routes():\n import urllib\n\n output = []\n for rule in app.url_map.iter_rules():\n methods = ','.join(rule.methods)\n line = urllib.unquote(\"{:50s} {:20s} {}\".format(\n rule.endpoint, methods, rule))\n output.append(line)\n\n for line in sorted(output):\n print(line)\n\n\[email protected]\ndef add_event_identifier():\n events = Event.query.all()\n for event in events:\n event.identifier = get_new_event_identifier()\n save_to_db(event)\n\n\[email protected]('-e', '--event', help='Event ID. Eg. 1')\ndef fix_speaker_images(event):\n from app.helpers.sessions_speakers.speakers import speaker_image_sizes\n from app.helpers.sessions_speakers.speakers import save_resized_photo\n import urllib\n from app.helpers.storage import generate_hash\n event_id = int(event)\n image_sizes = speaker_image_sizes()\n speakers = Speaker.query.filter_by(event_id=event_id).all()\n for speaker in speakers:\n if speaker.photo and speaker.photo.strip() != '':\n file_relative_path = 'static/media/temp/' + generate_hash(str(speaker.id)) + '.jpg'\n file_path = app.config['BASE_DIR'] + '/' + file_relative_path\n urllib.urlretrieve(speaker.photo, file_path)\n speaker.small = save_resized_photo(file_path, event_id, speaker.id, 'small', image_sizes)\n speaker.thumbnail = save_resized_photo(file_path, event_id, speaker.id, 'thumbnail', image_sizes)\n speaker.icon = save_resized_photo(file_path, event_id, speaker.id, 'icon', image_sizes)\n db.session.add(speaker)\n os.remove(file_path)\n print(\"Downloaded \" + speaker.photo + \" into \" + file_relative_path)\n print(\"Processed - \" + str(speaker.id))\n db.session.commit()\n\n\[email protected]('-c', '--credentials', help='Super admin credentials. Eg. username:password')\ndef initialize_db(credentials):\n with app.app_context():\n populate_data = True\n inspector = reflection.Inspector.from_engine(db.engine)\n table_name = 'events'\n table_names = inspector.get_table_names()\n print(\"[LOG] Existing tables:\")\n print(\"[LOG] \" + ','.join(table_names))\n if table_name not in table_names:\n print(\"[LOG] Table not found. Attempting creation\")\n try:\n db.create_all()\n stamp()\n except:\n populate_data = False\n print(\"[LOG] Could not create tables. Either database does not exist or tables already created\")\n if populate_data:\n credentials = credentials.split(\":\")\n create_super_admin(credentials[0], credentials[1])\n populate()\n else:\n print(\"[LOG] Tables already exist. Skipping data population & creation.\")\n\n\[email protected]\ndef prepare_kubernetes_db():\n with app.app_context():\n initialize_db('[email protected]:fossasia')\n\n\nif __name__ == \"__main__\":\n manager.run()\n", "path": "manage.py"}]}
1,570
84
gh_patches_debug_1067
rasdani/github-patches
git_diff
encode__httpx-1799
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update h2 pin? ### Discussed in https://github.com/encode/httpx/discussions/1485 <div type='discussions-op-text'> <sup>Originally posted by **HarrySky** February 24, 2021</sup> Hi, some time ago `h2` pin was updated in `httpcore`: https://github.com/encode/httpcore/pull/208 But it is still pinned to `3.*` in this package's `http2` extra: https://github.com/encode/httpx/blob/0f280af8b170ed5cc48c12a894f71a8b5762f748/setup.py#L65 This is not an issue, as I can just add `h2>=3,<5` to my setup.py instead of using `httpx[http2]`, but maybe you want dependencies to be in sync with `httpcore`. EDIT: Using git blame we can see that before `http2` extra - `httpcore[http2]` was used instead of `h2` dependency directly.</div> </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 4 import re 5 from pathlib import Path 6 7 from setuptools import setup 8 9 10 def get_version(package): 11 """ 12 Return package version as listed in `__version__` in `init.py`. 13 """ 14 version = Path(package, "__version__.py").read_text() 15 return re.search("__version__ = ['\"]([^'\"]+)['\"]", version).group(1) 16 17 18 def get_long_description(): 19 """ 20 Return the README. 21 """ 22 long_description = "" 23 with open("README.md", encoding="utf8") as f: 24 long_description += f.read() 25 long_description += "\n\n" 26 with open("CHANGELOG.md", encoding="utf8") as f: 27 long_description += f.read() 28 return long_description 29 30 31 def get_packages(package): 32 """ 33 Return root package and all sub-packages. 34 """ 35 return [str(path.parent) for path in Path(package).glob("**/__init__.py")] 36 37 38 setup( 39 name="httpx", 40 python_requires=">=3.6", 41 version=get_version("httpx"), 42 url="https://github.com/encode/httpx", 43 project_urls={ 44 "Changelog": "https://github.com/encode/httpx/blob/master/CHANGELOG.md", 45 "Documentation": "https://www.python-httpx.org", 46 "Source": "https://github.com/encode/httpx", 47 }, 48 license="BSD", 49 description="The next generation HTTP client.", 50 long_description=get_long_description(), 51 long_description_content_type="text/markdown", 52 author="Tom Christie", 53 author_email="[email protected]", 54 package_data={"httpx": ["py.typed"]}, 55 packages=get_packages("httpx"), 56 include_package_data=True, 57 zip_safe=False, 58 install_requires=[ 59 "certifi", 60 "sniffio", 61 "rfc3986[idna2008]>=1.3,<2", 62 "httpcore>=0.13.3,<0.14.0", 63 "async_generator; python_version < '3.7'" 64 ], 65 extras_require={ 66 "http2": "h2==3.*", 67 "brotli": "brotlicffi==1.*", 68 }, 69 classifiers=[ 70 "Development Status :: 4 - Beta", 71 "Environment :: Web Environment", 72 "Intended Audience :: Developers", 73 "License :: OSI Approved :: BSD License", 74 "Operating System :: OS Independent", 75 "Topic :: Internet :: WWW/HTTP", 76 "Framework :: AsyncIO", 77 "Framework :: Trio", 78 "Programming Language :: Python :: 3", 79 "Programming Language :: Python :: 3.6", 80 "Programming Language :: Python :: 3.7", 81 "Programming Language :: Python :: 3.8", 82 "Programming Language :: Python :: 3.9", 83 "Programming Language :: Python :: 3.10", 84 "Programming Language :: Python :: 3 :: Only", 85 ], 86 ) 87 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -63,7 +63,7 @@ "async_generator; python_version < '3.7'" ], extras_require={ - "http2": "h2==3.*", + "http2": "h2>=3,<5", "brotli": "brotlicffi==1.*", }, classifiers=[
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -63,7 +63,7 @@\n \"async_generator; python_version < '3.7'\"\n ],\n extras_require={\n- \"http2\": \"h2==3.*\",\n+ \"http2\": \"h2>=3,<5\",\n \"brotli\": \"brotlicffi==1.*\",\n },\n classifiers=[\n", "issue": "Update h2 pin?\n### Discussed in https://github.com/encode/httpx/discussions/1485\r\n\r\n<div type='discussions-op-text'>\r\n\r\n<sup>Originally posted by **HarrySky** February 24, 2021</sup>\r\nHi, some time ago `h2` pin was updated in `httpcore`: https://github.com/encode/httpcore/pull/208\r\n\r\nBut it is still pinned to `3.*` in this package's `http2` extra:\r\nhttps://github.com/encode/httpx/blob/0f280af8b170ed5cc48c12a894f71a8b5762f748/setup.py#L65\r\n\r\nThis is not an issue, as I can just add `h2>=3,<5` to my setup.py instead of using `httpx[http2]`, but maybe you want dependencies to be in sync with `httpcore`.\r\n\r\nEDIT: Using git blame we can see that before `http2` extra - `httpcore[http2]` was used instead of `h2` dependency directly.</div>\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n version = Path(package, \"__version__.py\").read_text()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", version).group(1)\n\n\ndef get_long_description():\n \"\"\"\n Return the README.\n \"\"\"\n long_description = \"\"\n with open(\"README.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n long_description += \"\\n\\n\"\n with open(\"CHANGELOG.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n return long_description\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [str(path.parent) for path in Path(package).glob(\"**/__init__.py\")]\n\n\nsetup(\n name=\"httpx\",\n python_requires=\">=3.6\",\n version=get_version(\"httpx\"),\n url=\"https://github.com/encode/httpx\",\n project_urls={\n \"Changelog\": \"https://github.com/encode/httpx/blob/master/CHANGELOG.md\",\n \"Documentation\": \"https://www.python-httpx.org\",\n \"Source\": \"https://github.com/encode/httpx\",\n },\n license=\"BSD\",\n description=\"The next generation HTTP client.\",\n long_description=get_long_description(),\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n package_data={\"httpx\": [\"py.typed\"]},\n packages=get_packages(\"httpx\"),\n include_package_data=True,\n zip_safe=False,\n install_requires=[\n \"certifi\",\n \"sniffio\",\n \"rfc3986[idna2008]>=1.3,<2\",\n \"httpcore>=0.13.3,<0.14.0\",\n \"async_generator; python_version < '3.7'\"\n ],\n extras_require={\n \"http2\": \"h2==3.*\",\n \"brotli\": \"brotlicffi==1.*\",\n },\n classifiers=[\n \"Development Status :: 4 - Beta\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Framework :: AsyncIO\",\n \"Framework :: Trio\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3 :: Only\",\n ],\n)\n", "path": "setup.py"}]}
1,589
99
gh_patches_debug_15412
rasdani/github-patches
git_diff
bridgecrewio__checkov-1928
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> CKV_AZURE_53 is a duplicate of CKV_AZURE_90 **Is your feature request related to a problem? Please describe.** CKV_AZURE_53 is a duplicate of CKV_AZURE_90. We should use only one. **Describe the solution you'd like** N/A **Describe alternatives you've considered** N/A **Additional context** N/A </issue> <code> [start of checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py] 1 from checkov.common.models.enums import CheckCategories, CheckResult 2 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck 3 4 5 class MySQLServerHasPublicAccessDisabled(BaseResourceValueCheck): 6 def __init__(self): 7 name = "Ensure that MySQL server disables public network access" 8 id = "CKV_AZURE_90" 9 supported_resources = ['azurerm_mysql_server'] 10 categories = [CheckCategories.NETWORKING] 11 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 12 13 def get_inspected_key(self): 14 return 'public_network_access_enabled' 15 16 def scan_resource_conf(self, conf): 17 public_access = conf.get('public_network_access_enabled', [True]) 18 if public_access[0]: 19 return CheckResult.FAILED 20 return CheckResult.PASSED 21 22 23 check = MySQLServerHasPublicAccessDisabled() 24 [end of checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py b/checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py deleted file mode 100644 --- a/checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py +++ /dev/null @@ -1,23 +0,0 @@ -from checkov.common.models.enums import CheckCategories, CheckResult -from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck - - -class MySQLServerHasPublicAccessDisabled(BaseResourceValueCheck): - def __init__(self): - name = "Ensure that MySQL server disables public network access" - id = "CKV_AZURE_90" - supported_resources = ['azurerm_mysql_server'] - categories = [CheckCategories.NETWORKING] - super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) - - def get_inspected_key(self): - return 'public_network_access_enabled' - - def scan_resource_conf(self, conf): - public_access = conf.get('public_network_access_enabled', [True]) - if public_access[0]: - return CheckResult.FAILED - return CheckResult.PASSED - - -check = MySQLServerHasPublicAccessDisabled()
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py b/checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py\ndeleted file mode 100644\n--- a/checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py\n+++ /dev/null\n@@ -1,23 +0,0 @@\n-from checkov.common.models.enums import CheckCategories, CheckResult\n-from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n-\n-\n-class MySQLServerHasPublicAccessDisabled(BaseResourceValueCheck):\n- def __init__(self):\n- name = \"Ensure that MySQL server disables public network access\"\n- id = \"CKV_AZURE_90\"\n- supported_resources = ['azurerm_mysql_server']\n- categories = [CheckCategories.NETWORKING]\n- super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n-\n- def get_inspected_key(self):\n- return 'public_network_access_enabled'\n-\n- def scan_resource_conf(self, conf):\n- public_access = conf.get('public_network_access_enabled', [True])\n- if public_access[0]:\n- return CheckResult.FAILED\n- return CheckResult.PASSED\n-\n-\n-check = MySQLServerHasPublicAccessDisabled()\n", "issue": "CKV_AZURE_53 is a duplicate of CKV_AZURE_90\n**Is your feature request related to a problem? Please describe.**\r\nCKV_AZURE_53 is a duplicate of CKV_AZURE_90. We should use only one.\r\n\r\n**Describe the solution you'd like**\r\nN/A\r\n\r\n**Describe alternatives you've considered**\r\nN/A\r\n\r\n**Additional context**\r\nN/A\n", "before_files": [{"content": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass MySQLServerHasPublicAccessDisabled(BaseResourceValueCheck):\n def __init__(self):\n name = \"Ensure that MySQL server disables public network access\"\n id = \"CKV_AZURE_90\"\n supported_resources = ['azurerm_mysql_server']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def get_inspected_key(self):\n return 'public_network_access_enabled'\n\n def scan_resource_conf(self, conf):\n public_access = conf.get('public_network_access_enabled', [True])\n if public_access[0]:\n return CheckResult.FAILED\n return CheckResult.PASSED\n\n\ncheck = MySQLServerHasPublicAccessDisabled()\n", "path": "checkov/terraform/checks/resource/azure/MySQLServerPublicAccessDisabled.py"}]}
883
294
gh_patches_debug_196
rasdani/github-patches
git_diff
twisted__twisted-11622
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 22.8.0 This is the ticket to track the release of 22.8.0 </issue> <code> [start of src/twisted/_version.py] 1 """ 2 Provides Twisted version information. 3 """ 4 5 # This file is auto-generated! Do not edit! 6 # Use `python -m incremental.update Twisted` to change this file. 7 8 from incremental import Version 9 10 __version__ = Version("Twisted", 22, 4, 0, post=0) 11 __all__ = ["__version__"] 12 [end of src/twisted/_version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/twisted/_version.py b/src/twisted/_version.py --- a/src/twisted/_version.py +++ b/src/twisted/_version.py @@ -7,5 +7,5 @@ from incremental import Version -__version__ = Version("Twisted", 22, 4, 0, post=0) +__version__ = Version("Twisted", 22, 8, 0, post=0) __all__ = ["__version__"]
{"golden_diff": "diff --git a/src/twisted/_version.py b/src/twisted/_version.py\n--- a/src/twisted/_version.py\n+++ b/src/twisted/_version.py\n@@ -7,5 +7,5 @@\n \n from incremental import Version\n \n-__version__ = Version(\"Twisted\", 22, 4, 0, post=0)\n+__version__ = Version(\"Twisted\", 22, 8, 0, post=0)\n __all__ = [\"__version__\"]\n", "issue": "Release 22.8.0\nThis is the ticket to track the release of 22.8.0\n", "before_files": [{"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 4, 0, post=0)\n__all__ = [\"__version__\"]\n", "path": "src/twisted/_version.py"}]}
656
114
gh_patches_debug_24456
rasdani/github-patches
git_diff
bridgecrewio__checkov-1096
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Duplicate check **Describe the bug** The new check `CKV_AWS_125` popped up complaining about me not activating point in time recovery for a DynamoDB table, which is surprising, because I already acknowledged it in the past. After digging into the code I found out that check `CKV_AWS_125` and `CKV_AWS_28` are identical. It would be great to delete one of them, probably the new one. **Additional context** https://github.com/bridgecrewio/checkov/blob/caafcbb83d6901b57c9a39464673670c150f0a33/checkov/terraform/checks/resource/aws/DynamodbRecovery.py#L5-L14 https://github.com/bridgecrewio/checkov/blob/caafcbb83d6901b57c9a39464673670c150f0a33/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py#L5-L17 </issue> <code> [start of checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py] 1 from checkov.common.models.enums import CheckCategories, CheckResult 2 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck 3 4 5 class DynamodbPointInTimeRecoveryEnabled(BaseResourceValueCheck): 6 def __init__(self): 7 name = "Ensure that that point in time recovery is enabled for Amazon DynamoDB tables" 8 id = "CKV_AWS_125" 9 supported_resources = ['aws_dynamodb_table'] 10 categories = [CheckCategories.BACKUP_AND_RECOVERY] 11 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 12 13 def get_inspected_key(self): 14 return "point_in_time_recovery/[0]/enabled" 15 16 def get_expected_value(self): 17 return True 18 19 20 check = DynamodbPointInTimeRecoveryEnabled() 21 [end of checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py] [start of checkov/terraform/checks/resource/aws/KMSKeyRotation.py] 1 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck 2 from checkov.common.models.enums import CheckCategories 3 4 5 class KMSKeyRotation(BaseResourceValueCheck): 6 7 def __init__(self): 8 name = "Ensure KMS have rotation policy" 9 id = "CKV_AWS_132" 10 supported_resources = ['aws_kms_key'] 11 categories = [CheckCategories.ENCRYPTION] 12 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 13 14 def get_inspected_key(self): 15 return "enable_key_rotation" 16 17 18 check = KMSKeyRotation() 19 [end of checkov/terraform/checks/resource/aws/KMSKeyRotation.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py b/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py deleted file mode 100644 --- a/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py +++ /dev/null @@ -1,20 +0,0 @@ -from checkov.common.models.enums import CheckCategories, CheckResult -from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck - - -class DynamodbPointInTimeRecoveryEnabled(BaseResourceValueCheck): - def __init__(self): - name = "Ensure that that point in time recovery is enabled for Amazon DynamoDB tables" - id = "CKV_AWS_125" - supported_resources = ['aws_dynamodb_table'] - categories = [CheckCategories.BACKUP_AND_RECOVERY] - super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) - - def get_inspected_key(self): - return "point_in_time_recovery/[0]/enabled" - - def get_expected_value(self): - return True - - -check = DynamodbPointInTimeRecoveryEnabled() diff --git a/checkov/terraform/checks/resource/aws/KMSKeyRotation.py b/checkov/terraform/checks/resource/aws/KMSKeyRotation.py deleted file mode 100644 --- a/checkov/terraform/checks/resource/aws/KMSKeyRotation.py +++ /dev/null @@ -1,18 +0,0 @@ -from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck -from checkov.common.models.enums import CheckCategories - - -class KMSKeyRotation(BaseResourceValueCheck): - - def __init__(self): - name = "Ensure KMS have rotation policy" - id = "CKV_AWS_132" - supported_resources = ['aws_kms_key'] - categories = [CheckCategories.ENCRYPTION] - super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) - - def get_inspected_key(self): - return "enable_key_rotation" - - -check = KMSKeyRotation()
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py b/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py\ndeleted file mode 100644\n--- a/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py\n+++ /dev/null\n@@ -1,20 +0,0 @@\n-from checkov.common.models.enums import CheckCategories, CheckResult\n-from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n-\n-\n-class DynamodbPointInTimeRecoveryEnabled(BaseResourceValueCheck):\n- def __init__(self):\n- name = \"Ensure that that point in time recovery is enabled for Amazon DynamoDB tables\"\n- id = \"CKV_AWS_125\"\n- supported_resources = ['aws_dynamodb_table']\n- categories = [CheckCategories.BACKUP_AND_RECOVERY]\n- super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n-\n- def get_inspected_key(self):\n- return \"point_in_time_recovery/[0]/enabled\"\n-\n- def get_expected_value(self):\n- return True\n-\n-\n-check = DynamodbPointInTimeRecoveryEnabled()\ndiff --git a/checkov/terraform/checks/resource/aws/KMSKeyRotation.py b/checkov/terraform/checks/resource/aws/KMSKeyRotation.py\ndeleted file mode 100644\n--- a/checkov/terraform/checks/resource/aws/KMSKeyRotation.py\n+++ /dev/null\n@@ -1,18 +0,0 @@\n-from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n-from checkov.common.models.enums import CheckCategories\n-\n-\n-class KMSKeyRotation(BaseResourceValueCheck):\n-\n- def __init__(self):\n- name = \"Ensure KMS have rotation policy\"\n- id = \"CKV_AWS_132\"\n- supported_resources = ['aws_kms_key']\n- categories = [CheckCategories.ENCRYPTION]\n- super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n-\n- def get_inspected_key(self):\n- return \"enable_key_rotation\"\n-\n-\n-check = KMSKeyRotation()\n", "issue": "Duplicate check\n**Describe the bug**\r\nThe new check `CKV_AWS_125` popped up complaining about me not activating point in time recovery for a DynamoDB table, which is surprising, because I already acknowledged it in the past. After digging into the code I found out that check `CKV_AWS_125` and `CKV_AWS_28` are identical. It would be great to delete one of them, probably the new one.\r\n\r\n**Additional context**\r\nhttps://github.com/bridgecrewio/checkov/blob/caafcbb83d6901b57c9a39464673670c150f0a33/checkov/terraform/checks/resource/aws/DynamodbRecovery.py#L5-L14\r\nhttps://github.com/bridgecrewio/checkov/blob/caafcbb83d6901b57c9a39464673670c150f0a33/checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py#L5-L17\r\n\n", "before_files": [{"content": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass DynamodbPointInTimeRecoveryEnabled(BaseResourceValueCheck):\n def __init__(self):\n name = \"Ensure that that point in time recovery is enabled for Amazon DynamoDB tables\"\n id = \"CKV_AWS_125\"\n supported_resources = ['aws_dynamodb_table']\n categories = [CheckCategories.BACKUP_AND_RECOVERY]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def get_inspected_key(self):\n return \"point_in_time_recovery/[0]/enabled\"\n\n def get_expected_value(self):\n return True\n\n\ncheck = DynamodbPointInTimeRecoveryEnabled()\n", "path": "checkov/terraform/checks/resource/aws/DynamodbPointInTimeRecoveryEnabled.py"}, {"content": "from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\nfrom checkov.common.models.enums import CheckCategories\n\n\nclass KMSKeyRotation(BaseResourceValueCheck):\n\n def __init__(self):\n name = \"Ensure KMS have rotation policy\"\n id = \"CKV_AWS_132\"\n supported_resources = ['aws_kms_key']\n categories = [CheckCategories.ENCRYPTION]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def get_inspected_key(self):\n return \"enable_key_rotation\"\n\n\ncheck = KMSKeyRotation()\n", "path": "checkov/terraform/checks/resource/aws/KMSKeyRotation.py"}]}
1,210
494
gh_patches_debug_730
rasdani/github-patches
git_diff
pytorch__pytorch-116517
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Missing packaging dependency in torch 2.1.x ### 🐛 Describe the bug Hi, [torch.utils.tensorboard requires "packaging"](https://github.com/pytorch/pytorch/blob/fa1ccc34c4f65756bc50c3e3ab135c88b175b18c/torch/utils/tensorboard/__init__.py#L2C1-L3C1) to be installed but that dependency is [missing on torch 2.1.x](https://github.com/pytorch/pytorch/blob/v2.1.2-rc1/requirements.txt). Here's some example code: ```python from torch.utils.tensorboard import SummaryWriter ``` The links above point to a RC version of 2.1.2 but this is also the case for 2.1.1. Would it be possible to make a patch release to add the dependency? ### Versions Python version: 3.9.16 (main, Dec 7 2022, 10:16:11) [Clang 14.0.0 (clang-140[0.0.29.202](http://0.0.29.202/) )] (64-bit runtime) Python platform: macOS-13.6.1-x86_64-i386-64bit Is CUDA available: False CUDA runtime version: No CUDA CUDA_MODULE_LOADING set to: N/A GPU models and configuration: No CUDA Nvidia driver version: No CUDA cuDNN version: No CUDA HIP runtime version: N/A MIOpen runtime version: N/A Is XNNPACK available: True CPU: Intel(R) Core(TM) i5-7287U CPU @ 3.30GHz Versions of relevant libraries: [pip3] numpy==1.23.5 [pip3] torch==2.1.1 [pip3] torchvision==0.16.1 [conda] Could not collect </issue> <code> [start of torch/utils/tensorboard/__init__.py] 1 import tensorboard 2 from packaging.version import Version 3 4 if not hasattr(tensorboard, "__version__") or Version( 5 tensorboard.__version__ 6 ) < Version("1.15"): 7 raise ImportError("TensorBoard logging requires TensorBoard version 1.15 or above") 8 9 del Version 10 del tensorboard 11 12 from .writer import FileWriter, SummaryWriter # noqa: F401 13 from tensorboard.summary.writer.record_writer import RecordWriter # noqa: F401 14 [end of torch/utils/tensorboard/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/torch/utils/tensorboard/__init__.py b/torch/utils/tensorboard/__init__.py --- a/torch/utils/tensorboard/__init__.py +++ b/torch/utils/tensorboard/__init__.py @@ -1,5 +1,5 @@ import tensorboard -from packaging.version import Version +from torch._vendor.packaging.version import Version if not hasattr(tensorboard, "__version__") or Version( tensorboard.__version__
{"golden_diff": "diff --git a/torch/utils/tensorboard/__init__.py b/torch/utils/tensorboard/__init__.py\n--- a/torch/utils/tensorboard/__init__.py\n+++ b/torch/utils/tensorboard/__init__.py\n@@ -1,5 +1,5 @@\n import tensorboard\n-from packaging.version import Version\n+from torch._vendor.packaging.version import Version\n \n if not hasattr(tensorboard, \"__version__\") or Version(\n tensorboard.__version__\n", "issue": "Missing packaging dependency in torch 2.1.x\n### \ud83d\udc1b Describe the bug\n\nHi,\r\n[torch.utils.tensorboard requires \"packaging\"](https://github.com/pytorch/pytorch/blob/fa1ccc34c4f65756bc50c3e3ab135c88b175b18c/torch/utils/tensorboard/__init__.py#L2C1-L3C1) to be installed but that dependency is [missing on torch 2.1.x](https://github.com/pytorch/pytorch/blob/v2.1.2-rc1/requirements.txt).\r\n\r\nHere's some example code:\r\n```python\r\nfrom torch.utils.tensorboard import SummaryWriter\r\n```\r\n\r\nThe links above point to a RC version of 2.1.2 but this is also the case for 2.1.1. Would it be possible to make a patch release to add the dependency?\n\n### Versions\n\nPython version: 3.9.16 (main, Dec 7 2022, 10:16:11) [Clang 14.0.0 (clang-140[0.0.29.202](http://0.0.29.202/)\r\n)] (64-bit runtime)\r\nPython platform: macOS-13.6.1-x86_64-i386-64bit\r\nIs CUDA available: False\r\nCUDA runtime version: No CUDA\r\nCUDA_MODULE_LOADING set to: N/A\r\nGPU models and configuration: No CUDA\r\nNvidia driver version: No CUDA\r\ncuDNN version: No CUDA\r\nHIP runtime version: N/A\r\nMIOpen runtime version: N/A\r\nIs XNNPACK available: True\r\n\r\nCPU:\r\nIntel(R) Core(TM) i5-7287U CPU @ 3.30GHz\r\n\r\nVersions of relevant libraries:\r\n[pip3] numpy==1.23.5\r\n[pip3] torch==2.1.1\r\n[pip3] torchvision==0.16.1\r\n[conda] Could not collect\n", "before_files": [{"content": "import tensorboard\nfrom packaging.version import Version\n\nif not hasattr(tensorboard, \"__version__\") or Version(\n tensorboard.__version__\n) < Version(\"1.15\"):\n raise ImportError(\"TensorBoard logging requires TensorBoard version 1.15 or above\")\n\ndel Version\ndel tensorboard\n\nfrom .writer import FileWriter, SummaryWriter # noqa: F401\nfrom tensorboard.summary.writer.record_writer import RecordWriter # noqa: F401\n", "path": "torch/utils/tensorboard/__init__.py"}]}
1,121
103
gh_patches_debug_21259
rasdani/github-patches
git_diff
archlinux__archinstall-914
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> desktop profiles, replace pipewire-media-session with wireplumber, wireplumber seembs to be the newest replacement media-session replaceing pipewire-media-session hi, would it be possible to replace pipewire-media-session with wireplumber? or, to make things flexable, add a question in asking, "would you like to use wireplumber or pipewire-media-session? p.s i'm using wireplumber currently. wireplumber will remove pipewire-media-session and the systemd units. a total replacement. thank you for considdering my request! Majid Hussain </issue> <code> [start of profiles/applications/pipewire.py] 1 import archinstall 2 import logging 3 4 # Define the package list in order for lib to source 5 # which packages will be installed by this profile 6 __packages__ = ["pipewire", "pipewire-alsa", "pipewire-jack", "pipewire-media-session", "pipewire-pulse", "gst-plugin-pipewire", "libpulse"] 7 8 archinstall.log('Installing pipewire', level=logging.INFO) 9 archinstall.storage['installation_session'].add_additional_packages(__packages__) 10 11 @archinstall.plugin 12 def on_user_created(installation :archinstall.Installer, user :str): 13 archinstall.log(f"Enabling pipewire-pulse for {user}", level=logging.INFO) 14 installation.chroot('systemctl enable --user pipewire-pulse.service', run_as=user) [end of profiles/applications/pipewire.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/profiles/applications/pipewire.py b/profiles/applications/pipewire.py --- a/profiles/applications/pipewire.py +++ b/profiles/applications/pipewire.py @@ -3,7 +3,7 @@ # Define the package list in order for lib to source # which packages will be installed by this profile -__packages__ = ["pipewire", "pipewire-alsa", "pipewire-jack", "pipewire-media-session", "pipewire-pulse", "gst-plugin-pipewire", "libpulse"] +__packages__ = ["pipewire", "pipewire-alsa", "pipewire-jack", "pipewire-pulse", "gst-plugin-pipewire", "libpulse", "wireplumber"] archinstall.log('Installing pipewire', level=logging.INFO) archinstall.storage['installation_session'].add_additional_packages(__packages__) @@ -11,4 +11,4 @@ @archinstall.plugin def on_user_created(installation :archinstall.Installer, user :str): archinstall.log(f"Enabling pipewire-pulse for {user}", level=logging.INFO) - installation.chroot('systemctl enable --user pipewire-pulse.service', run_as=user) \ No newline at end of file + installation.chroot('systemctl enable --user pipewire-pulse.service', run_as=user)
{"golden_diff": "diff --git a/profiles/applications/pipewire.py b/profiles/applications/pipewire.py\n--- a/profiles/applications/pipewire.py\n+++ b/profiles/applications/pipewire.py\n@@ -3,7 +3,7 @@\n \n # Define the package list in order for lib to source\n # which packages will be installed by this profile\n-__packages__ = [\"pipewire\", \"pipewire-alsa\", \"pipewire-jack\", \"pipewire-media-session\", \"pipewire-pulse\", \"gst-plugin-pipewire\", \"libpulse\"]\n+__packages__ = [\"pipewire\", \"pipewire-alsa\", \"pipewire-jack\", \"pipewire-pulse\", \"gst-plugin-pipewire\", \"libpulse\", \"wireplumber\"]\n \n archinstall.log('Installing pipewire', level=logging.INFO)\n archinstall.storage['installation_session'].add_additional_packages(__packages__)\n@@ -11,4 +11,4 @@\n @archinstall.plugin\n def on_user_created(installation :archinstall.Installer, user :str):\n \tarchinstall.log(f\"Enabling pipewire-pulse for {user}\", level=logging.INFO)\n-\tinstallation.chroot('systemctl enable --user pipewire-pulse.service', run_as=user)\n\\ No newline at end of file\n+\tinstallation.chroot('systemctl enable --user pipewire-pulse.service', run_as=user)\n", "issue": "desktop profiles, replace pipewire-media-session with wireplumber, wireplumber seembs to be the newest replacement media-session replaceing pipewire-media-session\nhi,\r\nwould it be possible to replace pipewire-media-session with wireplumber?\r\nor, to make things flexable,\r\nadd a question in asking,\r\n\"would you like to use wireplumber or pipewire-media-session?\r\np.s\r\ni'm using wireplumber currently.\r\nwireplumber will remove pipewire-media-session and the systemd units.\r\na total replacement.\r\nthank you for considdering my request!\r\nMajid Hussain\n", "before_files": [{"content": "import archinstall\nimport logging\n\n# Define the package list in order for lib to source\n# which packages will be installed by this profile\n__packages__ = [\"pipewire\", \"pipewire-alsa\", \"pipewire-jack\", \"pipewire-media-session\", \"pipewire-pulse\", \"gst-plugin-pipewire\", \"libpulse\"]\n\narchinstall.log('Installing pipewire', level=logging.INFO)\narchinstall.storage['installation_session'].add_additional_packages(__packages__)\n\[email protected]\ndef on_user_created(installation :archinstall.Installer, user :str):\n\tarchinstall.log(f\"Enabling pipewire-pulse for {user}\", level=logging.INFO)\n\tinstallation.chroot('systemctl enable --user pipewire-pulse.service', run_as=user)", "path": "profiles/applications/pipewire.py"}]}
844
296
gh_patches_debug_14876
rasdani/github-patches
git_diff
pyca__cryptography-2445
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> cryptography.x509.Name implements __eq__ but not __hash__ </issue> <code> [start of src/cryptography/x509/name.py] 1 # This file is dual licensed under the terms of the Apache License, Version 2 # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 # for complete details. 4 5 from __future__ import absolute_import, division, print_function 6 7 import six 8 9 from cryptography import utils 10 from cryptography.x509.oid import ObjectIdentifier 11 12 13 class NameAttribute(object): 14 def __init__(self, oid, value): 15 if not isinstance(oid, ObjectIdentifier): 16 raise TypeError( 17 "oid argument must be an ObjectIdentifier instance." 18 ) 19 20 if not isinstance(value, six.text_type): 21 raise TypeError( 22 "value argument must be a text type." 23 ) 24 25 self._oid = oid 26 self._value = value 27 28 oid = utils.read_only_property("_oid") 29 value = utils.read_only_property("_value") 30 31 def __eq__(self, other): 32 if not isinstance(other, NameAttribute): 33 return NotImplemented 34 35 return ( 36 self.oid == other.oid and 37 self.value == other.value 38 ) 39 40 def __ne__(self, other): 41 return not self == other 42 43 def __repr__(self): 44 return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self) 45 46 47 class Name(object): 48 def __init__(self, attributes): 49 self._attributes = attributes 50 51 def get_attributes_for_oid(self, oid): 52 return [i for i in self if i.oid == oid] 53 54 def __eq__(self, other): 55 if not isinstance(other, Name): 56 return NotImplemented 57 58 return self._attributes == other._attributes 59 60 def __ne__(self, other): 61 return not self == other 62 63 def __iter__(self): 64 return iter(self._attributes) 65 66 def __len__(self): 67 return len(self._attributes) 68 69 def __repr__(self): 70 return "<Name({0!r})>".format(self._attributes) 71 [end of src/cryptography/x509/name.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/cryptography/x509/name.py b/src/cryptography/x509/name.py --- a/src/cryptography/x509/name.py +++ b/src/cryptography/x509/name.py @@ -40,6 +40,9 @@ def __ne__(self, other): return not self == other + def __hash__(self): + return hash((self.oid, self.value)) + def __repr__(self): return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self) @@ -60,6 +63,11 @@ def __ne__(self, other): return not self == other + def __hash__(self): + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + def __iter__(self): return iter(self._attributes)
{"golden_diff": "diff --git a/src/cryptography/x509/name.py b/src/cryptography/x509/name.py\n--- a/src/cryptography/x509/name.py\n+++ b/src/cryptography/x509/name.py\n@@ -40,6 +40,9 @@\n def __ne__(self, other):\n return not self == other\n \n+ def __hash__(self):\n+ return hash((self.oid, self.value))\n+\n def __repr__(self):\n return \"<NameAttribute(oid={0.oid}, value={0.value!r})>\".format(self)\n \n@@ -60,6 +63,11 @@\n def __ne__(self, other):\n return not self == other\n \n+ def __hash__(self):\n+ # TODO: this is relatively expensive, if this looks like a bottleneck\n+ # for you, consider optimizing!\n+ return hash(tuple(self._attributes))\n+\n def __iter__(self):\n return iter(self._attributes)\n", "issue": "cryptography.x509.Name implements __eq__ but not __hash__\n\n", "before_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport six\n\nfrom cryptography import utils\nfrom cryptography.x509.oid import ObjectIdentifier\n\n\nclass NameAttribute(object):\n def __init__(self, oid, value):\n if not isinstance(oid, ObjectIdentifier):\n raise TypeError(\n \"oid argument must be an ObjectIdentifier instance.\"\n )\n\n if not isinstance(value, six.text_type):\n raise TypeError(\n \"value argument must be a text type.\"\n )\n\n self._oid = oid\n self._value = value\n\n oid = utils.read_only_property(\"_oid\")\n value = utils.read_only_property(\"_value\")\n\n def __eq__(self, other):\n if not isinstance(other, NameAttribute):\n return NotImplemented\n\n return (\n self.oid == other.oid and\n self.value == other.value\n )\n\n def __ne__(self, other):\n return not self == other\n\n def __repr__(self):\n return \"<NameAttribute(oid={0.oid}, value={0.value!r})>\".format(self)\n\n\nclass Name(object):\n def __init__(self, attributes):\n self._attributes = attributes\n\n def get_attributes_for_oid(self, oid):\n return [i for i in self if i.oid == oid]\n\n def __eq__(self, other):\n if not isinstance(other, Name):\n return NotImplemented\n\n return self._attributes == other._attributes\n\n def __ne__(self, other):\n return not self == other\n\n def __iter__(self):\n return iter(self._attributes)\n\n def __len__(self):\n return len(self._attributes)\n\n def __repr__(self):\n return \"<Name({0!r})>\".format(self._attributes)\n", "path": "src/cryptography/x509/name.py"}]}
1,126
216
gh_patches_debug_31457
rasdani/github-patches
git_diff
mozmeao__snippets-service-1206
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Create Freq Capping Goals for Jobs Create Global Frequency Capping goals for Jobs (introduced in #1034). Goals: - Total impressions - Total clicks and button clicks (combined) - Total blocks A `Job` can have multiple goals (freq capping, datetime), first one reached will un-publish the `Job`. Requires #1036. Also `Job`s with Freq Cap goals must get data from Redash more often (TBD). ## Use case: - Publish a Snippet for 1M impressions. It's fine it snippet gets 5K-10K impressions more. ## Questions - [x] How do we handle Jobs with Freq Capping when no Telemetry data is available? - Jobs that have impression, block or click limits set and no metrics data updated for at least 24 hours will be marked Complete. </issue> <code> [start of snippets/base/management/commands/update_jobs.py] 1 from datetime import datetime 2 3 from django.contrib.auth import get_user_model 4 from django.core.management.base import BaseCommand 5 from django.db import transaction 6 from django.db.models import F, Q 7 8 from snippets.base.models import Job 9 10 11 class Command(BaseCommand): 12 args = "(no args)" 13 help = "Update Jobs" 14 15 @transaction.atomic 16 def handle(self, *args, **options): 17 now = datetime.utcnow() 18 user = get_user_model().objects.get_or_create(username='snippets_bot')[0] 19 count_total_completed = 0 20 21 # Publish Scheduled Jobs with `publish_start` before now or without 22 # publish_start. 23 jobs = Job.objects.filter(status=Job.SCHEDULED).filter( 24 Q(publish_start__lte=now) | Q(publish_start=None) 25 ) 26 count_published = jobs.count() 27 for job in jobs: 28 job.change_status( 29 status=Job.PUBLISHED, 30 user=user, 31 reason='Published start date reached.', 32 ) 33 34 # Disable Published Jobs with `publish_end` before now. 35 jobs = Job.objects.filter(status=Job.PUBLISHED, publish_end__lte=now) 36 count_publication_end = jobs.count() 37 count_total_completed += count_publication_end 38 39 for job in jobs: 40 job.change_status( 41 status=Job.COMPLETED, 42 user=user, 43 reason='Publication end date reached.', 44 ) 45 46 # Disable Jobs that reached Impression, Click or Block limits. 47 count_limit = {} 48 for limit in ['impressions', 'clicks', 'blocks']: 49 jobs = (Job.objects 50 .filter(status=Job.PUBLISHED) 51 .exclude(**{f'limit_{limit}': 0}) 52 .filter(**{f'limit_{limit}__lte': F(f'metric_{limit}')})) 53 for job in jobs: 54 job.change_status( 55 status=Job.COMPLETED, 56 user=user, 57 reason=f'Limit reached: {limit}.', 58 ) 59 60 count_limit[limit] = jobs.count() 61 count_total_completed += count_limit[limit] 62 63 count_running = Job.objects.filter(status=Job.PUBLISHED).count() 64 65 self.stdout.write( 66 f'Jobs Published: {count_published}\n' 67 f'Jobs Completed: {count_total_completed}\n' 68 f' - Reached Publication End Date: {count_publication_end}\n' 69 f' - Reached Impressions Limit: {count_limit["impressions"]}\n' 70 f' - Reached Clicks Limit: {count_limit["clicks"]}\n' 71 f' - Reached Blocks Limit: {count_limit["blocks"]}\n' 72 f'Total Jobs Running: {count_running}\n' 73 ) 74 [end of snippets/base/management/commands/update_jobs.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/snippets/base/management/commands/update_jobs.py b/snippets/base/management/commands/update_jobs.py --- a/snippets/base/management/commands/update_jobs.py +++ b/snippets/base/management/commands/update_jobs.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timedelta from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand @@ -60,6 +60,23 @@ count_limit[limit] = jobs.count() count_total_completed += count_limit[limit] + # Disable Jobs that have Impression, Click or Block limits but don't + # have metrics data for at least 24h. This is to handle cases where the + # Metrics Pipeline is broken. + yesterday = datetime.utcnow() - timedelta(days=1) + jobs = (Job.objects + .filter(status=Job.PUBLISHED) + .exclude(limit_impressions=0, limit_clicks=0, limit_blocks=0) + .filter(metric_last_update__lt=yesterday)) + for job in jobs: + job.change_status( + status=Job.COMPLETED, + user=user, + reason=f'Premature termination due to missing metrics.', + ) + count_premature_termination = jobs.count() + count_total_completed += count_premature_termination + count_running = Job.objects.filter(status=Job.PUBLISHED).count() self.stdout.write( @@ -69,5 +86,6 @@ f' - Reached Impressions Limit: {count_limit["impressions"]}\n' f' - Reached Clicks Limit: {count_limit["clicks"]}\n' f' - Reached Blocks Limit: {count_limit["blocks"]}\n' + f' - Premature Termination due to missing metrics: {count_premature_termination}\n' f'Total Jobs Running: {count_running}\n' )
{"golden_diff": "diff --git a/snippets/base/management/commands/update_jobs.py b/snippets/base/management/commands/update_jobs.py\n--- a/snippets/base/management/commands/update_jobs.py\n+++ b/snippets/base/management/commands/update_jobs.py\n@@ -1,4 +1,4 @@\n-from datetime import datetime\n+from datetime import datetime, timedelta\n \n from django.contrib.auth import get_user_model\n from django.core.management.base import BaseCommand\n@@ -60,6 +60,23 @@\n count_limit[limit] = jobs.count()\n count_total_completed += count_limit[limit]\n \n+ # Disable Jobs that have Impression, Click or Block limits but don't\n+ # have metrics data for at least 24h. This is to handle cases where the\n+ # Metrics Pipeline is broken.\n+ yesterday = datetime.utcnow() - timedelta(days=1)\n+ jobs = (Job.objects\n+ .filter(status=Job.PUBLISHED)\n+ .exclude(limit_impressions=0, limit_clicks=0, limit_blocks=0)\n+ .filter(metric_last_update__lt=yesterday))\n+ for job in jobs:\n+ job.change_status(\n+ status=Job.COMPLETED,\n+ user=user,\n+ reason=f'Premature termination due to missing metrics.',\n+ )\n+ count_premature_termination = jobs.count()\n+ count_total_completed += count_premature_termination\n+\n count_running = Job.objects.filter(status=Job.PUBLISHED).count()\n \n self.stdout.write(\n@@ -69,5 +86,6 @@\n f' - Reached Impressions Limit: {count_limit[\"impressions\"]}\\n'\n f' - Reached Clicks Limit: {count_limit[\"clicks\"]}\\n'\n f' - Reached Blocks Limit: {count_limit[\"blocks\"]}\\n'\n+ f' - Premature Termination due to missing metrics: {count_premature_termination}\\n'\n f'Total Jobs Running: {count_running}\\n'\n )\n", "issue": "Create Freq Capping Goals for Jobs\nCreate Global Frequency Capping goals for Jobs (introduced in #1034). \r\n\r\nGoals:\r\n - Total impressions\r\n - Total clicks and button clicks (combined)\r\n - Total blocks\r\n\r\nA `Job` can have multiple goals (freq capping, datetime), first one reached will un-publish the `Job`.\r\n\r\nRequires #1036. Also `Job`s with Freq Cap goals must get data from Redash more often (TBD).\r\n\r\n## Use case:\r\n - Publish a Snippet for 1M impressions. It's fine it snippet gets 5K-10K impressions more.\r\n\r\n\r\n## Questions\r\n - [x] How do we handle Jobs with Freq Capping when no Telemetry data is available? \r\n - Jobs that have impression, block or click limits set and no metrics data updated for at least 24 hours will be marked Complete.\n", "before_files": [{"content": "from datetime import datetime\n\nfrom django.contrib.auth import get_user_model\nfrom django.core.management.base import BaseCommand\nfrom django.db import transaction\nfrom django.db.models import F, Q\n\nfrom snippets.base.models import Job\n\n\nclass Command(BaseCommand):\n args = \"(no args)\"\n help = \"Update Jobs\"\n\n @transaction.atomic\n def handle(self, *args, **options):\n now = datetime.utcnow()\n user = get_user_model().objects.get_or_create(username='snippets_bot')[0]\n count_total_completed = 0\n\n # Publish Scheduled Jobs with `publish_start` before now or without\n # publish_start.\n jobs = Job.objects.filter(status=Job.SCHEDULED).filter(\n Q(publish_start__lte=now) | Q(publish_start=None)\n )\n count_published = jobs.count()\n for job in jobs:\n job.change_status(\n status=Job.PUBLISHED,\n user=user,\n reason='Published start date reached.',\n )\n\n # Disable Published Jobs with `publish_end` before now.\n jobs = Job.objects.filter(status=Job.PUBLISHED, publish_end__lte=now)\n count_publication_end = jobs.count()\n count_total_completed += count_publication_end\n\n for job in jobs:\n job.change_status(\n status=Job.COMPLETED,\n user=user,\n reason='Publication end date reached.',\n )\n\n # Disable Jobs that reached Impression, Click or Block limits.\n count_limit = {}\n for limit in ['impressions', 'clicks', 'blocks']:\n jobs = (Job.objects\n .filter(status=Job.PUBLISHED)\n .exclude(**{f'limit_{limit}': 0})\n .filter(**{f'limit_{limit}__lte': F(f'metric_{limit}')}))\n for job in jobs:\n job.change_status(\n status=Job.COMPLETED,\n user=user,\n reason=f'Limit reached: {limit}.',\n )\n\n count_limit[limit] = jobs.count()\n count_total_completed += count_limit[limit]\n\n count_running = Job.objects.filter(status=Job.PUBLISHED).count()\n\n self.stdout.write(\n f'Jobs Published: {count_published}\\n'\n f'Jobs Completed: {count_total_completed}\\n'\n f' - Reached Publication End Date: {count_publication_end}\\n'\n f' - Reached Impressions Limit: {count_limit[\"impressions\"]}\\n'\n f' - Reached Clicks Limit: {count_limit[\"clicks\"]}\\n'\n f' - Reached Blocks Limit: {count_limit[\"blocks\"]}\\n'\n f'Total Jobs Running: {count_running}\\n'\n )\n", "path": "snippets/base/management/commands/update_jobs.py"}]}
1,456
441
gh_patches_debug_10443
rasdani/github-patches
git_diff
conan-io__conan-4096
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Conan Server 1.10 is broken #### Description Hi! I'm trying to start conan server but seems like is broken. $ conan_server > Traceback (most recent call last): > File "/opt/miniconda3/bin/conan_server", line 7, in <module> > from conans.conan_server import run > File "/opt/miniconda3/lib/python3.6/site-packages/conans/conan_server.py", line 1, in <module> > from conans.server.server_launcher import main > File "/opt/miniconda3/lib/python3.6/site-packages/conans/server/server_launcher.py", line 53, in <module> > launcher = ServerLauncher() > File "/opt/miniconda3/lib/python3.6/site-packages/conans/server/server_launcher.py", line 47, in __init__ server_capabilities, only_v1=False) > TypeError: __init__() got an unexpected keyword argument 'only_v1' #### Environment Conan Versions: 1.10 Python version: 3.6, 3.7 OS: Linux Ubuntu 18.04 #### Steps to reproduce pip3 install conan==1.10.0 conan_server or using Conan Docker Tools: BUILD_CONAN_SERVER_IMAGE=1 python build.py docker run -t -p 9300:9300 --name conan_server conanio/conan_server:latest - [X] I've read the [CONTRIBUTING guide](https://raw.githubusercontent.com/conan-io/conan/develop/.github/CONTRIBUTING.md). - [X] I've specified the Conan version, operating system version and any tool that can be relevant. - [X] I've explained the steps to reproduce the error or the motivation/use case of the question/suggestion. </issue> <code> [start of conans/server/server_launcher.py] 1 #!/usr/bin/python 2 import os 3 from conans.server.service.authorize import BasicAuthorizer, BasicAuthenticator 4 from conans.server.conf import get_server_store 5 from conans.server.rest.server import ConanServer 6 from conans.server.crypto.jwt.jwt_credentials_manager import JWTCredentialsManager 7 from conans.server.crypto.jwt.jwt_updown_manager import JWTUpDownAuthManager 8 from conans.server.conf import MIN_CLIENT_COMPATIBLE_VERSION 9 from conans.server.plugin_loader import load_authentication_plugin 10 from conans.model.version import Version 11 from conans.server.migrate import migrate_and_get_server_config 12 from conans import __version__ as SERVER_VERSION, REVISIONS 13 from conans.paths import conan_expand_user 14 from conans import SERVER_CAPABILITIES 15 16 17 class ServerLauncher(object): 18 def __init__(self): 19 user_folder = conan_expand_user("~") 20 server_folder = os.path.join(user_folder, '.conan_server') 21 22 server_config = migrate_and_get_server_config(user_folder) 23 custom_auth = server_config.custom_authenticator 24 if custom_auth: 25 authenticator = load_authentication_plugin(server_folder, custom_auth) 26 else: 27 authenticator = BasicAuthenticator(dict(server_config.users)) 28 29 authorizer = BasicAuthorizer(server_config.read_permissions, 30 server_config.write_permissions) 31 credentials_manager = JWTCredentialsManager(server_config.jwt_secret, 32 server_config.jwt_expire_time) 33 34 updown_auth_manager = JWTUpDownAuthManager(server_config.updown_secret, 35 server_config.authorize_timeout) 36 37 server_store = get_server_store(server_config.disk_storage_path, 38 server_config.public_url, 39 updown_auth_manager=updown_auth_manager) 40 41 server_capabilities = SERVER_CAPABILITIES 42 server_capabilities.append(REVISIONS) 43 44 self.ra = ConanServer(server_config.port, credentials_manager, updown_auth_manager, 45 authorizer, authenticator, server_store, 46 Version(SERVER_VERSION), Version(MIN_CLIENT_COMPATIBLE_VERSION), 47 server_capabilities, only_v1=False) 48 49 def launch(self): 50 self.ra.run(host="0.0.0.0") 51 52 53 launcher = ServerLauncher() 54 app = launcher.ra.root_app 55 56 57 def main(*args): 58 launcher.launch() 59 60 61 if __name__ == "__main__": 62 main() 63 [end of conans/server/server_launcher.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/conans/server/server_launcher.py b/conans/server/server_launcher.py --- a/conans/server/server_launcher.py +++ b/conans/server/server_launcher.py @@ -44,7 +44,7 @@ self.ra = ConanServer(server_config.port, credentials_manager, updown_auth_manager, authorizer, authenticator, server_store, Version(SERVER_VERSION), Version(MIN_CLIENT_COMPATIBLE_VERSION), - server_capabilities, only_v1=False) + server_capabilities) def launch(self): self.ra.run(host="0.0.0.0")
{"golden_diff": "diff --git a/conans/server/server_launcher.py b/conans/server/server_launcher.py\n--- a/conans/server/server_launcher.py\n+++ b/conans/server/server_launcher.py\n@@ -44,7 +44,7 @@\n self.ra = ConanServer(server_config.port, credentials_manager, updown_auth_manager,\n authorizer, authenticator, server_store,\n Version(SERVER_VERSION), Version(MIN_CLIENT_COMPATIBLE_VERSION),\n- server_capabilities, only_v1=False)\n+ server_capabilities)\n \n def launch(self):\n self.ra.run(host=\"0.0.0.0\")\n", "issue": "Conan Server 1.10 is broken\n#### Description\r\nHi!\r\n\r\nI'm trying to start conan server but seems like is broken.\r\n\r\n $ conan_server\r\n\r\n> Traceback (most recent call last):\r\n> File \"/opt/miniconda3/bin/conan_server\", line 7, in <module>\r\n> from conans.conan_server import run\r\n> File \"/opt/miniconda3/lib/python3.6/site-packages/conans/conan_server.py\", line 1, in <module>\r\n> from conans.server.server_launcher import main\r\n> File \"/opt/miniconda3/lib/python3.6/site-packages/conans/server/server_launcher.py\", line 53, in <module>\r\n> launcher = ServerLauncher()\r\n> File \"/opt/miniconda3/lib/python3.6/site-packages/conans/server/server_launcher.py\", line 47, in __init__\r\n server_capabilities, only_v1=False)\r\n> TypeError: __init__() got an unexpected keyword argument 'only_v1'\r\n\r\n#### Environment\r\nConan Versions: 1.10\r\nPython version: 3.6, 3.7\r\nOS: Linux Ubuntu 18.04\r\n\r\n#### Steps to reproduce\r\n pip3 install conan==1.10.0\r\n conan_server\r\n\r\nor using Conan Docker Tools:\r\n\r\n BUILD_CONAN_SERVER_IMAGE=1 python build.py\r\n docker run -t -p 9300:9300 --name conan_server conanio/conan_server:latest \r\n\r\n\r\n- [X] I've read the [CONTRIBUTING guide](https://raw.githubusercontent.com/conan-io/conan/develop/.github/CONTRIBUTING.md).\r\n- [X] I've specified the Conan version, operating system version and any tool that can be relevant.\r\n- [X] I've explained the steps to reproduce the error or the motivation/use case of the question/suggestion.\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/python\nimport os\nfrom conans.server.service.authorize import BasicAuthorizer, BasicAuthenticator\nfrom conans.server.conf import get_server_store\nfrom conans.server.rest.server import ConanServer\nfrom conans.server.crypto.jwt.jwt_credentials_manager import JWTCredentialsManager\nfrom conans.server.crypto.jwt.jwt_updown_manager import JWTUpDownAuthManager\nfrom conans.server.conf import MIN_CLIENT_COMPATIBLE_VERSION\nfrom conans.server.plugin_loader import load_authentication_plugin\nfrom conans.model.version import Version\nfrom conans.server.migrate import migrate_and_get_server_config\nfrom conans import __version__ as SERVER_VERSION, REVISIONS\nfrom conans.paths import conan_expand_user\nfrom conans import SERVER_CAPABILITIES\n\n\nclass ServerLauncher(object):\n def __init__(self):\n user_folder = conan_expand_user(\"~\")\n server_folder = os.path.join(user_folder, '.conan_server')\n\n server_config = migrate_and_get_server_config(user_folder)\n custom_auth = server_config.custom_authenticator\n if custom_auth:\n authenticator = load_authentication_plugin(server_folder, custom_auth)\n else:\n authenticator = BasicAuthenticator(dict(server_config.users))\n\n authorizer = BasicAuthorizer(server_config.read_permissions,\n server_config.write_permissions)\n credentials_manager = JWTCredentialsManager(server_config.jwt_secret,\n server_config.jwt_expire_time)\n\n updown_auth_manager = JWTUpDownAuthManager(server_config.updown_secret,\n server_config.authorize_timeout)\n\n server_store = get_server_store(server_config.disk_storage_path,\n server_config.public_url,\n updown_auth_manager=updown_auth_manager)\n\n server_capabilities = SERVER_CAPABILITIES\n server_capabilities.append(REVISIONS)\n\n self.ra = ConanServer(server_config.port, credentials_manager, updown_auth_manager,\n authorizer, authenticator, server_store,\n Version(SERVER_VERSION), Version(MIN_CLIENT_COMPATIBLE_VERSION),\n server_capabilities, only_v1=False)\n\n def launch(self):\n self.ra.run(host=\"0.0.0.0\")\n\n\nlauncher = ServerLauncher()\napp = launcher.ra.root_app\n\n\ndef main(*args):\n launcher.launch()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "conans/server/server_launcher.py"}]}
1,537
126
gh_patches_debug_9464
rasdani/github-patches
git_diff
Mailu__Mailu-1358
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add ability to specify SMTP port Some relayed domains may live on machines that accept SMTP on a nonstandard port, e.g., port 2525. This would allow users to relay for domains by appending `:port` to the hostname or IP address. Add ability to specify SMTP port Some relayed domains may live on machines that accept SMTP on a nonstandard port, e.g., port 2525. This would allow users to relay for domains by appending `:port` to the hostname or IP address. </issue> <code> [start of core/admin/mailu/internal/views/postfix.py] 1 from mailu import models 2 from mailu.internal import internal 3 4 import flask 5 import re 6 7 8 @internal.route("/postfix/domain/<domain_name>") 9 def postfix_mailbox_domain(domain_name): 10 if re.match("^\[.*\]$", domain_name): 11 return flask.abort(404) 12 domain = models.Domain.query.get(domain_name) or \ 13 models.Alternative.query.get(domain_name) or \ 14 flask.abort(404) 15 return flask.jsonify(domain.name) 16 17 18 @internal.route("/postfix/mailbox/<path:email>") 19 def postfix_mailbox_map(email): 20 user = models.User.query.get(email) or flask.abort(404) 21 return flask.jsonify(user.email) 22 23 24 @internal.route("/postfix/alias/<path:alias>") 25 def postfix_alias_map(alias): 26 localpart, domain_name = models.Email.resolve_domain(alias) 27 if localpart is None: 28 return flask.jsonify(domain_name) 29 destination = models.Email.resolve_destination(localpart, domain_name) 30 return flask.jsonify(",".join(destination)) if destination else flask.abort(404) 31 32 33 @internal.route("/postfix/transport/<path:email>") 34 def postfix_transport(email): 35 if email == '*' or re.match("(^|.*@)\[.*\]$", email): 36 return flask.abort(404) 37 localpart, domain_name = models.Email.resolve_domain(email) 38 relay = models.Relay.query.get(domain_name) or flask.abort(404) 39 return flask.jsonify("smtp:[{}]".format(relay.smtp)) 40 41 42 @internal.route("/postfix/sender/login/<path:sender>") 43 def postfix_sender_login(sender): 44 localpart, domain_name = models.Email.resolve_domain(sender) 45 if localpart is None: 46 return flask.abort(404) 47 destination = models.Email.resolve_destination(localpart, domain_name, True) 48 return flask.jsonify(",".join(destination)) if destination else flask.abort(404) 49 50 51 @internal.route("/postfix/sender/access/<path:sender>") 52 def postfix_sender_access(sender): 53 """ Simply reject any sender that pretends to be from a local domain 54 """ 55 if not is_void_address(sender): 56 localpart, domain_name = models.Email.resolve_domain(sender) 57 return flask.jsonify("REJECT") if models.Domain.query.get(domain_name) else flask.abort(404) 58 else: 59 return flask.abort(404) 60 61 62 def is_void_address(email): 63 '''True if the email is void (null) email address. 64 ''' 65 if email.startswith('<') and email.endswith('>'): 66 email = email[1:-1] 67 # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's 68 # consider void any such thing. 69 return '@' not in email 70 [end of core/admin/mailu/internal/views/postfix.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py --- a/core/admin/mailu/internal/views/postfix.py +++ b/core/admin/mailu/internal/views/postfix.py @@ -36,7 +36,11 @@ return flask.abort(404) localpart, domain_name = models.Email.resolve_domain(email) relay = models.Relay.query.get(domain_name) or flask.abort(404) - return flask.jsonify("smtp:[{}]".format(relay.smtp)) + ret = "smtp:[{0}]".format(relay.smtp) + if ":" in relay.smtp: + split = relay.smtp.split(':') + ret = "smtp:[{0}]:{1}".format(split[0], split[1]) + return flask.jsonify(ret) @internal.route("/postfix/sender/login/<path:sender>")
{"golden_diff": "diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py\n--- a/core/admin/mailu/internal/views/postfix.py\n+++ b/core/admin/mailu/internal/views/postfix.py\n@@ -36,7 +36,11 @@\n return flask.abort(404)\n localpart, domain_name = models.Email.resolve_domain(email)\n relay = models.Relay.query.get(domain_name) or flask.abort(404)\n- return flask.jsonify(\"smtp:[{}]\".format(relay.smtp))\n+ ret = \"smtp:[{0}]\".format(relay.smtp)\n+ if \":\" in relay.smtp:\n+ split = relay.smtp.split(':')\n+ ret = \"smtp:[{0}]:{1}\".format(split[0], split[1])\n+ return flask.jsonify(ret)\n \n \n @internal.route(\"/postfix/sender/login/<path:sender>\")\n", "issue": "Add ability to specify SMTP port\nSome relayed domains may live on machines that accept SMTP on a nonstandard port, e.g., port 2525. This would allow users to relay for domains by appending `:port` to the hostname or IP address.\nAdd ability to specify SMTP port\nSome relayed domains may live on machines that accept SMTP on a nonstandard port, e.g., port 2525. This would allow users to relay for domains by appending `:port` to the hostname or IP address.\n", "before_files": [{"content": "from mailu import models\nfrom mailu.internal import internal\n\nimport flask\nimport re\n\n\[email protected](\"/postfix/domain/<domain_name>\")\ndef postfix_mailbox_domain(domain_name):\n if re.match(\"^\\[.*\\]$\", domain_name):\n return flask.abort(404)\n domain = models.Domain.query.get(domain_name) or \\\n models.Alternative.query.get(domain_name) or \\\n flask.abort(404)\n return flask.jsonify(domain.name)\n\n\[email protected](\"/postfix/mailbox/<path:email>\")\ndef postfix_mailbox_map(email):\n user = models.User.query.get(email) or flask.abort(404)\n return flask.jsonify(user.email)\n\n\[email protected](\"/postfix/alias/<path:alias>\")\ndef postfix_alias_map(alias):\n localpart, domain_name = models.Email.resolve_domain(alias)\n if localpart is None:\n return flask.jsonify(domain_name)\n destination = models.Email.resolve_destination(localpart, domain_name)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/transport/<path:email>\")\ndef postfix_transport(email):\n if email == '*' or re.match(\"(^|.*@)\\[.*\\]$\", email):\n return flask.abort(404)\n localpart, domain_name = models.Email.resolve_domain(email)\n relay = models.Relay.query.get(domain_name) or flask.abort(404)\n return flask.jsonify(\"smtp:[{}]\".format(relay.smtp))\n\n\[email protected](\"/postfix/sender/login/<path:sender>\")\ndef postfix_sender_login(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n if localpart is None:\n return flask.abort(404)\n destination = models.Email.resolve_destination(localpart, domain_name, True)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/sender/access/<path:sender>\")\ndef postfix_sender_access(sender):\n \"\"\" Simply reject any sender that pretends to be from a local domain\n \"\"\"\n if not is_void_address(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n return flask.jsonify(\"REJECT\") if models.Domain.query.get(domain_name) else flask.abort(404)\n else:\n return flask.abort(404)\n\n\ndef is_void_address(email):\n '''True if the email is void (null) email address.\n '''\n if email.startswith('<') and email.endswith('>'):\n email = email[1:-1]\n # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's\n # consider void any such thing.\n return '@' not in email\n", "path": "core/admin/mailu/internal/views/postfix.py"}]}
1,382
198
gh_patches_debug_27614
rasdani/github-patches
git_diff
plone__Products.CMFPlone-2326
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Some Errors cannot render their exceptions during the creation of portal This is a follow-up for #2203. A exception of the following type cannot rendered in Plone 5.2: ``` ValueError Traceback (innermost last): Module ZServer.ZPublisher.Publish, line 144, in publish Module ZPublisher.mapply, line 85, in mapply Module Products.PDBDebugMode.runcall, line 68, in pdb_runcall Module ZServer.ZPublisher.Publish, line 44, in call_object Module Products.CMFPlone.browser.admin, line 271, in __call__ Module Products.CMFPlone.factory, line 146, in addPloneSite Module Products.GenericSetup.tool, line 388, in runAllImportStepsFromProfile __traceback_info__: profile-Products.CMFPlone:plone Module Products.GenericSetup.tool, line 1433, in _runImportStepsFromContext Module Products.GenericSetup.tool, line 1245, in _doRunImportStep __traceback_info__: rolemap Module Products.GenericSetup.rolemap, line 102, in importRolemap Module <string>, line 8, in manage_permission Module AccessControl.requestmethod, line 79, in _curried Module OFS.role, line 86, in manage_permission Module AccessControl.rolemanager, line 224, in manage_permission ValueError: The permission <em>Use Database Methods</em> is invalid. ``` The issue is that the portal and the main_template exist during rendering of ExceptionView but the main_template cannot be rendered because the context of the exception is not the Plone site but the Application. Thus the exception-template cannot be rendered properly. To fix that i suggest we try to render main_template and switch to the basic_template when that fails. </issue> <code> [start of Products/CMFPlone/browser/exceptions.py] 1 # -*- coding: utf-8 -*- 2 from AccessControl import getSecurityManager 3 from Products.Five import BrowserView 4 from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile 5 from zExceptions.ExceptionFormatter import format_exception 6 from zope.component.hooks import getSite 7 8 import json 9 import sys 10 11 12 class ExceptionView(BrowserView): 13 basic_template = ViewPageTemplateFile('templates/basic_error_message.pt') 14 15 def is_manager(self): 16 return getSecurityManager().checkPermission( 17 'Manage portal', self.context) 18 19 def __call__(self): 20 exception = self.context 21 self.context = self.__parent__ 22 request = self.request 23 24 error_type = exception.__class__.__name__ 25 exc_type, value, traceback = sys.exc_info() 26 error_tb = ''.join( 27 format_exception(exc_type, value, traceback, as_html=True)) 28 request.response.setStatus(exc_type) 29 30 # Indicate exception as JSON 31 if "text/html" not in request.getHeader('Accept', ''): 32 request.response.setHeader("Content-Type", "application/json") 33 return json.dumps({ 34 'error_type': error_type, 35 }) 36 37 if getSite() is None: 38 # We cannot get the site, so we cannot render our nice template 39 template = self.basic_template 40 else: 41 # Use a simplified template if main_template is not available 42 try: 43 self.context.unrestrictedTraverse('main_template') 44 except: 45 template = self.basic_template 46 else: 47 template = self.index 48 49 # Render page with user-facing error notice 50 request.set('disable_border', True) 51 request.set('disable_plone.leftcolumn', True) 52 request.set('disable_plone.rightcolumn', True) 53 54 return template( 55 error_type=error_type, 56 error_tb=error_tb, 57 ) 58 [end of Products/CMFPlone/browser/exceptions.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/Products/CMFPlone/browser/exceptions.py b/Products/CMFPlone/browser/exceptions.py --- a/Products/CMFPlone/browser/exceptions.py +++ b/Products/CMFPlone/browser/exceptions.py @@ -3,7 +3,6 @@ from Products.Five import BrowserView from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from zExceptions.ExceptionFormatter import format_exception -from zope.component.hooks import getSite import json import sys @@ -34,24 +33,16 @@ 'error_type': error_type, }) - if getSite() is None: - # We cannot get the site, so we cannot render our nice template - template = self.basic_template - else: - # Use a simplified template if main_template is not available - try: - self.context.unrestrictedTraverse('main_template') - except: - template = self.basic_template - else: - template = self.index - # Render page with user-facing error notice request.set('disable_border', True) request.set('disable_plone.leftcolumn', True) request.set('disable_plone.rightcolumn', True) - return template( - error_type=error_type, - error_tb=error_tb, - ) + try: + return self.index( + error_type=error_type, + error_tb=error_tb) + except: + return self.basic_template( + error_type=error_type, + error_tb=error_tb)
{"golden_diff": "diff --git a/Products/CMFPlone/browser/exceptions.py b/Products/CMFPlone/browser/exceptions.py\n--- a/Products/CMFPlone/browser/exceptions.py\n+++ b/Products/CMFPlone/browser/exceptions.py\n@@ -3,7 +3,6 @@\n from Products.Five import BrowserView\n from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\n from zExceptions.ExceptionFormatter import format_exception\n-from zope.component.hooks import getSite\n \n import json\n import sys\n@@ -34,24 +33,16 @@\n 'error_type': error_type,\n })\n \n- if getSite() is None:\n- # We cannot get the site, so we cannot render our nice template\n- template = self.basic_template\n- else:\n- # Use a simplified template if main_template is not available\n- try:\n- self.context.unrestrictedTraverse('main_template')\n- except:\n- template = self.basic_template\n- else:\n- template = self.index\n-\n # Render page with user-facing error notice\n request.set('disable_border', True)\n request.set('disable_plone.leftcolumn', True)\n request.set('disable_plone.rightcolumn', True)\n \n- return template(\n- error_type=error_type,\n- error_tb=error_tb,\n- )\n+ try:\n+ return self.index(\n+ error_type=error_type,\n+ error_tb=error_tb)\n+ except:\n+ return self.basic_template(\n+ error_type=error_type,\n+ error_tb=error_tb)\n", "issue": "Some Errors cannot render their exceptions during the creation of portal\nThis is a follow-up for #2203.\r\n\r\nA exception of the following type cannot rendered in Plone 5.2:\r\n\r\n```\r\nValueError\r\n\r\nTraceback (innermost last):\r\n\r\n Module ZServer.ZPublisher.Publish, line 144, in publish\r\n Module ZPublisher.mapply, line 85, in mapply\r\n Module Products.PDBDebugMode.runcall, line 68, in pdb_runcall\r\n Module ZServer.ZPublisher.Publish, line 44, in call_object\r\n Module Products.CMFPlone.browser.admin, line 271, in __call__\r\n Module Products.CMFPlone.factory, line 146, in addPloneSite\r\n Module Products.GenericSetup.tool, line 388, in runAllImportStepsFromProfile\r\n __traceback_info__: profile-Products.CMFPlone:plone\r\n Module Products.GenericSetup.tool, line 1433, in _runImportStepsFromContext\r\n Module Products.GenericSetup.tool, line 1245, in _doRunImportStep\r\n __traceback_info__: rolemap\r\n Module Products.GenericSetup.rolemap, line 102, in importRolemap\r\n Module <string>, line 8, in manage_permission\r\n Module AccessControl.requestmethod, line 79, in _curried\r\n Module OFS.role, line 86, in manage_permission\r\n Module AccessControl.rolemanager, line 224, in manage_permission\r\n\r\nValueError: The permission <em>Use Database Methods</em> is invalid. \r\n```\r\n\r\nThe issue is that the portal and the main_template exist during rendering of ExceptionView but the main_template cannot be rendered because the context of the exception is not the Plone site but the Application. Thus the exception-template cannot be rendered properly. To fix that i suggest we try to render main_template and switch to the basic_template when that fails.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom AccessControl import getSecurityManager\nfrom Products.Five import BrowserView\nfrom Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\nfrom zExceptions.ExceptionFormatter import format_exception\nfrom zope.component.hooks import getSite\n\nimport json\nimport sys\n\n\nclass ExceptionView(BrowserView):\n basic_template = ViewPageTemplateFile('templates/basic_error_message.pt')\n\n def is_manager(self):\n return getSecurityManager().checkPermission(\n 'Manage portal', self.context)\n\n def __call__(self):\n exception = self.context\n self.context = self.__parent__\n request = self.request\n\n error_type = exception.__class__.__name__\n exc_type, value, traceback = sys.exc_info()\n error_tb = ''.join(\n format_exception(exc_type, value, traceback, as_html=True))\n request.response.setStatus(exc_type)\n\n # Indicate exception as JSON\n if \"text/html\" not in request.getHeader('Accept', ''):\n request.response.setHeader(\"Content-Type\", \"application/json\")\n return json.dumps({\n 'error_type': error_type,\n })\n\n if getSite() is None:\n # We cannot get the site, so we cannot render our nice template\n template = self.basic_template\n else:\n # Use a simplified template if main_template is not available\n try:\n self.context.unrestrictedTraverse('main_template')\n except:\n template = self.basic_template\n else:\n template = self.index\n\n # Render page with user-facing error notice\n request.set('disable_border', True)\n request.set('disable_plone.leftcolumn', True)\n request.set('disable_plone.rightcolumn', True)\n\n return template(\n error_type=error_type,\n error_tb=error_tb,\n )\n", "path": "Products/CMFPlone/browser/exceptions.py"}]}
1,467
353
gh_patches_debug_2405
rasdani/github-patches
git_diff
googleapis__google-api-python-client-1221
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Published package is missing discovery files in discovery_cache Many thanks to the @wyk9787 for noticing this and reaching out. All calls to `discovery.build()` using `2.0.0` fail with "unknown api name or version". ```python from googleapiclient import discovery client = discovery.build("cloudprofiler", "v2") ``` This is because the published package has no `discovery_cache/documents` directory. 1. `python3 -m venv env` 2. `source env/bin/activate` 3. `python3 -m pip install google-api-python-client` 4. `ls env/lib/python*/site-packages/googleapiclient/discovery_cache` ``` busunkim@busunkim:~/github$ ls env/lib/python*/site-packages/googleapiclient/discovery_cache appengine_memcache.py base.py file_cache.py __init__.py __pycache__ ``` </issue> <code> [start of setup.py] 1 # Copyright 2014 Google Inc. All Rights Reserved. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 """Setup script for Google API Python client. 16 17 Also installs included versions of third party libraries, if those libraries 18 are not already installed. 19 """ 20 from __future__ import print_function 21 22 import sys 23 24 if sys.version_info < (3, 6): 25 print("google-api-python-client requires python3 version >= 3.6.", file=sys.stderr) 26 sys.exit(1) 27 28 import io 29 import os 30 from setuptools import setup 31 32 packages = ["apiclient", "googleapiclient", "googleapiclient/discovery_cache"] 33 34 install_requires = [ 35 "httplib2>=0.15.0,<1dev", 36 "google-auth>=1.16.0,<2dev", 37 "google-auth-httplib2>=0.0.3", 38 "google-api-core>=1.21.0,<2dev", 39 "six>=1.13.0,<2dev", 40 "uritemplate>=3.0.0,<4dev", 41 ] 42 43 package_root = os.path.abspath(os.path.dirname(__file__)) 44 45 readme_filename = os.path.join(package_root, "README.md") 46 with io.open(readme_filename, encoding="utf-8") as readme_file: 47 readme = readme_file.read() 48 49 version = "2.0.1" 50 51 setup( 52 name="google-api-python-client", 53 version=version, 54 description="Google API Client Library for Python", 55 long_description=readme, 56 long_description_content_type='text/markdown', 57 author="Google LLC", 58 author_email="[email protected]", 59 url="https://github.com/googleapis/google-api-python-client/", 60 install_requires=install_requires, 61 python_requires=">=3.6", 62 packages=packages, 63 package_data={}, 64 license="Apache 2.0", 65 keywords="google api client", 66 classifiers=[ 67 "Programming Language :: Python :: 3", 68 "Programming Language :: Python :: 3.6", 69 "Programming Language :: Python :: 3.7", 70 "Programming Language :: Python :: 3.8", 71 "Programming Language :: Python :: 3.9", 72 "Development Status :: 5 - Production/Stable", 73 "Intended Audience :: Developers", 74 "License :: OSI Approved :: Apache Software License", 75 "Operating System :: OS Independent", 76 "Topic :: Internet :: WWW/HTTP", 77 ], 78 ) 79 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -60,7 +60,7 @@ install_requires=install_requires, python_requires=">=3.6", packages=packages, - package_data={}, + package_data={"googleapiclient": ["discovery_cache/documents/*.json"]}, license="Apache 2.0", keywords="google api client", classifiers=[
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -60,7 +60,7 @@\n install_requires=install_requires,\n python_requires=\">=3.6\",\n packages=packages,\n- package_data={},\n+ package_data={\"googleapiclient\": [\"discovery_cache/documents/*.json\"]},\n license=\"Apache 2.0\",\n keywords=\"google api client\",\n classifiers=[\n", "issue": "Published package is missing discovery files in discovery_cache\nMany thanks to the @wyk9787 for noticing this and reaching out.\r\n\r\nAll calls to `discovery.build()` using `2.0.0` fail with \"unknown api name or version\".\r\n\r\n```python\r\nfrom googleapiclient import discovery\r\n\r\nclient = discovery.build(\"cloudprofiler\", \"v2\")\r\n```\r\n\r\n\r\nThis is because the published package has no `discovery_cache/documents` directory. \r\n\r\n1. `python3 -m venv env`\r\n2. `source env/bin/activate`\r\n3. `python3 -m pip install google-api-python-client`\r\n4. `ls env/lib/python*/site-packages/googleapiclient/discovery_cache`\r\n\r\n```\r\nbusunkim@busunkim:~/github$ ls env/lib/python*/site-packages/googleapiclient/discovery_cache\r\nappengine_memcache.py base.py file_cache.py __init__.py __pycache__\r\n```\r\n\r\n\n", "before_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Setup script for Google API Python client.\n\nAlso installs included versions of third party libraries, if those libraries\nare not already installed.\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\n\nif sys.version_info < (3, 6):\n print(\"google-api-python-client requires python3 version >= 3.6.\", file=sys.stderr)\n sys.exit(1)\n\nimport io\nimport os\nfrom setuptools import setup\n\npackages = [\"apiclient\", \"googleapiclient\", \"googleapiclient/discovery_cache\"]\n\ninstall_requires = [\n \"httplib2>=0.15.0,<1dev\",\n \"google-auth>=1.16.0,<2dev\",\n \"google-auth-httplib2>=0.0.3\",\n \"google-api-core>=1.21.0,<2dev\",\n \"six>=1.13.0,<2dev\",\n \"uritemplate>=3.0.0,<4dev\",\n]\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.md\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\nversion = \"2.0.1\"\n\nsetup(\n name=\"google-api-python-client\",\n version=version,\n description=\"Google API Client Library for Python\",\n long_description=readme,\n long_description_content_type='text/markdown',\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n url=\"https://github.com/googleapis/google-api-python-client/\",\n install_requires=install_requires,\n python_requires=\">=3.6\",\n packages=packages,\n package_data={},\n license=\"Apache 2.0\",\n keywords=\"google api client\",\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n ],\n)\n", "path": "setup.py"}]}
1,519
98
gh_patches_debug_9729
rasdani/github-patches
git_diff
gratipay__gratipay.com-1494
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> can't stay logged in from @amaltson via [Twitter](https://twitter.com/amaltson/status/332678066985893889): > trying to log in with GitHub, each time I go to my account page, I get logged out.. </issue> <code> [start of gittip/security/authentication.py] 1 """Defines website authentication helpers. 2 """ 3 import rfc822 4 import time 5 6 from aspen import Response 7 from gittip.security import csrf 8 from gittip.security.user import User 9 10 11 BEGINNING_OF_EPOCH = rfc822.formatdate(0) 12 TIMEOUT = 60 * 60 * 24 * 7 # one week 13 ROLES = ['anonymous', 'authenticated', 'owner', 'admin'] 14 ROLES_SHOULD_BE = "It should be one of: {}.".format(', '.join(ROLES)) 15 16 17 class NoMinimumRoleSpecified(Exception): 18 def __str__(self): 19 return "There is no minimum_role specified in the simplate at {}. {}" \ 20 .format(self.args[0], ROLES_SHOULD_BE) 21 22 class BadMinimumRole(Exception): 23 def __str__(self): 24 return "The minimum_role specific in {} is bad: {}. {}" \ 25 .format(self.args[0], self.args[1], ROLES_SHOULD_BE) 26 27 28 def inbound(request): 29 """Authenticate from a cookie or an API key in basic auth. 30 """ 31 user = None 32 if 'Authorization' in request.headers: 33 header = request.headers['authorization'] 34 if header.startswith('Basic '): 35 creds = header[len('Basic '):].decode('base64') 36 token, ignored = creds.split(':') 37 user = User.from_api_key(token) 38 39 # We don't require CSRF if they basically authenticated. 40 csrf_token = csrf._get_new_csrf_key() 41 request.headers.cookie['csrf_token'] = csrf_token 42 request.headers['X-CSRF-TOKEN'] = csrf_token 43 if 'Referer' not in request.headers: 44 request.headers['Referer'] = \ 45 'https://%s/' % csrf._get_host(request) 46 elif 'session' in request.headers.cookie: 47 token = request.headers.cookie['session'].value 48 user = User.from_session_token(token) 49 50 if user is None: 51 user = User() 52 request.context['user'] = user 53 54 55 def check_role(request): 56 """Given a request object, possibly raise Response(403). 57 """ 58 59 # XXX We can't use this yet because we don't have an inbound Aspen hook 60 # that fires after the first page of the simplate is exec'd. 61 62 context = request.context 63 path = request.line.uri.path 64 65 if 'minimum_role' not in context: 66 raise NoMinimumRoleSpecified(request.fs) 67 68 minimum_role = context['minimum_role'] 69 if minimum_role not in ROLES: 70 raise BadMinimumRole(request.fs, minimum_role) 71 72 user = context['user'] 73 highest_role = user.get_highest_role(path.get('username', None)) 74 if ROLES.index(highest_role) < ROLES.index(minimum_role): 75 request.redirect('..') 76 77 78 def outbound(response): 79 if 'user' in response.request.context: 80 user = response.request.context['user'] 81 if not isinstance(user, User): 82 raise Response(400, "If you define 'user' in a simplate it has to " 83 "be a User instance.") 84 else: 85 user = User() 86 87 if user.ANON: # user is anonymous 88 if 'session' not in response.request.headers.cookie: 89 # no cookie in the request, don't set one on response 90 return 91 else: 92 # expired cookie in the request, instruct browser to delete it 93 response.headers.cookie['session'] = '' 94 expires = 0 95 else: # user is authenticated 96 response.headers['Expires'] = BEGINNING_OF_EPOCH # don't cache 97 response.headers.cookie['session'] = user.participant.session_token 98 expires = time.time() + TIMEOUT 99 user.keep_signed_in_until(expires) 100 101 cookie = response.headers.cookie['session'] 102 # I am not setting domain, because it is supposed to default to what we 103 # want: the domain of the object requested. 104 #cookie['domain'] 105 cookie['path'] = '/' 106 cookie['expires'] = rfc822.formatdate(expires) 107 cookie['httponly'] = "Yes, please." 108 [end of gittip/security/authentication.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/gittip/security/authentication.py b/gittip/security/authentication.py --- a/gittip/security/authentication.py +++ b/gittip/security/authentication.py @@ -3,6 +3,7 @@ import rfc822 import time +import gittip from aspen import Response from gittip.security import csrf from gittip.security.user import User @@ -105,3 +106,5 @@ cookie['path'] = '/' cookie['expires'] = rfc822.formatdate(expires) cookie['httponly'] = "Yes, please." + if gittip.canonical_scheme == 'https': + cookie['secure'] = "Yes, please."
{"golden_diff": "diff --git a/gittip/security/authentication.py b/gittip/security/authentication.py\n--- a/gittip/security/authentication.py\n+++ b/gittip/security/authentication.py\n@@ -3,6 +3,7 @@\n import rfc822\n import time\n \n+import gittip\n from aspen import Response\n from gittip.security import csrf\n from gittip.security.user import User\n@@ -105,3 +106,5 @@\n cookie['path'] = '/'\n cookie['expires'] = rfc822.formatdate(expires)\n cookie['httponly'] = \"Yes, please.\"\n+ if gittip.canonical_scheme == 'https':\n+ cookie['secure'] = \"Yes, please.\"\n", "issue": "can't stay logged in\nfrom @amaltson via [Twitter](https://twitter.com/amaltson/status/332678066985893889):\n\n> trying to log in with GitHub, each time I go to my account page, I get logged out.. \n\n", "before_files": [{"content": "\"\"\"Defines website authentication helpers.\n\"\"\"\nimport rfc822\nimport time\n\nfrom aspen import Response\nfrom gittip.security import csrf\nfrom gittip.security.user import User\n\n\nBEGINNING_OF_EPOCH = rfc822.formatdate(0)\nTIMEOUT = 60 * 60 * 24 * 7 # one week\nROLES = ['anonymous', 'authenticated', 'owner', 'admin']\nROLES_SHOULD_BE = \"It should be one of: {}.\".format(', '.join(ROLES))\n\n\nclass NoMinimumRoleSpecified(Exception):\n def __str__(self):\n return \"There is no minimum_role specified in the simplate at {}. {}\" \\\n .format(self.args[0], ROLES_SHOULD_BE)\n\nclass BadMinimumRole(Exception):\n def __str__(self):\n return \"The minimum_role specific in {} is bad: {}. {}\" \\\n .format(self.args[0], self.args[1], ROLES_SHOULD_BE)\n\n\ndef inbound(request):\n \"\"\"Authenticate from a cookie or an API key in basic auth.\n \"\"\"\n user = None\n if 'Authorization' in request.headers:\n header = request.headers['authorization']\n if header.startswith('Basic '):\n creds = header[len('Basic '):].decode('base64')\n token, ignored = creds.split(':')\n user = User.from_api_key(token)\n\n # We don't require CSRF if they basically authenticated.\n csrf_token = csrf._get_new_csrf_key()\n request.headers.cookie['csrf_token'] = csrf_token\n request.headers['X-CSRF-TOKEN'] = csrf_token\n if 'Referer' not in request.headers:\n request.headers['Referer'] = \\\n 'https://%s/' % csrf._get_host(request)\n elif 'session' in request.headers.cookie:\n token = request.headers.cookie['session'].value\n user = User.from_session_token(token)\n\n if user is None:\n user = User()\n request.context['user'] = user\n\n\ndef check_role(request):\n \"\"\"Given a request object, possibly raise Response(403).\n \"\"\"\n\n # XXX We can't use this yet because we don't have an inbound Aspen hook\n # that fires after the first page of the simplate is exec'd.\n\n context = request.context\n path = request.line.uri.path\n\n if 'minimum_role' not in context:\n raise NoMinimumRoleSpecified(request.fs)\n\n minimum_role = context['minimum_role']\n if minimum_role not in ROLES:\n raise BadMinimumRole(request.fs, minimum_role)\n\n user = context['user']\n highest_role = user.get_highest_role(path.get('username', None))\n if ROLES.index(highest_role) < ROLES.index(minimum_role):\n request.redirect('..')\n\n\ndef outbound(response):\n if 'user' in response.request.context:\n user = response.request.context['user']\n if not isinstance(user, User):\n raise Response(400, \"If you define 'user' in a simplate it has to \"\n \"be a User instance.\")\n else:\n user = User()\n\n if user.ANON: # user is anonymous\n if 'session' not in response.request.headers.cookie:\n # no cookie in the request, don't set one on response\n return\n else:\n # expired cookie in the request, instruct browser to delete it\n response.headers.cookie['session'] = ''\n expires = 0\n else: # user is authenticated\n response.headers['Expires'] = BEGINNING_OF_EPOCH # don't cache\n response.headers.cookie['session'] = user.participant.session_token\n expires = time.time() + TIMEOUT\n user.keep_signed_in_until(expires)\n\n cookie = response.headers.cookie['session']\n # I am not setting domain, because it is supposed to default to what we\n # want: the domain of the object requested.\n #cookie['domain']\n cookie['path'] = '/'\n cookie['expires'] = rfc822.formatdate(expires)\n cookie['httponly'] = \"Yes, please.\"\n", "path": "gittip/security/authentication.py"}]}
1,712
161
gh_patches_debug_31388
rasdani/github-patches
git_diff
conan-io__conan-3100
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Issues with proxies I've started Conan server in my organization's local network. For external sites (like github) proxy must be used, but not for computers in local network. So, I have there env variables: ``` HTTP_PROXY=http://proxy.domain:3128 HTTP_PROXY=https://proxy.domain:3128 NO_PROXY=localhost,127.0.0.1,*.domain,192.168.* ``` With these variables I can create packages with recipes in my local cache (and this recipes can download external sources), but cannot download recipes and prebuilt packages from organization's Conan server. I've tried to set same settings in conan.conf, but had no luck. Also I've tried to use Conan server domain name and/or IP address in NO_PROXY with same result: ``` > conan search -r <remotename> "OpenCV*" ERROR: Permission denied for user: '<username>'. [Remote: <remotename>] ``` But if I disable all proxies (comment in conan.conf and unset env variables), it works just fine: ``` Existing package recipes: OpenCV/3.3.1@lukyanets/testing ``` Conan v. 1.0.1, OS: Arch Linux, CentOS 6, Windows 10. </issue> <code> [start of conans/client/rest/conan_requester.py] 1 import fnmatch 2 import os 3 4 from conans.util.files import save 5 6 7 class ConanRequester(object): 8 9 def __init__(self, requester, client_cache, timeout): 10 self.proxies = client_cache.conan_config.proxies or {} 11 self._no_proxy_match = [el.strip() for el in 12 self.proxies.pop("no_proxy_match", "").split(",")] 13 self._timeout_seconds = timeout 14 15 # Retrocompatibility with deprecated no_proxy 16 # Account for the requests NO_PROXY env variable, not defined as a proxy like http= 17 no_proxy = self.proxies.pop("no_proxy", None) 18 if no_proxy: 19 os.environ["NO_PROXY"] = no_proxy 20 21 self._requester = requester 22 self._client_cache = client_cache 23 24 if not os.path.exists(self._client_cache.cacert_path): 25 from conans.client.rest.cacert import cacert 26 save(self._client_cache.cacert_path, cacert) 27 28 if not os.path.exists(client_cache.client_cert_path): 29 self._client_certificates = None 30 else: 31 if os.path.exists(client_cache.client_cert_key_path): 32 # Requests can accept a tuple with cert and key, or just an string with a 33 # file having both 34 self._client_certificates = (client_cache.client_cert_path, 35 client_cache.client_cert_key_path) 36 else: 37 self._client_certificates = client_cache.client_cert_path 38 39 def _should_skip_proxy(self, url): 40 41 for entry in self._no_proxy_match: 42 if fnmatch.fnmatch(url, entry): 43 return True 44 45 return False 46 47 def _add_kwargs(self, url, kwargs): 48 if kwargs.get("verify", None) is True: 49 kwargs["verify"] = self._client_cache.cacert_path 50 else: 51 kwargs["verify"] = False 52 kwargs["cert"] = self._client_certificates 53 if self.proxies: 54 if not self._should_skip_proxy(url): 55 kwargs["proxies"] = self.proxies 56 if self._timeout_seconds: 57 kwargs["timeout"] = self._timeout_seconds 58 return kwargs 59 60 def get(self, url, **kwargs): 61 return self._requester.get(url, **self._add_kwargs(url, kwargs)) 62 63 def put(self, url, **kwargs): 64 return self._requester.put(url, **self._add_kwargs(url, kwargs)) 65 66 def delete(self, url, **kwargs): 67 return self._requester.delete(url, **self._add_kwargs(url, kwargs)) 68 69 def post(self, url, **kwargs): 70 return self._requester.post(url, **self._add_kwargs(url, kwargs)) 71 72 [end of conans/client/rest/conan_requester.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/conans/client/rest/conan_requester.py b/conans/client/rest/conan_requester.py --- a/conans/client/rest/conan_requester.py +++ b/conans/client/rest/conan_requester.py @@ -9,7 +9,7 @@ def __init__(self, requester, client_cache, timeout): self.proxies = client_cache.conan_config.proxies or {} self._no_proxy_match = [el.strip() for el in - self.proxies.pop("no_proxy_match", "").split(",")] + self.proxies.pop("no_proxy_match", "").split(",") if el] self._timeout_seconds = timeout # Retrocompatibility with deprecated no_proxy @@ -58,14 +58,28 @@ return kwargs def get(self, url, **kwargs): - return self._requester.get(url, **self._add_kwargs(url, kwargs)) + return self._call_method("get", url, **kwargs) def put(self, url, **kwargs): - return self._requester.put(url, **self._add_kwargs(url, kwargs)) + return self._call_method("put", url, **kwargs) def delete(self, url, **kwargs): - return self._requester.delete(url, **self._add_kwargs(url, kwargs)) + return self._call_method("delete", url, **kwargs) def post(self, url, **kwargs): - return self._requester.post(url, **self._add_kwargs(url, kwargs)) - + return self._call_method("post", url, **kwargs) + + def _call_method(self, method, url, **kwargs): + popped = False + if self.proxies or self._no_proxy_match: + old_env = dict(os.environ) + # Clean the proxies from the environ and use the conan specified proxies + for var_name in ("http_proxy", "https_proxy", "no_proxy"): + popped = popped or os.environ.pop(var_name, None) + popped = popped or os.environ.pop(var_name.upper(), None) + try: + return getattr(self._requester, method)(url, **self._add_kwargs(url, kwargs)) + finally: + if popped: + os.environ.clear() + os.environ.update(old_env)
{"golden_diff": "diff --git a/conans/client/rest/conan_requester.py b/conans/client/rest/conan_requester.py\n--- a/conans/client/rest/conan_requester.py\n+++ b/conans/client/rest/conan_requester.py\n@@ -9,7 +9,7 @@\n def __init__(self, requester, client_cache, timeout):\n self.proxies = client_cache.conan_config.proxies or {}\n self._no_proxy_match = [el.strip() for el in\n- self.proxies.pop(\"no_proxy_match\", \"\").split(\",\")]\n+ self.proxies.pop(\"no_proxy_match\", \"\").split(\",\") if el]\n self._timeout_seconds = timeout\n \n # Retrocompatibility with deprecated no_proxy\n@@ -58,14 +58,28 @@\n return kwargs\n \n def get(self, url, **kwargs):\n- return self._requester.get(url, **self._add_kwargs(url, kwargs))\n+ return self._call_method(\"get\", url, **kwargs)\n \n def put(self, url, **kwargs):\n- return self._requester.put(url, **self._add_kwargs(url, kwargs))\n+ return self._call_method(\"put\", url, **kwargs)\n \n def delete(self, url, **kwargs):\n- return self._requester.delete(url, **self._add_kwargs(url, kwargs))\n+ return self._call_method(\"delete\", url, **kwargs)\n \n def post(self, url, **kwargs):\n- return self._requester.post(url, **self._add_kwargs(url, kwargs))\n-\n+ return self._call_method(\"post\", url, **kwargs)\n+\n+ def _call_method(self, method, url, **kwargs):\n+ popped = False\n+ if self.proxies or self._no_proxy_match:\n+ old_env = dict(os.environ)\n+ # Clean the proxies from the environ and use the conan specified proxies\n+ for var_name in (\"http_proxy\", \"https_proxy\", \"no_proxy\"):\n+ popped = popped or os.environ.pop(var_name, None)\n+ popped = popped or os.environ.pop(var_name.upper(), None)\n+ try:\n+ return getattr(self._requester, method)(url, **self._add_kwargs(url, kwargs))\n+ finally:\n+ if popped:\n+ os.environ.clear()\n+ os.environ.update(old_env)\n", "issue": "Issues with proxies\nI've started Conan server in my organization's local network. For external sites (like github) proxy must be used, but not for computers in local network.\r\nSo, I have there env variables:\r\n```\r\nHTTP_PROXY=http://proxy.domain:3128\r\nHTTP_PROXY=https://proxy.domain:3128\r\nNO_PROXY=localhost,127.0.0.1,*.domain,192.168.*\r\n```\r\nWith these variables I can create packages with recipes in my local cache (and this recipes can download external sources), but cannot download recipes and prebuilt packages from organization's Conan server.\r\n\r\nI've tried to set same settings in conan.conf, but had no luck. Also I've tried to use Conan server domain name and/or IP address in NO_PROXY with same result:\r\n```\r\n> conan search -r <remotename> \"OpenCV*\"\r\nERROR: Permission denied for user: '<username>'. [Remote: <remotename>]\r\n```\r\n\r\nBut if I disable all proxies (comment in conan.conf and unset env variables), it works just fine:\r\n```\r\nExisting package recipes:\r\n\r\nOpenCV/3.3.1@lukyanets/testing\r\n```\r\n\r\nConan v. 1.0.1, OS: Arch Linux, CentOS 6, Windows 10.\n", "before_files": [{"content": "import fnmatch\nimport os\n\nfrom conans.util.files import save\n\n\nclass ConanRequester(object):\n\n def __init__(self, requester, client_cache, timeout):\n self.proxies = client_cache.conan_config.proxies or {}\n self._no_proxy_match = [el.strip() for el in\n self.proxies.pop(\"no_proxy_match\", \"\").split(\",\")]\n self._timeout_seconds = timeout\n\n # Retrocompatibility with deprecated no_proxy\n # Account for the requests NO_PROXY env variable, not defined as a proxy like http=\n no_proxy = self.proxies.pop(\"no_proxy\", None)\n if no_proxy:\n os.environ[\"NO_PROXY\"] = no_proxy\n\n self._requester = requester\n self._client_cache = client_cache\n\n if not os.path.exists(self._client_cache.cacert_path):\n from conans.client.rest.cacert import cacert\n save(self._client_cache.cacert_path, cacert)\n\n if not os.path.exists(client_cache.client_cert_path):\n self._client_certificates = None\n else:\n if os.path.exists(client_cache.client_cert_key_path):\n # Requests can accept a tuple with cert and key, or just an string with a\n # file having both\n self._client_certificates = (client_cache.client_cert_path,\n client_cache.client_cert_key_path)\n else:\n self._client_certificates = client_cache.client_cert_path\n\n def _should_skip_proxy(self, url):\n\n for entry in self._no_proxy_match:\n if fnmatch.fnmatch(url, entry):\n return True\n\n return False\n\n def _add_kwargs(self, url, kwargs):\n if kwargs.get(\"verify\", None) is True:\n kwargs[\"verify\"] = self._client_cache.cacert_path\n else:\n kwargs[\"verify\"] = False\n kwargs[\"cert\"] = self._client_certificates\n if self.proxies:\n if not self._should_skip_proxy(url):\n kwargs[\"proxies\"] = self.proxies\n if self._timeout_seconds:\n kwargs[\"timeout\"] = self._timeout_seconds\n return kwargs\n\n def get(self, url, **kwargs):\n return self._requester.get(url, **self._add_kwargs(url, kwargs))\n\n def put(self, url, **kwargs):\n return self._requester.put(url, **self._add_kwargs(url, kwargs))\n\n def delete(self, url, **kwargs):\n return self._requester.delete(url, **self._add_kwargs(url, kwargs))\n\n def post(self, url, **kwargs):\n return self._requester.post(url, **self._add_kwargs(url, kwargs))\n\n", "path": "conans/client/rest/conan_requester.py"}]}
1,540
508
gh_patches_debug_1142
rasdani/github-patches
git_diff
nipy__nipype-2852
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> nipype/conftest.py should be excluded from API documentation ### Summary The auto-generated API docs include `conftest.py`, which has a fixture. Pytest has turned calling a fixture directly into an error, and apparently the fixture is getting called when the docs are generated. This is what's currently breaking the Circle builds. </issue> <code> [start of tools/build_interface_docs.py] 1 #!/usr/bin/env python 2 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 # vi: set ft=python sts=4 ts=4 sw=4 et: 4 """Script to auto-generate interface docs. 5 """ 6 from __future__ import print_function, unicode_literals 7 # stdlib imports 8 import os 9 import sys 10 11 # ***************************************************************************** 12 if __name__ == '__main__': 13 nipypepath = os.path.abspath('..') 14 sys.path.insert(1, nipypepath) 15 # local imports 16 from interfacedocgen import InterfaceHelpWriter 17 package = 'nipype' 18 outdir = os.path.join('interfaces', 'generated') 19 docwriter = InterfaceHelpWriter(package) 20 # Packages that should not be included in generated API docs. 21 docwriter.package_skip_patterns += [ 22 '\.external$', 23 '\.fixes$', 24 '\.utils$', 25 '\.pipeline', 26 '\.testing', 27 '\.caching', 28 '\.scripts', 29 ] 30 # Modules that should not be included in generated API docs. 31 docwriter.module_skip_patterns += [ 32 '\.version$', 33 '\.interfaces\.base$', 34 '\.interfaces\.matlab$', 35 '\.interfaces\.rest$', 36 '\.interfaces\.pymvpa$', 37 '\.interfaces\.slicer\.generate_classes$', 38 '\.interfaces\.spm\.base$', 39 '\.interfaces\.traits', 40 '\.pipeline\.alloy$', 41 '\.pipeline\.s3_node_wrapper$', 42 '\.testing', 43 '\.scripts', 44 ] 45 docwriter.class_skip_patterns += [ 46 'AFNICommand', 47 'ANTS', 48 'FSLCommand', 49 'FS', 50 'Info', 51 '^SPM', 52 'Tester', 53 'Spec$', 54 'Numpy' 55 # NipypeTester raises an 56 # exception when instantiated in 57 # InterfaceHelpWriter.generate_api_doc 58 'NipypeTester', 59 ] 60 docwriter.write_api_docs(outdir) 61 docwriter.write_index(outdir, 'gen', relative_to='interfaces') 62 print('%d files written' % len(docwriter.written_modules)) 63 [end of tools/build_interface_docs.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py --- a/tools/build_interface_docs.py +++ b/tools/build_interface_docs.py @@ -41,6 +41,7 @@ '\.pipeline\.s3_node_wrapper$', '\.testing', '\.scripts', + '\.conftest', ] docwriter.class_skip_patterns += [ 'AFNICommand',
{"golden_diff": "diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py\n--- a/tools/build_interface_docs.py\n+++ b/tools/build_interface_docs.py\n@@ -41,6 +41,7 @@\n '\\.pipeline\\.s3_node_wrapper$',\n '\\.testing',\n '\\.scripts',\n+ '\\.conftest',\n ]\n docwriter.class_skip_patterns += [\n 'AFNICommand',\n", "issue": "nipype/conftest.py should be excluded from API documentation\n### Summary\r\n\r\nThe auto-generated API docs include `conftest.py`, which has a fixture. Pytest has turned calling a fixture directly into an error, and apparently the fixture is getting called when the docs are generated.\r\n\r\nThis is what's currently breaking the Circle builds.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-\n# vi: set ft=python sts=4 ts=4 sw=4 et:\n\"\"\"Script to auto-generate interface docs.\n\"\"\"\nfrom __future__ import print_function, unicode_literals\n# stdlib imports\nimport os\nimport sys\n\n# *****************************************************************************\nif __name__ == '__main__':\n nipypepath = os.path.abspath('..')\n sys.path.insert(1, nipypepath)\n # local imports\n from interfacedocgen import InterfaceHelpWriter\n package = 'nipype'\n outdir = os.path.join('interfaces', 'generated')\n docwriter = InterfaceHelpWriter(package)\n # Packages that should not be included in generated API docs.\n docwriter.package_skip_patterns += [\n '\\.external$',\n '\\.fixes$',\n '\\.utils$',\n '\\.pipeline',\n '\\.testing',\n '\\.caching',\n '\\.scripts',\n ]\n # Modules that should not be included in generated API docs.\n docwriter.module_skip_patterns += [\n '\\.version$',\n '\\.interfaces\\.base$',\n '\\.interfaces\\.matlab$',\n '\\.interfaces\\.rest$',\n '\\.interfaces\\.pymvpa$',\n '\\.interfaces\\.slicer\\.generate_classes$',\n '\\.interfaces\\.spm\\.base$',\n '\\.interfaces\\.traits',\n '\\.pipeline\\.alloy$',\n '\\.pipeline\\.s3_node_wrapper$',\n '\\.testing',\n '\\.scripts',\n ]\n docwriter.class_skip_patterns += [\n 'AFNICommand',\n 'ANTS',\n 'FSLCommand',\n 'FS',\n 'Info',\n '^SPM',\n 'Tester',\n 'Spec$',\n 'Numpy'\n # NipypeTester raises an\n # exception when instantiated in\n # InterfaceHelpWriter.generate_api_doc\n 'NipypeTester',\n ]\n docwriter.write_api_docs(outdir)\n docwriter.write_index(outdir, 'gen', relative_to='interfaces')\n print('%d files written' % len(docwriter.written_modules))\n", "path": "tools/build_interface_docs.py"}]}
1,192
90
gh_patches_debug_32346
rasdani/github-patches
git_diff
pwndbg__pwndbg-1800
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Fix Arch Linux tests The Arch Linux tests are failing due to new versions of pip needing to use the `--break-system-packages` flag to install packages globally. We should update the CI to use a virtualenv here. CI: try to bring back Arch Linux CI Let's see if this works now with virtualenv setup <!-- Please make sure to read the testing and linting instructions at https://github.com/pwndbg/pwndbg/blob/dev/DEVELOPING.md before creating a PR --> </issue> <code> [start of pwndbg/wrappers/readelf.py] 1 from enum import Enum 2 from typing import Dict 3 from typing import List 4 5 import pwndbg.wrappers 6 7 cmd_name = "readelf" 8 9 10 class RelocationType(Enum): 11 # For x86_64, some details about these flag can be found in 4.4.1 Relocation Types in https://www.intel.com/content/dam/develop/external/us/en/documents/mpx-linux64-abi.pdf 12 # The definitions of these flags can be found in this file: https://elixir.bootlin.com/glibc/glibc-2.37/source/elf/elf.h 13 JUMP_SLOT = 1 # e.g.: R_X86_64_JUMP_SLOT 14 GLOB_DAT = 2 # e.g.: R_X86_64_GLOB_DAT 15 IRELATIVE = 3 # e.g.: R_X86_64_IRELATIVE 16 17 18 @pwndbg.wrappers.OnlyWithCommand(cmd_name) 19 def get_got_entry(local_path: str) -> Dict[RelocationType, List[str]]: 20 # --wide is for showing the full information, e.g.: R_X86_64_JUMP_SLOT instead of R_X86_64_JUMP_SLO 21 cmd = get_got_entry.cmd + ["--relocs", "--wide", local_path] 22 readelf_out = pwndbg.wrappers.call_cmd(cmd) 23 24 entries: Dict[RelocationType, List[str]] = {category: [] for category in RelocationType} 25 for line in readelf_out.splitlines(): 26 if not line or not line[0].isdigit(): 27 continue 28 category = line.split()[2] 29 # TODO/FIXME: There's a bug here, somehow the IRELATIVE relocation might point to somewhere in .data.rel.ro, which is not in .got or .got.plt 30 for c in RelocationType: 31 if c.name in category: 32 entries[c].append(line) 33 return entries 34 [end of pwndbg/wrappers/readelf.py] [start of gdbinit.py] 1 import cProfile 2 import glob 3 import locale 4 import os 5 import sys 6 import time 7 from glob import glob 8 from os import environ 9 from os import path 10 11 _profiler = cProfile.Profile() 12 13 _start_time = None 14 if environ.get("PWNDBG_PROFILE") == "1": 15 _start_time = time.time() 16 _profiler.enable() 17 18 directory, file = path.split(__file__) 19 directory = path.expanduser(directory) 20 directory = path.abspath(directory) 21 22 # Get virtualenv's site-packages path 23 venv_path = os.environ.get("PWNDBG_VENV_PATH") 24 if not venv_path: 25 venv_path = os.path.join(directory, ".venv") 26 27 if not os.path.exists(venv_path): 28 print(f"Cannot find Pwndbg virtualenv directory: {venv_path}: please re-run setup.sh") 29 sys.exit(1) 30 31 site_pkgs_path = glob(os.path.join(venv_path, "lib/*/site-packages"))[0] 32 33 # Set virtualenv's bin path (needed for utility tools like ropper, pwntools etc) 34 bin_path = os.path.join(venv_path, "bin") 35 os.environ["PATH"] = bin_path + os.pathsep + os.environ.get("PATH") 36 37 # Add gdb-pt-dump directory to sys.path so it can be imported 38 gdbpt = path.join(directory, "gdb-pt-dump") 39 sys.path.append(directory) 40 sys.path.append(site_pkgs_path) 41 sys.path.append(gdbpt) 42 43 # warn if the user has different encoding than utf-8 44 encoding = locale.getpreferredencoding() 45 46 if encoding != "UTF-8": 47 print("******") 48 print(f"Your encoding ({encoding}) is different than UTF-8. pwndbg might not work properly.") 49 print("You might try launching GDB with:") 50 print(" LC_CTYPE=C.UTF-8 gdb") 51 print( 52 "If that does not work, make sure that en_US.UTF-8 is uncommented in /etc/locale.gen and that you called `locale-gen` command" 53 ) 54 print("******") 55 56 environ["PWNLIB_NOTERM"] = "1" 57 58 import pwndbg # noqa: F401 59 import pwndbg.profiling 60 61 pwndbg.profiling.init(_profiler, _start_time) 62 if environ.get("PWNDBG_PROFILE") == "1": 63 pwndbg.profiling.profiler.stop("pwndbg-load.pstats") 64 pwndbg.profiling.profiler.start() 65 [end of gdbinit.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/gdbinit.py b/gdbinit.py --- a/gdbinit.py +++ b/gdbinit.py @@ -2,6 +2,7 @@ import glob import locale import os +import site import sys import time from glob import glob @@ -30,6 +31,14 @@ site_pkgs_path = glob(os.path.join(venv_path, "lib/*/site-packages"))[0] +# add virtualenv's site-packages to sys.path and run .pth files +site.addsitedir(site_pkgs_path) + +# remove existing, system-level site-packages from sys.path +for site_packages in site.getsitepackages(): + if site_packages in sys.path: + sys.path.remove(site_packages) + # Set virtualenv's bin path (needed for utility tools like ropper, pwntools etc) bin_path = os.path.join(venv_path, "bin") os.environ["PATH"] = bin_path + os.pathsep + os.environ.get("PATH") @@ -37,7 +46,6 @@ # Add gdb-pt-dump directory to sys.path so it can be imported gdbpt = path.join(directory, "gdb-pt-dump") sys.path.append(directory) -sys.path.append(site_pkgs_path) sys.path.append(gdbpt) # warn if the user has different encoding than utf-8 diff --git a/pwndbg/wrappers/readelf.py b/pwndbg/wrappers/readelf.py --- a/pwndbg/wrappers/readelf.py +++ b/pwndbg/wrappers/readelf.py @@ -23,7 +23,7 @@ entries: Dict[RelocationType, List[str]] = {category: [] for category in RelocationType} for line in readelf_out.splitlines(): - if not line or not line[0].isdigit(): + if not line or not line[0].isdigit() or " " not in line: continue category = line.split()[2] # TODO/FIXME: There's a bug here, somehow the IRELATIVE relocation might point to somewhere in .data.rel.ro, which is not in .got or .got.plt
{"golden_diff": "diff --git a/gdbinit.py b/gdbinit.py\n--- a/gdbinit.py\n+++ b/gdbinit.py\n@@ -2,6 +2,7 @@\n import glob\n import locale\n import os\n+import site\n import sys\n import time\n from glob import glob\n@@ -30,6 +31,14 @@\n \n site_pkgs_path = glob(os.path.join(venv_path, \"lib/*/site-packages\"))[0]\n \n+# add virtualenv's site-packages to sys.path and run .pth files\n+site.addsitedir(site_pkgs_path)\n+\n+# remove existing, system-level site-packages from sys.path\n+for site_packages in site.getsitepackages():\n+ if site_packages in sys.path:\n+ sys.path.remove(site_packages)\n+\n # Set virtualenv's bin path (needed for utility tools like ropper, pwntools etc)\n bin_path = os.path.join(venv_path, \"bin\")\n os.environ[\"PATH\"] = bin_path + os.pathsep + os.environ.get(\"PATH\")\n@@ -37,7 +46,6 @@\n # Add gdb-pt-dump directory to sys.path so it can be imported\n gdbpt = path.join(directory, \"gdb-pt-dump\")\n sys.path.append(directory)\n-sys.path.append(site_pkgs_path)\n sys.path.append(gdbpt)\n \n # warn if the user has different encoding than utf-8\ndiff --git a/pwndbg/wrappers/readelf.py b/pwndbg/wrappers/readelf.py\n--- a/pwndbg/wrappers/readelf.py\n+++ b/pwndbg/wrappers/readelf.py\n@@ -23,7 +23,7 @@\n \n entries: Dict[RelocationType, List[str]] = {category: [] for category in RelocationType}\n for line in readelf_out.splitlines():\n- if not line or not line[0].isdigit():\n+ if not line or not line[0].isdigit() or \" \" not in line:\n continue\n category = line.split()[2]\n # TODO/FIXME: There's a bug here, somehow the IRELATIVE relocation might point to somewhere in .data.rel.ro, which is not in .got or .got.plt\n", "issue": "Fix Arch Linux tests\nThe Arch Linux tests are failing due to new versions of pip needing to use the `--break-system-packages` flag to install packages globally. We should update the CI to use a virtualenv here.\nCI: try to bring back Arch Linux CI\nLet's see if this works now with virtualenv setup\r\n\r\n<!-- Please make sure to read the testing and linting instructions at https://github.com/pwndbg/pwndbg/blob/dev/DEVELOPING.md before creating a PR -->\r\n\n", "before_files": [{"content": "from enum import Enum\nfrom typing import Dict\nfrom typing import List\n\nimport pwndbg.wrappers\n\ncmd_name = \"readelf\"\n\n\nclass RelocationType(Enum):\n # For x86_64, some details about these flag can be found in 4.4.1 Relocation Types in https://www.intel.com/content/dam/develop/external/us/en/documents/mpx-linux64-abi.pdf\n # The definitions of these flags can be found in this file: https://elixir.bootlin.com/glibc/glibc-2.37/source/elf/elf.h\n JUMP_SLOT = 1 # e.g.: R_X86_64_JUMP_SLOT\n GLOB_DAT = 2 # e.g.: R_X86_64_GLOB_DAT\n IRELATIVE = 3 # e.g.: R_X86_64_IRELATIVE\n\n\[email protected](cmd_name)\ndef get_got_entry(local_path: str) -> Dict[RelocationType, List[str]]:\n # --wide is for showing the full information, e.g.: R_X86_64_JUMP_SLOT instead of R_X86_64_JUMP_SLO\n cmd = get_got_entry.cmd + [\"--relocs\", \"--wide\", local_path]\n readelf_out = pwndbg.wrappers.call_cmd(cmd)\n\n entries: Dict[RelocationType, List[str]] = {category: [] for category in RelocationType}\n for line in readelf_out.splitlines():\n if not line or not line[0].isdigit():\n continue\n category = line.split()[2]\n # TODO/FIXME: There's a bug here, somehow the IRELATIVE relocation might point to somewhere in .data.rel.ro, which is not in .got or .got.plt\n for c in RelocationType:\n if c.name in category:\n entries[c].append(line)\n return entries\n", "path": "pwndbg/wrappers/readelf.py"}, {"content": "import cProfile\nimport glob\nimport locale\nimport os\nimport sys\nimport time\nfrom glob import glob\nfrom os import environ\nfrom os import path\n\n_profiler = cProfile.Profile()\n\n_start_time = None\nif environ.get(\"PWNDBG_PROFILE\") == \"1\":\n _start_time = time.time()\n _profiler.enable()\n\ndirectory, file = path.split(__file__)\ndirectory = path.expanduser(directory)\ndirectory = path.abspath(directory)\n\n# Get virtualenv's site-packages path\nvenv_path = os.environ.get(\"PWNDBG_VENV_PATH\")\nif not venv_path:\n venv_path = os.path.join(directory, \".venv\")\n\nif not os.path.exists(venv_path):\n print(f\"Cannot find Pwndbg virtualenv directory: {venv_path}: please re-run setup.sh\")\n sys.exit(1)\n\nsite_pkgs_path = glob(os.path.join(venv_path, \"lib/*/site-packages\"))[0]\n\n# Set virtualenv's bin path (needed for utility tools like ropper, pwntools etc)\nbin_path = os.path.join(venv_path, \"bin\")\nos.environ[\"PATH\"] = bin_path + os.pathsep + os.environ.get(\"PATH\")\n\n# Add gdb-pt-dump directory to sys.path so it can be imported\ngdbpt = path.join(directory, \"gdb-pt-dump\")\nsys.path.append(directory)\nsys.path.append(site_pkgs_path)\nsys.path.append(gdbpt)\n\n# warn if the user has different encoding than utf-8\nencoding = locale.getpreferredencoding()\n\nif encoding != \"UTF-8\":\n print(\"******\")\n print(f\"Your encoding ({encoding}) is different than UTF-8. pwndbg might not work properly.\")\n print(\"You might try launching GDB with:\")\n print(\" LC_CTYPE=C.UTF-8 gdb\")\n print(\n \"If that does not work, make sure that en_US.UTF-8 is uncommented in /etc/locale.gen and that you called `locale-gen` command\"\n )\n print(\"******\")\n\nenviron[\"PWNLIB_NOTERM\"] = \"1\"\n\nimport pwndbg # noqa: F401\nimport pwndbg.profiling\n\npwndbg.profiling.init(_profiler, _start_time)\nif environ.get(\"PWNDBG_PROFILE\") == \"1\":\n pwndbg.profiling.profiler.stop(\"pwndbg-load.pstats\")\n pwndbg.profiling.profiler.start()\n", "path": "gdbinit.py"}]}
1,802
480
gh_patches_debug_13533
rasdani/github-patches
git_diff
pre-commit__pre-commit-306
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Improve error message when attempting to run non-existent hook Hook id in `/.pre-commit-config.yaml` doesn't exist in the included repository (From https://github.com/pre-commit/pre-commit-hooks/issues/37) It should probably suggest updating hooks or checking the spelling of the hookid </issue> <code> [start of pre_commit/repository.py] 1 from __future__ import unicode_literals 2 3 import logging 4 import shutil 5 from collections import defaultdict 6 7 from cached_property import cached_property 8 9 from pre_commit import git 10 from pre_commit.clientlib.validate_config import is_local_hooks 11 from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA 12 from pre_commit.jsonschema_extensions import apply_defaults 13 from pre_commit.languages.all import languages 14 from pre_commit.languages.helpers import environment_dir 15 from pre_commit.manifest import Manifest 16 from pre_commit.prefixed_command_runner import PrefixedCommandRunner 17 18 19 logger = logging.getLogger('pre_commit') 20 21 22 class Repository(object): 23 def __init__(self, repo_config, repo_path_getter): 24 self.repo_config = repo_config 25 self.repo_path_getter = repo_path_getter 26 self.__installed = False 27 28 @classmethod 29 def create(cls, config, store): 30 if is_local_hooks(config): 31 return LocalRepository(config) 32 else: 33 repo_path_getter = store.get_repo_path_getter( 34 config['repo'], config['sha'] 35 ) 36 return cls(config, repo_path_getter) 37 38 @cached_property 39 def repo_url(self): 40 return self.repo_config['repo'] 41 42 @cached_property 43 def sha(self): 44 return self.repo_config['sha'] 45 46 @cached_property 47 def languages(self): 48 return set( 49 (hook['language'], hook['language_version']) 50 for _, hook in self.hooks 51 ) 52 53 @cached_property 54 def additional_dependencies(self): 55 dep_dict = defaultdict(lambda: defaultdict(set)) 56 for _, hook in self.hooks: 57 dep_dict[hook['language']][hook['language_version']].update( 58 hook.get('additional_dependencies', []), 59 ) 60 return dep_dict 61 62 @cached_property 63 def hooks(self): 64 # TODO: merging in manifest dicts is a smell imo 65 return tuple( 66 (hook['id'], dict(self.manifest.hooks[hook['id']], **hook)) 67 for hook in self.repo_config['hooks'] 68 ) 69 70 @cached_property 71 def manifest(self): 72 return Manifest(self.repo_path_getter) 73 74 @cached_property 75 def cmd_runner(self): 76 return PrefixedCommandRunner(self.repo_path_getter.repo_path) 77 78 def require_installed(self): 79 if self.__installed: 80 return 81 82 self.install() 83 self.__installed = True 84 85 def install(self): 86 """Install the hook repository.""" 87 def language_is_installed(language_name, language_version): 88 language = languages[language_name] 89 directory = environment_dir( 90 language.ENVIRONMENT_DIR, language_version, 91 ) 92 return ( 93 directory is None or 94 self.cmd_runner.exists(directory, '.installed') 95 ) 96 97 if not all( 98 language_is_installed(language_name, language_version) 99 for language_name, language_version in self.languages 100 ): 101 logger.info( 102 'Installing environment for {0}.'.format(self.repo_url) 103 ) 104 logger.info('Once installed this environment will be reused.') 105 logger.info('This may take a few minutes...') 106 107 for language_name, language_version in self.languages: 108 language = languages[language_name] 109 if language_is_installed(language_name, language_version): 110 continue 111 112 directory = environment_dir( 113 language.ENVIRONMENT_DIR, language_version, 114 ) 115 # There's potentially incomplete cleanup from previous runs 116 # Clean it up! 117 if self.cmd_runner.exists(directory): 118 shutil.rmtree(self.cmd_runner.path(directory)) 119 120 language.install_environment( 121 self.cmd_runner, language_version, 122 self.additional_dependencies[language_name][language_version], 123 ) 124 # Touch the .installed file (atomic) to indicate we've installed 125 open(self.cmd_runner.path(directory, '.installed'), 'w').close() 126 127 def run_hook(self, hook, file_args): 128 """Run a hook. 129 130 Args: 131 hook - Hook dictionary 132 file_args - List of files to run 133 """ 134 self.require_installed() 135 return languages[hook['language']].run_hook( 136 self.cmd_runner, hook, file_args, 137 ) 138 139 140 class LocalRepository(Repository): 141 def __init__(self, repo_config): 142 super(LocalRepository, self).__init__(repo_config, None) 143 144 @cached_property 145 def hooks(self): 146 return tuple( 147 (hook['id'], apply_defaults(hook, MANIFEST_JSON_SCHEMA['items'])) 148 for hook in self.repo_config['hooks'] 149 ) 150 151 @cached_property 152 def cmd_runner(self): 153 return PrefixedCommandRunner(git.get_root()) 154 155 @cached_property 156 def sha(self): 157 raise NotImplementedError 158 159 @cached_property 160 def manifest(self): 161 raise NotImplementedError 162 [end of pre_commit/repository.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pre_commit/repository.py b/pre_commit/repository.py --- a/pre_commit/repository.py +++ b/pre_commit/repository.py @@ -61,7 +61,16 @@ @cached_property def hooks(self): - # TODO: merging in manifest dicts is a smell imo + for hook in self.repo_config['hooks']: + if hook['id'] not in self.manifest.hooks: + logger.error( + '`{0}` is not present in repository {1}. ' + 'Typo? Perhaps it is introduced in a newer version? ' + 'Often `pre-commit autoupdate` fixes this.'.format( + hook['id'], self.repo_config['repo'], + ) + ) + exit(1) return tuple( (hook['id'], dict(self.manifest.hooks[hook['id']], **hook)) for hook in self.repo_config['hooks']
{"golden_diff": "diff --git a/pre_commit/repository.py b/pre_commit/repository.py\n--- a/pre_commit/repository.py\n+++ b/pre_commit/repository.py\n@@ -61,7 +61,16 @@\n \n @cached_property\n def hooks(self):\n- # TODO: merging in manifest dicts is a smell imo\n+ for hook in self.repo_config['hooks']:\n+ if hook['id'] not in self.manifest.hooks:\n+ logger.error(\n+ '`{0}` is not present in repository {1}. '\n+ 'Typo? Perhaps it is introduced in a newer version? '\n+ 'Often `pre-commit autoupdate` fixes this.'.format(\n+ hook['id'], self.repo_config['repo'],\n+ )\n+ )\n+ exit(1)\n return tuple(\n (hook['id'], dict(self.manifest.hooks[hook['id']], **hook))\n for hook in self.repo_config['hooks']\n", "issue": "Improve error message when attempting to run non-existent hook\nHook id in `/.pre-commit-config.yaml` doesn't exist in the included repository\n\n(From https://github.com/pre-commit/pre-commit-hooks/issues/37)\n\nIt should probably suggest updating hooks or checking the spelling of the hookid\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport logging\nimport shutil\nfrom collections import defaultdict\n\nfrom cached_property import cached_property\n\nfrom pre_commit import git\nfrom pre_commit.clientlib.validate_config import is_local_hooks\nfrom pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA\nfrom pre_commit.jsonschema_extensions import apply_defaults\nfrom pre_commit.languages.all import languages\nfrom pre_commit.languages.helpers import environment_dir\nfrom pre_commit.manifest import Manifest\nfrom pre_commit.prefixed_command_runner import PrefixedCommandRunner\n\n\nlogger = logging.getLogger('pre_commit')\n\n\nclass Repository(object):\n def __init__(self, repo_config, repo_path_getter):\n self.repo_config = repo_config\n self.repo_path_getter = repo_path_getter\n self.__installed = False\n\n @classmethod\n def create(cls, config, store):\n if is_local_hooks(config):\n return LocalRepository(config)\n else:\n repo_path_getter = store.get_repo_path_getter(\n config['repo'], config['sha']\n )\n return cls(config, repo_path_getter)\n\n @cached_property\n def repo_url(self):\n return self.repo_config['repo']\n\n @cached_property\n def sha(self):\n return self.repo_config['sha']\n\n @cached_property\n def languages(self):\n return set(\n (hook['language'], hook['language_version'])\n for _, hook in self.hooks\n )\n\n @cached_property\n def additional_dependencies(self):\n dep_dict = defaultdict(lambda: defaultdict(set))\n for _, hook in self.hooks:\n dep_dict[hook['language']][hook['language_version']].update(\n hook.get('additional_dependencies', []),\n )\n return dep_dict\n\n @cached_property\n def hooks(self):\n # TODO: merging in manifest dicts is a smell imo\n return tuple(\n (hook['id'], dict(self.manifest.hooks[hook['id']], **hook))\n for hook in self.repo_config['hooks']\n )\n\n @cached_property\n def manifest(self):\n return Manifest(self.repo_path_getter)\n\n @cached_property\n def cmd_runner(self):\n return PrefixedCommandRunner(self.repo_path_getter.repo_path)\n\n def require_installed(self):\n if self.__installed:\n return\n\n self.install()\n self.__installed = True\n\n def install(self):\n \"\"\"Install the hook repository.\"\"\"\n def language_is_installed(language_name, language_version):\n language = languages[language_name]\n directory = environment_dir(\n language.ENVIRONMENT_DIR, language_version,\n )\n return (\n directory is None or\n self.cmd_runner.exists(directory, '.installed')\n )\n\n if not all(\n language_is_installed(language_name, language_version)\n for language_name, language_version in self.languages\n ):\n logger.info(\n 'Installing environment for {0}.'.format(self.repo_url)\n )\n logger.info('Once installed this environment will be reused.')\n logger.info('This may take a few minutes...')\n\n for language_name, language_version in self.languages:\n language = languages[language_name]\n if language_is_installed(language_name, language_version):\n continue\n\n directory = environment_dir(\n language.ENVIRONMENT_DIR, language_version,\n )\n # There's potentially incomplete cleanup from previous runs\n # Clean it up!\n if self.cmd_runner.exists(directory):\n shutil.rmtree(self.cmd_runner.path(directory))\n\n language.install_environment(\n self.cmd_runner, language_version,\n self.additional_dependencies[language_name][language_version],\n )\n # Touch the .installed file (atomic) to indicate we've installed\n open(self.cmd_runner.path(directory, '.installed'), 'w').close()\n\n def run_hook(self, hook, file_args):\n \"\"\"Run a hook.\n\n Args:\n hook - Hook dictionary\n file_args - List of files to run\n \"\"\"\n self.require_installed()\n return languages[hook['language']].run_hook(\n self.cmd_runner, hook, file_args,\n )\n\n\nclass LocalRepository(Repository):\n def __init__(self, repo_config):\n super(LocalRepository, self).__init__(repo_config, None)\n\n @cached_property\n def hooks(self):\n return tuple(\n (hook['id'], apply_defaults(hook, MANIFEST_JSON_SCHEMA['items']))\n for hook in self.repo_config['hooks']\n )\n\n @cached_property\n def cmd_runner(self):\n return PrefixedCommandRunner(git.get_root())\n\n @cached_property\n def sha(self):\n raise NotImplementedError\n\n @cached_property\n def manifest(self):\n raise NotImplementedError\n", "path": "pre_commit/repository.py"}]}
1,963
206
gh_patches_debug_36673
rasdani/github-patches
git_diff
translate__pootle-3883
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> list_language no longer has an idea of a changeid We dropped change_id concept with 2.7.0 and now we have the idea of `revision`. Previously I could get a list of active languages since a `change_id` using `list_languages`. Now however I cannot. I used `change_id` in the past to allow me to decide which languages to sync. I also use it to record in a log which languages have been active since the last changeid. So this is to either 1. Bring back `--revision` to `list_languages` so we can see languages active since some revision 2. Explain what other process could be used to gather this info </issue> <code> [start of pootle/apps/pootle_app/management/commands/list_projects.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 # 4 # Copyright (C) Pootle contributors. 5 # 6 # This file is a part of the Pootle project. It is distributed under the GPL3 7 # or later license. See the LICENSE file for a copy of the license and the 8 # AUTHORS file for copyright and authorship information. 9 10 import os 11 os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings' 12 13 from django.core.management.base import NoArgsCommand 14 15 from pootle_project.models import Project 16 17 18 class Command(NoArgsCommand): 19 20 def handle_noargs(self, **options): 21 self.list_projects(**options) 22 23 def list_projects(self, **options): 24 """List all projects on the server.""" 25 26 for project in Project.objects.all(): 27 self.stdout.write(project.code) 28 [end of pootle/apps/pootle_app/management/commands/list_projects.py] [start of pootle/apps/pootle_app/management/commands/list_languages.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 # 4 # Copyright (C) Pootle contributors. 5 # 6 # This file is a part of the Pootle project. It is distributed under the GPL3 7 # or later license. See the LICENSE file for a copy of the license and the 8 # AUTHORS file for copyright and authorship information. 9 10 import os 11 os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings' 12 13 from optparse import make_option 14 15 from django.core.management.base import NoArgsCommand 16 17 18 class Command(NoArgsCommand): 19 option_list = NoArgsCommand.option_list + ( 20 make_option('--project', action='append', dest='projects', 21 help='Limit to PROJECTS'), 22 ) 23 help = "List language codes." 24 25 def handle_noargs(self, **options): 26 self.list_languages(**options) 27 28 def list_languages(self, **options): 29 """List all languages on the server or the given projects.""" 30 projects = options.get('projects', []) 31 32 from pootle_translationproject.models import TranslationProject 33 tps = TranslationProject.objects.distinct() 34 tps = tps.exclude(language__code='templates').order_by('language__code') 35 36 if projects: 37 tps = tps.filter(project__code__in=projects) 38 39 for lang in tps.values_list('language__code', flat=True): 40 self.stdout.write(lang) 41 [end of pootle/apps/pootle_app/management/commands/list_languages.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pootle/apps/pootle_app/management/commands/list_languages.py b/pootle/apps/pootle_app/management/commands/list_languages.py --- a/pootle/apps/pootle_app/management/commands/list_languages.py +++ b/pootle/apps/pootle_app/management/commands/list_languages.py @@ -19,6 +19,9 @@ option_list = NoArgsCommand.option_list + ( make_option('--project', action='append', dest='projects', help='Limit to PROJECTS'), + make_option("--modified-since", action="store", dest="modified_since", + type=int, + help="Only process translations newer than specified revision"), ) help = "List language codes." @@ -33,6 +36,10 @@ tps = TranslationProject.objects.distinct() tps = tps.exclude(language__code='templates').order_by('language__code') + revision = options.get("modified_since", 0) + if revision: + tps = tps.filter(submission__id__gt=revision) + if projects: tps = tps.filter(project__code__in=projects) diff --git a/pootle/apps/pootle_app/management/commands/list_projects.py b/pootle/apps/pootle_app/management/commands/list_projects.py --- a/pootle/apps/pootle_app/management/commands/list_projects.py +++ b/pootle/apps/pootle_app/management/commands/list_projects.py @@ -10,12 +10,20 @@ import os os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings' +from optparse import make_option + from django.core.management.base import NoArgsCommand from pootle_project.models import Project class Command(NoArgsCommand): + option_list = NoArgsCommand.option_list + ( + make_option("--modified-since", + action="store", dest="modified_since", type=int, + help="Only process translations newer than specified revision" + ), + ) def handle_noargs(self, **options): self.list_projects(**options) @@ -23,5 +31,15 @@ def list_projects(self, **options): """List all projects on the server.""" - for project in Project.objects.all(): - self.stdout.write(project.code) + revision = options.get("modified_since", 0) + if revision: + from pootle_translationproject.models import TranslationProject + tps = TranslationProject.objects.filter(submission__id__gt=revision) \ + .distinct().values("project__code") + + for tp in tps: + self.stdout.write(tp["project__code"]) + + else: + for project in Project.objects.all(): + self.stdout.write(project.code)
{"golden_diff": "diff --git a/pootle/apps/pootle_app/management/commands/list_languages.py b/pootle/apps/pootle_app/management/commands/list_languages.py\n--- a/pootle/apps/pootle_app/management/commands/list_languages.py\n+++ b/pootle/apps/pootle_app/management/commands/list_languages.py\n@@ -19,6 +19,9 @@\n option_list = NoArgsCommand.option_list + (\n make_option('--project', action='append', dest='projects',\n help='Limit to PROJECTS'),\n+ make_option(\"--modified-since\", action=\"store\", dest=\"modified_since\",\n+ type=int,\n+ help=\"Only process translations newer than specified revision\"),\n )\n help = \"List language codes.\"\n \n@@ -33,6 +36,10 @@\n tps = TranslationProject.objects.distinct()\n tps = tps.exclude(language__code='templates').order_by('language__code')\n \n+ revision = options.get(\"modified_since\", 0)\n+ if revision:\n+ tps = tps.filter(submission__id__gt=revision)\n+\n if projects:\n tps = tps.filter(project__code__in=projects)\n \ndiff --git a/pootle/apps/pootle_app/management/commands/list_projects.py b/pootle/apps/pootle_app/management/commands/list_projects.py\n--- a/pootle/apps/pootle_app/management/commands/list_projects.py\n+++ b/pootle/apps/pootle_app/management/commands/list_projects.py\n@@ -10,12 +10,20 @@\n import os\n os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n \n+from optparse import make_option\n+\n from django.core.management.base import NoArgsCommand\n \n from pootle_project.models import Project\n \n \n class Command(NoArgsCommand):\n+ option_list = NoArgsCommand.option_list + (\n+ make_option(\"--modified-since\",\n+ action=\"store\", dest=\"modified_since\", type=int,\n+ help=\"Only process translations newer than specified revision\"\n+ ),\n+ )\n \n def handle_noargs(self, **options):\n self.list_projects(**options)\n@@ -23,5 +31,15 @@\n def list_projects(self, **options):\n \"\"\"List all projects on the server.\"\"\"\n \n- for project in Project.objects.all():\n- self.stdout.write(project.code)\n+ revision = options.get(\"modified_since\", 0)\n+ if revision:\n+ from pootle_translationproject.models import TranslationProject\n+ tps = TranslationProject.objects.filter(submission__id__gt=revision) \\\n+ .distinct().values(\"project__code\")\n+\n+ for tp in tps:\n+ self.stdout.write(tp[\"project__code\"])\n+\n+ else:\n+ for project in Project.objects.all():\n+ self.stdout.write(project.code)\n", "issue": "list_language no longer has an idea of a changeid\nWe dropped change_id concept with 2.7.0 and now we have the idea of `revision`. Previously I could get a list of active languages since a `change_id` using `list_languages`. Now however I cannot. I used `change_id` in the past to allow me to decide which languages to sync. I also use it to record in a log which languages have been active since the last changeid.\n\nSo this is to either\n1. Bring back `--revision` to `list_languages` so we can see languages active since some revision\n2. Explain what other process could be used to gather this info\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport os\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management.base import NoArgsCommand\n\nfrom pootle_project.models import Project\n\n\nclass Command(NoArgsCommand):\n\n def handle_noargs(self, **options):\n self.list_projects(**options)\n\n def list_projects(self, **options):\n \"\"\"List all projects on the server.\"\"\"\n\n for project in Project.objects.all():\n self.stdout.write(project.code)\n", "path": "pootle/apps/pootle_app/management/commands/list_projects.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport os\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom optparse import make_option\n\nfrom django.core.management.base import NoArgsCommand\n\n\nclass Command(NoArgsCommand):\n option_list = NoArgsCommand.option_list + (\n make_option('--project', action='append', dest='projects',\n help='Limit to PROJECTS'),\n )\n help = \"List language codes.\"\n\n def handle_noargs(self, **options):\n self.list_languages(**options)\n\n def list_languages(self, **options):\n \"\"\"List all languages on the server or the given projects.\"\"\"\n projects = options.get('projects', [])\n\n from pootle_translationproject.models import TranslationProject\n tps = TranslationProject.objects.distinct()\n tps = tps.exclude(language__code='templates').order_by('language__code')\n\n if projects:\n tps = tps.filter(project__code__in=projects)\n\n for lang in tps.values_list('language__code', flat=True):\n self.stdout.write(lang)\n", "path": "pootle/apps/pootle_app/management/commands/list_languages.py"}]}
1,338
628
gh_patches_debug_42065
rasdani/github-patches
git_diff
liqd__a4-opin-690
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Remove clash between adhocracy4 library names - prevents override of global var if same library name is used from different bundles fixes #687 </issue> <code> [start of euth/maps/fields.py] 1 from django.utils.translation import ugettext_lazy as _ 2 from jsonfield.fields import JSONField, JSONFormField 3 4 from .validators import GeoJSONFormFieldValidator 5 6 7 class GeoJSONFormField(JSONFormField): 8 9 def __init__(self, *args, **kwargs): 10 geom_type = kwargs.pop('geom_type') 11 required = kwargs.pop('required') 12 kwargs.setdefault( 13 'validators', [GeoJSONFormFieldValidator(geom_type, required)]) 14 super().__init__(*args, **kwargs) 15 16 17 class GeoJSONField(JSONField): 18 description = _("Geometry as GeoJSON") 19 form_class = GeoJSONFormField 20 dim = 2 21 geom_type = 'GEOMETRY' 22 23 ''' 24 def __init__(self, *args, **kwargs): 25 required = kwargs.pop('required', True) 26 kwargs.setdefault( 27 'validators', [GeoJSONFormFieldValidator( 28 self.geom_type, required)]) 29 super().__init__(*args, **kwargs) 30 ''' 31 32 def formfield(self, **kwargs): 33 kwargs.setdefault('geom_type', self.geom_type) 34 return super(GeoJSONField, self).formfield(**kwargs) 35 36 37 class GeometryField(GeoJSONField): 38 pass 39 40 41 class PointField(GeometryField): 42 geom_type = 'POINT' 43 44 45 class MultiPolygonField(GeoJSONField): 46 geom_type = 'MULTIPOLYGON' 47 [end of euth/maps/fields.py] [start of euth/maps/forms.py] 1 from django import forms 2 3 from . import models 4 from .widgets import MapChoosePointWidget 5 6 7 class MapIdeaForm(forms.ModelForm): 8 9 def __init__(self, *args, **kwargs): 10 self.settings = kwargs.pop('settings_instance') 11 super().__init__(*args, **kwargs) 12 self.fields['point'].widget = MapChoosePointWidget( 13 polygon=self.settings.polygon) 14 15 class Meta: 16 model = models.MapIdea 17 fields = ['name', 'description', 'image', 'point'] 18 [end of euth/maps/forms.py] [start of euth/maps/validators.py] 1 import json 2 3 from django.core.exceptions import ValidationError 4 from django.utils.translation import ugettext_lazy as _ 5 6 7 class GeoJSONFormFieldValidator(object): 8 9 def __init__(self, geom_type, required): 10 self.geom_type = geom_type 11 self.required = required 12 13 def __call__(self, value): 14 if isinstance(value, str): 15 value = json.loads(value) 16 err_msg = None 17 json_type = value.get('type') 18 if json_type == 'Feature' and self.geom_type == 'GEOMETRY': 19 geom_type = value.get('geometry').get('type') 20 is_geometry = geom_type in ( 21 "Point", "MultiPoint", "LineString", "MultiLineString", 22 "Polygon", "MultiPolygon", "GeometryCollection" 23 ) 24 if not is_geometry: 25 err_msg = '%s is not a valid GeoJSON geometry type' % geom_type 26 elif json_type == 'FeatureCollection': 27 if len(value.get('features')) == 0 and self.required: 28 err_msg = _('Field can not be empty') 29 30 if err_msg: 31 raise ValidationError(err_msg) 32 [end of euth/maps/validators.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/euth/maps/fields.py b/euth/maps/fields.py --- a/euth/maps/fields.py +++ b/euth/maps/fields.py @@ -1,17 +1,22 @@ +from django.core import validators as django_validators from django.utils.translation import ugettext_lazy as _ from jsonfield.fields import JSONField, JSONFormField -from .validators import GeoJSONFormFieldValidator - class GeoJSONFormField(JSONFormField): def __init__(self, *args, **kwargs): - geom_type = kwargs.pop('geom_type') - required = kwargs.pop('required') - kwargs.setdefault( - 'validators', [GeoJSONFormFieldValidator(geom_type, required)]) + required_message = kwargs.pop('required_message') super().__init__(*args, **kwargs) + self.default_error_messages['required'] = required_message + + def to_python(self, value): + empty_featureset = '{"type":"FeatureCollection","features":[]}' + if (value not in django_validators.EMPTY_VALUES and not + value == empty_featureset): + return super().to_python(value) + else: + return None class GeoJSONField(JSONField): @@ -20,17 +25,8 @@ dim = 2 geom_type = 'GEOMETRY' - ''' - def __init__(self, *args, **kwargs): - required = kwargs.pop('required', True) - kwargs.setdefault( - 'validators', [GeoJSONFormFieldValidator( - self.geom_type, required)]) - super().__init__(*args, **kwargs) - ''' - def formfield(self, **kwargs): - kwargs.setdefault('geom_type', self.geom_type) + kwargs.setdefault('required_message', self.required_message) return super(GeoJSONField, self).formfield(**kwargs) @@ -40,7 +36,9 @@ class PointField(GeometryField): geom_type = 'POINT' + required_message = _('Please add a Marker on the map') class MultiPolygonField(GeoJSONField): geom_type = 'MULTIPOLYGON' + required_message = _('Please draw a Polygon on the map') diff --git a/euth/maps/forms.py b/euth/maps/forms.py --- a/euth/maps/forms.py +++ b/euth/maps/forms.py @@ -1,4 +1,5 @@ from django import forms +from django.utils.translation import ugettext as _ from . import models from .widgets import MapChoosePointWidget @@ -11,6 +12,8 @@ super().__init__(*args, **kwargs) self.fields['point'].widget = MapChoosePointWidget( polygon=self.settings.polygon) + self.fields['point'].error_messages['required'] = _( + 'Please locate your proposal on the map.') class Meta: model = models.MapIdea diff --git a/euth/maps/validators.py b/euth/maps/validators.py deleted file mode 100644 --- a/euth/maps/validators.py +++ /dev/null @@ -1,31 +0,0 @@ -import json - -from django.core.exceptions import ValidationError -from django.utils.translation import ugettext_lazy as _ - - -class GeoJSONFormFieldValidator(object): - - def __init__(self, geom_type, required): - self.geom_type = geom_type - self.required = required - - def __call__(self, value): - if isinstance(value, str): - value = json.loads(value) - err_msg = None - json_type = value.get('type') - if json_type == 'Feature' and self.geom_type == 'GEOMETRY': - geom_type = value.get('geometry').get('type') - is_geometry = geom_type in ( - "Point", "MultiPoint", "LineString", "MultiLineString", - "Polygon", "MultiPolygon", "GeometryCollection" - ) - if not is_geometry: - err_msg = '%s is not a valid GeoJSON geometry type' % geom_type - elif json_type == 'FeatureCollection': - if len(value.get('features')) == 0 and self.required: - err_msg = _('Field can not be empty') - - if err_msg: - raise ValidationError(err_msg)
{"golden_diff": "diff --git a/euth/maps/fields.py b/euth/maps/fields.py\n--- a/euth/maps/fields.py\n+++ b/euth/maps/fields.py\n@@ -1,17 +1,22 @@\n+from django.core import validators as django_validators\n from django.utils.translation import ugettext_lazy as _\n from jsonfield.fields import JSONField, JSONFormField\n \n-from .validators import GeoJSONFormFieldValidator\n-\n \n class GeoJSONFormField(JSONFormField):\n \n def __init__(self, *args, **kwargs):\n- geom_type = kwargs.pop('geom_type')\n- required = kwargs.pop('required')\n- kwargs.setdefault(\n- 'validators', [GeoJSONFormFieldValidator(geom_type, required)])\n+ required_message = kwargs.pop('required_message')\n super().__init__(*args, **kwargs)\n+ self.default_error_messages['required'] = required_message\n+\n+ def to_python(self, value):\n+ empty_featureset = '{\"type\":\"FeatureCollection\",\"features\":[]}'\n+ if (value not in django_validators.EMPTY_VALUES and not\n+ value == empty_featureset):\n+ return super().to_python(value)\n+ else:\n+ return None\n \n \n class GeoJSONField(JSONField):\n@@ -20,17 +25,8 @@\n dim = 2\n geom_type = 'GEOMETRY'\n \n- '''\n- def __init__(self, *args, **kwargs):\n- required = kwargs.pop('required', True)\n- kwargs.setdefault(\n- 'validators', [GeoJSONFormFieldValidator(\n- self.geom_type, required)])\n- super().__init__(*args, **kwargs)\n- '''\n-\n def formfield(self, **kwargs):\n- kwargs.setdefault('geom_type', self.geom_type)\n+ kwargs.setdefault('required_message', self.required_message)\n return super(GeoJSONField, self).formfield(**kwargs)\n \n \n@@ -40,7 +36,9 @@\n \n class PointField(GeometryField):\n geom_type = 'POINT'\n+ required_message = _('Please add a Marker on the map')\n \n \n class MultiPolygonField(GeoJSONField):\n geom_type = 'MULTIPOLYGON'\n+ required_message = _('Please draw a Polygon on the map')\ndiff --git a/euth/maps/forms.py b/euth/maps/forms.py\n--- a/euth/maps/forms.py\n+++ b/euth/maps/forms.py\n@@ -1,4 +1,5 @@\n from django import forms\n+from django.utils.translation import ugettext as _\n \n from . import models\n from .widgets import MapChoosePointWidget\n@@ -11,6 +12,8 @@\n super().__init__(*args, **kwargs)\n self.fields['point'].widget = MapChoosePointWidget(\n polygon=self.settings.polygon)\n+ self.fields['point'].error_messages['required'] = _(\n+ 'Please locate your proposal on the map.')\n \n class Meta:\n model = models.MapIdea\ndiff --git a/euth/maps/validators.py b/euth/maps/validators.py\ndeleted file mode 100644\n--- a/euth/maps/validators.py\n+++ /dev/null\n@@ -1,31 +0,0 @@\n-import json\n-\n-from django.core.exceptions import ValidationError\n-from django.utils.translation import ugettext_lazy as _\n-\n-\n-class GeoJSONFormFieldValidator(object):\n-\n- def __init__(self, geom_type, required):\n- self.geom_type = geom_type\n- self.required = required\n-\n- def __call__(self, value):\n- if isinstance(value, str):\n- value = json.loads(value)\n- err_msg = None\n- json_type = value.get('type')\n- if json_type == 'Feature' and self.geom_type == 'GEOMETRY':\n- geom_type = value.get('geometry').get('type')\n- is_geometry = geom_type in (\n- \"Point\", \"MultiPoint\", \"LineString\", \"MultiLineString\",\n- \"Polygon\", \"MultiPolygon\", \"GeometryCollection\"\n- )\n- if not is_geometry:\n- err_msg = '%s is not a valid GeoJSON geometry type' % geom_type\n- elif json_type == 'FeatureCollection':\n- if len(value.get('features')) == 0 and self.required:\n- err_msg = _('Field can not be empty')\n-\n- if err_msg:\n- raise ValidationError(err_msg)\n", "issue": "Remove clash between adhocracy4 library names\n - prevents override of global var if same library name is used from\r\n different bundles\r\n\r\nfixes #687 \n", "before_files": [{"content": "from django.utils.translation import ugettext_lazy as _\nfrom jsonfield.fields import JSONField, JSONFormField\n\nfrom .validators import GeoJSONFormFieldValidator\n\n\nclass GeoJSONFormField(JSONFormField):\n\n def __init__(self, *args, **kwargs):\n geom_type = kwargs.pop('geom_type')\n required = kwargs.pop('required')\n kwargs.setdefault(\n 'validators', [GeoJSONFormFieldValidator(geom_type, required)])\n super().__init__(*args, **kwargs)\n\n\nclass GeoJSONField(JSONField):\n description = _(\"Geometry as GeoJSON\")\n form_class = GeoJSONFormField\n dim = 2\n geom_type = 'GEOMETRY'\n\n '''\n def __init__(self, *args, **kwargs):\n required = kwargs.pop('required', True)\n kwargs.setdefault(\n 'validators', [GeoJSONFormFieldValidator(\n self.geom_type, required)])\n super().__init__(*args, **kwargs)\n '''\n\n def formfield(self, **kwargs):\n kwargs.setdefault('geom_type', self.geom_type)\n return super(GeoJSONField, self).formfield(**kwargs)\n\n\nclass GeometryField(GeoJSONField):\n pass\n\n\nclass PointField(GeometryField):\n geom_type = 'POINT'\n\n\nclass MultiPolygonField(GeoJSONField):\n geom_type = 'MULTIPOLYGON'\n", "path": "euth/maps/fields.py"}, {"content": "from django import forms\n\nfrom . import models\nfrom .widgets import MapChoosePointWidget\n\n\nclass MapIdeaForm(forms.ModelForm):\n\n def __init__(self, *args, **kwargs):\n self.settings = kwargs.pop('settings_instance')\n super().__init__(*args, **kwargs)\n self.fields['point'].widget = MapChoosePointWidget(\n polygon=self.settings.polygon)\n\n class Meta:\n model = models.MapIdea\n fields = ['name', 'description', 'image', 'point']\n", "path": "euth/maps/forms.py"}, {"content": "import json\n\nfrom django.core.exceptions import ValidationError\nfrom django.utils.translation import ugettext_lazy as _\n\n\nclass GeoJSONFormFieldValidator(object):\n\n def __init__(self, geom_type, required):\n self.geom_type = geom_type\n self.required = required\n\n def __call__(self, value):\n if isinstance(value, str):\n value = json.loads(value)\n err_msg = None\n json_type = value.get('type')\n if json_type == 'Feature' and self.geom_type == 'GEOMETRY':\n geom_type = value.get('geometry').get('type')\n is_geometry = geom_type in (\n \"Point\", \"MultiPoint\", \"LineString\", \"MultiLineString\",\n \"Polygon\", \"MultiPolygon\", \"GeometryCollection\"\n )\n if not is_geometry:\n err_msg = '%s is not a valid GeoJSON geometry type' % geom_type\n elif json_type == 'FeatureCollection':\n if len(value.get('features')) == 0 and self.required:\n err_msg = _('Field can not be empty')\n\n if err_msg:\n raise ValidationError(err_msg)\n", "path": "euth/maps/validators.py"}]}
1,407
948
gh_patches_debug_24312
rasdani/github-patches
git_diff
mozilla__bugbug-864
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add an option to the bug_classifier script to classify a specific bug </issue> <code> [start of scripts/bug_classifier.py] 1 # -*- coding: utf-8 -*- 2 3 import argparse 4 import os 5 from logging import INFO, basicConfig, getLogger 6 7 import numpy as np 8 import requests 9 10 from bugbug import bugzilla 11 from bugbug.models import get_model_class 12 from bugbug.utils import download_check_etag, zstd_decompress 13 14 MODELS_WITH_TYPE = ("component",) 15 16 basicConfig(level=INFO) 17 logger = getLogger(__name__) 18 19 20 def classify_bugs(model_name, classifier): 21 if classifier != "default": 22 assert ( 23 model_name in MODELS_WITH_TYPE 24 ), f"{classifier} is not a valid classifier type for {model_name}" 25 26 model_file_name = f"{model_name}{classifier}model" 27 model_name = f"{model_name}_{classifier}" 28 else: 29 model_file_name = f"{model_name}model" 30 31 if not os.path.exists(model_file_name): 32 logger.info(f"{model_file_name} does not exist. Downloading the model....") 33 try: 34 download_check_etag( 35 f"https://index.taskcluster.net/v1/task/project.relman.bugbug.train_{model_name}.latest/artifacts/public/{model_file_name}.zst", 36 f"{model_file_name}.zst", 37 ) 38 except requests.HTTPError: 39 logger.error( 40 f"A pre-trained model is not available, you will need to train it yourself using the trainer script" 41 ) 42 raise SystemExit(1) 43 44 zstd_decompress(model_file_name) 45 assert os.path.exists(model_file_name), "Decompressed file doesn't exist" 46 47 model_class = get_model_class(model_name) 48 model = model_class.load(model_file_name) 49 50 for bug in bugzilla.get_bugs(): 51 print( 52 f'https://bugzilla.mozilla.org/show_bug.cgi?id={bug["id"]} - {bug["summary"]} ' 53 ) 54 55 if model.calculate_importance: 56 probas, importance = model.classify( 57 bug, probabilities=True, importances=True 58 ) 59 60 feature_names = model.get_human_readable_feature_names() 61 62 model.print_feature_importances( 63 importance["importances"], feature_names, class_probabilities=probas 64 ) 65 else: 66 probas = model.classify(bug, probabilities=True, importances=False) 67 68 if np.argmax(probas) == 1: 69 print(f"Positive! {probas}") 70 else: 71 print(f"Negative! {probas}") 72 input() 73 74 75 def main(): 76 description = "Perform evaluation on bugs using the specified model" 77 parser = argparse.ArgumentParser(description=description) 78 79 parser.add_argument("model", help="Which model to use for evaluation") 80 parser.add_argument( 81 "--classifier", 82 help="Type of the classifier. Only used for component classification.", 83 choices=["default", "nn"], 84 default="default", 85 ) 86 87 args = parser.parse_args() 88 89 classify_bugs(args.model, args.classifier) 90 [end of scripts/bug_classifier.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/scripts/bug_classifier.py b/scripts/bug_classifier.py --- a/scripts/bug_classifier.py +++ b/scripts/bug_classifier.py @@ -17,7 +17,7 @@ logger = getLogger(__name__) -def classify_bugs(model_name, classifier): +def classify_bugs(model_name, classifier, bug_id): if classifier != "default": assert ( model_name in MODELS_WITH_TYPE @@ -47,7 +47,13 @@ model_class = get_model_class(model_name) model = model_class.load(model_file_name) - for bug in bugzilla.get_bugs(): + if bug_id: + bugs = bugzilla.get(bug_id).values() + assert bugs, f"A bug with a bug id of {bug_id} was not found" + else: + bugs = bugzilla.get_bugs() + + for bug in bugs: print( f'https://bugzilla.mozilla.org/show_bug.cgi?id={bug["id"]} - {bug["summary"]} ' ) @@ -83,7 +89,8 @@ choices=["default", "nn"], default="default", ) + parser.add_argument("--bug-id", help="Classify the given bug id") args = parser.parse_args() - classify_bugs(args.model, args.classifier) + classify_bugs(args.model, args.classifier, args.bug_id)
{"golden_diff": "diff --git a/scripts/bug_classifier.py b/scripts/bug_classifier.py\n--- a/scripts/bug_classifier.py\n+++ b/scripts/bug_classifier.py\n@@ -17,7 +17,7 @@\n logger = getLogger(__name__)\n \n \n-def classify_bugs(model_name, classifier):\n+def classify_bugs(model_name, classifier, bug_id):\n if classifier != \"default\":\n assert (\n model_name in MODELS_WITH_TYPE\n@@ -47,7 +47,13 @@\n model_class = get_model_class(model_name)\n model = model_class.load(model_file_name)\n \n- for bug in bugzilla.get_bugs():\n+ if bug_id:\n+ bugs = bugzilla.get(bug_id).values()\n+ assert bugs, f\"A bug with a bug id of {bug_id} was not found\"\n+ else:\n+ bugs = bugzilla.get_bugs()\n+\n+ for bug in bugs:\n print(\n f'https://bugzilla.mozilla.org/show_bug.cgi?id={bug[\"id\"]} - {bug[\"summary\"]} '\n )\n@@ -83,7 +89,8 @@\n choices=[\"default\", \"nn\"],\n default=\"default\",\n )\n+ parser.add_argument(\"--bug-id\", help=\"Classify the given bug id\")\n \n args = parser.parse_args()\n \n- classify_bugs(args.model, args.classifier)\n+ classify_bugs(args.model, args.classifier, args.bug_id)\n", "issue": "Add an option to the bug_classifier script to classify a specific bug\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport argparse\nimport os\nfrom logging import INFO, basicConfig, getLogger\n\nimport numpy as np\nimport requests\n\nfrom bugbug import bugzilla\nfrom bugbug.models import get_model_class\nfrom bugbug.utils import download_check_etag, zstd_decompress\n\nMODELS_WITH_TYPE = (\"component\",)\n\nbasicConfig(level=INFO)\nlogger = getLogger(__name__)\n\n\ndef classify_bugs(model_name, classifier):\n if classifier != \"default\":\n assert (\n model_name in MODELS_WITH_TYPE\n ), f\"{classifier} is not a valid classifier type for {model_name}\"\n\n model_file_name = f\"{model_name}{classifier}model\"\n model_name = f\"{model_name}_{classifier}\"\n else:\n model_file_name = f\"{model_name}model\"\n\n if not os.path.exists(model_file_name):\n logger.info(f\"{model_file_name} does not exist. Downloading the model....\")\n try:\n download_check_etag(\n f\"https://index.taskcluster.net/v1/task/project.relman.bugbug.train_{model_name}.latest/artifacts/public/{model_file_name}.zst\",\n f\"{model_file_name}.zst\",\n )\n except requests.HTTPError:\n logger.error(\n f\"A pre-trained model is not available, you will need to train it yourself using the trainer script\"\n )\n raise SystemExit(1)\n\n zstd_decompress(model_file_name)\n assert os.path.exists(model_file_name), \"Decompressed file doesn't exist\"\n\n model_class = get_model_class(model_name)\n model = model_class.load(model_file_name)\n\n for bug in bugzilla.get_bugs():\n print(\n f'https://bugzilla.mozilla.org/show_bug.cgi?id={bug[\"id\"]} - {bug[\"summary\"]} '\n )\n\n if model.calculate_importance:\n probas, importance = model.classify(\n bug, probabilities=True, importances=True\n )\n\n feature_names = model.get_human_readable_feature_names()\n\n model.print_feature_importances(\n importance[\"importances\"], feature_names, class_probabilities=probas\n )\n else:\n probas = model.classify(bug, probabilities=True, importances=False)\n\n if np.argmax(probas) == 1:\n print(f\"Positive! {probas}\")\n else:\n print(f\"Negative! {probas}\")\n input()\n\n\ndef main():\n description = \"Perform evaluation on bugs using the specified model\"\n parser = argparse.ArgumentParser(description=description)\n\n parser.add_argument(\"model\", help=\"Which model to use for evaluation\")\n parser.add_argument(\n \"--classifier\",\n help=\"Type of the classifier. Only used for component classification.\",\n choices=[\"default\", \"nn\"],\n default=\"default\",\n )\n\n args = parser.parse_args()\n\n classify_bugs(args.model, args.classifier)\n", "path": "scripts/bug_classifier.py"}]}
1,341
312
gh_patches_debug_25031
rasdani/github-patches
git_diff
mampfes__hacs_waste_collection_schedule-223
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> CalendarEventDevice is deprecated Hi there, Latest HA beta `core-2022.5.0` shows this within the logs: ``` CalendarEventDevice is deprecated, modify WasteCollectionCalendar to extend CalendarEntity ``` Just wanted to let you know 🙂 More information can be found here: https://github.com/home-assistant/core/pull/68843 And here: https://developers.home-assistant.io/docs/core/entity/calendar/ </issue> <code> [start of custom_components/waste_collection_schedule/calendar.py] 1 """Calendar platform support for Waste Collection Schedule.""" 2 3 import logging 4 from datetime import timedelta 5 6 from homeassistant.components.calendar import CalendarEventDevice 7 8 _LOGGER = logging.getLogger(__name__) 9 10 11 async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): 12 """Set up calendar platform.""" 13 # We only want this platform to be set up via discovery. 14 if discovery_info is None: 15 return 16 17 entities = [] 18 19 api = discovery_info["api"] 20 21 for scraper in api.scrapers: 22 entities.append(WasteCollectionCalendar(api, scraper)) 23 24 async_add_entities(entities) 25 26 27 class WasteCollectionCalendar(CalendarEventDevice): 28 """Calendar entity class.""" 29 30 def __init__(self, api, scraper): 31 self._api = api 32 self._scraper = scraper 33 self._attr_unique_id = scraper.unique_id + "_calendar" 34 35 @property 36 def name(self): 37 """Return entity name.""" 38 return self._scraper.calendar_title 39 40 @property 41 def event(self): 42 """Return next collection event.""" 43 collections = self._scraper.get_upcoming(count=1, include_today=True) 44 if len(collections) == 0: 45 return None 46 else: 47 return self._convert(collections[0]) 48 49 async def async_get_events(self, hass, start_date, end_date): 50 """Return all events within specified time span.""" 51 collections = [] 52 for a in self._scraper.get_upcoming(include_today=True): 53 if a.date >= start_date.date() and a.date <= end_date.date(): 54 collections.append(self._convert(a)) 55 return collections 56 57 def _convert(self, collection): 58 """Convert an collection into a Home Assistant calendar event.""" 59 return { 60 "uid": f"{self._scraper.calendar_title}-{collection.date.isoformat()}-{collection.type}", 61 "summary": collection.type, 62 "start": {"date": collection.date.isoformat()}, 63 "end": {"date": (collection.date + timedelta(days=1)).isoformat()}, 64 "allDay": True, 65 } 66 [end of custom_components/waste_collection_schedule/calendar.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/custom_components/waste_collection_schedule/calendar.py b/custom_components/waste_collection_schedule/calendar.py --- a/custom_components/waste_collection_schedule/calendar.py +++ b/custom_components/waste_collection_schedule/calendar.py @@ -3,7 +3,7 @@ import logging from datetime import timedelta -from homeassistant.components.calendar import CalendarEventDevice +from homeassistant.components.calendar import CalendarEntity, CalendarEvent _LOGGER = logging.getLogger(__name__) @@ -24,7 +24,7 @@ async_add_entities(entities) -class WasteCollectionCalendar(CalendarEventDevice): +class WasteCollectionCalendar(CalendarEntity): """Calendar entity class.""" def __init__(self, api, scraper): @@ -56,10 +56,8 @@ def _convert(self, collection): """Convert an collection into a Home Assistant calendar event.""" - return { - "uid": f"{self._scraper.calendar_title}-{collection.date.isoformat()}-{collection.type}", - "summary": collection.type, - "start": {"date": collection.date.isoformat()}, - "end": {"date": (collection.date + timedelta(days=1)).isoformat()}, - "allDay": True, - } + return CalendarEvent( + summary=collection.type, + start=collection.date, + end=collection.date + timedelta(days=1), + )
{"golden_diff": "diff --git a/custom_components/waste_collection_schedule/calendar.py b/custom_components/waste_collection_schedule/calendar.py\n--- a/custom_components/waste_collection_schedule/calendar.py\n+++ b/custom_components/waste_collection_schedule/calendar.py\n@@ -3,7 +3,7 @@\n import logging\n from datetime import timedelta\n \n-from homeassistant.components.calendar import CalendarEventDevice\n+from homeassistant.components.calendar import CalendarEntity, CalendarEvent\n \n _LOGGER = logging.getLogger(__name__)\n \n@@ -24,7 +24,7 @@\n async_add_entities(entities)\n \n \n-class WasteCollectionCalendar(CalendarEventDevice):\n+class WasteCollectionCalendar(CalendarEntity):\n \"\"\"Calendar entity class.\"\"\"\n \n def __init__(self, api, scraper):\n@@ -56,10 +56,8 @@\n \n def _convert(self, collection):\n \"\"\"Convert an collection into a Home Assistant calendar event.\"\"\"\n- return {\n- \"uid\": f\"{self._scraper.calendar_title}-{collection.date.isoformat()}-{collection.type}\",\n- \"summary\": collection.type,\n- \"start\": {\"date\": collection.date.isoformat()},\n- \"end\": {\"date\": (collection.date + timedelta(days=1)).isoformat()},\n- \"allDay\": True,\n- }\n+ return CalendarEvent(\n+ summary=collection.type,\n+ start=collection.date,\n+ end=collection.date + timedelta(days=1),\n+ )\n", "issue": "CalendarEventDevice is deprecated\nHi there,\r\n\r\nLatest HA beta `core-2022.5.0` shows this within the logs:\r\n```\r\nCalendarEventDevice is deprecated, modify WasteCollectionCalendar to extend CalendarEntity\r\n```\r\n\r\nJust wanted to let you know \ud83d\ude42\r\n\r\nMore information can be found here:\r\nhttps://github.com/home-assistant/core/pull/68843\r\n\r\nAnd here:\r\nhttps://developers.home-assistant.io/docs/core/entity/calendar/\n", "before_files": [{"content": "\"\"\"Calendar platform support for Waste Collection Schedule.\"\"\"\n\nimport logging\nfrom datetime import timedelta\n\nfrom homeassistant.components.calendar import CalendarEventDevice\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def async_setup_platform(hass, config, async_add_entities, discovery_info=None):\n \"\"\"Set up calendar platform.\"\"\"\n # We only want this platform to be set up via discovery.\n if discovery_info is None:\n return\n\n entities = []\n\n api = discovery_info[\"api\"]\n\n for scraper in api.scrapers:\n entities.append(WasteCollectionCalendar(api, scraper))\n\n async_add_entities(entities)\n\n\nclass WasteCollectionCalendar(CalendarEventDevice):\n \"\"\"Calendar entity class.\"\"\"\n\n def __init__(self, api, scraper):\n self._api = api\n self._scraper = scraper\n self._attr_unique_id = scraper.unique_id + \"_calendar\"\n\n @property\n def name(self):\n \"\"\"Return entity name.\"\"\"\n return self._scraper.calendar_title\n\n @property\n def event(self):\n \"\"\"Return next collection event.\"\"\"\n collections = self._scraper.get_upcoming(count=1, include_today=True)\n if len(collections) == 0:\n return None\n else:\n return self._convert(collections[0])\n\n async def async_get_events(self, hass, start_date, end_date):\n \"\"\"Return all events within specified time span.\"\"\"\n collections = []\n for a in self._scraper.get_upcoming(include_today=True):\n if a.date >= start_date.date() and a.date <= end_date.date():\n collections.append(self._convert(a))\n return collections\n\n def _convert(self, collection):\n \"\"\"Convert an collection into a Home Assistant calendar event.\"\"\"\n return {\n \"uid\": f\"{self._scraper.calendar_title}-{collection.date.isoformat()}-{collection.type}\",\n \"summary\": collection.type,\n \"start\": {\"date\": collection.date.isoformat()},\n \"end\": {\"date\": (collection.date + timedelta(days=1)).isoformat()},\n \"allDay\": True,\n }\n", "path": "custom_components/waste_collection_schedule/calendar.py"}]}
1,204
296
gh_patches_debug_17685
rasdani/github-patches
git_diff
DataDog__dd-trace-py-3906
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> mypy invalid syntax error with python 2 ### Which version of dd-trace-py are you using? v1.2.0 ### Which version of pip are you using? 22.1.2 ### Which version of the libraries are you using? ``` ddtrace==1.2.0 mypy[python2]==0.961 ``` ### How can we reproduce your problem? 1. Clone https://gist.github.com/majorgreys/720201c4bdd33131e4c231207ef716ce 2. Run `docker run --rm -it (docker build .)` ### What is the result that you get? ``` usr/local/lib/python3.9/site-packages/ddtrace/contrib/redis/asyncio_patch.py:12: error: invalid syntax Found 1 error in 1 file (errors prevented further checking) ``` ### What is the result that you expected? ``` Success: no issues found in 1 source file ``` </issue> <code> [start of ddtrace/contrib/redis/patch.py] 1 import redis 2 3 from ddtrace import config 4 from ddtrace.vendor import wrapt 5 6 from ...internal.utils.formats import stringify_cache_args 7 from ...pin import Pin 8 from ..trace_utils import unwrap 9 from .util import _trace_redis_cmd 10 from .util import _trace_redis_execute_pipeline 11 12 13 config._add("redis", dict(_default_service="redis")) 14 15 16 def patch(): 17 """Patch the instrumented methods 18 19 This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top 20 of Redis and StrictRedis. However, it means that any "import redis.Redis" won't be instrumented. 21 """ 22 if getattr(redis, "_datadog_patch", False): 23 return 24 setattr(redis, "_datadog_patch", True) 25 26 _w = wrapt.wrap_function_wrapper 27 28 if redis.VERSION < (3, 0, 0): 29 _w("redis", "StrictRedis.execute_command", traced_execute_command) 30 _w("redis", "StrictRedis.pipeline", traced_pipeline) 31 _w("redis", "Redis.pipeline", traced_pipeline) 32 _w("redis.client", "BasePipeline.execute", traced_execute_pipeline) 33 _w("redis.client", "BasePipeline.immediate_execute_command", traced_execute_command) 34 else: 35 _w("redis", "Redis.execute_command", traced_execute_command) 36 _w("redis", "Redis.pipeline", traced_pipeline) 37 _w("redis.client", "Pipeline.execute", traced_execute_pipeline) 38 _w("redis.client", "Pipeline.immediate_execute_command", traced_execute_command) 39 if redis.VERSION >= (4, 2, 0): 40 from .asyncio_patch import traced_async_execute_command 41 from .asyncio_patch import traced_async_execute_pipeline 42 43 _w("redis.asyncio.client", "Redis.execute_command", traced_async_execute_command) 44 _w("redis.asyncio.client", "Redis.pipeline", traced_pipeline) 45 _w("redis.asyncio.client", "Pipeline.execute", traced_async_execute_pipeline) 46 _w("redis.asyncio.client", "Pipeline.immediate_execute_command", traced_async_execute_command) 47 Pin(service=None).onto(redis.StrictRedis) 48 49 50 def unpatch(): 51 if getattr(redis, "_datadog_patch", False): 52 setattr(redis, "_datadog_patch", False) 53 54 if redis.VERSION < (3, 0, 0): 55 unwrap(redis.StrictRedis, "execute_command") 56 unwrap(redis.StrictRedis, "pipeline") 57 unwrap(redis.Redis, "pipeline") 58 unwrap(redis.client.BasePipeline, "execute") 59 unwrap(redis.client.BasePipeline, "immediate_execute_command") 60 else: 61 unwrap(redis.Redis, "execute_command") 62 unwrap(redis.Redis, "pipeline") 63 unwrap(redis.client.Pipeline, "execute") 64 unwrap(redis.client.Pipeline, "immediate_execute_command") 65 if redis.VERSION >= (4, 2, 0): 66 unwrap(redis.asyncio.client.Redis, "execute_command") 67 unwrap(redis.asyncio.client.Redis, "pipeline") 68 unwrap(redis.asyncio.client.Pipeline, "execute") 69 unwrap(redis.asyncio.client.Pipeline, "immediate_execute_command") 70 71 72 # 73 # tracing functions 74 # 75 def traced_execute_command(func, instance, args, kwargs): 76 pin = Pin.get_from(instance) 77 if not pin or not pin.enabled(): 78 return func(*args, **kwargs) 79 80 with _trace_redis_cmd(pin, config.redis, instance, args): 81 return func(*args, **kwargs) 82 83 84 def traced_pipeline(func, instance, args, kwargs): 85 pipeline = func(*args, **kwargs) 86 pin = Pin.get_from(instance) 87 if pin: 88 pin.onto(pipeline) 89 return pipeline 90 91 92 def traced_execute_pipeline(func, instance, args, kwargs): 93 pin = Pin.get_from(instance) 94 if not pin or not pin.enabled(): 95 return func(*args, **kwargs) 96 97 cmds = [stringify_cache_args(c) for c, _ in instance.command_stack] 98 resource = "\n".join(cmds) 99 with _trace_redis_execute_pipeline(pin, config.redis, resource, instance): 100 return func(*args, **kwargs) 101 [end of ddtrace/contrib/redis/patch.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ddtrace/contrib/redis/patch.py b/ddtrace/contrib/redis/patch.py --- a/ddtrace/contrib/redis/patch.py +++ b/ddtrace/contrib/redis/patch.py @@ -1,4 +1,5 @@ import redis +from six import PY3 from ddtrace import config from ddtrace.vendor import wrapt @@ -36,7 +37,8 @@ _w("redis", "Redis.pipeline", traced_pipeline) _w("redis.client", "Pipeline.execute", traced_execute_pipeline) _w("redis.client", "Pipeline.immediate_execute_command", traced_execute_command) - if redis.VERSION >= (4, 2, 0): + # Avoid mypy invalid syntax errors when parsing Python 2 files + if PY3 and redis.VERSION >= (4, 2, 0): from .asyncio_patch import traced_async_execute_command from .asyncio_patch import traced_async_execute_pipeline
{"golden_diff": "diff --git a/ddtrace/contrib/redis/patch.py b/ddtrace/contrib/redis/patch.py\n--- a/ddtrace/contrib/redis/patch.py\n+++ b/ddtrace/contrib/redis/patch.py\n@@ -1,4 +1,5 @@\n import redis\n+from six import PY3\n \n from ddtrace import config\n from ddtrace.vendor import wrapt\n@@ -36,7 +37,8 @@\n _w(\"redis\", \"Redis.pipeline\", traced_pipeline)\n _w(\"redis.client\", \"Pipeline.execute\", traced_execute_pipeline)\n _w(\"redis.client\", \"Pipeline.immediate_execute_command\", traced_execute_command)\n- if redis.VERSION >= (4, 2, 0):\n+ # Avoid mypy invalid syntax errors when parsing Python 2 files\n+ if PY3 and redis.VERSION >= (4, 2, 0):\n from .asyncio_patch import traced_async_execute_command\n from .asyncio_patch import traced_async_execute_pipeline\n", "issue": "mypy invalid syntax error with python 2\n### Which version of dd-trace-py are you using?\r\n\r\nv1.2.0\r\n\r\n### Which version of pip are you using?\r\n\r\n22.1.2\r\n\r\n### Which version of the libraries are you using?\r\n\r\n```\r\nddtrace==1.2.0\r\nmypy[python2]==0.961\r\n```\r\n\r\n### How can we reproduce your problem?\r\n\r\n1. Clone https://gist.github.com/majorgreys/720201c4bdd33131e4c231207ef716ce\r\n2. Run `docker run --rm -it (docker build .)`\r\n\r\n### What is the result that you get?\r\n\r\n```\r\nusr/local/lib/python3.9/site-packages/ddtrace/contrib/redis/asyncio_patch.py:12: error: invalid syntax\r\nFound 1 error in 1 file (errors prevented further checking)\r\n```\r\n\r\n### What is the result that you expected?\r\n\r\n```\r\nSuccess: no issues found in 1 source file\r\n```\r\n\n", "before_files": [{"content": "import redis\n\nfrom ddtrace import config\nfrom ddtrace.vendor import wrapt\n\nfrom ...internal.utils.formats import stringify_cache_args\nfrom ...pin import Pin\nfrom ..trace_utils import unwrap\nfrom .util import _trace_redis_cmd\nfrom .util import _trace_redis_execute_pipeline\n\n\nconfig._add(\"redis\", dict(_default_service=\"redis\"))\n\n\ndef patch():\n \"\"\"Patch the instrumented methods\n\n This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top\n of Redis and StrictRedis. However, it means that any \"import redis.Redis\" won't be instrumented.\n \"\"\"\n if getattr(redis, \"_datadog_patch\", False):\n return\n setattr(redis, \"_datadog_patch\", True)\n\n _w = wrapt.wrap_function_wrapper\n\n if redis.VERSION < (3, 0, 0):\n _w(\"redis\", \"StrictRedis.execute_command\", traced_execute_command)\n _w(\"redis\", \"StrictRedis.pipeline\", traced_pipeline)\n _w(\"redis\", \"Redis.pipeline\", traced_pipeline)\n _w(\"redis.client\", \"BasePipeline.execute\", traced_execute_pipeline)\n _w(\"redis.client\", \"BasePipeline.immediate_execute_command\", traced_execute_command)\n else:\n _w(\"redis\", \"Redis.execute_command\", traced_execute_command)\n _w(\"redis\", \"Redis.pipeline\", traced_pipeline)\n _w(\"redis.client\", \"Pipeline.execute\", traced_execute_pipeline)\n _w(\"redis.client\", \"Pipeline.immediate_execute_command\", traced_execute_command)\n if redis.VERSION >= (4, 2, 0):\n from .asyncio_patch import traced_async_execute_command\n from .asyncio_patch import traced_async_execute_pipeline\n\n _w(\"redis.asyncio.client\", \"Redis.execute_command\", traced_async_execute_command)\n _w(\"redis.asyncio.client\", \"Redis.pipeline\", traced_pipeline)\n _w(\"redis.asyncio.client\", \"Pipeline.execute\", traced_async_execute_pipeline)\n _w(\"redis.asyncio.client\", \"Pipeline.immediate_execute_command\", traced_async_execute_command)\n Pin(service=None).onto(redis.StrictRedis)\n\n\ndef unpatch():\n if getattr(redis, \"_datadog_patch\", False):\n setattr(redis, \"_datadog_patch\", False)\n\n if redis.VERSION < (3, 0, 0):\n unwrap(redis.StrictRedis, \"execute_command\")\n unwrap(redis.StrictRedis, \"pipeline\")\n unwrap(redis.Redis, \"pipeline\")\n unwrap(redis.client.BasePipeline, \"execute\")\n unwrap(redis.client.BasePipeline, \"immediate_execute_command\")\n else:\n unwrap(redis.Redis, \"execute_command\")\n unwrap(redis.Redis, \"pipeline\")\n unwrap(redis.client.Pipeline, \"execute\")\n unwrap(redis.client.Pipeline, \"immediate_execute_command\")\n if redis.VERSION >= (4, 2, 0):\n unwrap(redis.asyncio.client.Redis, \"execute_command\")\n unwrap(redis.asyncio.client.Redis, \"pipeline\")\n unwrap(redis.asyncio.client.Pipeline, \"execute\")\n unwrap(redis.asyncio.client.Pipeline, \"immediate_execute_command\")\n\n\n#\n# tracing functions\n#\ndef traced_execute_command(func, instance, args, kwargs):\n pin = Pin.get_from(instance)\n if not pin or not pin.enabled():\n return func(*args, **kwargs)\n\n with _trace_redis_cmd(pin, config.redis, instance, args):\n return func(*args, **kwargs)\n\n\ndef traced_pipeline(func, instance, args, kwargs):\n pipeline = func(*args, **kwargs)\n pin = Pin.get_from(instance)\n if pin:\n pin.onto(pipeline)\n return pipeline\n\n\ndef traced_execute_pipeline(func, instance, args, kwargs):\n pin = Pin.get_from(instance)\n if not pin or not pin.enabled():\n return func(*args, **kwargs)\n\n cmds = [stringify_cache_args(c) for c, _ in instance.command_stack]\n resource = \"\\n\".join(cmds)\n with _trace_redis_execute_pipeline(pin, config.redis, resource, instance):\n return func(*args, **kwargs)\n", "path": "ddtrace/contrib/redis/patch.py"}]}
1,845
214
gh_patches_debug_23956
rasdani/github-patches
git_diff
xorbitsai__inference-407
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> BUG: Asking to pad but the tokenizer does not have a padding token ### Describe the bug When using Llama-2 PyTorch model for embedding creation, there's an issue: ----> 1 model.create_embedding("Once upon a time, there was a very old computer") File ~/inference/xinference/client.py:324, in RESTfulGenerateModelHandle.create_embedding(self, input) 322 response = requests.post(url, json=request_body) 323 if response.status_code != 200: --> 324 raise RuntimeError( 325 f"Failed to create the embeddings, detail: {response.json()['detail']}" 326 ) 328 response_data = response.json() 329 return response_data RuntimeError: Failed to create the embeddings, detail: [address=127.0.0.1:35477, pid=951303] Asking to pad but the tokenizer does not have a padding token. Please select a token to use as `pad_token` `(tokenizer.pad_token = tokenizer.eos_token e.g.)` or add a new pad token via `tokenizer.add_special_tokens({'pad_token': '[PAD]'})`. </issue> <code> [start of xinference/model/llm/pytorch/llama_2.py] 1 # Copyright 2022-2023 XProbe Inc. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 from typing import Optional 16 17 from ..llm_family import LLMFamilyV1, LLMSpecV1 18 from .core import PytorchChatModel, PytorchModel, PytorchModelConfig 19 20 21 class LlamaPytorchModel(PytorchModel): 22 def __init__( 23 self, 24 model_uid: str, 25 model_family: "LLMFamilyV1", 26 model_spec: "LLMSpecV1", 27 quantization: str, 28 model_path: str, 29 pytorch_model_config: Optional[PytorchModelConfig] = None, 30 ): 31 super().__init__( 32 model_uid, 33 model_family, 34 model_spec, 35 quantization, 36 model_path, 37 pytorch_model_config=pytorch_model_config, 38 ) 39 40 def _load_model(self, kwargs: dict): 41 model, tokenizer = super()._load_model(kwargs) 42 model.config.eos_token_id = tokenizer.eos_token_id 43 model.config.pad_token_id = tokenizer.pad_token_id 44 return model, tokenizer 45 46 @classmethod 47 def match(cls, llm_family: "LLMFamilyV1", llm_spec: "LLMSpecV1") -> bool: 48 if llm_spec.model_format != "pytorch": 49 return False 50 if "llama-2" not in llm_family.model_name: 51 return False 52 if "generate" not in llm_family.model_ability: 53 return False 54 return True 55 56 57 class LlamaPytorchChatModel(PytorchChatModel): 58 def __init__( 59 self, 60 model_uid: str, 61 model_family: "LLMFamilyV1", 62 model_spec: "LLMSpecV1", 63 quantization: str, 64 model_path: str, 65 pytorch_model_config: Optional["PytorchModelConfig"] = None, 66 ): 67 super().__init__( 68 model_uid, 69 model_family, 70 model_spec, 71 quantization, 72 model_path, 73 pytorch_model_config=pytorch_model_config, 74 ) 75 self._use_fast_tokenizer = False 76 77 def _load_model(self, kwargs: dict): 78 model, tokenizer = super()._load_model(kwargs) 79 model.config.eos_token_id = tokenizer.eos_token_id 80 model.config.pad_token_id = tokenizer.pad_token_id 81 return model, tokenizer 82 83 @classmethod 84 def match(cls, llm_family: "LLMFamilyV1", llm_spec: "LLMSpecV1") -> bool: 85 if llm_spec.model_format != "pytorch": 86 return False 87 if "llama-2" not in llm_family.model_name: 88 return False 89 if "chat" not in llm_family.model_ability: 90 return False 91 return True 92 [end of xinference/model/llm/pytorch/llama_2.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/xinference/model/llm/pytorch/llama_2.py b/xinference/model/llm/pytorch/llama_2.py --- a/xinference/model/llm/pytorch/llama_2.py +++ b/xinference/model/llm/pytorch/llama_2.py @@ -39,6 +39,9 @@ def _load_model(self, kwargs: dict): model, tokenizer = super()._load_model(kwargs) + # Llama has no pad token by default + # https://github.com/huggingface/transformers/blob/07998ef39926b76d3f6667025535d0859eed61c3/docs/source/en/llm_tutorial.md?plain=1#L125 + tokenizer.pad_token = tokenizer.eos_token model.config.eos_token_id = tokenizer.eos_token_id model.config.pad_token_id = tokenizer.pad_token_id return model, tokenizer @@ -76,6 +79,9 @@ def _load_model(self, kwargs: dict): model, tokenizer = super()._load_model(kwargs) + # Llama has no pad token by default + # https://github.com/huggingface/transformers/blob/07998ef39926b76d3f6667025535d0859eed61c3/docs/source/en/llm_tutorial.md?plain=1#L125 + tokenizer.pad_token = tokenizer.eos_token model.config.eos_token_id = tokenizer.eos_token_id model.config.pad_token_id = tokenizer.pad_token_id return model, tokenizer
{"golden_diff": "diff --git a/xinference/model/llm/pytorch/llama_2.py b/xinference/model/llm/pytorch/llama_2.py\n--- a/xinference/model/llm/pytorch/llama_2.py\n+++ b/xinference/model/llm/pytorch/llama_2.py\n@@ -39,6 +39,9 @@\n \n def _load_model(self, kwargs: dict):\n model, tokenizer = super()._load_model(kwargs)\n+ # Llama has no pad token by default\n+ # https://github.com/huggingface/transformers/blob/07998ef39926b76d3f6667025535d0859eed61c3/docs/source/en/llm_tutorial.md?plain=1#L125\n+ tokenizer.pad_token = tokenizer.eos_token\n model.config.eos_token_id = tokenizer.eos_token_id\n model.config.pad_token_id = tokenizer.pad_token_id\n return model, tokenizer\n@@ -76,6 +79,9 @@\n \n def _load_model(self, kwargs: dict):\n model, tokenizer = super()._load_model(kwargs)\n+ # Llama has no pad token by default\n+ # https://github.com/huggingface/transformers/blob/07998ef39926b76d3f6667025535d0859eed61c3/docs/source/en/llm_tutorial.md?plain=1#L125\n+ tokenizer.pad_token = tokenizer.eos_token\n model.config.eos_token_id = tokenizer.eos_token_id\n model.config.pad_token_id = tokenizer.pad_token_id\n return model, tokenizer\n", "issue": "BUG: Asking to pad but the tokenizer does not have a padding token\n### Describe the bug\r\nWhen using Llama-2 PyTorch model for embedding creation, there's an issue:\r\n\r\n----> 1 model.create_embedding(\"Once upon a time, there was a very old computer\")\r\n\r\nFile ~/inference/xinference/client.py:324, in RESTfulGenerateModelHandle.create_embedding(self, input)\r\n 322 response = requests.post(url, json=request_body)\r\n 323 if response.status_code != 200:\r\n--> 324 raise RuntimeError(\r\n 325 f\"Failed to create the embeddings, detail: {response.json()['detail']}\"\r\n 326 )\r\n 328 response_data = response.json()\r\n 329 return response_data\r\n\r\nRuntimeError: Failed to create the embeddings, detail: [address=127.0.0.1:35477, pid=951303] Asking to pad but the tokenizer does not have a padding token. Please select a token to use as `pad_token` `(tokenizer.pad_token = tokenizer.eos_token e.g.)` or add a new pad token via `tokenizer.add_special_tokens({'pad_token': '[PAD]'})`.\r\n\n", "before_files": [{"content": "# Copyright 2022-2023 XProbe Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Optional\n\nfrom ..llm_family import LLMFamilyV1, LLMSpecV1\nfrom .core import PytorchChatModel, PytorchModel, PytorchModelConfig\n\n\nclass LlamaPytorchModel(PytorchModel):\n def __init__(\n self,\n model_uid: str,\n model_family: \"LLMFamilyV1\",\n model_spec: \"LLMSpecV1\",\n quantization: str,\n model_path: str,\n pytorch_model_config: Optional[PytorchModelConfig] = None,\n ):\n super().__init__(\n model_uid,\n model_family,\n model_spec,\n quantization,\n model_path,\n pytorch_model_config=pytorch_model_config,\n )\n\n def _load_model(self, kwargs: dict):\n model, tokenizer = super()._load_model(kwargs)\n model.config.eos_token_id = tokenizer.eos_token_id\n model.config.pad_token_id = tokenizer.pad_token_id\n return model, tokenizer\n\n @classmethod\n def match(cls, llm_family: \"LLMFamilyV1\", llm_spec: \"LLMSpecV1\") -> bool:\n if llm_spec.model_format != \"pytorch\":\n return False\n if \"llama-2\" not in llm_family.model_name:\n return False\n if \"generate\" not in llm_family.model_ability:\n return False\n return True\n\n\nclass LlamaPytorchChatModel(PytorchChatModel):\n def __init__(\n self,\n model_uid: str,\n model_family: \"LLMFamilyV1\",\n model_spec: \"LLMSpecV1\",\n quantization: str,\n model_path: str,\n pytorch_model_config: Optional[\"PytorchModelConfig\"] = None,\n ):\n super().__init__(\n model_uid,\n model_family,\n model_spec,\n quantization,\n model_path,\n pytorch_model_config=pytorch_model_config,\n )\n self._use_fast_tokenizer = False\n\n def _load_model(self, kwargs: dict):\n model, tokenizer = super()._load_model(kwargs)\n model.config.eos_token_id = tokenizer.eos_token_id\n model.config.pad_token_id = tokenizer.pad_token_id\n return model, tokenizer\n\n @classmethod\n def match(cls, llm_family: \"LLMFamilyV1\", llm_spec: \"LLMSpecV1\") -> bool:\n if llm_spec.model_format != \"pytorch\":\n return False\n if \"llama-2\" not in llm_family.model_name:\n return False\n if \"chat\" not in llm_family.model_ability:\n return False\n return True\n", "path": "xinference/model/llm/pytorch/llama_2.py"}]}
1,721
395
gh_patches_debug_9221
rasdani/github-patches
git_diff
pre-commit__pre-commit-2030
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> multiple mutable rev warnings issued on `autoupdate` when running `pre-commit autoupdate` I get 2 warnings per mutable rev, when I expected 0 see #974 ```sh ~/projects/pytest-cov   pre-commit-autoupdate  pipx run pre-commit autoupdate [WARNING] The 'rev' field of repo 'https://github.com/pre-commit/pre-commit-hooks' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this. [WARNING] The 'rev' field of repo 'https://github.com/timothycrosley/isort' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this. [WARNING] The 'rev' field of repo 'https://gitlab.com/pycqa/flake8' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this. [WARNING] The 'rev' field of repo 'https://github.com/pre-commit/pre-commit-hooks' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this. [WARNING] The 'rev' field of repo 'https://github.com/timothycrosley/isort' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this. [WARNING] The 'rev' field of repo 'https://gitlab.com/pycqa/flake8' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this. Updating https://github.com/pre-commit/pre-commit-hooks ... updating master -> v4.0.1. Updating https://github.com/timothycrosley/isort ... updating master -> 5.9.3. Updating https://gitlab.com/pycqa/flake8 ... updating master -> 3.9.2. ``` </issue> <code> [start of pre_commit/commands/migrate_config.py] 1 import re 2 import textwrap 3 4 import yaml 5 6 from pre_commit.clientlib import load_config 7 from pre_commit.util import yaml_load 8 9 10 def _is_header_line(line: str) -> bool: 11 return line.startswith(('#', '---')) or not line.strip() 12 13 14 def _migrate_map(contents: str) -> str: 15 if isinstance(yaml_load(contents), list): 16 # Find the first non-header line 17 lines = contents.splitlines(True) 18 i = 0 19 # Only loop on non empty configuration file 20 while i < len(lines) and _is_header_line(lines[i]): 21 i += 1 22 23 header = ''.join(lines[:i]) 24 rest = ''.join(lines[i:]) 25 26 # If they are using the "default" flow style of yaml, this operation 27 # will yield a valid configuration 28 try: 29 trial_contents = f'{header}repos:\n{rest}' 30 yaml_load(trial_contents) 31 contents = trial_contents 32 except yaml.YAMLError: 33 contents = f'{header}repos:\n{textwrap.indent(rest, " " * 4)}' 34 35 return contents 36 37 38 def _migrate_sha_to_rev(contents: str) -> str: 39 return re.sub(r'(\n\s+)sha:', r'\1rev:', contents) 40 41 42 def migrate_config(config_file: str, quiet: bool = False) -> int: 43 # ensure that the configuration is a valid pre-commit configuration 44 load_config(config_file) 45 46 with open(config_file) as f: 47 orig_contents = contents = f.read() 48 49 contents = _migrate_map(contents) 50 contents = _migrate_sha_to_rev(contents) 51 52 if contents != orig_contents: 53 with open(config_file, 'w') as f: 54 f.write(contents) 55 56 print('Configuration has been migrated.') 57 elif not quiet: 58 print('Configuration is already migrated.') 59 return 0 60 [end of pre_commit/commands/migrate_config.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pre_commit/commands/migrate_config.py b/pre_commit/commands/migrate_config.py --- a/pre_commit/commands/migrate_config.py +++ b/pre_commit/commands/migrate_config.py @@ -3,7 +3,6 @@ import yaml -from pre_commit.clientlib import load_config from pre_commit.util import yaml_load @@ -40,9 +39,6 @@ def migrate_config(config_file: str, quiet: bool = False) -> int: - # ensure that the configuration is a valid pre-commit configuration - load_config(config_file) - with open(config_file) as f: orig_contents = contents = f.read()
{"golden_diff": "diff --git a/pre_commit/commands/migrate_config.py b/pre_commit/commands/migrate_config.py\n--- a/pre_commit/commands/migrate_config.py\n+++ b/pre_commit/commands/migrate_config.py\n@@ -3,7 +3,6 @@\n \n import yaml\n \n-from pre_commit.clientlib import load_config\n from pre_commit.util import yaml_load\n \n \n@@ -40,9 +39,6 @@\n \n \n def migrate_config(config_file: str, quiet: bool = False) -> int:\n- # ensure that the configuration is a valid pre-commit configuration\n- load_config(config_file)\n-\n with open(config_file) as f:\n orig_contents = contents = f.read()\n", "issue": "multiple mutable rev warnings issued on `autoupdate`\nwhen running `pre-commit autoupdate` I get 2 warnings per mutable rev, when I expected 0 see #974\r\n\r\n```sh\r\n~/projects/pytest-cov \ue0b0 \ue0a0 pre-commit-autoupdate \ue0b0 pipx run pre-commit autoupdate \r\n[WARNING] The 'rev' field of repo 'https://github.com/pre-commit/pre-commit-hooks' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this.\r\n[WARNING] The 'rev' field of repo 'https://github.com/timothycrosley/isort' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this.\r\n[WARNING] The 'rev' field of repo 'https://gitlab.com/pycqa/flake8' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this.\r\n[WARNING] The 'rev' field of repo 'https://github.com/pre-commit/pre-commit-hooks' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this.\r\n[WARNING] The 'rev' field of repo 'https://github.com/timothycrosley/isort' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this.\r\n[WARNING] The 'rev' field of repo 'https://gitlab.com/pycqa/flake8' appears to be a mutable reference (moving tag / branch). Mutable references are never updated after first install and are not supported. See https://pre-commit.com/#using-the-latest-version-for-a-repository for more details. Hint: `pre-commit autoupdate` often fixes this.\r\nUpdating https://github.com/pre-commit/pre-commit-hooks ... updating master -> v4.0.1.\r\nUpdating https://github.com/timothycrosley/isort ... updating master -> 5.9.3.\r\nUpdating https://gitlab.com/pycqa/flake8 ... updating master -> 3.9.2.\r\n```\n", "before_files": [{"content": "import re\nimport textwrap\n\nimport yaml\n\nfrom pre_commit.clientlib import load_config\nfrom pre_commit.util import yaml_load\n\n\ndef _is_header_line(line: str) -> bool:\n return line.startswith(('#', '---')) or not line.strip()\n\n\ndef _migrate_map(contents: str) -> str:\n if isinstance(yaml_load(contents), list):\n # Find the first non-header line\n lines = contents.splitlines(True)\n i = 0\n # Only loop on non empty configuration file\n while i < len(lines) and _is_header_line(lines[i]):\n i += 1\n\n header = ''.join(lines[:i])\n rest = ''.join(lines[i:])\n\n # If they are using the \"default\" flow style of yaml, this operation\n # will yield a valid configuration\n try:\n trial_contents = f'{header}repos:\\n{rest}'\n yaml_load(trial_contents)\n contents = trial_contents\n except yaml.YAMLError:\n contents = f'{header}repos:\\n{textwrap.indent(rest, \" \" * 4)}'\n\n return contents\n\n\ndef _migrate_sha_to_rev(contents: str) -> str:\n return re.sub(r'(\\n\\s+)sha:', r'\\1rev:', contents)\n\n\ndef migrate_config(config_file: str, quiet: bool = False) -> int:\n # ensure that the configuration is a valid pre-commit configuration\n load_config(config_file)\n\n with open(config_file) as f:\n orig_contents = contents = f.read()\n\n contents = _migrate_map(contents)\n contents = _migrate_sha_to_rev(contents)\n\n if contents != orig_contents:\n with open(config_file, 'w') as f:\n f.write(contents)\n\n print('Configuration has been migrated.')\n elif not quiet:\n print('Configuration is already migrated.')\n return 0\n", "path": "pre_commit/commands/migrate_config.py"}]}
1,710
147
gh_patches_debug_26064
rasdani/github-patches
git_diff
holoviz__holoviews-671
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Dependencies missing Hi, I think that the holoviews pip package does not correctly state its dependencies. These are packages that holoviews complained about not finding when I tried importing it: - jinja2 - nbformat - nbconvert - matplotlib After installing them manually via pip, I can import holoviews fine. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 3 import sys, os 4 try: 5 from setuptools import setup 6 except ImportError: 7 from distutils.core import setup 8 9 10 setup_args = {} 11 install_requires = ['param>=1.3.2', 'numpy>=1.0'] 12 extras_require={} 13 14 # Notebook dependencies of IPython 3 15 extras_require['notebook-dependencies'] = ['ipython', 'pyzmq', 'jinja2', 'tornado', 16 'jsonschema', 'ipython', 'pygments'] 17 # IPython Notebook + matplotlib + Lancet 18 extras_require['recommended'] = (extras_require['notebook-dependencies'] 19 + ['matplotlib', 'lancet-ioam']) 20 # Additional, useful third-party packages 21 extras_require['extras'] = (['pandas', 'seaborn', 'mpld3', 'bokeh'] 22 + extras_require['recommended']) 23 # Everything including cyordereddict (optimization) and nosetests 24 extras_require['all'] = (extras_require['recommended'] 25 + extras_require['extras'] 26 + ['cyordereddict', 'nose']) 27 28 setup_args.update(dict( 29 name='holoviews', 30 version="1.4.3", 31 install_requires = install_requires, 32 extras_require = extras_require, 33 description='Stop plotting your data - annotate your data and let it visualize itself.', 34 long_description=open('README.rst').read() if os.path.isfile('README.rst') else 'Consult README.rst', 35 author= "Jean-Luc Stevens and Philipp Rudiger", 36 author_email= "[email protected]", 37 maintainer= "IOAM", 38 maintainer_email= "[email protected]", 39 platforms=['Windows', 'Mac OS X', 'Linux'], 40 license='BSD', 41 url='http://ioam.github.com/holoviews/', 42 packages = ["holoviews", 43 "holoviews.core", 44 "holoviews.core.data", 45 "holoviews.element", 46 "holoviews.interface", 47 "holoviews.ipython", 48 "holoviews.operation", 49 "holoviews.plotting", 50 "holoviews.plotting.mpl", 51 "holoviews.plotting.bokeh", 52 "holoviews.plotting.widgets"], 53 package_data={'holoviews.ipython': ['*.html'], 54 'holoviews.plotting.mpl': ['*.mplstyle', '*.jinja', '*.js'], 55 'holoviews.plotting.bokeh': ['*.js', '*.css'], 56 'holoviews.plotting.widgets': ['*.jinja', '*.js', '*.css']}, 57 classifiers = [ 58 "License :: OSI Approved :: BSD License", 59 "Development Status :: 5 - Production/Stable", 60 "Programming Language :: Python :: 2.7", 61 "Programming Language :: Python :: 3.3", 62 "Programming Language :: Python :: 3.4", 63 "Operating System :: OS Independent", 64 "Intended Audience :: Science/Research", 65 "Intended Audience :: Developers", 66 "Natural Language :: English", 67 "Topic :: Scientific/Engineering", 68 "Topic :: Software Development :: Libraries"] 69 )) 70 71 def check_pseudo_package(path): 72 """ 73 Verifies that a fake subpackage path for assets (notebooks, svgs, 74 pngs etc) both exists and is populated with files. 75 """ 76 if not os.path.isdir(path): 77 raise Exception("Please make sure pseudo-package %s exists." % path) 78 else: 79 assets = os.listdir(path) 80 if len(assets) == 0: 81 raise Exception("Please make sure pseudo-package %s is populated." % path) 82 83 84 if __name__=="__main__": 85 86 if 'HOLOVIEWS_RELEASE' in os.environ: 87 # Make sure to create these directories and populate them before upload 88 setup_args['packages'] += ["holoviews.assets", 'holoviews.notebooks'] 89 90 # Add unit tests 91 setup_args['packages'].append('holoviews.tests') 92 93 setup_args['package_data']['holoviews.assets'] = ['*.png', '*.svg', '*.rst'] 94 setup_args['package_data']['holoviews.notebooks'] = ['*.ipynb', '*.npy'] 95 96 if ('upload' in sys.argv) or ('sdist' in sys.argv): 97 check_pseudo_package(os.path.join('.', 'holoviews', 'tests')) 98 check_pseudo_package(os.path.join('.', 'holoviews', 'assets')) 99 check_pseudo_package(os.path.join('.', 'holoviews', 'notebooks')) 100 101 import holoviews 102 holoviews.__version__.verify(setup_args['version']) 103 104 setup(**setup_args) 105 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ # Notebook dependencies of IPython 3 extras_require['notebook-dependencies'] = ['ipython', 'pyzmq', 'jinja2', 'tornado', - 'jsonschema', 'ipython', 'pygments'] + 'jsonschema', 'notebook', 'pygments'] # IPython Notebook + matplotlib + Lancet extras_require['recommended'] = (extras_require['notebook-dependencies'] + ['matplotlib', 'lancet-ioam']) @@ -101,4 +101,22 @@ import holoviews holoviews.__version__.verify(setup_args['version']) + + if 'install' in sys.argv: + header = "HOLOVIEWS INSTALLATION INFORMATION" + bars = "="*len(header) + + extras = '\n'.join('holoviews[%s]' % e for e in setup_args['extras_require']) + + print("%s\n%s\n%s" % (bars, header, bars)) + + print("\nHoloViews supports the following installation types:\n") + print("%s\n" % extras) + print("Users should consider using one of these options.\n") + print("By default only a core installation is performed and ") + print("only the minimal set of dependencies are fetched.\n\n") + print("For more information please visit http://holoviews.org/install.html\n") + print(bars+'\n') + + setup(**setup_args)
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -13,7 +13,7 @@\n \n # Notebook dependencies of IPython 3\n extras_require['notebook-dependencies'] = ['ipython', 'pyzmq', 'jinja2', 'tornado',\n- 'jsonschema', 'ipython', 'pygments']\n+ 'jsonschema', 'notebook', 'pygments']\n # IPython Notebook + matplotlib + Lancet\n extras_require['recommended'] = (extras_require['notebook-dependencies']\n + ['matplotlib', 'lancet-ioam'])\n@@ -101,4 +101,22 @@\n import holoviews\n holoviews.__version__.verify(setup_args['version'])\n \n+\n+ if 'install' in sys.argv:\n+ header = \"HOLOVIEWS INSTALLATION INFORMATION\"\n+ bars = \"=\"*len(header)\n+\n+ extras = '\\n'.join('holoviews[%s]' % e for e in setup_args['extras_require'])\n+\n+ print(\"%s\\n%s\\n%s\" % (bars, header, bars))\n+\n+ print(\"\\nHoloViews supports the following installation types:\\n\")\n+ print(\"%s\\n\" % extras)\n+ print(\"Users should consider using one of these options.\\n\")\n+ print(\"By default only a core installation is performed and \")\n+ print(\"only the minimal set of dependencies are fetched.\\n\\n\")\n+ print(\"For more information please visit http://holoviews.org/install.html\\n\")\n+ print(bars+'\\n')\n+\n+\n setup(**setup_args)\n", "issue": "Dependencies missing\nHi,\n\nI think that the holoviews pip package does not correctly state its dependencies. These are packages that holoviews complained about not finding when I tried importing it:\n- jinja2 \n- nbformat \n- nbconvert \n- matplotlib\n\nAfter installing them manually via pip, I can import holoviews fine.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport sys, os\ntry:\n from setuptools import setup\nexcept ImportError:\n from distutils.core import setup\n\n\nsetup_args = {}\ninstall_requires = ['param>=1.3.2', 'numpy>=1.0']\nextras_require={}\n\n# Notebook dependencies of IPython 3\nextras_require['notebook-dependencies'] = ['ipython', 'pyzmq', 'jinja2', 'tornado',\n 'jsonschema', 'ipython', 'pygments']\n# IPython Notebook + matplotlib + Lancet\nextras_require['recommended'] = (extras_require['notebook-dependencies']\n + ['matplotlib', 'lancet-ioam'])\n# Additional, useful third-party packages\nextras_require['extras'] = (['pandas', 'seaborn', 'mpld3', 'bokeh']\n + extras_require['recommended'])\n# Everything including cyordereddict (optimization) and nosetests\nextras_require['all'] = (extras_require['recommended']\n + extras_require['extras']\n + ['cyordereddict', 'nose'])\n\nsetup_args.update(dict(\n name='holoviews',\n version=\"1.4.3\",\n install_requires = install_requires,\n extras_require = extras_require,\n description='Stop plotting your data - annotate your data and let it visualize itself.',\n long_description=open('README.rst').read() if os.path.isfile('README.rst') else 'Consult README.rst',\n author= \"Jean-Luc Stevens and Philipp Rudiger\",\n author_email= \"[email protected]\",\n maintainer= \"IOAM\",\n maintainer_email= \"[email protected]\",\n platforms=['Windows', 'Mac OS X', 'Linux'],\n license='BSD',\n url='http://ioam.github.com/holoviews/',\n packages = [\"holoviews\",\n \"holoviews.core\",\n \"holoviews.core.data\",\n \"holoviews.element\",\n \"holoviews.interface\",\n \"holoviews.ipython\",\n \"holoviews.operation\",\n \"holoviews.plotting\",\n \"holoviews.plotting.mpl\",\n \"holoviews.plotting.bokeh\",\n \"holoviews.plotting.widgets\"],\n package_data={'holoviews.ipython': ['*.html'],\n 'holoviews.plotting.mpl': ['*.mplstyle', '*.jinja', '*.js'],\n 'holoviews.plotting.bokeh': ['*.js', '*.css'],\n 'holoviews.plotting.widgets': ['*.jinja', '*.js', '*.css']},\n classifiers = [\n \"License :: OSI Approved :: BSD License\",\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Developers\",\n \"Natural Language :: English\",\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Software Development :: Libraries\"]\n))\n\ndef check_pseudo_package(path):\n \"\"\"\n Verifies that a fake subpackage path for assets (notebooks, svgs,\n pngs etc) both exists and is populated with files.\n \"\"\"\n if not os.path.isdir(path):\n raise Exception(\"Please make sure pseudo-package %s exists.\" % path)\n else:\n assets = os.listdir(path)\n if len(assets) == 0:\n raise Exception(\"Please make sure pseudo-package %s is populated.\" % path)\n\n\nif __name__==\"__main__\":\n\n if 'HOLOVIEWS_RELEASE' in os.environ:\n # Make sure to create these directories and populate them before upload\n setup_args['packages'] += [\"holoviews.assets\", 'holoviews.notebooks']\n\n # Add unit tests\n setup_args['packages'].append('holoviews.tests')\n\n setup_args['package_data']['holoviews.assets'] = ['*.png', '*.svg', '*.rst']\n setup_args['package_data']['holoviews.notebooks'] = ['*.ipynb', '*.npy']\n\n if ('upload' in sys.argv) or ('sdist' in sys.argv):\n check_pseudo_package(os.path.join('.', 'holoviews', 'tests'))\n check_pseudo_package(os.path.join('.', 'holoviews', 'assets'))\n check_pseudo_package(os.path.join('.', 'holoviews', 'notebooks'))\n\n import holoviews\n holoviews.__version__.verify(setup_args['version'])\n\n setup(**setup_args)\n", "path": "setup.py"}]}
1,833
363
gh_patches_debug_6961
rasdani/github-patches
git_diff
nextcloud__appstore-186
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Email confirmation email should be improved I couldn't find the text for the email, so I suspect it's from some library. Here is the content: --- Hello from **apps.nextcloud.com**! You're receiving this e-mail because user oparoz at apps.nextcloud.com has given **yours as an e-mail address to connect their account**. To confirm this is correct, go to https://apps.nextcloud.com/confirm-email/Mzc:1bZksL:Y8YI3zMQ0fOllevi3VhZ-dmiSMU/ Thank you from **apps.nextcloud.com**! **apps.nextcloud.com** --- I've highlighted what should be altered. </issue> <code> [start of nextcloudappstore/core/management/commands/setupsocial.py] 1 from allauth.socialaccount.models import SocialApp 2 from django.contrib.sites.models import Site 3 from django.core.management import BaseCommand 4 5 6 class Command(BaseCommand): 7 help = ('Updates the first site with the given domain and creates or ' 8 'updates the GitHub social login application') 9 10 def add_arguments(self, parser): 11 social_meta = SocialApp._meta 12 parser.add_argument('--github-secret', required=True, 13 help=social_meta.get_field('secret').help_text) 14 parser.add_argument('--github-client-id', required=True, 15 help=social_meta.get_field('client_id').help_text) 16 site_meta = Site._meta 17 parser.add_argument('--domain', required=True, 18 help=site_meta.get_field('domain').help_text) 19 20 def handle(self, *args, **options): 21 # set up site which is required for social login 22 site = Site.objects.all()[0] 23 site.domain = options['domain'] 24 site.name = options['domain'] 25 site.save() 26 # set up github 27 app, created = SocialApp.objects.get_or_create(provider='github') 28 app.name = 'GitHub' 29 app.secret = options['github_secret'] 30 app.client_id = options['github_client_id'] 31 app.sites.add(site) 32 app.save() 33 34 msg = 'Successfully initialized social accounts' 35 self.stdout.write(self.style.SUCCESS(msg)) 36 [end of nextcloudappstore/core/management/commands/setupsocial.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/nextcloudappstore/core/management/commands/setupsocial.py b/nextcloudappstore/core/management/commands/setupsocial.py --- a/nextcloudappstore/core/management/commands/setupsocial.py +++ b/nextcloudappstore/core/management/commands/setupsocial.py @@ -21,7 +21,7 @@ # set up site which is required for social login site = Site.objects.all()[0] site.domain = options['domain'] - site.name = options['domain'] + site.name = 'Nextcloud App Store' site.save() # set up github app, created = SocialApp.objects.get_or_create(provider='github')
{"golden_diff": "diff --git a/nextcloudappstore/core/management/commands/setupsocial.py b/nextcloudappstore/core/management/commands/setupsocial.py\n--- a/nextcloudappstore/core/management/commands/setupsocial.py\n+++ b/nextcloudappstore/core/management/commands/setupsocial.py\n@@ -21,7 +21,7 @@\n # set up site which is required for social login\n site = Site.objects.all()[0]\n site.domain = options['domain']\n- site.name = options['domain']\n+ site.name = 'Nextcloud App Store'\n site.save()\n # set up github\n app, created = SocialApp.objects.get_or_create(provider='github')\n", "issue": "Email confirmation email should be improved\nI couldn't find the text for the email, so I suspect it's from some library.\n\nHere is the content:\n\n---\n\nHello from **apps.nextcloud.com**!\n\nYou're receiving this e-mail because user oparoz at apps.nextcloud.com has given **yours as an e-mail address to connect their account**.\n\nTo confirm this is correct, go to https://apps.nextcloud.com/confirm-email/Mzc:1bZksL:Y8YI3zMQ0fOllevi3VhZ-dmiSMU/\n\nThank you from **apps.nextcloud.com**!\n**apps.nextcloud.com**\n\n---\n\nI've highlighted what should be altered.\n\n", "before_files": [{"content": "from allauth.socialaccount.models import SocialApp\nfrom django.contrib.sites.models import Site\nfrom django.core.management import BaseCommand\n\n\nclass Command(BaseCommand):\n help = ('Updates the first site with the given domain and creates or '\n 'updates the GitHub social login application')\n\n def add_arguments(self, parser):\n social_meta = SocialApp._meta\n parser.add_argument('--github-secret', required=True,\n help=social_meta.get_field('secret').help_text)\n parser.add_argument('--github-client-id', required=True,\n help=social_meta.get_field('client_id').help_text)\n site_meta = Site._meta\n parser.add_argument('--domain', required=True,\n help=site_meta.get_field('domain').help_text)\n\n def handle(self, *args, **options):\n # set up site which is required for social login\n site = Site.objects.all()[0]\n site.domain = options['domain']\n site.name = options['domain']\n site.save()\n # set up github\n app, created = SocialApp.objects.get_or_create(provider='github')\n app.name = 'GitHub'\n app.secret = options['github_secret']\n app.client_id = options['github_client_id']\n app.sites.add(site)\n app.save()\n\n msg = 'Successfully initialized social accounts'\n self.stdout.write(self.style.SUCCESS(msg))\n", "path": "nextcloudappstore/core/management/commands/setupsocial.py"}]}
1,049
155
gh_patches_debug_23246
rasdani/github-patches
git_diff
docarray__docarray-85
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> fix(hashing): ignore casting to float Fixes #83 ```python >>> x = Document(text="float test 2.56") >>> x.get_vocabulary() Counter({'float': 1, 'test': 1, '2': 1, '56': 1}) ``` </issue> <code> [start of docarray/document/mixins/featurehash.py] 1 import hashlib 2 import json 3 from typing import Tuple, TYPE_CHECKING 4 5 import numpy as np 6 7 if TYPE_CHECKING: 8 from ...types import T 9 10 11 class FeatureHashMixin: 12 """Provide helper functions for feature hashing.""" 13 14 def embed_feature_hashing( 15 self: 'T', 16 n_dim: int = 256, 17 sparse: bool = False, 18 fields: Tuple[str, ...] = ('text', 'tags'), 19 max_value: int = 1_000_000, 20 ) -> 'T': 21 """Convert an arbitrary set of attributes into a fixed-dimensional matrix using the hashing trick. 22 23 :param n_dim: the dimensionality of each document in the output embedding. 24 Small numbers of features are likely to cause hash collisions, 25 but large numbers will cause larger overall parameter dimensions. 26 :param sparse: whether the resulting feature matrix should be a sparse csr_matrix or dense ndarray. 27 Note that this feature requires ``scipy`` 28 :param fields: which attributes to be considered as for feature hashing. 29 """ 30 if sparse: 31 from scipy.sparse import csr_matrix 32 33 idxs, data = [], [] # sparse 34 table = np.zeros(n_dim) # dense 35 36 for f in fields: 37 if 'text' in fields: 38 all_tokens = self.get_vocabulary(('text',)) 39 for f_id, val in all_tokens.items(): 40 _hash_column(f_id, val, n_dim, max_value, idxs, data, table) 41 42 if 'tags' in fields: 43 for k, v in self.tags.items(): 44 _hash_column(k, v, n_dim, max_value, idxs, data, table) 45 46 v = getattr(self, f, None) 47 if v: 48 _hash_column(f, v, n_dim, max_value, idxs, data, table) 49 50 if sparse: 51 self.embedding = csr_matrix((data, zip(*idxs)), shape=(1, n_dim)) 52 else: 53 self.embedding = table 54 return self 55 56 57 def _hash_column(col_name, col_val, n_dim, max_value, idxs, data, table): 58 h = _any_hash(col_name) 59 col_val = _any_hash(col_val) % max_value 60 col = h % n_dim 61 idxs.append((0, col)) 62 data.append(np.sign(h) * col_val) 63 table[col] += np.sign(h) * col_val 64 65 66 def _any_hash(v): 67 try: 68 return int(v) # parse int parameter 69 except ValueError: 70 try: 71 return float(v) # parse float parameter 72 except ValueError: 73 if not v: 74 # ignore it when the parameter is empty 75 return 0 76 if isinstance(v, str): 77 v = v.strip() 78 if v.lower() in {'true', 'yes'}: # parse boolean parameter 79 return 1 80 if v.lower() in {'false', 'no'}: 81 return 0 82 if isinstance(v, (tuple, dict, list)): 83 v = json.dumps(v, sort_keys=True) 84 85 return int(hashlib.md5(str(v).encode('utf-8')).hexdigest(), base=16) 86 [end of docarray/document/mixins/featurehash.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/docarray/document/mixins/featurehash.py b/docarray/document/mixins/featurehash.py --- a/docarray/document/mixins/featurehash.py +++ b/docarray/document/mixins/featurehash.py @@ -64,22 +64,24 @@ def _any_hash(v): - try: - return int(v) # parse int parameter - except ValueError: + if not v: + # ignore it when the parameter is empty + return 0 + elif isinstance(v, (tuple, dict, list, str)): + if isinstance(v, str): + v = v.strip() + if v.lower() in {'true', 'yes'}: # parse boolean parameter + return 1 + if v.lower() in {'false', 'no'}: + return 0 + else: + v = json.dumps(v, sort_keys=True) + return int(hashlib.md5(str(v).encode('utf-8')).hexdigest(), base=16) + else: try: - return float(v) # parse float parameter + return int(v) # parse int parameter except ValueError: - if not v: - # ignore it when the parameter is empty - return 0 - if isinstance(v, str): - v = v.strip() - if v.lower() in {'true', 'yes'}: # parse boolean parameter - return 1 - if v.lower() in {'false', 'no'}: - return 0 - if isinstance(v, (tuple, dict, list)): - v = json.dumps(v, sort_keys=True) - - return int(hashlib.md5(str(v).encode('utf-8')).hexdigest(), base=16) + try: + return float(v) # parse float parameter + except ValueError: + return 0 # unable to hash
{"golden_diff": "diff --git a/docarray/document/mixins/featurehash.py b/docarray/document/mixins/featurehash.py\n--- a/docarray/document/mixins/featurehash.py\n+++ b/docarray/document/mixins/featurehash.py\n@@ -64,22 +64,24 @@\n \n \n def _any_hash(v):\n- try:\n- return int(v) # parse int parameter\n- except ValueError:\n+ if not v:\n+ # ignore it when the parameter is empty\n+ return 0\n+ elif isinstance(v, (tuple, dict, list, str)):\n+ if isinstance(v, str):\n+ v = v.strip()\n+ if v.lower() in {'true', 'yes'}: # parse boolean parameter\n+ return 1\n+ if v.lower() in {'false', 'no'}:\n+ return 0\n+ else:\n+ v = json.dumps(v, sort_keys=True)\n+ return int(hashlib.md5(str(v).encode('utf-8')).hexdigest(), base=16)\n+ else:\n try:\n- return float(v) # parse float parameter\n+ return int(v) # parse int parameter\n except ValueError:\n- if not v:\n- # ignore it when the parameter is empty\n- return 0\n- if isinstance(v, str):\n- v = v.strip()\n- if v.lower() in {'true', 'yes'}: # parse boolean parameter\n- return 1\n- if v.lower() in {'false', 'no'}:\n- return 0\n- if isinstance(v, (tuple, dict, list)):\n- v = json.dumps(v, sort_keys=True)\n-\n- return int(hashlib.md5(str(v).encode('utf-8')).hexdigest(), base=16)\n+ try:\n+ return float(v) # parse float parameter\n+ except ValueError:\n+ return 0 # unable to hash\n", "issue": "fix(hashing): ignore casting to float\nFixes #83 \r\n\r\n```python\r\n>>> x = Document(text=\"float test 2.56\")\r\n>>> x.get_vocabulary()\r\nCounter({'float': 1, 'test': 1, '2': 1, '56': 1})\r\n```\n", "before_files": [{"content": "import hashlib\nimport json\nfrom typing import Tuple, TYPE_CHECKING\n\nimport numpy as np\n\nif TYPE_CHECKING:\n from ...types import T\n\n\nclass FeatureHashMixin:\n \"\"\"Provide helper functions for feature hashing.\"\"\"\n\n def embed_feature_hashing(\n self: 'T',\n n_dim: int = 256,\n sparse: bool = False,\n fields: Tuple[str, ...] = ('text', 'tags'),\n max_value: int = 1_000_000,\n ) -> 'T':\n \"\"\"Convert an arbitrary set of attributes into a fixed-dimensional matrix using the hashing trick.\n\n :param n_dim: the dimensionality of each document in the output embedding.\n Small numbers of features are likely to cause hash collisions,\n but large numbers will cause larger overall parameter dimensions.\n :param sparse: whether the resulting feature matrix should be a sparse csr_matrix or dense ndarray.\n Note that this feature requires ``scipy``\n :param fields: which attributes to be considered as for feature hashing.\n \"\"\"\n if sparse:\n from scipy.sparse import csr_matrix\n\n idxs, data = [], [] # sparse\n table = np.zeros(n_dim) # dense\n\n for f in fields:\n if 'text' in fields:\n all_tokens = self.get_vocabulary(('text',))\n for f_id, val in all_tokens.items():\n _hash_column(f_id, val, n_dim, max_value, idxs, data, table)\n\n if 'tags' in fields:\n for k, v in self.tags.items():\n _hash_column(k, v, n_dim, max_value, idxs, data, table)\n\n v = getattr(self, f, None)\n if v:\n _hash_column(f, v, n_dim, max_value, idxs, data, table)\n\n if sparse:\n self.embedding = csr_matrix((data, zip(*idxs)), shape=(1, n_dim))\n else:\n self.embedding = table\n return self\n\n\ndef _hash_column(col_name, col_val, n_dim, max_value, idxs, data, table):\n h = _any_hash(col_name)\n col_val = _any_hash(col_val) % max_value\n col = h % n_dim\n idxs.append((0, col))\n data.append(np.sign(h) * col_val)\n table[col] += np.sign(h) * col_val\n\n\ndef _any_hash(v):\n try:\n return int(v) # parse int parameter\n except ValueError:\n try:\n return float(v) # parse float parameter\n except ValueError:\n if not v:\n # ignore it when the parameter is empty\n return 0\n if isinstance(v, str):\n v = v.strip()\n if v.lower() in {'true', 'yes'}: # parse boolean parameter\n return 1\n if v.lower() in {'false', 'no'}:\n return 0\n if isinstance(v, (tuple, dict, list)):\n v = json.dumps(v, sort_keys=True)\n\n return int(hashlib.md5(str(v).encode('utf-8')).hexdigest(), base=16)\n", "path": "docarray/document/mixins/featurehash.py"}]}
1,468
435
gh_patches_debug_29060
rasdani/github-patches
git_diff
falconry__falcon-1399
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Tox (including Travis) not testing cythonized variants As described in ``README.rst``, Falcon can be cythonized for ~20% performance gain (or actually even more). Installing Falcon from *sdist* into an environment with Cython does the trick: ```python >>> import falcon >>> falcon.api # As we can see, falcon.api is coming from the dynamically-linked (cythonized) library api.so <module 'falcon.api' from '/home/vytas/.virtualenvs/fresh/local/lib/python2.7/site-packages/falcon/api.so'> ``` However, this does not hold under Tox ``py27_cython`` and ``py36_cython`` environments, including runs in Travis, as the properly cythonized Falcon is shadowed by the local source directory. This could potentially be worked around by changing dir in Tox, but apparently pytest is even more stubborn as it is correctly determining the root dir of tests, and changing to it. See also discussions here: * https://github.com/tox-dev/tox/issues/54 * https://github.com/tox-dev/tox/issues/514 The last comment on the latter also explains the possible patterns to work this around: https://github.com/tox-dev/tox/issues/514#issuecomment-327779367 (links to the useful https://docs.pytest.org/en/latest/goodpractices.html#choosing-a-test-layout-import-rules ). </issue> <code> [start of falcon/cmd/print_routes.py] 1 #!/usr/bin/env python 2 # Copyright 2013 by Rackspace Hosting, Inc. 3 # 4 # Licensed under the Apache License, Version 2.0 (the "License"); 5 # you may not use this file except in compliance with the License. 6 # You may obtain a copy of the License at 7 # 8 # http://www.apache.org/licenses/LICENSE-2.0 9 # 10 # Unless required by applicable law or agreed to in writing, software 11 # distributed under the License is distributed on an "AS IS" BASIS, 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 """ 16 Script that prints out the routes of an API instance. 17 """ 18 19 from __future__ import print_function 20 21 from functools import partial 22 import inspect 23 24 import falcon 25 26 27 def print_routes(api, verbose=False): # pragma: no cover 28 """ 29 Initial call. 30 31 :param api: The falcon.API or callable that returns an instance to look at. 32 :type api: falcon.API or callable 33 :param verbose: If the output should be verbose. 34 :type verbose: bool 35 """ 36 traverse(api._router._roots, verbose=verbose) 37 38 39 def traverse(roots, parent='', verbose=False): 40 """ 41 Recursive call which also handles printing output. 42 43 :param api: The falcon.API or callable that returns an instance to look at. 44 :type api: falcon.API or callable 45 :param parent: The parent uri path to the current iteration. 46 :type parent: str 47 :param verbose: If the output should be verbose. 48 :type verbose: bool 49 """ 50 for root in roots: 51 if root.method_map: 52 print('->', parent + '/' + root.raw_segment) 53 if verbose: 54 for method, func in root.method_map.items(): 55 if func.__name__ != 'method_not_allowed': 56 if isinstance(func, partial): 57 real_func = func.func 58 else: 59 real_func = func 60 61 source_file = inspect.getsourcefile(real_func) 62 63 print('-->{0} {1}:{2}'.format( 64 method, 65 source_file, 66 source_file[1] 67 )) 68 69 if root.children: 70 traverse(root.children, parent + '/' + root.raw_segment, verbose) 71 72 73 def main(): 74 """ 75 Main entrypoint. 76 """ 77 import argparse 78 79 parser = argparse.ArgumentParser( 80 description='Example: print-api-routes myprogram:app') 81 parser.add_argument( 82 '-v', '--verbose', action='store_true', 83 help='Prints out information for each method.') 84 parser.add_argument( 85 'api_module', 86 help='The module and api to inspect. Example: myapp.somemodule:api', 87 ) 88 args = parser.parse_args() 89 90 try: 91 module, instance = args.api_module.split(':', 1) 92 except ValueError: 93 parser.error( 94 'The api_module must include a colon between ' 95 'the module and instnace') 96 api = getattr(__import__(module, fromlist=[True]), instance) 97 if not isinstance(api, falcon.API): 98 if callable(api): 99 api = api() 100 if not isinstance(api, falcon.API): 101 parser.error( 102 '{0} did not return a falcon.API instance'.format( 103 args.api_module)) 104 else: 105 parser.error( 106 'The instance must be of falcon.API or be ' 107 'a callable without args that returns falcon.API') 108 print_routes(api, verbose=args.verbose) 109 110 111 if __name__ == '__main__': 112 main() 113 [end of falcon/cmd/print_routes.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/falcon/cmd/print_routes.py b/falcon/cmd/print_routes.py --- a/falcon/cmd/print_routes.py +++ b/falcon/cmd/print_routes.py @@ -58,13 +58,19 @@ else: real_func = func - source_file = inspect.getsourcefile(real_func) - - print('-->{0} {1}:{2}'.format( - method, - source_file, - source_file[1] - )) + try: + source_file = inspect.getsourcefile(real_func) + source_lines = inspect.getsourcelines(real_func) + source_info = '{}:{}'.format(source_file, + source_lines[1]) + except TypeError: + # NOTE(vytas): If Falcon is cythonized, all default + # responders coming from cythonized modules will + # appear as built-in functions, and raise a + # TypeError when trying to locate the source file. + source_info = '[unknown file]' + + print('-->' + method, source_info) if root.children: traverse(root.children, parent + '/' + root.raw_segment, verbose) @@ -92,7 +98,7 @@ except ValueError: parser.error( 'The api_module must include a colon between ' - 'the module and instnace') + 'the module and instance') api = getattr(__import__(module, fromlist=[True]), instance) if not isinstance(api, falcon.API): if callable(api):
{"golden_diff": "diff --git a/falcon/cmd/print_routes.py b/falcon/cmd/print_routes.py\n--- a/falcon/cmd/print_routes.py\n+++ b/falcon/cmd/print_routes.py\n@@ -58,13 +58,19 @@\n else:\n real_func = func\n \n- source_file = inspect.getsourcefile(real_func)\n-\n- print('-->{0} {1}:{2}'.format(\n- method,\n- source_file,\n- source_file[1]\n- ))\n+ try:\n+ source_file = inspect.getsourcefile(real_func)\n+ source_lines = inspect.getsourcelines(real_func)\n+ source_info = '{}:{}'.format(source_file,\n+ source_lines[1])\n+ except TypeError:\n+ # NOTE(vytas): If Falcon is cythonized, all default\n+ # responders coming from cythonized modules will\n+ # appear as built-in functions, and raise a\n+ # TypeError when trying to locate the source file.\n+ source_info = '[unknown file]'\n+\n+ print('-->' + method, source_info)\n \n if root.children:\n traverse(root.children, parent + '/' + root.raw_segment, verbose)\n@@ -92,7 +98,7 @@\n except ValueError:\n parser.error(\n 'The api_module must include a colon between '\n- 'the module and instnace')\n+ 'the module and instance')\n api = getattr(__import__(module, fromlist=[True]), instance)\n if not isinstance(api, falcon.API):\n if callable(api):\n", "issue": "Tox (including Travis) not testing cythonized variants\nAs described in ``README.rst``, Falcon can be cythonized for ~20% performance gain (or actually even more). Installing Falcon from *sdist* into an environment with Cython does the trick:\r\n\r\n```python\r\n>>> import falcon\r\n>>> falcon.api # As we can see, falcon.api is coming from the dynamically-linked (cythonized) library api.so\r\n<module 'falcon.api' from '/home/vytas/.virtualenvs/fresh/local/lib/python2.7/site-packages/falcon/api.so'>\r\n```\r\n\r\nHowever, this does not hold under Tox ``py27_cython`` and ``py36_cython`` environments, including runs in Travis, as the properly cythonized Falcon is shadowed by the local source directory. This could potentially be worked around by changing dir in Tox, but apparently pytest is even more stubborn as it is correctly determining the root dir of tests, and changing to it.\r\n\r\nSee also discussions here:\r\n* https://github.com/tox-dev/tox/issues/54\r\n* https://github.com/tox-dev/tox/issues/514\r\n\r\nThe last comment on the latter also explains the possible patterns to work this around: https://github.com/tox-dev/tox/issues/514#issuecomment-327779367 (links to the useful https://docs.pytest.org/en/latest/goodpractices.html#choosing-a-test-layout-import-rules ).\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# Copyright 2013 by Rackspace Hosting, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nScript that prints out the routes of an API instance.\n\"\"\"\n\nfrom __future__ import print_function\n\nfrom functools import partial\nimport inspect\n\nimport falcon\n\n\ndef print_routes(api, verbose=False): # pragma: no cover\n \"\"\"\n Initial call.\n\n :param api: The falcon.API or callable that returns an instance to look at.\n :type api: falcon.API or callable\n :param verbose: If the output should be verbose.\n :type verbose: bool\n \"\"\"\n traverse(api._router._roots, verbose=verbose)\n\n\ndef traverse(roots, parent='', verbose=False):\n \"\"\"\n Recursive call which also handles printing output.\n\n :param api: The falcon.API or callable that returns an instance to look at.\n :type api: falcon.API or callable\n :param parent: The parent uri path to the current iteration.\n :type parent: str\n :param verbose: If the output should be verbose.\n :type verbose: bool\n \"\"\"\n for root in roots:\n if root.method_map:\n print('->', parent + '/' + root.raw_segment)\n if verbose:\n for method, func in root.method_map.items():\n if func.__name__ != 'method_not_allowed':\n if isinstance(func, partial):\n real_func = func.func\n else:\n real_func = func\n\n source_file = inspect.getsourcefile(real_func)\n\n print('-->{0} {1}:{2}'.format(\n method,\n source_file,\n source_file[1]\n ))\n\n if root.children:\n traverse(root.children, parent + '/' + root.raw_segment, verbose)\n\n\ndef main():\n \"\"\"\n Main entrypoint.\n \"\"\"\n import argparse\n\n parser = argparse.ArgumentParser(\n description='Example: print-api-routes myprogram:app')\n parser.add_argument(\n '-v', '--verbose', action='store_true',\n help='Prints out information for each method.')\n parser.add_argument(\n 'api_module',\n help='The module and api to inspect. Example: myapp.somemodule:api',\n )\n args = parser.parse_args()\n\n try:\n module, instance = args.api_module.split(':', 1)\n except ValueError:\n parser.error(\n 'The api_module must include a colon between '\n 'the module and instnace')\n api = getattr(__import__(module, fromlist=[True]), instance)\n if not isinstance(api, falcon.API):\n if callable(api):\n api = api()\n if not isinstance(api, falcon.API):\n parser.error(\n '{0} did not return a falcon.API instance'.format(\n args.api_module))\n else:\n parser.error(\n 'The instance must be of falcon.API or be '\n 'a callable without args that returns falcon.API')\n print_routes(api, verbose=args.verbose)\n\n\nif __name__ == '__main__':\n main()\n", "path": "falcon/cmd/print_routes.py"}]}
1,863
337
gh_patches_debug_10000
rasdani/github-patches
git_diff
kubeflow__pipelines-5290
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [Bug] KeyValueStore fails to check the cached data with new data Typo bug. ``` def store_value_bytes(self, key: str, data: bytes) -> str: ... if cache_value_file_path.exists(): old_data = cache_value_file_path.write_bytes() ... ``` should be: ``` def store_value_bytes(self, key: str, data: bytes) -> str: ... if cache_value_file_path.exists(): old_data = cache_value_file_path.read_bytes() ... ``` </issue> <code> [start of sdk/python/kfp/components/_key_value_store.py] 1 import hashlib 2 from pathlib import Path 3 4 5 class KeyValueStore: 6 KEY_FILE_SUFFIX = '.key' 7 VALUE_FILE_SUFFIX = '.value' 8 9 def __init__( 10 self, 11 cache_dir: str, 12 ): 13 cache_dir = Path(cache_dir) 14 hash_func = (lambda text: hashlib.sha256(text.encode('utf-8')).hexdigest()) 15 self.cache_dir = cache_dir 16 self.hash_func = hash_func 17 18 def store_value_text(self, key: str, text: str) -> str: 19 return self.store_value_bytes(key, text.encode('utf-8')) 20 21 def store_value_bytes(self, key: str, data: bytes) -> str: 22 cache_id = self.hash_func(key) 23 self.cache_dir.mkdir(parents=True, exist_ok=True) 24 cache_key_file_path = self.cache_dir / (cache_id + KeyValueStore.KEY_FILE_SUFFIX) 25 cache_value_file_path = self.cache_dir / (cache_id + KeyValueStore.VALUE_FILE_SUFFIX) 26 if cache_key_file_path.exists(): 27 old_key = cache_key_file_path.read_text() 28 if key != old_key: 29 raise RuntimeError( 30 'Cache is corrupted: File "{}" contains existing key ' 31 '"{}" != new key "{}"'.format(cache_key_file_path, old_key, key) 32 ) 33 if cache_value_file_path.exists(): 34 old_data = cache_value_file_path.write_bytes() 35 if data != old_data: 36 # TODO: Add options to raise error when overwriting the value. 37 pass 38 cache_value_file_path.write_bytes(data) 39 cache_key_file_path.write_text(key) 40 return cache_id 41 42 def try_get_value_text(self, key: str) -> str: 43 result = self.try_get_value_bytes(key) 44 if result is None: 45 return None 46 return result.decode('utf-8') 47 48 def try_get_value_bytes(self, key: str) -> bytes: 49 cache_id = self.hash_func(key) 50 cache_value_file_path = self.cache_dir / (cache_id + KeyValueStore.VALUE_FILE_SUFFIX) 51 if cache_value_file_path.exists(): 52 return cache_value_file_path.read_bytes() 53 return None 54 55 def exists(self, key: str) -> bool: 56 cache_id = self.hash_func(key) 57 cache_key_file_path = self.cache_dir / (cache_id + KeyValueStore.KEY_FILE_SUFFIX) 58 return cache_key_file_path.exists() 59 60 def keys(self): 61 for cache_key_file_path in self.cache_dir.glob('*' + KeyValueStore.KEY_FILE_SUFFIX): 62 yield Path(cache_key_file_path).read_text() 63 [end of sdk/python/kfp/components/_key_value_store.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sdk/python/kfp/components/_key_value_store.py b/sdk/python/kfp/components/_key_value_store.py --- a/sdk/python/kfp/components/_key_value_store.py +++ b/sdk/python/kfp/components/_key_value_store.py @@ -31,7 +31,7 @@ '"{}" != new key "{}"'.format(cache_key_file_path, old_key, key) ) if cache_value_file_path.exists(): - old_data = cache_value_file_path.write_bytes() + old_data = cache_value_file_path.read_bytes() if data != old_data: # TODO: Add options to raise error when overwriting the value. pass
{"golden_diff": "diff --git a/sdk/python/kfp/components/_key_value_store.py b/sdk/python/kfp/components/_key_value_store.py\n--- a/sdk/python/kfp/components/_key_value_store.py\n+++ b/sdk/python/kfp/components/_key_value_store.py\n@@ -31,7 +31,7 @@\n '\"{}\" != new key \"{}\"'.format(cache_key_file_path, old_key, key)\n )\n if cache_value_file_path.exists():\n- old_data = cache_value_file_path.write_bytes()\n+ old_data = cache_value_file_path.read_bytes()\n if data != old_data:\n # TODO: Add options to raise error when overwriting the value.\n pass\n", "issue": "[Bug] KeyValueStore fails to check the cached data with new data\nTypo bug.\r\n\r\n```\r\ndef store_value_bytes(self, key: str, data: bytes) -> str:\r\n ... \r\n if cache_value_file_path.exists():\r\n old_data = cache_value_file_path.write_bytes()\r\n ... \r\n```\r\nshould be:\r\n```\r\ndef store_value_bytes(self, key: str, data: bytes) -> str:\r\n ... \r\n if cache_value_file_path.exists():\r\n old_data = cache_value_file_path.read_bytes()\r\n ... \r\n```\n", "before_files": [{"content": "import hashlib\nfrom pathlib import Path\n\n\nclass KeyValueStore:\n KEY_FILE_SUFFIX = '.key'\n VALUE_FILE_SUFFIX = '.value'\n\n def __init__(\n self,\n cache_dir: str,\n ):\n cache_dir = Path(cache_dir)\n hash_func = (lambda text: hashlib.sha256(text.encode('utf-8')).hexdigest())\n self.cache_dir = cache_dir\n self.hash_func = hash_func\n\n def store_value_text(self, key: str, text: str) -> str:\n return self.store_value_bytes(key, text.encode('utf-8'))\n\n def store_value_bytes(self, key: str, data: bytes) -> str:\n cache_id = self.hash_func(key)\n self.cache_dir.mkdir(parents=True, exist_ok=True)\n cache_key_file_path = self.cache_dir / (cache_id + KeyValueStore.KEY_FILE_SUFFIX)\n cache_value_file_path = self.cache_dir / (cache_id + KeyValueStore.VALUE_FILE_SUFFIX)\n if cache_key_file_path.exists():\n old_key = cache_key_file_path.read_text()\n if key != old_key:\n raise RuntimeError(\n 'Cache is corrupted: File \"{}\" contains existing key '\n '\"{}\" != new key \"{}\"'.format(cache_key_file_path, old_key, key)\n )\n if cache_value_file_path.exists():\n old_data = cache_value_file_path.write_bytes()\n if data != old_data:\n # TODO: Add options to raise error when overwriting the value.\n pass\n cache_value_file_path.write_bytes(data)\n cache_key_file_path.write_text(key)\n return cache_id\n\n def try_get_value_text(self, key: str) -> str:\n result = self.try_get_value_bytes(key)\n if result is None:\n return None\n return result.decode('utf-8')\n\n def try_get_value_bytes(self, key: str) -> bytes:\n cache_id = self.hash_func(key)\n cache_value_file_path = self.cache_dir / (cache_id + KeyValueStore.VALUE_FILE_SUFFIX)\n if cache_value_file_path.exists():\n return cache_value_file_path.read_bytes()\n return None\n\n def exists(self, key: str) -> bool:\n cache_id = self.hash_func(key)\n cache_key_file_path = self.cache_dir / (cache_id + KeyValueStore.KEY_FILE_SUFFIX)\n return cache_key_file_path.exists()\n\n def keys(self):\n for cache_key_file_path in self.cache_dir.glob('*' + KeyValueStore.KEY_FILE_SUFFIX):\n yield Path(cache_key_file_path).read_text()\n", "path": "sdk/python/kfp/components/_key_value_store.py"}]}
1,316
145
gh_patches_debug_29566
rasdani/github-patches
git_diff
mathesar-foundation__mathesar-2361
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Saving existing explorations is broken: UIQuery names need to be unique ## Description https://github.com/centerofci/mathesar/pull/2315 modified query names to be unique per schema. It does not ignore the current name of the query while checking the condition. To reproduce: Try saving an existing query after making changes. </issue> <code> [start of mathesar/api/serializers/queries.py] 1 from django.core.exceptions import ValidationError 2 from django.urls import reverse 3 from rest_access_policy import PermittedPkRelatedField 4 from rest_framework import serializers 5 6 from mathesar.api.db.permissions.query_table import QueryTableAccessPolicy 7 from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin 8 from mathesar.api.exceptions.validation_exceptions.exceptions import DuplicateUIQueryInSchemaAPIException 9 from mathesar.models.base import Table 10 from mathesar.models.query import UIQuery 11 12 13 class BaseQuerySerializer(MathesarErrorMessageMixin, serializers.ModelSerializer): 14 schema = serializers.SerializerMethodField('get_schema') 15 base_table = PermittedPkRelatedField( 16 access_policy=QueryTableAccessPolicy, 17 queryset=Table.current_objects.all() 18 ) 19 20 class Meta: 21 model = UIQuery 22 fields = ['schema', 'initial_columns', 'transformations', 'base_table', 'display_names'] 23 24 def get_schema(self, uiquery): 25 base_table = uiquery.base_table 26 if base_table: 27 return base_table.schema.id 28 29 def validate(self, attrs): 30 unexpected_fields = set(self.initial_data) - set(self.fields) 31 if unexpected_fields: 32 raise ValidationError(f"Unexpected field(s): {unexpected_fields}") 33 self._validate_uniqueness(attrs) 34 return attrs 35 36 def _validate_uniqueness(self, attrs): 37 """ 38 Uniqueness is only defined when both name and base_table are defined. 39 40 Would be nice to define this in terms of Django's UniqueConstraint, but that doesn't seem 41 possible, due to schema being a child property of base_table. 42 """ 43 name = attrs.get('name') 44 if name: 45 base_table = attrs.get('base_table') 46 if base_table: 47 schema = base_table.schema 48 queries_with_same_name = UIQuery.objects.filter(name=name) 49 duplicate_in_schema_exists = \ 50 queries_with_same_name\ 51 .filter(base_table__schema=schema)\ 52 .exists() 53 if duplicate_in_schema_exists: 54 raise DuplicateUIQueryInSchemaAPIException(field='name') 55 56 57 class QuerySerializer(BaseQuerySerializer): 58 results_url = serializers.SerializerMethodField('get_results_url') 59 records_url = serializers.SerializerMethodField('get_records_url') 60 columns_url = serializers.SerializerMethodField('get_columns_url') 61 62 class Meta: 63 model = UIQuery 64 fields = '__all__' 65 66 def get_records_url(self, obj): 67 if isinstance(obj, UIQuery) and obj.pk is not None: 68 # Only get records_url if we are serializing an existing persisted UIQuery 69 request = self.context['request'] 70 return request.build_absolute_uri(reverse('query-records', kwargs={'pk': obj.pk})) 71 else: 72 return None 73 74 def get_columns_url(self, obj): 75 if isinstance(obj, UIQuery) and obj.pk is not None: 76 # Only get columns_url if we are serializing an existing persisted UIQuery 77 request = self.context['request'] 78 return request.build_absolute_uri(reverse('query-columns', kwargs={'pk': obj.pk})) 79 else: 80 return None 81 82 def get_results_url(self, obj): 83 if isinstance(obj, UIQuery) and obj.pk is not None: 84 # Only get records_url if we are serializing an existing persisted UIQuery 85 request = self.context['request'] 86 return request.build_absolute_uri(reverse('query-results', kwargs={'pk': obj.pk})) 87 else: 88 return None 89 [end of mathesar/api/serializers/queries.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/mathesar/api/serializers/queries.py b/mathesar/api/serializers/queries.py --- a/mathesar/api/serializers/queries.py +++ b/mathesar/api/serializers/queries.py @@ -1,5 +1,7 @@ from django.core.exceptions import ValidationError from django.urls import reverse +from django.db.models import Q + from rest_access_policy import PermittedPkRelatedField from rest_framework import serializers @@ -45,14 +47,23 @@ base_table = attrs.get('base_table') if base_table: schema = base_table.schema - queries_with_same_name = UIQuery.objects.filter(name=name) - duplicate_in_schema_exists = \ - queries_with_same_name\ - .filter(base_table__schema=schema)\ - .exists() - if duplicate_in_schema_exists: + is_duplicate_q = self._get_is_duplicate_q(name, schema) + duplicates = UIQuery.objects.filter(is_duplicate_q) + if duplicates.exists(): raise DuplicateUIQueryInSchemaAPIException(field='name') + def _get_is_duplicate_q(self, name, schema): + has_same_name_q = Q(name=name) + has_same_schema_q = Q(base_table__schema=schema) + is_duplicate_q = has_same_name_q & has_same_schema_q + is_update = self.instance is not None + if is_update: + # If this is an update, filter self out of found duplicates + id = self.instance.id + is_not_this_instance_q = ~Q(id=id) + is_duplicate_q = is_duplicate_q & is_not_this_instance_q + return is_duplicate_q + class QuerySerializer(BaseQuerySerializer): results_url = serializers.SerializerMethodField('get_results_url')
{"golden_diff": "diff --git a/mathesar/api/serializers/queries.py b/mathesar/api/serializers/queries.py\n--- a/mathesar/api/serializers/queries.py\n+++ b/mathesar/api/serializers/queries.py\n@@ -1,5 +1,7 @@\n from django.core.exceptions import ValidationError\n from django.urls import reverse\n+from django.db.models import Q\n+\n from rest_access_policy import PermittedPkRelatedField\n from rest_framework import serializers\n \n@@ -45,14 +47,23 @@\n base_table = attrs.get('base_table')\n if base_table:\n schema = base_table.schema\n- queries_with_same_name = UIQuery.objects.filter(name=name)\n- duplicate_in_schema_exists = \\\n- queries_with_same_name\\\n- .filter(base_table__schema=schema)\\\n- .exists()\n- if duplicate_in_schema_exists:\n+ is_duplicate_q = self._get_is_duplicate_q(name, schema)\n+ duplicates = UIQuery.objects.filter(is_duplicate_q)\n+ if duplicates.exists():\n raise DuplicateUIQueryInSchemaAPIException(field='name')\n \n+ def _get_is_duplicate_q(self, name, schema):\n+ has_same_name_q = Q(name=name)\n+ has_same_schema_q = Q(base_table__schema=schema)\n+ is_duplicate_q = has_same_name_q & has_same_schema_q\n+ is_update = self.instance is not None\n+ if is_update:\n+ # If this is an update, filter self out of found duplicates\n+ id = self.instance.id\n+ is_not_this_instance_q = ~Q(id=id)\n+ is_duplicate_q = is_duplicate_q & is_not_this_instance_q\n+ return is_duplicate_q\n+\n \n class QuerySerializer(BaseQuerySerializer):\n results_url = serializers.SerializerMethodField('get_results_url')\n", "issue": "Saving existing explorations is broken: UIQuery names need to be unique\n## Description\r\nhttps://github.com/centerofci/mathesar/pull/2315 modified query names to be unique per schema.\r\nIt does not ignore the current name of the query while checking the condition.\r\n\r\nTo reproduce: Try saving an existing query after making changes.\r\n\n", "before_files": [{"content": "from django.core.exceptions import ValidationError\nfrom django.urls import reverse\nfrom rest_access_policy import PermittedPkRelatedField\nfrom rest_framework import serializers\n\nfrom mathesar.api.db.permissions.query_table import QueryTableAccessPolicy\nfrom mathesar.api.exceptions.mixins import MathesarErrorMessageMixin\nfrom mathesar.api.exceptions.validation_exceptions.exceptions import DuplicateUIQueryInSchemaAPIException\nfrom mathesar.models.base import Table\nfrom mathesar.models.query import UIQuery\n\n\nclass BaseQuerySerializer(MathesarErrorMessageMixin, serializers.ModelSerializer):\n schema = serializers.SerializerMethodField('get_schema')\n base_table = PermittedPkRelatedField(\n access_policy=QueryTableAccessPolicy,\n queryset=Table.current_objects.all()\n )\n\n class Meta:\n model = UIQuery\n fields = ['schema', 'initial_columns', 'transformations', 'base_table', 'display_names']\n\n def get_schema(self, uiquery):\n base_table = uiquery.base_table\n if base_table:\n return base_table.schema.id\n\n def validate(self, attrs):\n unexpected_fields = set(self.initial_data) - set(self.fields)\n if unexpected_fields:\n raise ValidationError(f\"Unexpected field(s): {unexpected_fields}\")\n self._validate_uniqueness(attrs)\n return attrs\n\n def _validate_uniqueness(self, attrs):\n \"\"\"\n Uniqueness is only defined when both name and base_table are defined.\n\n Would be nice to define this in terms of Django's UniqueConstraint, but that doesn't seem\n possible, due to schema being a child property of base_table.\n \"\"\"\n name = attrs.get('name')\n if name:\n base_table = attrs.get('base_table')\n if base_table:\n schema = base_table.schema\n queries_with_same_name = UIQuery.objects.filter(name=name)\n duplicate_in_schema_exists = \\\n queries_with_same_name\\\n .filter(base_table__schema=schema)\\\n .exists()\n if duplicate_in_schema_exists:\n raise DuplicateUIQueryInSchemaAPIException(field='name')\n\n\nclass QuerySerializer(BaseQuerySerializer):\n results_url = serializers.SerializerMethodField('get_results_url')\n records_url = serializers.SerializerMethodField('get_records_url')\n columns_url = serializers.SerializerMethodField('get_columns_url')\n\n class Meta:\n model = UIQuery\n fields = '__all__'\n\n def get_records_url(self, obj):\n if isinstance(obj, UIQuery) and obj.pk is not None:\n # Only get records_url if we are serializing an existing persisted UIQuery\n request = self.context['request']\n return request.build_absolute_uri(reverse('query-records', kwargs={'pk': obj.pk}))\n else:\n return None\n\n def get_columns_url(self, obj):\n if isinstance(obj, UIQuery) and obj.pk is not None:\n # Only get columns_url if we are serializing an existing persisted UIQuery\n request = self.context['request']\n return request.build_absolute_uri(reverse('query-columns', kwargs={'pk': obj.pk}))\n else:\n return None\n\n def get_results_url(self, obj):\n if isinstance(obj, UIQuery) and obj.pk is not None:\n # Only get records_url if we are serializing an existing persisted UIQuery\n request = self.context['request']\n return request.build_absolute_uri(reverse('query-results', kwargs={'pk': obj.pk}))\n else:\n return None\n", "path": "mathesar/api/serializers/queries.py"}]}
1,503
390
gh_patches_debug_4230
rasdani/github-patches
git_diff
numba__numba-873
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> PR #856 introduced regression in macro expansion of more than one block PR #856 caused macro expansion to effectively cease after performing macro expansion in one block, due to the logic in `numba/macro.py`: ``` python for blk in blocks.values(): module_getattr_folding(constants, blk) expanded = expanded or expand_macros_in_block(constants, blk) ``` </issue> <code> [start of numba/macro.py] 1 """ 2 Macro handling passes 3 4 Macros are expanded on block-by-block 5 """ 6 from __future__ import absolute_import, print_function, division 7 from numba import ir 8 9 10 class MacroError(Exception): 11 ''' 12 An exception thrown during macro expansion 13 ''' 14 pass 15 16 17 def expand_macros(blocks): 18 ''' 19 Performs macro expansion on blocks 20 21 Args 22 ---- 23 blocks: list 24 the blocks to macro-expand 25 return: bool 26 True if any macros were expanded 27 ''' 28 constants = {} 29 expanded = False 30 for blk in blocks.values(): 31 module_getattr_folding(constants, blk) 32 expanded = expanded or expand_macros_in_block(constants, blk) 33 return expanded 34 35 def module_getattr_folding(constants, block): 36 ''' 37 Performs constant-folding of getattr instructions within a block. Any 38 constants defined within the block are also added to the constant pool. 39 40 Args 41 ---- 42 constants: dict 43 The pool of constants to use, which will be updated with any new 44 constants in this block 45 block: ir.Block 46 The block to perform constant folding on 47 ''' 48 for inst in block.body: 49 if isinstance(inst, ir.Assign): 50 rhs = inst.value 51 52 if isinstance(rhs, ir.Global): 53 constants[inst.target.name] = rhs.value 54 55 elif isinstance(rhs, ir.Expr) and rhs.op == 'getattr': 56 if rhs.value.name in constants: 57 base = constants[rhs.value.name] 58 constants[inst.target.name] = getattr(base, rhs.attr) 59 60 elif isinstance(rhs, ir.Const): 61 constants[inst.target.name] = rhs.value 62 63 elif isinstance(rhs, ir.Var) and rhs.name in constants: 64 constants[inst.target.name] = constants[rhs.name] 65 66 elif isinstance(rhs, ir.FreeVar): 67 constants[inst.target.name] = rhs.value 68 69 def expand_macros_in_block(constants, block): 70 ''' 71 Performs macro expansion on a block. 72 73 Args 74 ---- 75 constants: dict 76 The pool of constants which contains the values which contains mappings 77 from variable names to callee names 78 block: ir.Block 79 The block to perform macro expansion on 80 return: bool 81 True if any macros were expanded 82 ''' 83 expanded = False 84 for inst in block.body: 85 if isinstance(inst, ir.Assign): 86 rhs = inst.value 87 if isinstance(rhs, ir.Expr) and rhs.op == 'call': 88 callee = rhs.func 89 macro = constants.get(callee.name) 90 if isinstance(macro, Macro): 91 # Rewrite calling macro 92 assert macro.callable 93 args = [constants[arg.name] for arg in rhs.args] 94 kws = dict((k, constants[v.name]) for k, v in rhs.kws) 95 try: 96 result = macro.func(*args, **kws) 97 except BaseException as e: 98 msg = str(e) 99 headfmt = "Macro expansion failed at {line}" 100 head = headfmt.format(line=inst.loc) 101 newmsg = "{0}:\n{1}".format(head, msg) 102 raise MacroError(newmsg) 103 if result: 104 # Insert a new function 105 result.loc = rhs.loc 106 inst.value = ir.Expr.call(func=result, args=rhs.args, 107 kws=rhs.kws, loc=rhs.loc) 108 expanded = True 109 elif isinstance(rhs, ir.Expr) and rhs.op == 'getattr': 110 # Rewrite get attribute to macro call 111 # Non-calling macro must be triggered by get attribute 112 base = constants.get(rhs.value.name) 113 if base is not None: 114 value = getattr(base, rhs.attr) 115 if isinstance(value, Macro): 116 macro = value 117 if not macro.callable: 118 intr = ir.Intrinsic(macro.name, macro.func, args=()) 119 inst.value = ir.Expr.call(func=intr, args=(), 120 kws=(), loc=rhs.loc) 121 expanded = True 122 return expanded 123 124 125 class Macro(object): 126 ''' 127 A macro object is expanded to a function call 128 129 Args 130 ---- 131 name: str 132 Name of this Macro 133 func: function 134 Function that evaluates the macro expansion 135 callable: bool 136 True if the Macro represents a callable function. 137 False if it is represents some other type. 138 argnames: list 139 If ``callable`` is True, this holds a list of the names of arguments 140 to the function. 141 ''' 142 143 __slots__ = 'name', 'func', 'callable', 'argnames' 144 145 def __init__(self, name, func, callable=False, argnames=None): 146 self.name = name 147 self.func = func 148 self.callable = callable 149 self.argnames = argnames 150 151 def __repr__(self): 152 return '<macro %s -> %s>' % (self.name, self.func) 153 154 [end of numba/macro.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/numba/macro.py b/numba/macro.py --- a/numba/macro.py +++ b/numba/macro.py @@ -29,7 +29,8 @@ expanded = False for blk in blocks.values(): module_getattr_folding(constants, blk) - expanded = expanded or expand_macros_in_block(constants, blk) + block_expanded = expand_macros_in_block(constants, blk) + expanded = expanded or block_expanded return expanded def module_getattr_folding(constants, block):
{"golden_diff": "diff --git a/numba/macro.py b/numba/macro.py\n--- a/numba/macro.py\n+++ b/numba/macro.py\n@@ -29,7 +29,8 @@\n expanded = False\n for blk in blocks.values():\n module_getattr_folding(constants, blk)\n- expanded = expanded or expand_macros_in_block(constants, blk)\n+ block_expanded = expand_macros_in_block(constants, blk)\n+ expanded = expanded or block_expanded\n return expanded\n \n def module_getattr_folding(constants, block):\n", "issue": "PR #856 introduced regression in macro expansion of more than one block\nPR #856 caused macro expansion to effectively cease after performing macro expansion in one block, due to the logic in `numba/macro.py`:\n\n``` python\nfor blk in blocks.values():\n module_getattr_folding(constants, blk)\n expanded = expanded or expand_macros_in_block(constants, blk)\n```\n\n", "before_files": [{"content": "\"\"\"\nMacro handling passes\n\nMacros are expanded on block-by-block\n\"\"\"\nfrom __future__ import absolute_import, print_function, division\nfrom numba import ir\n\n\nclass MacroError(Exception):\n '''\n An exception thrown during macro expansion\n '''\n pass\n\n\ndef expand_macros(blocks):\n '''\n Performs macro expansion on blocks\n\n Args\n ----\n blocks: list\n the blocks to macro-expand\n return: bool\n True if any macros were expanded\n '''\n constants = {}\n expanded = False\n for blk in blocks.values():\n module_getattr_folding(constants, blk)\n expanded = expanded or expand_macros_in_block(constants, blk)\n return expanded\n\ndef module_getattr_folding(constants, block):\n '''\n Performs constant-folding of getattr instructions within a block. Any\n constants defined within the block are also added to the constant pool.\n\n Args\n ----\n constants: dict\n The pool of constants to use, which will be updated with any new\n constants in this block\n block: ir.Block\n The block to perform constant folding on\n '''\n for inst in block.body:\n if isinstance(inst, ir.Assign):\n rhs = inst.value\n\n if isinstance(rhs, ir.Global):\n constants[inst.target.name] = rhs.value\n\n elif isinstance(rhs, ir.Expr) and rhs.op == 'getattr':\n if rhs.value.name in constants:\n base = constants[rhs.value.name]\n constants[inst.target.name] = getattr(base, rhs.attr)\n\n elif isinstance(rhs, ir.Const):\n constants[inst.target.name] = rhs.value\n\n elif isinstance(rhs, ir.Var) and rhs.name in constants:\n constants[inst.target.name] = constants[rhs.name]\n\n elif isinstance(rhs, ir.FreeVar):\n constants[inst.target.name] = rhs.value\n\ndef expand_macros_in_block(constants, block):\n '''\n Performs macro expansion on a block.\n\n Args\n ----\n constants: dict\n The pool of constants which contains the values which contains mappings\n from variable names to callee names\n block: ir.Block\n The block to perform macro expansion on\n return: bool\n True if any macros were expanded\n '''\n expanded = False\n for inst in block.body:\n if isinstance(inst, ir.Assign):\n rhs = inst.value\n if isinstance(rhs, ir.Expr) and rhs.op == 'call':\n callee = rhs.func\n macro = constants.get(callee.name)\n if isinstance(macro, Macro):\n # Rewrite calling macro\n assert macro.callable\n args = [constants[arg.name] for arg in rhs.args]\n kws = dict((k, constants[v.name]) for k, v in rhs.kws)\n try:\n result = macro.func(*args, **kws)\n except BaseException as e:\n msg = str(e)\n headfmt = \"Macro expansion failed at {line}\"\n head = headfmt.format(line=inst.loc)\n newmsg = \"{0}:\\n{1}\".format(head, msg)\n raise MacroError(newmsg)\n if result:\n # Insert a new function\n result.loc = rhs.loc\n inst.value = ir.Expr.call(func=result, args=rhs.args,\n kws=rhs.kws, loc=rhs.loc)\n expanded = True\n elif isinstance(rhs, ir.Expr) and rhs.op == 'getattr':\n # Rewrite get attribute to macro call\n # Non-calling macro must be triggered by get attribute\n base = constants.get(rhs.value.name)\n if base is not None:\n value = getattr(base, rhs.attr)\n if isinstance(value, Macro):\n macro = value\n if not macro.callable:\n intr = ir.Intrinsic(macro.name, macro.func, args=())\n inst.value = ir.Expr.call(func=intr, args=(),\n kws=(), loc=rhs.loc)\n expanded = True\n return expanded\n\n\nclass Macro(object):\n '''\n A macro object is expanded to a function call\n\n Args\n ----\n name: str\n Name of this Macro\n func: function\n Function that evaluates the macro expansion\n callable: bool\n True if the Macro represents a callable function.\n False if it is represents some other type.\n argnames: list\n If ``callable`` is True, this holds a list of the names of arguments\n to the function.\n '''\n\n __slots__ = 'name', 'func', 'callable', 'argnames'\n\n def __init__(self, name, func, callable=False, argnames=None):\n self.name = name\n self.func = func\n self.callable = callable\n self.argnames = argnames\n\n def __repr__(self):\n return '<macro %s -> %s>' % (self.name, self.func)\n\n", "path": "numba/macro.py"}]}
2,019
123
gh_patches_debug_7749
rasdani/github-patches
git_diff
cloudtools__troposphere-1696
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> implement AWS::Synthetics changes from May 14, 2020 update </issue> <code> [start of troposphere/synthetics.py] 1 # Copyright (c) 2020, Mark Peek <[email protected]> 2 # All rights reserved. 3 # 4 # See LICENSE file for full license. 5 6 from . import AWSObject, AWSProperty, Tags 7 from .validators import (integer, boolean, canary_runtime_version) 8 9 10 class VPCConfig(AWSProperty): 11 props = { 12 'SecurityGroupIds': ([basestring], True), 13 'SubnetIds': ([basestring], True), 14 'VpcId': (basestring, False), 15 } 16 17 18 class Schedule(AWSProperty): 19 props = { 20 'DurationInSeconds': (basestring, True), 21 'Expression': (basestring, True), 22 } 23 24 25 class RunConfig(AWSProperty): 26 props = { 27 'TimeoutInSeconds': (integer, True), 28 } 29 30 31 class Code(AWSProperty): 32 props = { 33 'Handler': (basestring, False), 34 'S3Bucket': (basestring, False), 35 'S3Key': (basestring, False), 36 'S3ObjectVersion': (basestring, False), 37 'Script': (basestring, False), 38 } 39 40 41 class Canary(AWSObject): 42 resource_type = "AWS::Synthetics::Canary" 43 44 props = { 45 'ArtifactS3Location': (basestring, True), 46 'Code': (Code, True), 47 'ExecutionRoleArn': (basestring, True), 48 'FailureRetentionPeriod': (integer, False), 49 'Name': (basestring, True), 50 'RunConfig': (RunConfig, False), 51 'RuntimeVersion': (canary_runtime_version, True), 52 'Schedule': (Schedule, True), 53 'StartCanaryAfterCreation': (boolean, True), 54 'SuccessRetentionPeriod': (integer, False), 55 'Tags': (Tags, False), 56 'VPCConfig': (VPCConfig, False), 57 } 58 [end of troposphere/synthetics.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/troposphere/synthetics.py b/troposphere/synthetics.py --- a/troposphere/synthetics.py +++ b/troposphere/synthetics.py @@ -47,7 +47,7 @@ 'ExecutionRoleArn': (basestring, True), 'FailureRetentionPeriod': (integer, False), 'Name': (basestring, True), - 'RunConfig': (RunConfig, False), + 'RunConfig': (RunConfig, True), 'RuntimeVersion': (canary_runtime_version, True), 'Schedule': (Schedule, True), 'StartCanaryAfterCreation': (boolean, True),
{"golden_diff": "diff --git a/troposphere/synthetics.py b/troposphere/synthetics.py\n--- a/troposphere/synthetics.py\n+++ b/troposphere/synthetics.py\n@@ -47,7 +47,7 @@\n 'ExecutionRoleArn': (basestring, True),\n 'FailureRetentionPeriod': (integer, False),\n 'Name': (basestring, True),\n- 'RunConfig': (RunConfig, False),\n+ 'RunConfig': (RunConfig, True),\n 'RuntimeVersion': (canary_runtime_version, True),\n 'Schedule': (Schedule, True),\n 'StartCanaryAfterCreation': (boolean, True),\n", "issue": "implement AWS::Synthetics changes from May 14, 2020 update\n\n", "before_files": [{"content": "# Copyright (c) 2020, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSObject, AWSProperty, Tags\nfrom .validators import (integer, boolean, canary_runtime_version)\n\n\nclass VPCConfig(AWSProperty):\n props = {\n 'SecurityGroupIds': ([basestring], True),\n 'SubnetIds': ([basestring], True),\n 'VpcId': (basestring, False),\n }\n\n\nclass Schedule(AWSProperty):\n props = {\n 'DurationInSeconds': (basestring, True),\n 'Expression': (basestring, True),\n }\n\n\nclass RunConfig(AWSProperty):\n props = {\n 'TimeoutInSeconds': (integer, True),\n }\n\n\nclass Code(AWSProperty):\n props = {\n 'Handler': (basestring, False),\n 'S3Bucket': (basestring, False),\n 'S3Key': (basestring, False),\n 'S3ObjectVersion': (basestring, False),\n 'Script': (basestring, False),\n }\n\n\nclass Canary(AWSObject):\n resource_type = \"AWS::Synthetics::Canary\"\n\n props = {\n 'ArtifactS3Location': (basestring, True),\n 'Code': (Code, True),\n 'ExecutionRoleArn': (basestring, True),\n 'FailureRetentionPeriod': (integer, False),\n 'Name': (basestring, True),\n 'RunConfig': (RunConfig, False),\n 'RuntimeVersion': (canary_runtime_version, True),\n 'Schedule': (Schedule, True),\n 'StartCanaryAfterCreation': (boolean, True),\n 'SuccessRetentionPeriod': (integer, False),\n 'Tags': (Tags, False),\n 'VPCConfig': (VPCConfig, False),\n }\n", "path": "troposphere/synthetics.py"}]}
1,070
145
gh_patches_debug_38141
rasdani/github-patches
git_diff
apache__tvm-2921
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [TEST][TENSORFLOW] Cache the Downloaded File So far the model files used in the TF end to end tests re-download the file in each test run. This causes the test execution to be slow. Eventually, the test server can be blocked by the place that hosts the data-source. We need to change the implementation to cache to local and only re-download the file if necessary. cc @srkreddy1238 @icemelon9 </issue> <code> [start of python/tvm/contrib/download.py] 1 """Helper utility for downloading""" 2 from __future__ import print_function 3 from __future__ import absolute_import as _abs 4 5 import os 6 import sys 7 import time 8 9 def download(url, path, overwrite=False, size_compare=False, verbose=1): 10 """Downloads the file from the internet. 11 Set the input options correctly to overwrite or do the size comparison 12 13 Parameters 14 ---------- 15 url : str 16 Download url. 17 18 path : str 19 Local file path to save downloaded file 20 21 overwrite : bool, optional 22 Whether to overwrite existing file 23 24 size_compare : bool, optional 25 Whether to do size compare to check downloaded file. 26 27 verbose: int, optional 28 Verbose level 29 """ 30 if sys.version_info >= (3,): 31 import urllib.request as urllib2 32 else: 33 import urllib2 34 35 if os.path.isfile(path) and not overwrite: 36 if size_compare: 37 import requests 38 file_size = os.path.getsize(path) 39 res_head = requests.head(url) 40 res_get = requests.get(url, stream=True) 41 if 'Content-Length' not in res_head.headers: 42 res_get = urllib2.urlopen(url) 43 url_file_size = int(res_get.headers['Content-Length']) 44 if url_file_size != file_size: 45 print("exist file got corrupted, downloading %s file freshly..." % path) 46 download(url, path, True, False) 47 return 48 print('File {} exists, skip.'.format(path)) 49 return 50 51 if verbose >= 1: 52 print('Downloading from url {} to {}'.format(url, path)) 53 54 # Stateful start time 55 start_time = time.time() 56 57 def _download_progress(count, block_size, total_size): 58 #pylint: disable=unused-argument 59 """Show the download progress. 60 """ 61 if count == 0: 62 return 63 duration = time.time() - start_time 64 progress_size = int(count * block_size) 65 speed = int(progress_size / (1024 * duration)) 66 percent = min(int(count * block_size * 100 / total_size), 100) 67 sys.stdout.write("\r...%d%%, %.2f MB, %d KB/s, %d seconds passed" % 68 (percent, progress_size / (1024.0 * 1024), speed, duration)) 69 sys.stdout.flush() 70 71 if sys.version_info >= (3,): 72 urllib2.urlretrieve(url, path, reporthook=_download_progress) 73 print("") 74 else: 75 f = urllib2.urlopen(url) 76 data = f.read() 77 with open(path, "wb") as code: 78 code.write(data) 79 [end of python/tvm/contrib/download.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/python/tvm/contrib/download.py b/python/tvm/contrib/download.py --- a/python/tvm/contrib/download.py +++ b/python/tvm/contrib/download.py @@ -5,8 +5,10 @@ import os import sys import time +import uuid +import shutil -def download(url, path, overwrite=False, size_compare=False, verbose=1): +def download(url, path, overwrite=False, size_compare=False, verbose=1, retries=3): """Downloads the file from the internet. Set the input options correctly to overwrite or do the size comparison @@ -53,6 +55,11 @@ # Stateful start time start_time = time.time() + dirpath = os.path.dirname(path) + if not os.path.isdir(dirpath): + os.makedirs(dirpath) + random_uuid = str(uuid.uuid4()) + tempfile = os.path.join(dirpath, random_uuid) def _download_progress(count, block_size, total_size): #pylint: disable=unused-argument @@ -68,11 +75,62 @@ (percent, progress_size / (1024.0 * 1024), speed, duration)) sys.stdout.flush() - if sys.version_info >= (3,): - urllib2.urlretrieve(url, path, reporthook=_download_progress) - print("") + while retries >= 0: + # Disable pyling too broad Exception + # pylint: disable=W0703 + try: + if sys.version_info >= (3,): + urllib2.urlretrieve(url, tempfile, reporthook=_download_progress) + print("") + else: + f = urllib2.urlopen(url) + data = f.read() + with open(tempfile, "wb") as code: + code.write(data) + shutil.move(tempfile, path) + break + except Exception as err: + retries -= 1 + if retries == 0: + os.remove(tempfile) + raise err + else: + print("download failed due to {}, retrying, {} attempt{} left" + .format(repr(err), retries, 's' if retries > 1 else '')) + + +TEST_DATA_ROOT_PATH = os.path.join(os.path.expanduser('~'), '.tvm_test_data') +if not os.path.exists(TEST_DATA_ROOT_PATH): + os.mkdir(TEST_DATA_ROOT_PATH) + +def download_testdata(url, relpath, module=None): + """Downloads the test data from the internet. + + Parameters + ---------- + url : str + Download url. + + relpath : str + Relative file path. + + module : Union[str, list, tuple], optional + Subdirectory paths under test data folder. + + Returns + ------- + abspath : str + Absolute file path of downloaded file + """ + global TEST_DATA_ROOT_PATH + if module is None: + module_path = '' + elif isinstance(module, str): + module_path = module + elif isinstance(module, (list, tuple)): + module_path = os.path.join(*module) else: - f = urllib2.urlopen(url) - data = f.read() - with open(path, "wb") as code: - code.write(data) + raise ValueError("Unsupported module: " + module) + abspath = os.path.join(TEST_DATA_ROOT_PATH, module_path, relpath) + download(url, abspath, overwrite=False, size_compare=True) + return abspath
{"golden_diff": "diff --git a/python/tvm/contrib/download.py b/python/tvm/contrib/download.py\n--- a/python/tvm/contrib/download.py\n+++ b/python/tvm/contrib/download.py\n@@ -5,8 +5,10 @@\n import os\n import sys\n import time\n+import uuid\n+import shutil\n \n-def download(url, path, overwrite=False, size_compare=False, verbose=1):\n+def download(url, path, overwrite=False, size_compare=False, verbose=1, retries=3):\n \"\"\"Downloads the file from the internet.\n Set the input options correctly to overwrite or do the size comparison\n \n@@ -53,6 +55,11 @@\n \n # Stateful start time\n start_time = time.time()\n+ dirpath = os.path.dirname(path)\n+ if not os.path.isdir(dirpath):\n+ os.makedirs(dirpath)\n+ random_uuid = str(uuid.uuid4())\n+ tempfile = os.path.join(dirpath, random_uuid)\n \n def _download_progress(count, block_size, total_size):\n #pylint: disable=unused-argument\n@@ -68,11 +75,62 @@\n (percent, progress_size / (1024.0 * 1024), speed, duration))\n sys.stdout.flush()\n \n- if sys.version_info >= (3,):\n- urllib2.urlretrieve(url, path, reporthook=_download_progress)\n- print(\"\")\n+ while retries >= 0:\n+ # Disable pyling too broad Exception\n+ # pylint: disable=W0703\n+ try:\n+ if sys.version_info >= (3,):\n+ urllib2.urlretrieve(url, tempfile, reporthook=_download_progress)\n+ print(\"\")\n+ else:\n+ f = urllib2.urlopen(url)\n+ data = f.read()\n+ with open(tempfile, \"wb\") as code:\n+ code.write(data)\n+ shutil.move(tempfile, path)\n+ break\n+ except Exception as err:\n+ retries -= 1\n+ if retries == 0:\n+ os.remove(tempfile)\n+ raise err\n+ else:\n+ print(\"download failed due to {}, retrying, {} attempt{} left\"\n+ .format(repr(err), retries, 's' if retries > 1 else ''))\n+\n+\n+TEST_DATA_ROOT_PATH = os.path.join(os.path.expanduser('~'), '.tvm_test_data')\n+if not os.path.exists(TEST_DATA_ROOT_PATH):\n+ os.mkdir(TEST_DATA_ROOT_PATH)\n+\n+def download_testdata(url, relpath, module=None):\n+ \"\"\"Downloads the test data from the internet.\n+\n+ Parameters\n+ ----------\n+ url : str\n+ Download url.\n+\n+ relpath : str\n+ Relative file path.\n+\n+ module : Union[str, list, tuple], optional\n+ Subdirectory paths under test data folder.\n+\n+ Returns\n+ -------\n+ abspath : str\n+ Absolute file path of downloaded file\n+ \"\"\"\n+ global TEST_DATA_ROOT_PATH\n+ if module is None:\n+ module_path = ''\n+ elif isinstance(module, str):\n+ module_path = module\n+ elif isinstance(module, (list, tuple)):\n+ module_path = os.path.join(*module)\n else:\n- f = urllib2.urlopen(url)\n- data = f.read()\n- with open(path, \"wb\") as code:\n- code.write(data)\n+ raise ValueError(\"Unsupported module: \" + module)\n+ abspath = os.path.join(TEST_DATA_ROOT_PATH, module_path, relpath)\n+ download(url, abspath, overwrite=False, size_compare=True)\n+ return abspath\n", "issue": "[TEST][TENSORFLOW] Cache the Downloaded File\nSo far the model files used in the TF end to end tests re-download the file in each test run. \r\nThis causes the test execution to be slow. Eventually, the test server can be blocked by the place that hosts the data-source.\r\n\r\nWe need to change the implementation to cache to local and only re-download the file if necessary.\r\n\r\ncc @srkreddy1238 @icemelon9 \n", "before_files": [{"content": "\"\"\"Helper utility for downloading\"\"\"\nfrom __future__ import print_function\nfrom __future__ import absolute_import as _abs\n\nimport os\nimport sys\nimport time\n\ndef download(url, path, overwrite=False, size_compare=False, verbose=1):\n \"\"\"Downloads the file from the internet.\n Set the input options correctly to overwrite or do the size comparison\n\n Parameters\n ----------\n url : str\n Download url.\n\n path : str\n Local file path to save downloaded file\n\n overwrite : bool, optional\n Whether to overwrite existing file\n\n size_compare : bool, optional\n Whether to do size compare to check downloaded file.\n\n verbose: int, optional\n Verbose level\n \"\"\"\n if sys.version_info >= (3,):\n import urllib.request as urllib2\n else:\n import urllib2\n\n if os.path.isfile(path) and not overwrite:\n if size_compare:\n import requests\n file_size = os.path.getsize(path)\n res_head = requests.head(url)\n res_get = requests.get(url, stream=True)\n if 'Content-Length' not in res_head.headers:\n res_get = urllib2.urlopen(url)\n url_file_size = int(res_get.headers['Content-Length'])\n if url_file_size != file_size:\n print(\"exist file got corrupted, downloading %s file freshly...\" % path)\n download(url, path, True, False)\n return\n print('File {} exists, skip.'.format(path))\n return\n\n if verbose >= 1:\n print('Downloading from url {} to {}'.format(url, path))\n\n # Stateful start time\n start_time = time.time()\n\n def _download_progress(count, block_size, total_size):\n #pylint: disable=unused-argument\n \"\"\"Show the download progress.\n \"\"\"\n if count == 0:\n return\n duration = time.time() - start_time\n progress_size = int(count * block_size)\n speed = int(progress_size / (1024 * duration))\n percent = min(int(count * block_size * 100 / total_size), 100)\n sys.stdout.write(\"\\r...%d%%, %.2f MB, %d KB/s, %d seconds passed\" %\n (percent, progress_size / (1024.0 * 1024), speed, duration))\n sys.stdout.flush()\n\n if sys.version_info >= (3,):\n urllib2.urlretrieve(url, path, reporthook=_download_progress)\n print(\"\")\n else:\n f = urllib2.urlopen(url)\n data = f.read()\n with open(path, \"wb\") as code:\n code.write(data)\n", "path": "python/tvm/contrib/download.py"}]}
1,365
804
gh_patches_debug_1585
rasdani/github-patches
git_diff
scipy__scipy-10447
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Build adds the user folder inside the code base After building the SciPy on Windows 10, the base folder has a copy of the following folder ``` C:\Users\<user>\Documents\GitHub\scipy\Users\<user>\AppData\Local\Temp\tmpuvtg6i4i\main.obj ``` From the look of the structure, it seems like a relative folder is used instead of an absolute one hence it recreates the temp folder within the codebase. <strike>I think this might be related to the pocketfft development as I recently started to see it but might also be another C++ source change.</strike> Happens at the `cluster._optimal_leaf_ordering` compilation </issue> <code> [start of scipy/fft/_pocketfft/setup.py] 1 2 def try_compile(compiler, code=None, flags=[], ext='.cpp'): 3 """Returns True if the compiler is able to compile the given code""" 4 import tempfile 5 from distutils.errors import CompileError 6 import os 7 8 code = code or 'int main (int argc, char **argv) { return 0; }' 9 10 with tempfile.TemporaryDirectory() as temp_dir: 11 fname = os.path.join(temp_dir, 'main'+ext) 12 with open(fname, 'w') as f: 13 f.write(code) 14 15 try: 16 compiler.compile([fname], extra_postargs=flags) 17 except CompileError: 18 return False 19 return True 20 21 22 def has_flag(compiler, flag): 23 return try_compile(compiler, flags=[flag]) 24 25 26 def get_std_flag(compiler): 27 # Test the compiler for the highest available c++ standard flag 28 gnu_flags = ['--std=c++14', '--std=c++11'] 29 flags_by_cc = { 30 'msvc': ['/std:c++14', None], 31 'intelw': ['/Qstd=c++14', '/Qstd=c++11'] 32 } 33 flags = flags_by_cc.get(compiler.compiler_type, gnu_flags) 34 35 for flag in flags: 36 if flag is None: 37 return None 38 39 if has_flag(compiler, flag): 40 return flag 41 42 from numpy.distutils import log 43 log.warn('Could not detect c++ standard flag') 44 return None 45 46 47 def try_add_flag(args, compiler, flag): 48 """Appends flag to the list of arguments if supported by the compiler""" 49 if try_compile(compiler, flags=args+[flag]): 50 args.append(flag) 51 52 53 def pre_build_hook(build_ext, ext): 54 cc = build_ext._cxx_compiler 55 args = ext.extra_compile_args 56 57 std_flag = get_std_flag(build_ext._cxx_compiler) 58 if std_flag is not None: 59 args.append(std_flag) 60 61 if cc.compiler_type == 'msvc': 62 args.append('/EHsc') 63 else: 64 try_add_flag(args, cc, '-fvisibility=hidden') 65 66 import sys 67 if sys.platform == 'darwin': 68 args.append('-mmacosx-version-min=10.7') 69 try_add_flag(args, cc, '-stdlib=libc++') 70 71 72 def configuration(parent_package='', top_path=None): 73 from numpy.distutils.misc_util import Configuration 74 import pybind11 75 include_dirs = [pybind11.get_include(True), pybind11.get_include(False)] 76 77 config = Configuration('_pocketfft', parent_package, top_path) 78 ext = config.add_extension('pypocketfft', 79 sources=['pypocketfft.cxx'], 80 depends=['pocketfft_hdronly.h'], 81 include_dirs=include_dirs, 82 language='c++') 83 ext._pre_build_hook = pre_build_hook 84 85 config.add_data_files('LICENSE.md') 86 config.add_data_dir('tests') 87 return config 88 89 90 if __name__ == '__main__': 91 from numpy.distutils.core import setup 92 setup(**configuration(top_path='').todict()) 93 [end of scipy/fft/_pocketfft/setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/scipy/fft/_pocketfft/setup.py b/scipy/fft/_pocketfft/setup.py --- a/scipy/fft/_pocketfft/setup.py +++ b/scipy/fft/_pocketfft/setup.py @@ -13,7 +13,7 @@ f.write(code) try: - compiler.compile([fname], extra_postargs=flags) + compiler.compile([fname], output_dir=temp_dir, extra_postargs=flags) except CompileError: return False return True
{"golden_diff": "diff --git a/scipy/fft/_pocketfft/setup.py b/scipy/fft/_pocketfft/setup.py\n--- a/scipy/fft/_pocketfft/setup.py\n+++ b/scipy/fft/_pocketfft/setup.py\n@@ -13,7 +13,7 @@\n f.write(code)\n \n try:\n- compiler.compile([fname], extra_postargs=flags)\n+ compiler.compile([fname], output_dir=temp_dir, extra_postargs=flags)\n except CompileError:\n return False\n return True\n", "issue": "Build adds the user folder inside the code base\nAfter building the SciPy on Windows 10, the base folder has a copy of the following folder\r\n\r\n```\r\nC:\\Users\\<user>\\Documents\\GitHub\\scipy\\Users\\<user>\\AppData\\Local\\Temp\\tmpuvtg6i4i\\main.obj\r\n```\r\nFrom the look of the structure, it seems like a relative folder is used instead of an absolute one hence it recreates the temp folder within the codebase. \r\n\r\n<strike>I think this might be related to the pocketfft development as I recently started to see it but might also be another C++ source change.</strike> Happens at the `cluster._optimal_leaf_ordering` compilation\r\n\r\n\n", "before_files": [{"content": "\ndef try_compile(compiler, code=None, flags=[], ext='.cpp'):\n \"\"\"Returns True if the compiler is able to compile the given code\"\"\"\n import tempfile\n from distutils.errors import CompileError\n import os\n\n code = code or 'int main (int argc, char **argv) { return 0; }'\n\n with tempfile.TemporaryDirectory() as temp_dir:\n fname = os.path.join(temp_dir, 'main'+ext)\n with open(fname, 'w') as f:\n f.write(code)\n\n try:\n compiler.compile([fname], extra_postargs=flags)\n except CompileError:\n return False\n return True\n\n\ndef has_flag(compiler, flag):\n return try_compile(compiler, flags=[flag])\n\n\ndef get_std_flag(compiler):\n # Test the compiler for the highest available c++ standard flag\n gnu_flags = ['--std=c++14', '--std=c++11']\n flags_by_cc = {\n 'msvc': ['/std:c++14', None],\n 'intelw': ['/Qstd=c++14', '/Qstd=c++11']\n }\n flags = flags_by_cc.get(compiler.compiler_type, gnu_flags)\n\n for flag in flags:\n if flag is None:\n return None\n\n if has_flag(compiler, flag):\n return flag\n\n from numpy.distutils import log\n log.warn('Could not detect c++ standard flag')\n return None\n\n\ndef try_add_flag(args, compiler, flag):\n \"\"\"Appends flag to the list of arguments if supported by the compiler\"\"\"\n if try_compile(compiler, flags=args+[flag]):\n args.append(flag)\n\n\ndef pre_build_hook(build_ext, ext):\n cc = build_ext._cxx_compiler\n args = ext.extra_compile_args\n\n std_flag = get_std_flag(build_ext._cxx_compiler)\n if std_flag is not None:\n args.append(std_flag)\n\n if cc.compiler_type == 'msvc':\n args.append('/EHsc')\n else:\n try_add_flag(args, cc, '-fvisibility=hidden')\n\n import sys\n if sys.platform == 'darwin':\n args.append('-mmacosx-version-min=10.7')\n try_add_flag(args, cc, '-stdlib=libc++')\n\n\ndef configuration(parent_package='', top_path=None):\n from numpy.distutils.misc_util import Configuration\n import pybind11\n include_dirs = [pybind11.get_include(True), pybind11.get_include(False)]\n\n config = Configuration('_pocketfft', parent_package, top_path)\n ext = config.add_extension('pypocketfft',\n sources=['pypocketfft.cxx'],\n depends=['pocketfft_hdronly.h'],\n include_dirs=include_dirs,\n language='c++')\n ext._pre_build_hook = pre_build_hook\n\n config.add_data_files('LICENSE.md')\n config.add_data_dir('tests')\n return config\n\n\nif __name__ == '__main__':\n from numpy.distutils.core import setup\n setup(**configuration(top_path='').todict())\n", "path": "scipy/fft/_pocketfft/setup.py"}]}
1,549
118
gh_patches_debug_15992
rasdani/github-patches
git_diff
pymodbus-dev__pymodbus-1339
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Can't run pymodbus.simulator --help <!-- Before opening a new issue, make sure you do the following: * check that your issue isn't already filed: https://github.com/pymodbus-dev/pymodbus/issues * check the discussions forum https://github.com/pymodbus-dev/pymodbus/discussions * prepare a short, runnable example that reproduce the issue with the latest development version of Pymodbus Before opening a new issue, make sure you do the following --> ### Versions * Python: 3.10.6 * OS: Linux * Pymodbus: 3.1.3 * Modbus Hardware (if used): ### Description Trying to run `pymodbus.simulator --help` fails: ``` <coroutine object main at 0x7efcc073cf90> sys:1: RuntimeWarning: coroutine 'main' was never awaited RuntimeWarning: Enable tracemalloc to get the object allocation traceback ``` The `main` function used as entry point for the simulator is an async function: https://github.com/pymodbus-dev/pymodbus/blob/12859d0b82cc215a18ac757fe9319cdf1f9ec890/pymodbus/server/simulator/main.py#L113 It can't be used directly as an entry point. The entry point should be a function using `asyncio.run`. </issue> <code> [start of pymodbus/server/simulator/main.py] 1 #!/usr/bin/env python3 2 """HTTP server for modbus simulator. 3 4 The modbus simulator contain 3 distint parts: 5 6 - Datastore simulator, to define registers and their behaviour including actions: (simulator)(../../datastore/simulator.py) 7 - Modbus server: (server)(./http_server.py) 8 - HTTP server with REST API and web pages providing an online console in your browser 9 10 Multiple setups for different server types and/or devices are prepared in a (json file)(./setup.json), the detailed configuration is explained in (doc)(README.md) 11 12 The command line parameters are kept to a minimum: 13 14 usage: main.py [-h] [--modbus_server MODBUS_SERVER] 15 [--modbus_device MODBUS_DEVICE] [--http_host HTTP_HOST] 16 [--http_port HTTP_PORT] 17 [--log {critical,error,warning,info,debug}] 18 [--json_file JSON_FILE] 19 [--custom_actions_module CUSTOM_ACTIONS_MODULE] 20 21 Modbus server with REST-API and web server 22 23 options: 24 -h, --help show this help message and exit 25 --modbus_server MODBUS_SERVER 26 use <modbus_server> from server_list in json file 27 --modbus_device MODBUS_DEVICE 28 use <modbus_device> from device_list in json file 29 --http_host HTTP_HOST 30 use <http_host> as host to bind http listen 31 --http_port HTTP_PORT 32 use <http_port> as port to bind http listen 33 --log {critical,error,warning,info,debug} 34 set log level, default is info 35 --log_file LOG_FILE 36 name of server log file, default is "server.log" 37 --json_file JSON_FILE 38 name of json_file, default is "setup.json" 39 --custom_actions_module CUSTOM_ACTIONS_MODULE 40 python file with custom actions, default is none 41 """ 42 import argparse 43 import asyncio 44 45 from pymodbus import pymodbus_apply_logging_config 46 from pymodbus.logging import Log 47 from pymodbus.server.simulator.http_server import ModbusSimulatorServer 48 49 50 async def run(): 51 """Run simulator.""" 52 53 54 def get_commandline(): 55 """Get command line arguments.""" 56 parser = argparse.ArgumentParser( 57 description="Modbus server with REST-API and web server" 58 ) 59 parser.add_argument( 60 "--modbus_server", 61 help="use <modbus_server> from server_list in json file", 62 type=str, 63 ) 64 parser.add_argument( 65 "--modbus_device", 66 help="use <modbus_device> from device_list in json file", 67 type=str, 68 ) 69 parser.add_argument( 70 "--http_host", 71 help="use <http_host> as host to bind http listen", 72 type=str, 73 ) 74 parser.add_argument( 75 "--http_port", 76 help="use <http_port> as port to bind http listen", 77 type=str, 78 ) 79 parser.add_argument( 80 "--log", 81 choices=["critical", "error", "warning", "info", "debug"], 82 help="set log level, default is info", 83 default="info", 84 type=str, 85 ) 86 parser.add_argument( 87 "--json_file", 88 help='name of json file, default is "setup.json"', 89 type=str, 90 ) 91 parser.add_argument( 92 "--log_file", 93 help='name of server log file, default is "server.log"', 94 type=str, 95 ) 96 parser.add_argument( 97 "--custom_actions_module", 98 help="python file with custom actions, default is none", 99 type=str, 100 ) 101 args = parser.parse_args() 102 pymodbus_apply_logging_config(args.log.upper()) 103 Log.info("Start simulator") 104 cmd_args = {} 105 for argument in args.__dict__: 106 if argument == "log": 107 continue 108 if args.__dict__[argument] is not None: 109 cmd_args[argument] = args.__dict__[argument] 110 return cmd_args 111 112 113 async def main(): 114 """Run server.""" 115 cmd_args = get_commandline() 116 task = ModbusSimulatorServer(**cmd_args) 117 118 await task.run_forever() 119 120 121 if __name__ == "__main__": 122 asyncio.run(main(), debug=True) 123 [end of pymodbus/server/simulator/main.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pymodbus/server/simulator/main.py b/pymodbus/server/simulator/main.py --- a/pymodbus/server/simulator/main.py +++ b/pymodbus/server/simulator/main.py @@ -47,10 +47,6 @@ from pymodbus.server.simulator.http_server import ModbusSimulatorServer -async def run(): - """Run simulator.""" - - def get_commandline(): """Get command line arguments.""" parser = argparse.ArgumentParser( @@ -110,13 +106,12 @@ return cmd_args -async def main(): +def main(): """Run server.""" cmd_args = get_commandline() task = ModbusSimulatorServer(**cmd_args) - - await task.run_forever() + asyncio.run(task.run_forever(), debug=True) if __name__ == "__main__": - asyncio.run(main(), debug=True) + main()
{"golden_diff": "diff --git a/pymodbus/server/simulator/main.py b/pymodbus/server/simulator/main.py\n--- a/pymodbus/server/simulator/main.py\n+++ b/pymodbus/server/simulator/main.py\n@@ -47,10 +47,6 @@\n from pymodbus.server.simulator.http_server import ModbusSimulatorServer\n \n \n-async def run():\n- \"\"\"Run simulator.\"\"\"\n-\n-\n def get_commandline():\n \"\"\"Get command line arguments.\"\"\"\n parser = argparse.ArgumentParser(\n@@ -110,13 +106,12 @@\n return cmd_args\n \n \n-async def main():\n+def main():\n \"\"\"Run server.\"\"\"\n cmd_args = get_commandline()\n task = ModbusSimulatorServer(**cmd_args)\n-\n- await task.run_forever()\n+ asyncio.run(task.run_forever(), debug=True)\n \n \n if __name__ == \"__main__\":\n- asyncio.run(main(), debug=True)\n+ main()\n", "issue": "Can't run pymodbus.simulator --help\n<!--\r\n\r\nBefore opening a new issue, make sure you do the following:\r\n * check that your issue isn't already filed: https://github.com/pymodbus-dev/pymodbus/issues\r\n * check the discussions forum https://github.com/pymodbus-dev/pymodbus/discussions\r\n * prepare a short, runnable example that reproduce the issue with the latest development version of Pymodbus\r\n\r\n Before opening a new issue, make sure you do the following\r\n-->\r\n\r\n### Versions\r\n\r\n* Python: 3.10.6\r\n* OS: Linux\r\n* Pymodbus: 3.1.3\r\n* Modbus Hardware (if used):\r\n\r\n### Description\r\n\r\nTrying to run `pymodbus.simulator --help` fails:\r\n\r\n```\r\n<coroutine object main at 0x7efcc073cf90>\r\nsys:1: RuntimeWarning: coroutine 'main' was never awaited\r\nRuntimeWarning: Enable tracemalloc to get the object allocation traceback\r\n```\r\n\r\nThe `main` function used as entry point for the simulator is an async function: https://github.com/pymodbus-dev/pymodbus/blob/12859d0b82cc215a18ac757fe9319cdf1f9ec890/pymodbus/server/simulator/main.py#L113\r\n\r\nIt can't be used directly as an entry point. The entry point should be a function using `asyncio.run`.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\"\"\"HTTP server for modbus simulator.\n\nThe modbus simulator contain 3 distint parts:\n\n- Datastore simulator, to define registers and their behaviour including actions: (simulator)(../../datastore/simulator.py)\n- Modbus server: (server)(./http_server.py)\n- HTTP server with REST API and web pages providing an online console in your browser\n\nMultiple setups for different server types and/or devices are prepared in a (json file)(./setup.json), the detailed configuration is explained in (doc)(README.md)\n\nThe command line parameters are kept to a minimum:\n\nusage: main.py [-h] [--modbus_server MODBUS_SERVER]\n [--modbus_device MODBUS_DEVICE] [--http_host HTTP_HOST]\n [--http_port HTTP_PORT]\n [--log {critical,error,warning,info,debug}]\n [--json_file JSON_FILE]\n [--custom_actions_module CUSTOM_ACTIONS_MODULE]\n\nModbus server with REST-API and web server\n\noptions:\n -h, --help show this help message and exit\n --modbus_server MODBUS_SERVER\n use <modbus_server> from server_list in json file\n --modbus_device MODBUS_DEVICE\n use <modbus_device> from device_list in json file\n --http_host HTTP_HOST\n use <http_host> as host to bind http listen\n --http_port HTTP_PORT\n use <http_port> as port to bind http listen\n --log {critical,error,warning,info,debug}\n set log level, default is info\n --log_file LOG_FILE\n name of server log file, default is \"server.log\"\n --json_file JSON_FILE\n name of json_file, default is \"setup.json\"\n --custom_actions_module CUSTOM_ACTIONS_MODULE\n python file with custom actions, default is none\n\"\"\"\nimport argparse\nimport asyncio\n\nfrom pymodbus import pymodbus_apply_logging_config\nfrom pymodbus.logging import Log\nfrom pymodbus.server.simulator.http_server import ModbusSimulatorServer\n\n\nasync def run():\n \"\"\"Run simulator.\"\"\"\n\n\ndef get_commandline():\n \"\"\"Get command line arguments.\"\"\"\n parser = argparse.ArgumentParser(\n description=\"Modbus server with REST-API and web server\"\n )\n parser.add_argument(\n \"--modbus_server\",\n help=\"use <modbus_server> from server_list in json file\",\n type=str,\n )\n parser.add_argument(\n \"--modbus_device\",\n help=\"use <modbus_device> from device_list in json file\",\n type=str,\n )\n parser.add_argument(\n \"--http_host\",\n help=\"use <http_host> as host to bind http listen\",\n type=str,\n )\n parser.add_argument(\n \"--http_port\",\n help=\"use <http_port> as port to bind http listen\",\n type=str,\n )\n parser.add_argument(\n \"--log\",\n choices=[\"critical\", \"error\", \"warning\", \"info\", \"debug\"],\n help=\"set log level, default is info\",\n default=\"info\",\n type=str,\n )\n parser.add_argument(\n \"--json_file\",\n help='name of json file, default is \"setup.json\"',\n type=str,\n )\n parser.add_argument(\n \"--log_file\",\n help='name of server log file, default is \"server.log\"',\n type=str,\n )\n parser.add_argument(\n \"--custom_actions_module\",\n help=\"python file with custom actions, default is none\",\n type=str,\n )\n args = parser.parse_args()\n pymodbus_apply_logging_config(args.log.upper())\n Log.info(\"Start simulator\")\n cmd_args = {}\n for argument in args.__dict__:\n if argument == \"log\":\n continue\n if args.__dict__[argument] is not None:\n cmd_args[argument] = args.__dict__[argument]\n return cmd_args\n\n\nasync def main():\n \"\"\"Run server.\"\"\"\n cmd_args = get_commandline()\n task = ModbusSimulatorServer(**cmd_args)\n\n await task.run_forever()\n\n\nif __name__ == \"__main__\":\n asyncio.run(main(), debug=True)\n", "path": "pymodbus/server/simulator/main.py"}]}
2,017
209
gh_patches_debug_650
rasdani/github-patches
git_diff
pex-tool__pex-1942
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 2.1.109 On the docket: + [x] pex does not support musllinux wheels #1933 + [x] Empty string PEX_PATH="" env var causes CWD (.) to be added bootstrapped pex_path #1936 </issue> <code> [start of pex/version.py] 1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 __version__ = "2.1.108" 5 [end of pex/version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pex/version.py b/pex/version.py --- a/pex/version.py +++ b/pex/version.py @@ -1,4 +1,4 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -__version__ = "2.1.108" +__version__ = "2.1.109"
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.108\"\n+__version__ = \"2.1.109\"\n", "issue": "Release 2.1.109\nOn the docket:\r\n+ [x] pex does not support musllinux wheels #1933\r\n+ [x] Empty string PEX_PATH=\"\" env var causes CWD (.) to be added bootstrapped pex_path #1936\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.108\"\n", "path": "pex/version.py"}]}
650
99
gh_patches_debug_25401
rasdani/github-patches
git_diff
GeotrekCE__Geotrek-admin-1307
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Infrastructure list is filtered on "Ouvrage" by default J'ai créé des points d'aménagements (8 au total), cependant la plupart ne s'affichent pas (ni dans la liste, ni sur la carte)... ![1](https://cloud.githubusercontent.com/assets/7872468/3261864/b950187a-f25e-11e3-90ce-49701ae4aa36.JPG) Lorsque je rentre dans une fiche aménagement et que je reclique sur le bouton liste, là ils apparaissent tous mais seulement sur la carte. ![2](https://cloud.githubusercontent.com/assets/7872468/3261921/4d0614a2-f25f-11e3-8234-d1b44a293728.JPG) Par contre, si je touche au zoom, ils disparaissent et je n'ai plus que les trois du début. </issue> <code> [start of geotrek/infrastructure/filters.py] 1 from django.utils.translation import ugettext_lazy as _ 2 3 from geotrek.common.filters import StructureRelatedFilterSet, YearFilter 4 from geotrek.maintenance.filters import InterventionYearSelect 5 6 from .models import INFRASTRUCTURE_TYPES, Infrastructure, Signage 7 8 9 class InfrastructureYearSelect(InterventionYearSelect): 10 label = _(u"Intervention year") 11 12 13 class InfrastructureFilterSet(StructureRelatedFilterSet): 14 intervention_year = YearFilter(name='interventions_set__date', 15 widget=InfrastructureYearSelect, 16 label=_(u"Intervention year")) 17 18 def __init__(self, *args, **kwargs): 19 super(InfrastructureFilterSet, self).__init__(*args, **kwargs) 20 field = self.form.fields['type'] 21 field.queryset = field.queryset.exclude(type=INFRASTRUCTURE_TYPES.SIGNAGE) 22 23 class Meta(StructureRelatedFilterSet.Meta): 24 model = Infrastructure 25 fields = StructureRelatedFilterSet.Meta.fields + ['type__type', 'type'] 26 27 28 class SignageFilterSet(StructureRelatedFilterSet): 29 intervention_year = YearFilter(name='interventions_set__date', 30 widget=InfrastructureYearSelect) 31 32 def __init__(self, *args, **kwargs): 33 super(SignageFilterSet, self).__init__(*args, **kwargs) 34 field = self.form.fields['type'] 35 field.queryset = field.queryset.filter(type=INFRASTRUCTURE_TYPES.SIGNAGE) 36 37 class Meta(StructureRelatedFilterSet.Meta): 38 model = Signage 39 fields = StructureRelatedFilterSet.Meta.fields + ['type'] 40 [end of geotrek/infrastructure/filters.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/geotrek/infrastructure/filters.py b/geotrek/infrastructure/filters.py --- a/geotrek/infrastructure/filters.py +++ b/geotrek/infrastructure/filters.py @@ -20,6 +20,11 @@ field = self.form.fields['type'] field.queryset = field.queryset.exclude(type=INFRASTRUCTURE_TYPES.SIGNAGE) + field = self.form.fields['type__type'] + all_choices = field.widget.choices + all_choices = [c for c in all_choices if c[0] != INFRASTRUCTURE_TYPES.SIGNAGE] + field.widget.choices = [('', _(u"Category"))] + all_choices + class Meta(StructureRelatedFilterSet.Meta): model = Infrastructure fields = StructureRelatedFilterSet.Meta.fields + ['type__type', 'type'] @@ -29,11 +34,6 @@ intervention_year = YearFilter(name='interventions_set__date', widget=InfrastructureYearSelect) - def __init__(self, *args, **kwargs): - super(SignageFilterSet, self).__init__(*args, **kwargs) - field = self.form.fields['type'] - field.queryset = field.queryset.filter(type=INFRASTRUCTURE_TYPES.SIGNAGE) - class Meta(StructureRelatedFilterSet.Meta): model = Signage - fields = StructureRelatedFilterSet.Meta.fields + ['type'] + fields = StructureRelatedFilterSet.Meta.fields
{"golden_diff": "diff --git a/geotrek/infrastructure/filters.py b/geotrek/infrastructure/filters.py\n--- a/geotrek/infrastructure/filters.py\n+++ b/geotrek/infrastructure/filters.py\n@@ -20,6 +20,11 @@\n field = self.form.fields['type']\n field.queryset = field.queryset.exclude(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n \n+ field = self.form.fields['type__type']\n+ all_choices = field.widget.choices\n+ all_choices = [c for c in all_choices if c[0] != INFRASTRUCTURE_TYPES.SIGNAGE]\n+ field.widget.choices = [('', _(u\"Category\"))] + all_choices\n+\n class Meta(StructureRelatedFilterSet.Meta):\n model = Infrastructure\n fields = StructureRelatedFilterSet.Meta.fields + ['type__type', 'type']\n@@ -29,11 +34,6 @@\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect)\n \n- def __init__(self, *args, **kwargs):\n- super(SignageFilterSet, self).__init__(*args, **kwargs)\n- field = self.form.fields['type']\n- field.queryset = field.queryset.filter(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n-\n class Meta(StructureRelatedFilterSet.Meta):\n model = Signage\n- fields = StructureRelatedFilterSet.Meta.fields + ['type']\n+ fields = StructureRelatedFilterSet.Meta.fields\n", "issue": "Infrastructure list is filtered on \"Ouvrage\" by default\nJ'ai cr\u00e9\u00e9 des points d'am\u00e9nagements (8 au total), cependant la plupart ne s'affichent pas (ni dans la liste, ni sur la carte)...\n![1](https://cloud.githubusercontent.com/assets/7872468/3261864/b950187a-f25e-11e3-90ce-49701ae4aa36.JPG)\nLorsque je rentre dans une fiche am\u00e9nagement et que je reclique sur le bouton liste, l\u00e0 ils apparaissent tous mais seulement sur la carte.\n![2](https://cloud.githubusercontent.com/assets/7872468/3261921/4d0614a2-f25f-11e3-8234-d1b44a293728.JPG)\nPar contre, si je touche au zoom, ils disparaissent et je n'ai plus que les trois du d\u00e9but.\n\n", "before_files": [{"content": "from django.utils.translation import ugettext_lazy as _\n\nfrom geotrek.common.filters import StructureRelatedFilterSet, YearFilter\nfrom geotrek.maintenance.filters import InterventionYearSelect\n\nfrom .models import INFRASTRUCTURE_TYPES, Infrastructure, Signage\n\n\nclass InfrastructureYearSelect(InterventionYearSelect):\n label = _(u\"Intervention year\")\n\n\nclass InfrastructureFilterSet(StructureRelatedFilterSet):\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect,\n label=_(u\"Intervention year\"))\n\n def __init__(self, *args, **kwargs):\n super(InfrastructureFilterSet, self).__init__(*args, **kwargs)\n field = self.form.fields['type']\n field.queryset = field.queryset.exclude(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Infrastructure\n fields = StructureRelatedFilterSet.Meta.fields + ['type__type', 'type']\n\n\nclass SignageFilterSet(StructureRelatedFilterSet):\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect)\n\n def __init__(self, *args, **kwargs):\n super(SignageFilterSet, self).__init__(*args, **kwargs)\n field = self.form.fields['type']\n field.queryset = field.queryset.filter(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Signage\n fields = StructureRelatedFilterSet.Meta.fields + ['type']\n", "path": "geotrek/infrastructure/filters.py"}]}
1,186
333
gh_patches_debug_11644
rasdani/github-patches
git_diff
pyqtgraph__pyqtgraph-954
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [CI-fail] GLScatterPlotItem failing on Windows Builds ``` Traceback (most recent call last): File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\latebind.py", line 41, in __call__ return self._finalCall( *args, **named ) TypeError: 'NoneType' object is not callable During handling of the above exception, another exception occurred: Traceback (most recent call last): File "D:\a\1\s\pyqtgraph\opengl\GLViewWidget.py", line 60, in addItem item.initializeGL() File "D:\a\1\s\pyqtgraph\opengl\items\GLScatterPlotItem.py", line 70, in initializeGL self.pointTexture = glGenTextures(1) File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\latebind.py", line 61, in __call__ return self.wrapperFunction( self.baseFunction, *args, **named ) File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\GL\exceptional.py", line 178, in glGenTextures baseFunction( count, textures) File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\latebind.py", line 45, in __call__ return self._finalCall( *args, **named ) File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\wrapper.py", line 664, in wrapperCall raise err File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\wrapper.py", line 657, in wrapperCall result = wrappedOperation( *cArguments ) File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\platform\baseplatform.py", line 402, in __call__ return self( *args, **named ) File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\error.py", line 232, in glCheckError baseOperation = baseOperation, OpenGL.error.GLError: GLError( err = 1282, description = b'invalid operation', baseOperation = glGenTextures, pyArgs = (1, c_ulong(0)), cArgs = (1, <cparam 'P' (00000158BE5A9310)>), cArguments = (1, <cparam 'P' (00000158BE5A9310)>) ) During handling of the above exception, another exception occurred: Traceback (most recent call last): File "<stdin>", line 7, in <module> File "D:\a\1\s\examples\GLScatterPlotItem.py", line 46, in <module> w.addItem(sp1) File "D:\a\1\s\pyqtgraph\opengl\GLViewWidget.py", line 62, in addItem self.checkOpenGLVersion('Error while adding item %s to GLViewWidget.' % str(item)) File "D:\a\1\s\pyqtgraph\opengl\GLViewWidget.py", line 429, in checkOpenGLVersion ver = glGetString(GL_VERSION).split()[0] File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\platform\baseplatform.py", line 402, in __call__ return self( *args, **named ) File "c:\hostedtoolcache\windows\python\3.7.3\x64\lib\site-packages\OpenGL\error.py", line 232, in glCheckError baseOperation = baseOperation, OpenGL.error.GLError: GLError( err = 1282, description = b'invalid operation', baseOperation = glGetString, cArguments = (GL_VERSION,) ) Failed Scatter Plot Example Test Located in GLScatterPlotItem.py ``` Likely related to #928 </issue> <code> [start of pyqtgraph/opengl/glInfo.py] 1 from ..Qt import QtCore, QtGui, QtOpenGL 2 from OpenGL.GL import * 3 app = QtGui.QApplication([]) 4 5 class GLTest(QtOpenGL.QGLWidget): 6 def __init__(self): 7 QtOpenGL.QGLWidget.__init__(self) 8 self.makeCurrent() 9 print("GL version:" + glGetString(GL_VERSION)) 10 print("MAX_TEXTURE_SIZE: %d" % glGetIntegerv(GL_MAX_TEXTURE_SIZE)) 11 print("MAX_3D_TEXTURE_SIZE: %d" % glGetIntegerv(GL_MAX_3D_TEXTURE_SIZE)) 12 print("Extensions: " + glGetString(GL_EXTENSIONS)) 13 14 GLTest() 15 16 17 [end of pyqtgraph/opengl/glInfo.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pyqtgraph/opengl/glInfo.py b/pyqtgraph/opengl/glInfo.py --- a/pyqtgraph/opengl/glInfo.py +++ b/pyqtgraph/opengl/glInfo.py @@ -6,10 +6,10 @@ def __init__(self): QtOpenGL.QGLWidget.__init__(self) self.makeCurrent() - print("GL version:" + glGetString(GL_VERSION)) + print("GL version:" + glGetString(GL_VERSION).decode("utf-8")) print("MAX_TEXTURE_SIZE: %d" % glGetIntegerv(GL_MAX_TEXTURE_SIZE)) print("MAX_3D_TEXTURE_SIZE: %d" % glGetIntegerv(GL_MAX_3D_TEXTURE_SIZE)) - print("Extensions: " + glGetString(GL_EXTENSIONS)) + print("Extensions: " + glGetString(GL_EXTENSIONS).decode("utf-8").replace(" ", "\n")) GLTest()
{"golden_diff": "diff --git a/pyqtgraph/opengl/glInfo.py b/pyqtgraph/opengl/glInfo.py\n--- a/pyqtgraph/opengl/glInfo.py\n+++ b/pyqtgraph/opengl/glInfo.py\n@@ -6,10 +6,10 @@\n def __init__(self):\n QtOpenGL.QGLWidget.__init__(self)\n self.makeCurrent()\n- print(\"GL version:\" + glGetString(GL_VERSION))\n+ print(\"GL version:\" + glGetString(GL_VERSION).decode(\"utf-8\"))\n print(\"MAX_TEXTURE_SIZE: %d\" % glGetIntegerv(GL_MAX_TEXTURE_SIZE))\n print(\"MAX_3D_TEXTURE_SIZE: %d\" % glGetIntegerv(GL_MAX_3D_TEXTURE_SIZE))\n- print(\"Extensions: \" + glGetString(GL_EXTENSIONS))\n+ print(\"Extensions: \" + glGetString(GL_EXTENSIONS).decode(\"utf-8\").replace(\" \", \"\\n\"))\n \n GLTest()\n", "issue": "[CI-fail] GLScatterPlotItem failing on Windows Builds\n```\r\nTraceback (most recent call last):\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\latebind.py\", line 41, in __call__\r\n\r\n return self._finalCall( *args, **named )\r\n\r\nTypeError: 'NoneType' object is not callable\r\n\r\n\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\n\r\n\r\nTraceback (most recent call last):\r\n\r\n File \"D:\\a\\1\\s\\pyqtgraph\\opengl\\GLViewWidget.py\", line 60, in addItem\r\n\r\n item.initializeGL()\r\n\r\n File \"D:\\a\\1\\s\\pyqtgraph\\opengl\\items\\GLScatterPlotItem.py\", line 70, in initializeGL\r\n\r\n self.pointTexture = glGenTextures(1)\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\latebind.py\", line 61, in __call__\r\n\r\n return self.wrapperFunction( self.baseFunction, *args, **named )\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\GL\\exceptional.py\", line 178, in glGenTextures\r\n\r\n baseFunction( count, textures)\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\latebind.py\", line 45, in __call__\r\n\r\n return self._finalCall( *args, **named )\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\wrapper.py\", line 664, in wrapperCall\r\n\r\n raise err\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\wrapper.py\", line 657, in wrapperCall\r\n\r\n result = wrappedOperation( *cArguments )\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\platform\\baseplatform.py\", line 402, in __call__\r\n\r\n return self( *args, **named )\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\error.py\", line 232, in glCheckError\r\n\r\n baseOperation = baseOperation,\r\n\r\nOpenGL.error.GLError: GLError(\r\n\r\n\terr = 1282,\r\n\r\n\tdescription = b'invalid operation',\r\n\r\n\tbaseOperation = glGenTextures,\r\n\r\n\tpyArgs = (1, c_ulong(0)),\r\n\r\n\tcArgs = (1, <cparam 'P' (00000158BE5A9310)>),\r\n\r\n\tcArguments = (1, <cparam 'P' (00000158BE5A9310)>)\r\n\r\n)\r\n\r\n\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\n\r\n\r\nTraceback (most recent call last):\r\n\r\n File \"<stdin>\", line 7, in <module>\r\n\r\n File \"D:\\a\\1\\s\\examples\\GLScatterPlotItem.py\", line 46, in <module>\r\n\r\n w.addItem(sp1)\r\n\r\n File \"D:\\a\\1\\s\\pyqtgraph\\opengl\\GLViewWidget.py\", line 62, in addItem\r\n\r\n self.checkOpenGLVersion('Error while adding item %s to GLViewWidget.' % str(item))\r\n\r\n File \"D:\\a\\1\\s\\pyqtgraph\\opengl\\GLViewWidget.py\", line 429, in checkOpenGLVersion\r\n\r\n ver = glGetString(GL_VERSION).split()[0]\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\platform\\baseplatform.py\", line 402, in __call__\r\n\r\n return self( *args, **named )\r\n\r\n File \"c:\\hostedtoolcache\\windows\\python\\3.7.3\\x64\\lib\\site-packages\\OpenGL\\error.py\", line 232, in glCheckError\r\n\r\n baseOperation = baseOperation,\r\n\r\nOpenGL.error.GLError: GLError(\r\n\r\n\terr = 1282,\r\n\r\n\tdescription = b'invalid operation',\r\n\r\n\tbaseOperation = glGetString,\r\n\r\n\tcArguments = (GL_VERSION,)\r\n\r\n)\r\n\r\n\r\nFailed Scatter Plot Example Test Located in GLScatterPlotItem.py \r\n```\r\n\r\nLikely related to #928 \n", "before_files": [{"content": "from ..Qt import QtCore, QtGui, QtOpenGL\nfrom OpenGL.GL import *\napp = QtGui.QApplication([])\n\nclass GLTest(QtOpenGL.QGLWidget):\n def __init__(self):\n QtOpenGL.QGLWidget.__init__(self)\n self.makeCurrent()\n print(\"GL version:\" + glGetString(GL_VERSION))\n print(\"MAX_TEXTURE_SIZE: %d\" % glGetIntegerv(GL_MAX_TEXTURE_SIZE))\n print(\"MAX_3D_TEXTURE_SIZE: %d\" % glGetIntegerv(GL_MAX_3D_TEXTURE_SIZE))\n print(\"Extensions: \" + glGetString(GL_EXTENSIONS))\n\nGLTest()\n\n\n", "path": "pyqtgraph/opengl/glInfo.py"}]}
1,711
200
gh_patches_debug_27636
rasdani/github-patches
git_diff
graspologic-org__graspologic-829
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [BUG] Inaccurracy in how to use autokmeans ## Expected Behavior https://github.com/microsoft/graspologic/blob/10de2bf17b972decbab318568154af226dcd71fa/graspologic/cluster/kclust.py#L16 This line is false; higher silhouette score is better, to my knowledge? https://scikit-learn.org/stable/modules/generated/sklearn.metrics.silhouette_score.html ## Actual Behavior Documentation correctly reports how to use the package. </issue> <code> [start of graspologic/cluster/kclust.py] 1 # Copyright (c) Microsoft Corporation and contributors. 2 # Licensed under the MIT License. 3 4 from typing import Optional, Union 5 6 import numpy as np 7 from sklearn.cluster import KMeans 8 from sklearn.metrics import adjusted_rand_score, silhouette_score 9 10 from graspologic.types import List 11 12 from .base import BaseCluster 13 14 15 class KMeansCluster(BaseCluster): 16 ari_: Optional[List[float]] 17 18 """ 19 KMeans Cluster. 20 21 It computes all possible models from one component to 22 ``max_clusters``. The best model is given by the lowest silhouette score. 23 24 Parameters 25 ---------- 26 max_clusters : int, defaults to 1. 27 The maximum number of mixture components to consider. 28 29 random_state : int, RandomState instance or None, optional (default=None) 30 If int, ``random_state`` is the seed used by the random number generator; 31 If RandomState instance, ``random_state`` is the random number generator; 32 If None, the random number generator is the RandomState instance used 33 by ``np.random``. 34 35 Attributes 36 ---------- 37 n_clusters_ : int 38 Optimal number of components. If y is given, it is based on largest 39 ARI. Otherwise, it is based on smallest loss. 40 41 model_ : KMeans object 42 Fitted KMeans object fitted with optimal n_components. 43 44 silhouette_ : list 45 List of silhouette scores computed for all possible number 46 of clusters given by ``range(2, max_clusters)``. 47 48 ari_ : list 49 Only computed when y is given. List of ARI values computed for 50 all possible number of clusters given by ``range(2, max_clusters)``. 51 """ 52 53 def __init__( 54 self, 55 max_clusters: int = 2, 56 random_state: Optional[Union[int, np.random.RandomState]] = None, 57 ): 58 if isinstance(max_clusters, int): 59 if max_clusters <= 1: 60 msg = "n_components must be >= 2 or None." 61 raise ValueError(msg) 62 else: 63 self.max_clusters = max_clusters 64 else: 65 msg = "max_clusters must be an integer, not {}.".format(type(max_clusters)) 66 raise TypeError(msg) 67 self.random_state = random_state 68 69 def fit(self, X: np.ndarray, y: Optional[np.ndarray] = None) -> "KMeansCluster": 70 """ 71 Fits kmeans model to the data. 72 73 Parameters 74 ---------- 75 X : array-like, shape (n_samples, n_features) 76 List of n_features-dimensional data points. Each row 77 corresponds to a single data point. 78 79 y : array-like, shape (n_samples,), optional (default=None) 80 List of labels for `X` if available. Used to compute ARI scores. 81 82 Returns 83 ------- 84 self 85 """ 86 # Deal with number of clusters 87 if self.max_clusters > X.shape[0]: 88 msg = "n_components must be >= n_samples, but got \ 89 n_components = {}, n_samples = {}".format( 90 self.max_clusters, X.shape[0] 91 ) 92 raise ValueError(msg) 93 else: 94 max_clusters = self.max_clusters 95 96 # Get parameters 97 random_state = self.random_state 98 99 # Compute all models 100 models = [] 101 silhouettes = [] 102 aris = [] 103 for n in range(2, max_clusters + 1): 104 model = KMeans(n_clusters=n, random_state=random_state) 105 106 # Fit and compute values 107 predictions = model.fit_predict(X) 108 models.append(model) 109 silhouettes.append(silhouette_score(X, predictions)) 110 if y is not None: 111 aris.append(adjusted_rand_score(y, predictions)) 112 113 if y is not None: 114 self.ari_ = aris 115 self.silhouette_ = silhouettes 116 self.n_clusters_ = np.argmax(aris) + 1 117 self.model_ = models[np.argmax(aris)] 118 else: 119 self.ari_ = None 120 self.silhouette_ = silhouettes 121 self.n_clusters_ = np.argmax(silhouettes) + 1 122 self.model_ = models[np.argmax(silhouettes)] 123 124 return self 125 [end of graspologic/cluster/kclust.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/graspologic/cluster/kclust.py b/graspologic/cluster/kclust.py --- a/graspologic/cluster/kclust.py +++ b/graspologic/cluster/kclust.py @@ -18,13 +18,15 @@ """ KMeans Cluster. - It computes all possible models from one component to - ``max_clusters``. The best model is given by the lowest silhouette score. + It computes all possible models from one component to ``max_clusters``. + When the true labels are known, the best model is given by the model with highest + adjusted Rand index (ARI). + Otherwise, the best model is given by the model with highest silhouette score. Parameters ---------- - max_clusters : int, defaults to 1. - The maximum number of mixture components to consider. + max_clusters : int, default=2. + The maximum number of clusters to consider. Must be ``>=2``. random_state : int, RandomState instance or None, optional (default=None) If int, ``random_state`` is the seed used by the random number generator; @@ -35,11 +37,11 @@ Attributes ---------- n_clusters_ : int - Optimal number of components. If y is given, it is based on largest - ARI. Otherwise, it is based on smallest loss. + Optimal number of clusters. If y is given, it is based on largest + ARI. Otherwise, it is based on highest silhouette score. model_ : KMeans object - Fitted KMeans object fitted with optimal n_components. + Fitted KMeans object fitted with ``n_clusters_``. silhouette_ : list List of silhouette scores computed for all possible number
{"golden_diff": "diff --git a/graspologic/cluster/kclust.py b/graspologic/cluster/kclust.py\n--- a/graspologic/cluster/kclust.py\n+++ b/graspologic/cluster/kclust.py\n@@ -18,13 +18,15 @@\n \"\"\"\n KMeans Cluster.\n \n- It computes all possible models from one component to\n- ``max_clusters``. The best model is given by the lowest silhouette score.\n+ It computes all possible models from one component to ``max_clusters``.\n+ When the true labels are known, the best model is given by the model with highest\n+ adjusted Rand index (ARI).\n+ Otherwise, the best model is given by the model with highest silhouette score.\n \n Parameters\n ----------\n- max_clusters : int, defaults to 1.\n- The maximum number of mixture components to consider.\n+ max_clusters : int, default=2.\n+ The maximum number of clusters to consider. Must be ``>=2``.\n \n random_state : int, RandomState instance or None, optional (default=None)\n If int, ``random_state`` is the seed used by the random number generator;\n@@ -35,11 +37,11 @@\n Attributes\n ----------\n n_clusters_ : int\n- Optimal number of components. If y is given, it is based on largest\n- ARI. Otherwise, it is based on smallest loss.\n+ Optimal number of clusters. If y is given, it is based on largest\n+ ARI. Otherwise, it is based on highest silhouette score.\n \n model_ : KMeans object\n- Fitted KMeans object fitted with optimal n_components.\n+ Fitted KMeans object fitted with ``n_clusters_``.\n \n silhouette_ : list\n List of silhouette scores computed for all possible number\n", "issue": "[BUG] Inaccurracy in how to use autokmeans\n## Expected Behavior\r\n\r\nhttps://github.com/microsoft/graspologic/blob/10de2bf17b972decbab318568154af226dcd71fa/graspologic/cluster/kclust.py#L16\r\n\r\nThis line is false; higher silhouette score is better, to my knowledge? https://scikit-learn.org/stable/modules/generated/sklearn.metrics.silhouette_score.html\r\n\r\n## Actual Behavior\r\n\r\nDocumentation correctly reports how to use the package.\r\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation and contributors.\n# Licensed under the MIT License.\n\nfrom typing import Optional, Union\n\nimport numpy as np\nfrom sklearn.cluster import KMeans\nfrom sklearn.metrics import adjusted_rand_score, silhouette_score\n\nfrom graspologic.types import List\n\nfrom .base import BaseCluster\n\n\nclass KMeansCluster(BaseCluster):\n ari_: Optional[List[float]]\n\n \"\"\"\n KMeans Cluster.\n\n It computes all possible models from one component to\n ``max_clusters``. The best model is given by the lowest silhouette score.\n\n Parameters\n ----------\n max_clusters : int, defaults to 1.\n The maximum number of mixture components to consider.\n\n random_state : int, RandomState instance or None, optional (default=None)\n If int, ``random_state`` is the seed used by the random number generator;\n If RandomState instance, ``random_state`` is the random number generator;\n If None, the random number generator is the RandomState instance used\n by ``np.random``.\n\n Attributes\n ----------\n n_clusters_ : int\n Optimal number of components. If y is given, it is based on largest\n ARI. Otherwise, it is based on smallest loss.\n\n model_ : KMeans object\n Fitted KMeans object fitted with optimal n_components.\n\n silhouette_ : list\n List of silhouette scores computed for all possible number\n of clusters given by ``range(2, max_clusters)``.\n\n ari_ : list\n Only computed when y is given. List of ARI values computed for\n all possible number of clusters given by ``range(2, max_clusters)``.\n \"\"\"\n\n def __init__(\n self,\n max_clusters: int = 2,\n random_state: Optional[Union[int, np.random.RandomState]] = None,\n ):\n if isinstance(max_clusters, int):\n if max_clusters <= 1:\n msg = \"n_components must be >= 2 or None.\"\n raise ValueError(msg)\n else:\n self.max_clusters = max_clusters\n else:\n msg = \"max_clusters must be an integer, not {}.\".format(type(max_clusters))\n raise TypeError(msg)\n self.random_state = random_state\n\n def fit(self, X: np.ndarray, y: Optional[np.ndarray] = None) -> \"KMeansCluster\":\n \"\"\"\n Fits kmeans model to the data.\n\n Parameters\n ----------\n X : array-like, shape (n_samples, n_features)\n List of n_features-dimensional data points. Each row\n corresponds to a single data point.\n\n y : array-like, shape (n_samples,), optional (default=None)\n List of labels for `X` if available. Used to compute ARI scores.\n\n Returns\n -------\n self\n \"\"\"\n # Deal with number of clusters\n if self.max_clusters > X.shape[0]:\n msg = \"n_components must be >= n_samples, but got \\\n n_components = {}, n_samples = {}\".format(\n self.max_clusters, X.shape[0]\n )\n raise ValueError(msg)\n else:\n max_clusters = self.max_clusters\n\n # Get parameters\n random_state = self.random_state\n\n # Compute all models\n models = []\n silhouettes = []\n aris = []\n for n in range(2, max_clusters + 1):\n model = KMeans(n_clusters=n, random_state=random_state)\n\n # Fit and compute values\n predictions = model.fit_predict(X)\n models.append(model)\n silhouettes.append(silhouette_score(X, predictions))\n if y is not None:\n aris.append(adjusted_rand_score(y, predictions))\n\n if y is not None:\n self.ari_ = aris\n self.silhouette_ = silhouettes\n self.n_clusters_ = np.argmax(aris) + 1\n self.model_ = models[np.argmax(aris)]\n else:\n self.ari_ = None\n self.silhouette_ = silhouettes\n self.n_clusters_ = np.argmax(silhouettes) + 1\n self.model_ = models[np.argmax(silhouettes)]\n\n return self\n", "path": "graspologic/cluster/kclust.py"}]}
1,840
403
gh_patches_debug_3066
rasdani/github-patches
git_diff
searx__searx-200
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> bing_news can't parse other languages date When searching for french article, the time is noted as "Il y a 5 minutes", and so, doesn't match for the regex `"^[0-9]+ minute(s|) ago$"`. Do you see a way to internationalize this detection ? </issue> <code> [start of searx/engines/bing_news.py] 1 ## Bing (News) 2 # 3 # @website https://www.bing.com/news 4 # @provide-api yes (http://datamarket.azure.com/dataset/bing/search), 5 # max. 5000 query/month 6 # 7 # @using-api no (because of query limit) 8 # @results HTML (using search portal) 9 # @stable no (HTML can change) 10 # @parse url, title, content, publishedDate 11 12 from urllib import urlencode 13 from cgi import escape 14 from lxml import html 15 from datetime import datetime, timedelta 16 from dateutil import parser 17 import re 18 19 # engine dependent config 20 categories = ['news'] 21 paging = True 22 language_support = True 23 24 # search-url 25 base_url = 'https://www.bing.com/' 26 search_string = 'news/search?{query}&first={offset}' 27 28 29 # do search-request 30 def request(query, params): 31 offset = (params['pageno'] - 1) * 10 + 1 32 33 if params['language'] == 'all': 34 language = 'en-US' 35 else: 36 language = params['language'].replace('_', '-') 37 38 search_path = search_string.format( 39 query=urlencode({'q': query, 'setmkt': language}), 40 offset=offset) 41 42 params['cookies']['SRCHHPGUSR'] = \ 43 'NEWWND=0&NRSLT=-1&SRCHLANG=' + language.split('-')[0] 44 45 params['url'] = base_url + search_path 46 return params 47 48 49 # get response from search-request 50 def response(resp): 51 results = [] 52 53 dom = html.fromstring(resp.content) 54 55 # parse results 56 for result in dom.xpath('//div[@class="sn_r"]'): 57 link = result.xpath('.//div[@class="newstitle"]/a')[0] 58 url = link.attrib.get('href') 59 title = ' '.join(link.xpath('.//text()')) 60 contentXPath = result.xpath('.//div[@class="sn_txt"]/div' 61 '//span[@class="sn_snip"]//text()') 62 if contentXPath is not None: 63 content = escape(' '.join(contentXPath)) 64 65 # parse publishedDate 66 publishedDateXPath = result.xpath('.//div[@class="sn_txt"]/div' 67 '//span[contains(@class,"sn_ST")]' 68 '//span[contains(@class,"sn_tm")]' 69 '//text()') 70 if publishedDateXPath is not None: 71 publishedDate = escape(' '.join(publishedDateXPath)) 72 73 if re.match("^[0-9]+ minute(s|) ago$", publishedDate): 74 timeNumbers = re.findall(r'\d+', publishedDate) 75 publishedDate = datetime.now()\ 76 - timedelta(minutes=int(timeNumbers[0])) 77 elif re.match("^[0-9]+ hour(s|) ago$", publishedDate): 78 timeNumbers = re.findall(r'\d+', publishedDate) 79 publishedDate = datetime.now()\ 80 - timedelta(hours=int(timeNumbers[0])) 81 elif re.match("^[0-9]+ hour(s|)," 82 " [0-9]+ minute(s|) ago$", publishedDate): 83 timeNumbers = re.findall(r'\d+', publishedDate) 84 publishedDate = datetime.now()\ 85 - timedelta(hours=int(timeNumbers[0]))\ 86 - timedelta(minutes=int(timeNumbers[1])) 87 elif re.match("^[0-9]+ day(s|) ago$", publishedDate): 88 timeNumbers = re.findall(r'\d+', publishedDate) 89 publishedDate = datetime.now()\ 90 - timedelta(days=int(timeNumbers[0])) 91 else: 92 try: 93 # FIXME use params['language'] to parse either mm/dd or dd/mm 94 publishedDate = parser.parse(publishedDate, dayfirst=False) 95 except TypeError: 96 # FIXME 97 publishedDate = datetime.now() 98 99 # append result 100 results.append({'url': url, 101 'title': title, 102 'publishedDate': publishedDate, 103 'content': content}) 104 105 # return results 106 return results 107 [end of searx/engines/bing_news.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py --- a/searx/engines/bing_news.py +++ b/searx/engines/bing_news.py @@ -39,8 +39,7 @@ query=urlencode({'q': query, 'setmkt': language}), offset=offset) - params['cookies']['SRCHHPGUSR'] = \ - 'NEWWND=0&NRSLT=-1&SRCHLANG=' + language.split('-')[0] + params['cookies']['_FP'] = "ui=en-US" params['url'] = base_url + search_path return params
{"golden_diff": "diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py\n--- a/searx/engines/bing_news.py\n+++ b/searx/engines/bing_news.py\n@@ -39,8 +39,7 @@\n query=urlencode({'q': query, 'setmkt': language}),\n offset=offset)\n \n- params['cookies']['SRCHHPGUSR'] = \\\n- 'NEWWND=0&NRSLT=-1&SRCHLANG=' + language.split('-')[0]\n+ params['cookies']['_FP'] = \"ui=en-US\"\n \n params['url'] = base_url + search_path\n return params\n", "issue": "bing_news can't parse other languages date\nWhen searching for french article, the time is noted as \"Il y a 5 minutes\", and so, doesn't match for the regex `\"^[0-9]+ minute(s|) ago$\"`.\n\nDo you see a way to internationalize this detection ?\n\n", "before_files": [{"content": "## Bing (News)\n#\n# @website https://www.bing.com/news\n# @provide-api yes (http://datamarket.azure.com/dataset/bing/search),\n# max. 5000 query/month\n#\n# @using-api no (because of query limit)\n# @results HTML (using search portal)\n# @stable no (HTML can change)\n# @parse url, title, content, publishedDate\n\nfrom urllib import urlencode\nfrom cgi import escape\nfrom lxml import html\nfrom datetime import datetime, timedelta\nfrom dateutil import parser\nimport re\n\n# engine dependent config\ncategories = ['news']\npaging = True\nlanguage_support = True\n\n# search-url\nbase_url = 'https://www.bing.com/'\nsearch_string = 'news/search?{query}&first={offset}'\n\n\n# do search-request\ndef request(query, params):\n offset = (params['pageno'] - 1) * 10 + 1\n\n if params['language'] == 'all':\n language = 'en-US'\n else:\n language = params['language'].replace('_', '-')\n\n search_path = search_string.format(\n query=urlencode({'q': query, 'setmkt': language}),\n offset=offset)\n\n params['cookies']['SRCHHPGUSR'] = \\\n 'NEWWND=0&NRSLT=-1&SRCHLANG=' + language.split('-')[0]\n\n params['url'] = base_url + search_path\n return params\n\n\n# get response from search-request\ndef response(resp):\n results = []\n\n dom = html.fromstring(resp.content)\n\n # parse results\n for result in dom.xpath('//div[@class=\"sn_r\"]'):\n link = result.xpath('.//div[@class=\"newstitle\"]/a')[0]\n url = link.attrib.get('href')\n title = ' '.join(link.xpath('.//text()'))\n contentXPath = result.xpath('.//div[@class=\"sn_txt\"]/div'\n '//span[@class=\"sn_snip\"]//text()')\n if contentXPath is not None:\n content = escape(' '.join(contentXPath))\n\n # parse publishedDate\n publishedDateXPath = result.xpath('.//div[@class=\"sn_txt\"]/div'\n '//span[contains(@class,\"sn_ST\")]'\n '//span[contains(@class,\"sn_tm\")]'\n '//text()')\n if publishedDateXPath is not None:\n publishedDate = escape(' '.join(publishedDateXPath))\n\n if re.match(\"^[0-9]+ minute(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now()\\\n - timedelta(minutes=int(timeNumbers[0]))\n elif re.match(\"^[0-9]+ hour(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now()\\\n - timedelta(hours=int(timeNumbers[0]))\n elif re.match(\"^[0-9]+ hour(s|),\"\n \" [0-9]+ minute(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now()\\\n - timedelta(hours=int(timeNumbers[0]))\\\n - timedelta(minutes=int(timeNumbers[1]))\n elif re.match(\"^[0-9]+ day(s|) ago$\", publishedDate):\n timeNumbers = re.findall(r'\\d+', publishedDate)\n publishedDate = datetime.now()\\\n - timedelta(days=int(timeNumbers[0]))\n else:\n try:\n # FIXME use params['language'] to parse either mm/dd or dd/mm\n publishedDate = parser.parse(publishedDate, dayfirst=False)\n except TypeError:\n # FIXME\n publishedDate = datetime.now()\n\n # append result\n results.append({'url': url,\n 'title': title,\n 'publishedDate': publishedDate,\n 'content': content})\n\n # return results\n return results\n", "path": "searx/engines/bing_news.py"}]}
1,682
159
gh_patches_debug_3814
rasdani/github-patches
git_diff
cookiecutter__cookiecutter-573
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Problem with --checkout reclone The message should ask me about recloning `/Users/audreyr/.cookiecutters/cookiecutter-pypackage`, not `/Users/audreyr/.cookiecutters`. ``` $ cookiecutter https://github.com/eliasdorneles/cookiecutter-pypackage/ -c adding-travis-setup-for-pypi-deployment You've cloned /Users/audreyr/.cookiecutters before. Is it okay to delete and re-clone it? [yes]: ``` </issue> <code> [start of cookiecutter/vcs.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 4 """ 5 cookiecutter.vcs 6 ---------------- 7 8 Helper functions for working with version control systems. 9 """ 10 11 from __future__ import unicode_literals 12 import logging 13 import os 14 import subprocess 15 import sys 16 17 from whichcraft import which 18 19 from .exceptions import UnknownRepoType, VCSNotInstalled 20 from .prompt import read_user_yes_no 21 from .utils import make_sure_path_exists, rmtree 22 23 24 def prompt_and_delete_repo(repo_dir, no_input=False): 25 """ 26 Asks the user whether it's okay to delete the previously-cloned repo. 27 If yes, deletes it. Otherwise, Cookiecutter exits. 28 29 :param repo_dir: Directory of previously-cloned repo. 30 :param no_input: Suppress prompt to delete repo and just delete it. 31 """ 32 33 # Suppress prompt if called via API 34 if no_input: 35 ok_to_delete = True 36 else: 37 question = ( 38 "You've cloned {0} before. " 39 'Is it okay to delete and re-clone it?' 40 ).format(repo_dir) 41 42 ok_to_delete = read_user_yes_no(question, 'yes') 43 44 if ok_to_delete: 45 rmtree(repo_dir) 46 else: 47 sys.exit() 48 49 50 def identify_repo(repo_url): 51 """ 52 Determines if `repo_url` should be treated as a URL to a git or hg repo. 53 Repos can be identified prepeding "hg+" or "git+" to repo URL. 54 55 :param repo_url: Repo URL of unknown type. 56 :returns: ("git", repo_url), ("hg", repo_url), or None. 57 """ 58 repo_url_values = repo_url.split('+') 59 if len(repo_url_values) == 2: 60 repo_type = repo_url_values[0] 61 if repo_type in ["git", "hg"]: 62 return repo_type, repo_url_values[1] 63 else: 64 raise UnknownRepoType 65 else: 66 if "git" in repo_url: 67 return "git", repo_url 68 elif "bitbucket" in repo_url: 69 return "hg", repo_url 70 else: 71 raise UnknownRepoType 72 73 74 def is_vcs_installed(repo_type): 75 """ 76 Check if the version control system for a repo type is installed. 77 78 :param repo_type: 79 """ 80 return bool(which(repo_type)) 81 82 83 def clone(repo_url, checkout=None, clone_to_dir=".", no_input=False): 84 """ 85 Clone a repo to the current directory. 86 87 :param repo_url: Repo URL of unknown type. 88 :param checkout: The branch, tag or commit ID to checkout after clone. 89 :param clone_to_dir: The directory to clone to. 90 Defaults to the current directory. 91 :param no_input: Suppress all user prompts when calling via API. 92 """ 93 94 # Ensure that clone_to_dir exists 95 clone_to_dir = os.path.expanduser(clone_to_dir) 96 make_sure_path_exists(clone_to_dir) 97 98 # identify the repo_type 99 repo_type, repo_url = identify_repo(repo_url) 100 101 # check that the appropriate VCS for the repo_type is installed 102 if not is_vcs_installed(repo_type): 103 msg = "'{0}' is not installed.".format(repo_type) 104 raise VCSNotInstalled(msg) 105 106 tail = os.path.split(repo_url)[1] 107 if repo_type == 'git': 108 repo_dir = os.path.normpath(os.path.join(clone_to_dir, 109 tail.rsplit('.git')[0])) 110 elif repo_type == 'hg': 111 repo_dir = os.path.normpath(os.path.join(clone_to_dir, tail)) 112 logging.debug('repo_dir is {0}'.format(repo_dir)) 113 114 if os.path.isdir(repo_dir): 115 prompt_and_delete_repo(repo_dir, no_input=no_input) 116 117 if repo_type in ['git', 'hg']: 118 subprocess.check_call([repo_type, 'clone', repo_url], cwd=clone_to_dir) 119 if checkout is not None: 120 subprocess.check_call([repo_type, 'checkout', checkout], 121 cwd=repo_dir) 122 123 return repo_dir 124 [end of cookiecutter/vcs.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/cookiecutter/vcs.py b/cookiecutter/vcs.py --- a/cookiecutter/vcs.py +++ b/cookiecutter/vcs.py @@ -103,6 +103,7 @@ msg = "'{0}' is not installed.".format(repo_type) raise VCSNotInstalled(msg) + repo_url = repo_url.rstrip('/') tail = os.path.split(repo_url)[1] if repo_type == 'git': repo_dir = os.path.normpath(os.path.join(clone_to_dir,
{"golden_diff": "diff --git a/cookiecutter/vcs.py b/cookiecutter/vcs.py\n--- a/cookiecutter/vcs.py\n+++ b/cookiecutter/vcs.py\n@@ -103,6 +103,7 @@\n msg = \"'{0}' is not installed.\".format(repo_type)\n raise VCSNotInstalled(msg)\n \n+ repo_url = repo_url.rstrip('/')\n tail = os.path.split(repo_url)[1]\n if repo_type == 'git':\n repo_dir = os.path.normpath(os.path.join(clone_to_dir,\n", "issue": "Problem with --checkout reclone\nThe message should ask me about recloning `/Users/audreyr/.cookiecutters/cookiecutter-pypackage`, not `/Users/audreyr/.cookiecutters`.\n\n```\n$ cookiecutter https://github.com/eliasdorneles/cookiecutter-pypackage/ -c adding-travis-setup-for-pypi-deployment\nYou've cloned /Users/audreyr/.cookiecutters before. Is it okay to delete and re-clone it? [yes]: \n```\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\ncookiecutter.vcs\n----------------\n\nHelper functions for working with version control systems.\n\"\"\"\n\nfrom __future__ import unicode_literals\nimport logging\nimport os\nimport subprocess\nimport sys\n\nfrom whichcraft import which\n\nfrom .exceptions import UnknownRepoType, VCSNotInstalled\nfrom .prompt import read_user_yes_no\nfrom .utils import make_sure_path_exists, rmtree\n\n\ndef prompt_and_delete_repo(repo_dir, no_input=False):\n \"\"\"\n Asks the user whether it's okay to delete the previously-cloned repo.\n If yes, deletes it. Otherwise, Cookiecutter exits.\n\n :param repo_dir: Directory of previously-cloned repo.\n :param no_input: Suppress prompt to delete repo and just delete it.\n \"\"\"\n\n # Suppress prompt if called via API\n if no_input:\n ok_to_delete = True\n else:\n question = (\n \"You've cloned {0} before. \"\n 'Is it okay to delete and re-clone it?'\n ).format(repo_dir)\n\n ok_to_delete = read_user_yes_no(question, 'yes')\n\n if ok_to_delete:\n rmtree(repo_dir)\n else:\n sys.exit()\n\n\ndef identify_repo(repo_url):\n \"\"\"\n Determines if `repo_url` should be treated as a URL to a git or hg repo.\n Repos can be identified prepeding \"hg+\" or \"git+\" to repo URL.\n\n :param repo_url: Repo URL of unknown type.\n :returns: (\"git\", repo_url), (\"hg\", repo_url), or None.\n \"\"\"\n repo_url_values = repo_url.split('+')\n if len(repo_url_values) == 2:\n repo_type = repo_url_values[0]\n if repo_type in [\"git\", \"hg\"]:\n return repo_type, repo_url_values[1]\n else:\n raise UnknownRepoType\n else:\n if \"git\" in repo_url:\n return \"git\", repo_url\n elif \"bitbucket\" in repo_url:\n return \"hg\", repo_url\n else:\n raise UnknownRepoType\n\n\ndef is_vcs_installed(repo_type):\n \"\"\"\n Check if the version control system for a repo type is installed.\n\n :param repo_type:\n \"\"\"\n return bool(which(repo_type))\n\n\ndef clone(repo_url, checkout=None, clone_to_dir=\".\", no_input=False):\n \"\"\"\n Clone a repo to the current directory.\n\n :param repo_url: Repo URL of unknown type.\n :param checkout: The branch, tag or commit ID to checkout after clone.\n :param clone_to_dir: The directory to clone to.\n Defaults to the current directory.\n :param no_input: Suppress all user prompts when calling via API.\n \"\"\"\n\n # Ensure that clone_to_dir exists\n clone_to_dir = os.path.expanduser(clone_to_dir)\n make_sure_path_exists(clone_to_dir)\n\n # identify the repo_type\n repo_type, repo_url = identify_repo(repo_url)\n\n # check that the appropriate VCS for the repo_type is installed\n if not is_vcs_installed(repo_type):\n msg = \"'{0}' is not installed.\".format(repo_type)\n raise VCSNotInstalled(msg)\n\n tail = os.path.split(repo_url)[1]\n if repo_type == 'git':\n repo_dir = os.path.normpath(os.path.join(clone_to_dir,\n tail.rsplit('.git')[0]))\n elif repo_type == 'hg':\n repo_dir = os.path.normpath(os.path.join(clone_to_dir, tail))\n logging.debug('repo_dir is {0}'.format(repo_dir))\n\n if os.path.isdir(repo_dir):\n prompt_and_delete_repo(repo_dir, no_input=no_input)\n\n if repo_type in ['git', 'hg']:\n subprocess.check_call([repo_type, 'clone', repo_url], cwd=clone_to_dir)\n if checkout is not None:\n subprocess.check_call([repo_type, 'checkout', checkout],\n cwd=repo_dir)\n\n return repo_dir\n", "path": "cookiecutter/vcs.py"}]}
1,790
122
gh_patches_debug_22116
rasdani/github-patches
git_diff
pyload__pyload-1381
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> uplea plugin (still) broken Hi again, sorry but inspite of #1369 and #1375, uplea is still not working; now it's back with downloading the HTML download page... 24 26.04.2015 23:29:20 INFO Download finished: ***** 23 26.04.2015 23:29:02 INFO Download starts: **** The resulting file has correct name but is 14KB big; expected size if around 350MB </issue> <code> [start of module/plugins/hoster/UpleaCom.py] 1 # -*- coding: utf-8 -*- 2 3 import re 4 5 from urlparse import urljoin 6 7 from module.plugins.internal.XFSHoster import XFSHoster, create_getInfo 8 9 10 class UpleaCom(XFSHoster): 11 __name__ = "UpleaCom" 12 __type__ = "hoster" 13 __version__ = "0.08" 14 15 __pattern__ = r'https?://(?:www\.)?uplea\.com/dl/\w{15}' 16 17 __description__ = """Uplea.com hoster plugin""" 18 __license__ = "GPLv3" 19 __authors__ = [("Redleon", None), 20 ("GammaC0de", None)] 21 22 23 NAME_PATTERN = r'class="agmd size18">(?P<N>.+?)<' 24 SIZE_PATTERN = r'size14">(?P<S>[\d.,]+) (?P<U>[\w^_]+?)</span>' 25 SIZE_REPLACEMENTS = [('Ko','KB'), ('Mo','MB'), ('Go','GB')] 26 27 OFFLINE_PATTERN = r'>You followed an invalid or expired link' 28 PREMIUM_PATTERN = r'You need to have a Premium subscription to download this file' 29 30 LINK_PATTERN = r'"(https?://\w+\.uplea\.com/anonym/.*?)"' 31 HOSTER_DOMAIN = "uplea.com" 32 33 WAIT_PATTERN = r'timeText: ?([\d.]+),' 34 STEP_PATTERN = r'<a href="(/step/.+)">' 35 36 37 def setup(self): 38 self.multiDL = False 39 self.chunkLimit = 1 40 self.resumeDownload = True 41 42 43 def handleFree(self, pyfile): 44 m = re.search(self.STEP_PATTERN, self.html) 45 if m is None: 46 self.error(_("STEP_PATTERN not found")) 47 48 self.html = self.load(urljoin("http://uplea.com/", m.group(1))) 49 50 m = re.search(self.WAIT_PATTERN, self.html) 51 if m: 52 self.logDebug(_("Waiting %s seconds") % m.group(1)) 53 self.wait(m.group(1), True) 54 self.retry() 55 56 m = re.search(self.PREMIUM_PATTERN, self.html) 57 if m: 58 self.error(_("This URL requires a premium account")) 59 60 m = re.search(self.LINK_PATTERN, self.html) 61 if m is None: 62 self.error(_("LINK_PATTERN not found")) 63 64 self.link = m.group(1) 65 self.wait(15) 66 67 68 getInfo = create_getInfo(UpleaCom) 69 [end of module/plugins/hoster/UpleaCom.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/module/plugins/hoster/UpleaCom.py b/module/plugins/hoster/UpleaCom.py --- a/module/plugins/hoster/UpleaCom.py +++ b/module/plugins/hoster/UpleaCom.py @@ -10,7 +10,7 @@ class UpleaCom(XFSHoster): __name__ = "UpleaCom" __type__ = "hoster" - __version__ = "0.08" + __version__ = "0.10" __pattern__ = r'https?://(?:www\.)?uplea\.com/dl/\w{15}' @@ -20,9 +20,11 @@ ("GammaC0de", None)] - NAME_PATTERN = r'class="agmd size18">(?P<N>.+?)<' - SIZE_PATTERN = r'size14">(?P<S>[\d.,]+) (?P<U>[\w^_]+?)</span>' - SIZE_REPLACEMENTS = [('Ko','KB'), ('Mo','MB'), ('Go','GB')] + DISPOSITION = False #@TODO: Remove in 0.4.10 + + NAME_PATTERN = r'<span class="gold-text">(?P<N>.+?)</span>' + SIZE_PATTERN = r'<span class="label label-info agmd">(?P<S>[\d.,]+) (?P<U>[\w^_]+?)</span>' + SIZE_REPLACEMENTS = [('ko','KB'), ('mo','MB'), ('go','GB'), ('Ko','KB'), ('Mo','MB'), ('Go','GB')] OFFLINE_PATTERN = r'>You followed an invalid or expired link' PREMIUM_PATTERN = r'You need to have a Premium subscription to download this file'
{"golden_diff": "diff --git a/module/plugins/hoster/UpleaCom.py b/module/plugins/hoster/UpleaCom.py\n--- a/module/plugins/hoster/UpleaCom.py\n+++ b/module/plugins/hoster/UpleaCom.py\n@@ -10,7 +10,7 @@\n class UpleaCom(XFSHoster):\n __name__ = \"UpleaCom\"\n __type__ = \"hoster\"\n- __version__ = \"0.08\"\n+ __version__ = \"0.10\"\n \n __pattern__ = r'https?://(?:www\\.)?uplea\\.com/dl/\\w{15}'\n \n@@ -20,9 +20,11 @@\n (\"GammaC0de\", None)]\n \n \n- NAME_PATTERN = r'class=\"agmd size18\">(?P<N>.+?)<'\n- SIZE_PATTERN = r'size14\">(?P<S>[\\d.,]+) (?P<U>[\\w^_]+?)</span>'\n- SIZE_REPLACEMENTS = [('Ko','KB'), ('Mo','MB'), ('Go','GB')]\n+ DISPOSITION = False #@TODO: Remove in 0.4.10\n+\n+ NAME_PATTERN = r'<span class=\"gold-text\">(?P<N>.+?)</span>'\n+ SIZE_PATTERN = r'<span class=\"label label-info agmd\">(?P<S>[\\d.,]+) (?P<U>[\\w^_]+?)</span>'\n+ SIZE_REPLACEMENTS = [('ko','KB'), ('mo','MB'), ('go','GB'), ('Ko','KB'), ('Mo','MB'), ('Go','GB')]\n \n OFFLINE_PATTERN = r'>You followed an invalid or expired link'\n PREMIUM_PATTERN = r'You need to have a Premium subscription to download this file'\n", "issue": "uplea plugin (still) broken\nHi again,\n\nsorry but inspite of #1369 and #1375, uplea is still not working; now it's back with downloading the HTML download page...\n24 26.04.2015 23:29:20 INFO Download finished: *****\n23 26.04.2015 23:29:02 INFO Download starts: ****\n\nThe resulting file has correct name but is 14KB big; expected size if around 350MB\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport re\n\nfrom urlparse import urljoin\n\nfrom module.plugins.internal.XFSHoster import XFSHoster, create_getInfo\n\n\nclass UpleaCom(XFSHoster):\n __name__ = \"UpleaCom\"\n __type__ = \"hoster\"\n __version__ = \"0.08\"\n\n __pattern__ = r'https?://(?:www\\.)?uplea\\.com/dl/\\w{15}'\n\n __description__ = \"\"\"Uplea.com hoster plugin\"\"\"\n __license__ = \"GPLv3\"\n __authors__ = [(\"Redleon\", None),\n (\"GammaC0de\", None)]\n\n\n NAME_PATTERN = r'class=\"agmd size18\">(?P<N>.+?)<'\n SIZE_PATTERN = r'size14\">(?P<S>[\\d.,]+) (?P<U>[\\w^_]+?)</span>'\n SIZE_REPLACEMENTS = [('Ko','KB'), ('Mo','MB'), ('Go','GB')]\n\n OFFLINE_PATTERN = r'>You followed an invalid or expired link'\n PREMIUM_PATTERN = r'You need to have a Premium subscription to download this file'\n\n LINK_PATTERN = r'\"(https?://\\w+\\.uplea\\.com/anonym/.*?)\"'\n HOSTER_DOMAIN = \"uplea.com\"\n\n WAIT_PATTERN = r'timeText: ?([\\d.]+),'\n STEP_PATTERN = r'<a href=\"(/step/.+)\">'\n\n\n def setup(self):\n self.multiDL = False\n self.chunkLimit = 1\n self.resumeDownload = True\n\n\n def handleFree(self, pyfile):\n m = re.search(self.STEP_PATTERN, self.html)\n if m is None:\n self.error(_(\"STEP_PATTERN not found\"))\n\n self.html = self.load(urljoin(\"http://uplea.com/\", m.group(1)))\n\n m = re.search(self.WAIT_PATTERN, self.html)\n if m:\n self.logDebug(_(\"Waiting %s seconds\") % m.group(1))\n self.wait(m.group(1), True)\n self.retry()\n\n m = re.search(self.PREMIUM_PATTERN, self.html)\n if m:\n self.error(_(\"This URL requires a premium account\"))\n\n m = re.search(self.LINK_PATTERN, self.html)\n if m is None:\n self.error(_(\"LINK_PATTERN not found\"))\n\n self.link = m.group(1)\n self.wait(15)\n\n\ngetInfo = create_getInfo(UpleaCom)\n", "path": "module/plugins/hoster/UpleaCom.py"}]}
1,367
403
gh_patches_debug_42531
rasdani/github-patches
git_diff
plone__Products.CMFPlone-1763
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> CSS bundles generation breaks background images relative urls This is a bug related to PR #1300. </issue> <code> [start of Products/CMFPlone/resources/browser/combine.py] 1 from zExceptions import NotFound 2 from Acquisition import aq_base 3 from datetime import datetime 4 from plone.registry.interfaces import IRegistry 5 from plone.resource.file import FilesystemFile 6 from plone.resource.interfaces import IResourceDirectory 7 from Products.CMFPlone.interfaces import IBundleRegistry 8 from Products.CMFPlone.interfaces.resources import ( 9 OVERRIDE_RESOURCE_DIRECTORY_NAME, 10 ) 11 from StringIO import StringIO 12 from zope.component import getUtility 13 from zope.component import queryUtility 14 15 PRODUCTION_RESOURCE_DIRECTORY = "production" 16 17 18 def get_production_resource_directory(): 19 persistent_directory = queryUtility(IResourceDirectory, name="persistent") 20 if persistent_directory is None: 21 return '' 22 container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME] 23 try: 24 production_folder = container[PRODUCTION_RESOURCE_DIRECTORY] 25 except NotFound: 26 return "%s/++unique++1" % PRODUCTION_RESOURCE_DIRECTORY 27 timestamp = production_folder.readFile('timestamp.txt') 28 return "%s/++unique++%s" % ( 29 PRODUCTION_RESOURCE_DIRECTORY, timestamp) 30 31 32 def get_resource(context, path): 33 if path.startswith('++plone++'): 34 # ++plone++ resources can be customized, we return their override 35 # value if any 36 overrides = get_override_directory(context) 37 filepath = path[9:] 38 if overrides.isFile(filepath): 39 return overrides.readFile(filepath) 40 41 resource = context.unrestrictedTraverse(path) 42 if isinstance(resource, FilesystemFile): 43 (directory, sep, filename) = path.rpartition('/') 44 return context.unrestrictedTraverse(directory).readFile(filename) 45 else: 46 if hasattr(aq_base(resource), 'GET'): 47 # for FileResource 48 return resource.GET() 49 else: 50 # any BrowserView 51 return resource() 52 53 54 def write_js(context, folder, meta_bundle): 55 registry = getUtility(IRegistry) 56 resources = [] 57 58 # default resources 59 if meta_bundle == 'default' and registry.records.get( 60 'plone.resources/jquery.js' 61 ): 62 resources.append(get_resource(context, 63 registry.records['plone.resources/jquery.js'].value)) 64 resources.append(get_resource(context, 65 registry.records['plone.resources.requirejs'].value)) 66 resources.append(get_resource(context, 67 registry.records['plone.resources.configjs'].value)) 68 69 # bundles 70 bundles = registry.collectionOfInterface( 71 IBundleRegistry, prefix="plone.bundles", check=False) 72 for bundle in bundles.values(): 73 if bundle.merge_with == meta_bundle and bundle.jscompilation: 74 resources.append(get_resource(context, bundle.jscompilation)) 75 76 fi = StringIO() 77 for script in resources: 78 fi.write(script + '\n') 79 folder.writeFile(meta_bundle + ".js", fi) 80 81 82 def write_css(context, folder, meta_bundle): 83 registry = getUtility(IRegistry) 84 resources = [] 85 86 bundles = registry.collectionOfInterface( 87 IBundleRegistry, prefix="plone.bundles", check=False) 88 for bundle in bundles.values(): 89 if bundle.merge_with == meta_bundle and bundle.csscompilation: 90 resources.append(get_resource(context, bundle.csscompilation)) 91 92 fi = StringIO() 93 for script in resources: 94 fi.write(script + '\n') 95 folder.writeFile(meta_bundle + ".css", fi) 96 97 98 def get_override_directory(context): 99 persistent_directory = queryUtility(IResourceDirectory, name="persistent") 100 if persistent_directory is None: 101 return 102 if OVERRIDE_RESOURCE_DIRECTORY_NAME not in persistent_directory: 103 persistent_directory.makeDirectory(OVERRIDE_RESOURCE_DIRECTORY_NAME) 104 return persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME] 105 106 107 def combine_bundles(context): 108 container = get_override_directory(context) 109 if PRODUCTION_RESOURCE_DIRECTORY not in container: 110 container.makeDirectory(PRODUCTION_RESOURCE_DIRECTORY) 111 production_folder = container[PRODUCTION_RESOURCE_DIRECTORY] 112 113 # store timestamp 114 fi = StringIO() 115 fi.write(datetime.now().isoformat()) 116 production_folder.writeFile("timestamp.txt", fi) 117 118 # generate new combined bundles 119 write_js(context, production_folder, 'default') 120 write_js(context, production_folder, 'logged-in') 121 write_css(context, production_folder, 'default') 122 write_css(context, production_folder, 'logged-in') 123 [end of Products/CMFPlone/resources/browser/combine.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/Products/CMFPlone/resources/browser/combine.py b/Products/CMFPlone/resources/browser/combine.py --- a/Products/CMFPlone/resources/browser/combine.py +++ b/Products/CMFPlone/resources/browser/combine.py @@ -1,18 +1,20 @@ -from zExceptions import NotFound from Acquisition import aq_base from datetime import datetime from plone.registry.interfaces import IRegistry from plone.resource.file import FilesystemFile from plone.resource.interfaces import IResourceDirectory from Products.CMFPlone.interfaces import IBundleRegistry -from Products.CMFPlone.interfaces.resources import ( - OVERRIDE_RESOURCE_DIRECTORY_NAME, -) +from Products.CMFPlone.interfaces.resources import OVERRIDE_RESOURCE_DIRECTORY_NAME # noqa from StringIO import StringIO +from zExceptions import NotFound from zope.component import getUtility from zope.component import queryUtility +import logging +import re + PRODUCTION_RESOURCE_DIRECTORY = "production" +logger = logging.getLogger(__name__) def get_production_resource_directory(): @@ -24,6 +26,8 @@ production_folder = container[PRODUCTION_RESOURCE_DIRECTORY] except NotFound: return "%s/++unique++1" % PRODUCTION_RESOURCE_DIRECTORY + if 'timestamp.txt' not in production_folder: + return "%s/++unique++1" % PRODUCTION_RESOURCE_DIRECTORY timestamp = production_folder.readFile('timestamp.txt') return "%s/++unique++%s" % ( PRODUCTION_RESOURCE_DIRECTORY, timestamp) @@ -38,7 +42,12 @@ if overrides.isFile(filepath): return overrides.readFile(filepath) - resource = context.unrestrictedTraverse(path) + try: + resource = context.unrestrictedTraverse(path) + except NotFound: + logger.warn(u"Could not find resource {0}. You may have to create it first.".format(path)) # noqa + return + if isinstance(resource, FilesystemFile): (directory, sep, filename) = path.rpartition('/') return context.unrestrictedTraverse(directory).readFile(filename) @@ -71,7 +80,10 @@ IBundleRegistry, prefix="plone.bundles", check=False) for bundle in bundles.values(): if bundle.merge_with == meta_bundle and bundle.jscompilation: - resources.append(get_resource(context, bundle.jscompilation)) + resource = get_resource(context, bundle.jscompilation) + if not resource: + continue + resources.append(resource) fi = StringIO() for script in resources: @@ -87,7 +99,18 @@ IBundleRegistry, prefix="plone.bundles", check=False) for bundle in bundles.values(): if bundle.merge_with == meta_bundle and bundle.csscompilation: - resources.append(get_resource(context, bundle.csscompilation)) + css = get_resource(context, bundle.csscompilation) + if not css: + continue + (path, sep, filename) = bundle.csscompilation.rpartition('/') + # Process relative urls: + # we prefix with current resource path any url not starting with + # '/' or http: or data: + css = re.sub( + r"""(url\(['"]?(?!['"]?([a-z]+:|\/)))""", + r'\1%s/' % path, + css) + resources.append(css) fi = StringIO() for script in resources:
{"golden_diff": "diff --git a/Products/CMFPlone/resources/browser/combine.py b/Products/CMFPlone/resources/browser/combine.py\n--- a/Products/CMFPlone/resources/browser/combine.py\n+++ b/Products/CMFPlone/resources/browser/combine.py\n@@ -1,18 +1,20 @@\n-from zExceptions import NotFound\n from Acquisition import aq_base\n from datetime import datetime\n from plone.registry.interfaces import IRegistry\n from plone.resource.file import FilesystemFile\n from plone.resource.interfaces import IResourceDirectory\n from Products.CMFPlone.interfaces import IBundleRegistry\n-from Products.CMFPlone.interfaces.resources import (\n- OVERRIDE_RESOURCE_DIRECTORY_NAME,\n-)\n+from Products.CMFPlone.interfaces.resources import OVERRIDE_RESOURCE_DIRECTORY_NAME # noqa\n from StringIO import StringIO\n+from zExceptions import NotFound\n from zope.component import getUtility\n from zope.component import queryUtility\n \n+import logging\n+import re\n+\n PRODUCTION_RESOURCE_DIRECTORY = \"production\"\n+logger = logging.getLogger(__name__)\n \n \n def get_production_resource_directory():\n@@ -24,6 +26,8 @@\n production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]\n except NotFound:\n return \"%s/++unique++1\" % PRODUCTION_RESOURCE_DIRECTORY\n+ if 'timestamp.txt' not in production_folder:\n+ return \"%s/++unique++1\" % PRODUCTION_RESOURCE_DIRECTORY\n timestamp = production_folder.readFile('timestamp.txt')\n return \"%s/++unique++%s\" % (\n PRODUCTION_RESOURCE_DIRECTORY, timestamp)\n@@ -38,7 +42,12 @@\n if overrides.isFile(filepath):\n return overrides.readFile(filepath)\n \n- resource = context.unrestrictedTraverse(path)\n+ try:\n+ resource = context.unrestrictedTraverse(path)\n+ except NotFound:\n+ logger.warn(u\"Could not find resource {0}. You may have to create it first.\".format(path)) # noqa\n+ return\n+\n if isinstance(resource, FilesystemFile):\n (directory, sep, filename) = path.rpartition('/')\n return context.unrestrictedTraverse(directory).readFile(filename)\n@@ -71,7 +80,10 @@\n IBundleRegistry, prefix=\"plone.bundles\", check=False)\n for bundle in bundles.values():\n if bundle.merge_with == meta_bundle and bundle.jscompilation:\n- resources.append(get_resource(context, bundle.jscompilation))\n+ resource = get_resource(context, bundle.jscompilation)\n+ if not resource:\n+ continue\n+ resources.append(resource)\n \n fi = StringIO()\n for script in resources:\n@@ -87,7 +99,18 @@\n IBundleRegistry, prefix=\"plone.bundles\", check=False)\n for bundle in bundles.values():\n if bundle.merge_with == meta_bundle and bundle.csscompilation:\n- resources.append(get_resource(context, bundle.csscompilation))\n+ css = get_resource(context, bundle.csscompilation)\n+ if not css:\n+ continue\n+ (path, sep, filename) = bundle.csscompilation.rpartition('/')\n+ # Process relative urls:\n+ # we prefix with current resource path any url not starting with\n+ # '/' or http: or data:\n+ css = re.sub(\n+ r\"\"\"(url\\(['\"]?(?!['\"]?([a-z]+:|\\/)))\"\"\",\n+ r'\\1%s/' % path,\n+ css)\n+ resources.append(css)\n \n fi = StringIO()\n for script in resources:\n", "issue": "CSS bundles generation breaks background images relative urls\nThis is a bug related to PR #1300.\n\n", "before_files": [{"content": "from zExceptions import NotFound\nfrom Acquisition import aq_base\nfrom datetime import datetime\nfrom plone.registry.interfaces import IRegistry\nfrom plone.resource.file import FilesystemFile\nfrom plone.resource.interfaces import IResourceDirectory\nfrom Products.CMFPlone.interfaces import IBundleRegistry\nfrom Products.CMFPlone.interfaces.resources import (\n OVERRIDE_RESOURCE_DIRECTORY_NAME,\n)\nfrom StringIO import StringIO\nfrom zope.component import getUtility\nfrom zope.component import queryUtility\n\nPRODUCTION_RESOURCE_DIRECTORY = \"production\"\n\n\ndef get_production_resource_directory():\n persistent_directory = queryUtility(IResourceDirectory, name=\"persistent\")\n if persistent_directory is None:\n return ''\n container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]\n try:\n production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]\n except NotFound:\n return \"%s/++unique++1\" % PRODUCTION_RESOURCE_DIRECTORY\n timestamp = production_folder.readFile('timestamp.txt')\n return \"%s/++unique++%s\" % (\n PRODUCTION_RESOURCE_DIRECTORY, timestamp)\n\n\ndef get_resource(context, path):\n if path.startswith('++plone++'):\n # ++plone++ resources can be customized, we return their override\n # value if any\n overrides = get_override_directory(context)\n filepath = path[9:]\n if overrides.isFile(filepath):\n return overrides.readFile(filepath)\n\n resource = context.unrestrictedTraverse(path)\n if isinstance(resource, FilesystemFile):\n (directory, sep, filename) = path.rpartition('/')\n return context.unrestrictedTraverse(directory).readFile(filename)\n else:\n if hasattr(aq_base(resource), 'GET'):\n # for FileResource\n return resource.GET()\n else:\n # any BrowserView\n return resource()\n\n\ndef write_js(context, folder, meta_bundle):\n registry = getUtility(IRegistry)\n resources = []\n\n # default resources\n if meta_bundle == 'default' and registry.records.get(\n 'plone.resources/jquery.js'\n ):\n resources.append(get_resource(context,\n registry.records['plone.resources/jquery.js'].value))\n resources.append(get_resource(context,\n registry.records['plone.resources.requirejs'].value))\n resources.append(get_resource(context,\n registry.records['plone.resources.configjs'].value))\n\n # bundles\n bundles = registry.collectionOfInterface(\n IBundleRegistry, prefix=\"plone.bundles\", check=False)\n for bundle in bundles.values():\n if bundle.merge_with == meta_bundle and bundle.jscompilation:\n resources.append(get_resource(context, bundle.jscompilation))\n\n fi = StringIO()\n for script in resources:\n fi.write(script + '\\n')\n folder.writeFile(meta_bundle + \".js\", fi)\n\n\ndef write_css(context, folder, meta_bundle):\n registry = getUtility(IRegistry)\n resources = []\n\n bundles = registry.collectionOfInterface(\n IBundleRegistry, prefix=\"plone.bundles\", check=False)\n for bundle in bundles.values():\n if bundle.merge_with == meta_bundle and bundle.csscompilation:\n resources.append(get_resource(context, bundle.csscompilation))\n\n fi = StringIO()\n for script in resources:\n fi.write(script + '\\n')\n folder.writeFile(meta_bundle + \".css\", fi)\n\n\ndef get_override_directory(context):\n persistent_directory = queryUtility(IResourceDirectory, name=\"persistent\")\n if persistent_directory is None:\n return\n if OVERRIDE_RESOURCE_DIRECTORY_NAME not in persistent_directory:\n persistent_directory.makeDirectory(OVERRIDE_RESOURCE_DIRECTORY_NAME)\n return persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]\n\n\ndef combine_bundles(context):\n container = get_override_directory(context)\n if PRODUCTION_RESOURCE_DIRECTORY not in container:\n container.makeDirectory(PRODUCTION_RESOURCE_DIRECTORY)\n production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]\n\n # store timestamp\n fi = StringIO()\n fi.write(datetime.now().isoformat())\n production_folder.writeFile(\"timestamp.txt\", fi)\n\n # generate new combined bundles\n write_js(context, production_folder, 'default')\n write_js(context, production_folder, 'logged-in')\n write_css(context, production_folder, 'default')\n write_css(context, production_folder, 'logged-in')\n", "path": "Products/CMFPlone/resources/browser/combine.py"}]}
1,707
756
gh_patches_debug_658
rasdani/github-patches
git_diff
pex-tool__pex-2258
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 2.1.148 On the docket: + [x] The Pex CLI should warn when it creates a PEX zip that requires zip64. #2247 </issue> <code> [start of pex/version.py] 1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 __version__ = "2.1.147" 5 [end of pex/version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pex/version.py b/pex/version.py --- a/pex/version.py +++ b/pex/version.py @@ -1,4 +1,4 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -__version__ = "2.1.147" +__version__ = "2.1.148"
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.147\"\n+__version__ = \"2.1.148\"\n", "issue": "Release 2.1.148\nOn the docket:\r\n+ [x] The Pex CLI should warn when it creates a PEX zip that requires zip64. #2247\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.147\"\n", "path": "pex/version.py"}]}
628
99
gh_patches_debug_2942
rasdani/github-patches
git_diff
docker__docker-py-3004
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> installing latest 5.0.3 on windows machines is still using pywin32==227 but not pywin32==301 [Bump pywin32 from 227 to 301 ]( https://github.com/docker/docker-py/commit/e0d186d754693feb7d27c2352e455c5febb4a5cd) was already merged in to bump pywin32 from 227 to 301. But, when installing latest 5.0.3 on windows machines is resulting in install of pywin32==227 Most likely extras_require needs updated https://github.com/docker/docker-py/blob/a48a5a9647761406d66e8271f19fab7fa0c5f582/setup.py#L19 Pywin32 upgrade Fix issue #2902 @aiordache @ulyssessouza, please, accept this PR to fix this annoying bug Don't pin to pywin32 227 The hard pin to 227 is keeping us from using docker with other projects that depend on a newer version of pywin32. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 3 import codecs 4 import os 5 6 from setuptools import find_packages 7 from setuptools import setup 8 9 ROOT_DIR = os.path.dirname(__file__) 10 SOURCE_DIR = os.path.join(ROOT_DIR) 11 12 requirements = [ 13 'websocket-client >= 0.32.0', 14 'requests >= 2.14.2, != 2.18.0', 15 ] 16 17 extras_require = { 18 # win32 APIs if on Windows (required for npipe support) 19 ':sys_platform == "win32"': 'pywin32==227', 20 21 # If using docker-py over TLS, highly recommend this option is 22 # pip-installed or pinned. 23 24 # TODO: if pip installing both "requests" and "requests[security]", the 25 # extra package from the "security" option are not installed (see 26 # https://github.com/pypa/pip/issues/4391). Once that's fixed, instead of 27 # installing the extra dependencies, install the following instead: 28 # 'requests[security] >= 2.5.2, != 2.11.0, != 2.12.2' 29 'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=3.4.7', 'idna>=2.0.0'], 30 31 # Only required when connecting using the ssh:// protocol 32 'ssh': ['paramiko>=2.4.3'], 33 34 } 35 36 version = None 37 exec(open('docker/version.py').read()) 38 39 with open('./test-requirements.txt') as test_reqs_txt: 40 test_requirements = [line for line in test_reqs_txt] 41 42 43 long_description = '' 44 with codecs.open('./README.md', encoding='utf-8') as readme_md: 45 long_description = readme_md.read() 46 47 setup( 48 name="docker", 49 version=version, 50 description="A Python library for the Docker Engine API.", 51 long_description=long_description, 52 long_description_content_type='text/markdown', 53 url='https://github.com/docker/docker-py', 54 project_urls={ 55 'Documentation': 'https://docker-py.readthedocs.io', 56 'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html', # noqa: E501 57 'Source': 'https://github.com/docker/docker-py', 58 'Tracker': 'https://github.com/docker/docker-py/issues', 59 }, 60 packages=find_packages(exclude=["tests.*", "tests"]), 61 install_requires=requirements, 62 tests_require=test_requirements, 63 extras_require=extras_require, 64 python_requires='>=3.6', 65 zip_safe=False, 66 test_suite='tests', 67 classifiers=[ 68 'Development Status :: 5 - Production/Stable', 69 'Environment :: Other Environment', 70 'Intended Audience :: Developers', 71 'Operating System :: OS Independent', 72 'Programming Language :: Python', 73 'Programming Language :: Python :: 3', 74 'Programming Language :: Python :: 3.6', 75 'Programming Language :: Python :: 3.7', 76 'Programming Language :: Python :: 3.8', 77 'Programming Language :: Python :: 3.9', 78 'Programming Language :: Python :: 3.10', 79 'Topic :: Software Development', 80 'Topic :: Utilities', 81 'License :: OSI Approved :: Apache Software License', 82 ], 83 maintainer='Ulysses Souza', 84 maintainer_email='[email protected]', 85 ) 86 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ extras_require = { # win32 APIs if on Windows (required for npipe support) - ':sys_platform == "win32"': 'pywin32==227', + ':sys_platform == "win32"': 'pywin32>=304', # If using docker-py over TLS, highly recommend this option is # pip-installed or pinned.
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -16,7 +16,7 @@\n \n extras_require = {\n # win32 APIs if on Windows (required for npipe support)\n- ':sys_platform == \"win32\"': 'pywin32==227',\n+ ':sys_platform == \"win32\"': 'pywin32>=304',\n \n # If using docker-py over TLS, highly recommend this option is\n # pip-installed or pinned.\n", "issue": "installing latest 5.0.3 on windows machines is still using pywin32==227 but not pywin32==301\n[Bump pywin32 from 227 to 301 ]( https://github.com/docker/docker-py/commit/e0d186d754693feb7d27c2352e455c5febb4a5cd) was already merged in to bump pywin32 from 227 to 301. But, when installing latest 5.0.3 on windows machines is resulting in install of pywin32==227\r\n\r\nMost likely extras_require needs updated\r\nhttps://github.com/docker/docker-py/blob/a48a5a9647761406d66e8271f19fab7fa0c5f582/setup.py#L19\r\n\r\n\r\n\r\n\nPywin32 upgrade\nFix issue #2902\r\n\r\n@aiordache @ulyssessouza, please, accept this PR to fix this annoying bug\r\n\nDon't pin to pywin32 227\nThe hard pin to 227 is keeping us from using docker with other projects that depend on a newer version of pywin32.\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport codecs\nimport os\n\nfrom setuptools import find_packages\nfrom setuptools import setup\n\nROOT_DIR = os.path.dirname(__file__)\nSOURCE_DIR = os.path.join(ROOT_DIR)\n\nrequirements = [\n 'websocket-client >= 0.32.0',\n 'requests >= 2.14.2, != 2.18.0',\n]\n\nextras_require = {\n # win32 APIs if on Windows (required for npipe support)\n ':sys_platform == \"win32\"': 'pywin32==227',\n\n # If using docker-py over TLS, highly recommend this option is\n # pip-installed or pinned.\n\n # TODO: if pip installing both \"requests\" and \"requests[security]\", the\n # extra package from the \"security\" option are not installed (see\n # https://github.com/pypa/pip/issues/4391). Once that's fixed, instead of\n # installing the extra dependencies, install the following instead:\n # 'requests[security] >= 2.5.2, != 2.11.0, != 2.12.2'\n 'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=3.4.7', 'idna>=2.0.0'],\n\n # Only required when connecting using the ssh:// protocol\n 'ssh': ['paramiko>=2.4.3'],\n\n}\n\nversion = None\nexec(open('docker/version.py').read())\n\nwith open('./test-requirements.txt') as test_reqs_txt:\n test_requirements = [line for line in test_reqs_txt]\n\n\nlong_description = ''\nwith codecs.open('./README.md', encoding='utf-8') as readme_md:\n long_description = readme_md.read()\n\nsetup(\n name=\"docker\",\n version=version,\n description=\"A Python library for the Docker Engine API.\",\n long_description=long_description,\n long_description_content_type='text/markdown',\n url='https://github.com/docker/docker-py',\n project_urls={\n 'Documentation': 'https://docker-py.readthedocs.io',\n 'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html', # noqa: E501\n 'Source': 'https://github.com/docker/docker-py',\n 'Tracker': 'https://github.com/docker/docker-py/issues',\n },\n packages=find_packages(exclude=[\"tests.*\", \"tests\"]),\n install_requires=requirements,\n tests_require=test_requirements,\n extras_require=extras_require,\n python_requires='>=3.6',\n zip_safe=False,\n test_suite='tests',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Other Environment',\n 'Intended Audience :: Developers',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Topic :: Software Development',\n 'Topic :: Utilities',\n 'License :: OSI Approved :: Apache Software License',\n ],\n maintainer='Ulysses Souza',\n maintainer_email='[email protected]',\n)\n", "path": "setup.py"}]}
1,733
123
gh_patches_debug_12597
rasdani/github-patches
git_diff
sublimelsp__LSP-1110
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Double requests for documentHighlight I've noticed that setting a cursor on some symbol makes the `documentHighlight` underline blink once. Checked logs and saw the request being made twice on each cursor movement: ``` :: --> pyls textDocument/documentHighlight(12): {'textDocument': {'uri': 'file:////LSP/plugin/highlights.py'}, 'position': {'character': 8, 'line': 38}} :: --> pyls textDocument/documentHighlight(13): {'textDocument': {'uri': 'file:////LSP/plugin/highlights.py'}, 'position': {'character': 8, 'line': 38}} ``` Then added log in `DocumentHighlightListener` class, inside `on_selection_modified_async` method and that listener seems to be triggered twice on cursor movement. Tested with `print(self.view.file_name())`. </issue> <code> [start of plugin/highlights.py] 1 import sublime 2 from .core.protocol import Request, Range, DocumentHighlightKind 3 from .core.registry import LSPViewEventListener 4 from .core.settings import settings 5 from .core.typing import List, Dict, Optional 6 from .core.views import range_to_region, text_document_position_params 7 from .core.windows import debounced 8 9 SUBLIME_WORD_MASK = 515 10 NO_HIGHLIGHT_SCOPES = 'comment, string' 11 12 _kind2name = { 13 DocumentHighlightKind.Unknown: "unknown", 14 DocumentHighlightKind.Text: "text", 15 DocumentHighlightKind.Read: "read", 16 DocumentHighlightKind.Write: "write" 17 } 18 19 20 def remove_highlights(view: sublime.View) -> None: 21 for kind in settings.document_highlight_scopes.keys(): 22 view.erase_regions("lsp_highlight_{}".format(kind)) 23 24 25 class DocumentHighlightListener(LSPViewEventListener): 26 def __init__(self, view: sublime.View) -> None: 27 super().__init__(view) 28 self._initialized = False 29 self._enabled = False 30 self._stored_point = -1 31 32 @classmethod 33 def is_applicable(cls, view_settings: dict) -> bool: 34 if 'documentHighlight' in settings.disabled_capabilities: 35 return False 36 return cls.has_supported_syntax(view_settings) 37 38 def on_selection_modified_async(self) -> None: 39 if not self._initialized: 40 self._initialize() 41 if self._enabled and settings.document_highlight_style: 42 try: 43 current_point = self.view.sel()[0].begin() 44 except IndexError: 45 return 46 self._stored_point = current_point 47 self._clear_regions() 48 debounced(self._on_document_highlight, 500, lambda: self._stored_point == current_point, async_thread=True) 49 50 def _initialize(self) -> None: 51 self._initialized = True 52 session = self.session("documentHighlightProvider") 53 if session: 54 self._enabled = True 55 56 def _clear_regions(self) -> None: 57 for kind in settings.document_highlight_scopes.keys(): 58 self.view.erase_regions("lsp_highlight_{}".format(kind)) 59 60 def _on_document_highlight(self) -> None: 61 self._clear_regions() 62 if len(self.view.sel()) != 1: 63 return 64 point = self.view.sel()[0].begin() 65 word_at_sel = self.view.classify(point) 66 if word_at_sel & SUBLIME_WORD_MASK: 67 if self.view.match_selector(point, NO_HIGHLIGHT_SCOPES): 68 return 69 session = self.session("documentHighlightProvider", point) 70 if session: 71 params = text_document_position_params(self.view, point) 72 request = Request.documentHighlight(params) 73 session.send_request(request, self._handle_response) 74 75 def _handle_response(self, response: Optional[List]) -> None: 76 if not response: 77 return 78 kind2regions = {} # type: Dict[str, List[sublime.Region]] 79 for kind in range(0, 4): 80 kind2regions[_kind2name[kind]] = [] 81 for highlight in response: 82 r = range_to_region(Range.from_lsp(highlight["range"]), self.view) 83 kind = highlight.get("kind", DocumentHighlightKind.Unknown) 84 if kind is not None: 85 kind2regions[_kind2name[kind]].append(r) 86 if settings.document_highlight_style == "fill": 87 flags = 0 88 elif settings.document_highlight_style == "box": 89 flags = sublime.DRAW_NO_FILL 90 else: 91 flags = sublime.DRAW_NO_FILL | sublime.DRAW_NO_OUTLINE 92 if settings.document_highlight_style == "underline": 93 flags |= sublime.DRAW_SOLID_UNDERLINE 94 elif settings.document_highlight_style == "stippled": 95 flags |= sublime.DRAW_STIPPLED_UNDERLINE 96 elif settings.document_highlight_style == "squiggly": 97 flags |= sublime.DRAW_SQUIGGLY_UNDERLINE 98 99 self._clear_regions() 100 for kind_str, regions in kind2regions.items(): 101 if regions: 102 scope = settings.document_highlight_scopes.get(kind_str, None) 103 if scope: 104 self.view.add_regions("lsp_highlight_{}".format(kind_str), 105 regions, scope=scope, flags=flags) 106 [end of plugin/highlights.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/plugin/highlights.py b/plugin/highlights.py --- a/plugin/highlights.py +++ b/plugin/highlights.py @@ -43,9 +43,11 @@ current_point = self.view.sel()[0].begin() except IndexError: return - self._stored_point = current_point self._clear_regions() - debounced(self._on_document_highlight, 500, lambda: self._stored_point == current_point, async_thread=True) + if self._stored_point != current_point: + self._stored_point = current_point + debounced(self._on_document_highlight, 500, lambda: self._stored_point == current_point, + async_thread=True) def _initialize(self) -> None: self._initialized = True
{"golden_diff": "diff --git a/plugin/highlights.py b/plugin/highlights.py\n--- a/plugin/highlights.py\n+++ b/plugin/highlights.py\n@@ -43,9 +43,11 @@\n current_point = self.view.sel()[0].begin()\n except IndexError:\n return\n- self._stored_point = current_point\n self._clear_regions()\n- debounced(self._on_document_highlight, 500, lambda: self._stored_point == current_point, async_thread=True)\n+ if self._stored_point != current_point:\n+ self._stored_point = current_point\n+ debounced(self._on_document_highlight, 500, lambda: self._stored_point == current_point,\n+ async_thread=True)\n \n def _initialize(self) -> None:\n self._initialized = True\n", "issue": "Double requests for documentHighlight\nI've noticed that setting a cursor on some symbol makes the `documentHighlight` underline blink once.\r\n\r\nChecked logs and saw the request being made twice on each cursor movement:\r\n```\r\n:: --> pyls textDocument/documentHighlight(12): {'textDocument': {'uri': 'file:////LSP/plugin/highlights.py'}, 'position': {'character': 8, 'line': 38}}\r\n:: --> pyls textDocument/documentHighlight(13): {'textDocument': {'uri': 'file:////LSP/plugin/highlights.py'}, 'position': {'character': 8, 'line': 38}}\r\n```\r\n\r\nThen added log in `DocumentHighlightListener` class, inside `on_selection_modified_async` method and that listener seems to be triggered twice on cursor movement. Tested with `print(self.view.file_name())`.\n", "before_files": [{"content": "import sublime\nfrom .core.protocol import Request, Range, DocumentHighlightKind\nfrom .core.registry import LSPViewEventListener\nfrom .core.settings import settings\nfrom .core.typing import List, Dict, Optional\nfrom .core.views import range_to_region, text_document_position_params\nfrom .core.windows import debounced\n\nSUBLIME_WORD_MASK = 515\nNO_HIGHLIGHT_SCOPES = 'comment, string'\n\n_kind2name = {\n DocumentHighlightKind.Unknown: \"unknown\",\n DocumentHighlightKind.Text: \"text\",\n DocumentHighlightKind.Read: \"read\",\n DocumentHighlightKind.Write: \"write\"\n}\n\n\ndef remove_highlights(view: sublime.View) -> None:\n for kind in settings.document_highlight_scopes.keys():\n view.erase_regions(\"lsp_highlight_{}\".format(kind))\n\n\nclass DocumentHighlightListener(LSPViewEventListener):\n def __init__(self, view: sublime.View) -> None:\n super().__init__(view)\n self._initialized = False\n self._enabled = False\n self._stored_point = -1\n\n @classmethod\n def is_applicable(cls, view_settings: dict) -> bool:\n if 'documentHighlight' in settings.disabled_capabilities:\n return False\n return cls.has_supported_syntax(view_settings)\n\n def on_selection_modified_async(self) -> None:\n if not self._initialized:\n self._initialize()\n if self._enabled and settings.document_highlight_style:\n try:\n current_point = self.view.sel()[0].begin()\n except IndexError:\n return\n self._stored_point = current_point\n self._clear_regions()\n debounced(self._on_document_highlight, 500, lambda: self._stored_point == current_point, async_thread=True)\n\n def _initialize(self) -> None:\n self._initialized = True\n session = self.session(\"documentHighlightProvider\")\n if session:\n self._enabled = True\n\n def _clear_regions(self) -> None:\n for kind in settings.document_highlight_scopes.keys():\n self.view.erase_regions(\"lsp_highlight_{}\".format(kind))\n\n def _on_document_highlight(self) -> None:\n self._clear_regions()\n if len(self.view.sel()) != 1:\n return\n point = self.view.sel()[0].begin()\n word_at_sel = self.view.classify(point)\n if word_at_sel & SUBLIME_WORD_MASK:\n if self.view.match_selector(point, NO_HIGHLIGHT_SCOPES):\n return\n session = self.session(\"documentHighlightProvider\", point)\n if session:\n params = text_document_position_params(self.view, point)\n request = Request.documentHighlight(params)\n session.send_request(request, self._handle_response)\n\n def _handle_response(self, response: Optional[List]) -> None:\n if not response:\n return\n kind2regions = {} # type: Dict[str, List[sublime.Region]]\n for kind in range(0, 4):\n kind2regions[_kind2name[kind]] = []\n for highlight in response:\n r = range_to_region(Range.from_lsp(highlight[\"range\"]), self.view)\n kind = highlight.get(\"kind\", DocumentHighlightKind.Unknown)\n if kind is not None:\n kind2regions[_kind2name[kind]].append(r)\n if settings.document_highlight_style == \"fill\":\n flags = 0\n elif settings.document_highlight_style == \"box\":\n flags = sublime.DRAW_NO_FILL\n else:\n flags = sublime.DRAW_NO_FILL | sublime.DRAW_NO_OUTLINE\n if settings.document_highlight_style == \"underline\":\n flags |= sublime.DRAW_SOLID_UNDERLINE\n elif settings.document_highlight_style == \"stippled\":\n flags |= sublime.DRAW_STIPPLED_UNDERLINE\n elif settings.document_highlight_style == \"squiggly\":\n flags |= sublime.DRAW_SQUIGGLY_UNDERLINE\n\n self._clear_regions()\n for kind_str, regions in kind2regions.items():\n if regions:\n scope = settings.document_highlight_scopes.get(kind_str, None)\n if scope:\n self.view.add_regions(\"lsp_highlight_{}\".format(kind_str),\n regions, scope=scope, flags=flags)\n", "path": "plugin/highlights.py"}]}
1,819
175
gh_patches_debug_15724
rasdani/github-patches
git_diff
pyscript__pyscript-1902
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> js_modules not behaving like in Polyscript ### Checklist - [X] I added a descriptive title - [X] I searched for other issues and couldn't find a solution or duplication - [X] I already searched in Google and didn't find any good information or help ### What happened? Apparently `from pyscript.js_modules import Thing` doesn't work in *PyScript* the same way it does on *Polyscript*. The main difference is that in *PyScript* that's exported within the Python code, as opposite of being registered as JS module like it is for *Polyscript* where *js_modules* use `registerJSModule` utility instead. ### What browsers are you seeing the problem on? (if applicable) _No response_ ### Console info _No response_ ### Additional Context _No response_ </issue> <code> [start of pyscript.core/src/stdlib/pyscript/magic_js.py] 1 import js as globalThis 2 from polyscript import js_modules 3 from pyscript.util import NotSupported 4 5 RUNNING_IN_WORKER = not hasattr(globalThis, "document") 6 7 if RUNNING_IN_WORKER: 8 import js 9 import polyscript 10 11 PyWorker = NotSupported( 12 "pyscript.PyWorker", 13 "pyscript.PyWorker works only when running in the main thread", 14 ) 15 window = polyscript.xworker.window 16 document = window.document 17 js.document = document 18 sync = polyscript.xworker.sync 19 20 # in workers the display does not have a default ID 21 # but there is a sync utility from xworker 22 def current_target(): 23 return polyscript.target 24 25 else: 26 import _pyscript 27 from _pyscript import PyWorker 28 29 window = globalThis 30 document = globalThis.document 31 sync = NotSupported( 32 "pyscript.sync", "pyscript.sync works only when running in a worker" 33 ) 34 35 # in MAIN the current element target exist, just use it 36 def current_target(): 37 return _pyscript.target 38 [end of pyscript.core/src/stdlib/pyscript/magic_js.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pyscript.core/src/stdlib/pyscript/magic_js.py b/pyscript.core/src/stdlib/pyscript/magic_js.py --- a/pyscript.core/src/stdlib/pyscript/magic_js.py +++ b/pyscript.core/src/stdlib/pyscript/magic_js.py @@ -1,9 +1,28 @@ +import sys + import js as globalThis from polyscript import js_modules from pyscript.util import NotSupported RUNNING_IN_WORKER = not hasattr(globalThis, "document") + +# allow `from pyscript.js_modules.xxx import yyy` +class JSModule(object): + def __init__(self, name): + self.name = name + + def __getattr__(self, field): + # avoid pyodide looking for non existent fields + if not field.startswith("_"): + return getattr(getattr(js_modules, self.name), field) + + +# generate N modules in the system that will proxy the real value +for name in globalThis.Reflect.ownKeys(js_modules): + sys.modules[f"pyscript.js_modules.{name}"] = JSModule(name) +sys.modules["pyscript.js_modules"] = js_modules + if RUNNING_IN_WORKER: import js import polyscript
{"golden_diff": "diff --git a/pyscript.core/src/stdlib/pyscript/magic_js.py b/pyscript.core/src/stdlib/pyscript/magic_js.py\n--- a/pyscript.core/src/stdlib/pyscript/magic_js.py\n+++ b/pyscript.core/src/stdlib/pyscript/magic_js.py\n@@ -1,9 +1,28 @@\n+import sys\n+\n import js as globalThis\n from polyscript import js_modules\n from pyscript.util import NotSupported\n \n RUNNING_IN_WORKER = not hasattr(globalThis, \"document\")\n \n+\n+# allow `from pyscript.js_modules.xxx import yyy`\n+class JSModule(object):\n+ def __init__(self, name):\n+ self.name = name\n+\n+ def __getattr__(self, field):\n+ # avoid pyodide looking for non existent fields\n+ if not field.startswith(\"_\"):\n+ return getattr(getattr(js_modules, self.name), field)\n+\n+\n+# generate N modules in the system that will proxy the real value\n+for name in globalThis.Reflect.ownKeys(js_modules):\n+ sys.modules[f\"pyscript.js_modules.{name}\"] = JSModule(name)\n+sys.modules[\"pyscript.js_modules\"] = js_modules\n+\n if RUNNING_IN_WORKER:\n import js\n import polyscript\n", "issue": "js_modules not behaving like in Polyscript\n### Checklist\n\n- [X] I added a descriptive title\n- [X] I searched for other issues and couldn't find a solution or duplication\n- [X] I already searched in Google and didn't find any good information or help\n\n### What happened?\n\nApparently `from pyscript.js_modules import Thing` doesn't work in *PyScript* the same way it does on *Polyscript*.\r\n\r\nThe main difference is that in *PyScript* that's exported within the Python code, as opposite of being registered as JS module like it is for *Polyscript* where *js_modules* use `registerJSModule` utility instead.\n\n### What browsers are you seeing the problem on? (if applicable)\n\n_No response_\n\n### Console info\n\n_No response_\n\n### Additional Context\n\n_No response_\n", "before_files": [{"content": "import js as globalThis\nfrom polyscript import js_modules\nfrom pyscript.util import NotSupported\n\nRUNNING_IN_WORKER = not hasattr(globalThis, \"document\")\n\nif RUNNING_IN_WORKER:\n import js\n import polyscript\n\n PyWorker = NotSupported(\n \"pyscript.PyWorker\",\n \"pyscript.PyWorker works only when running in the main thread\",\n )\n window = polyscript.xworker.window\n document = window.document\n js.document = document\n sync = polyscript.xworker.sync\n\n # in workers the display does not have a default ID\n # but there is a sync utility from xworker\n def current_target():\n return polyscript.target\n\nelse:\n import _pyscript\n from _pyscript import PyWorker\n\n window = globalThis\n document = globalThis.document\n sync = NotSupported(\n \"pyscript.sync\", \"pyscript.sync works only when running in a worker\"\n )\n\n # in MAIN the current element target exist, just use it\n def current_target():\n return _pyscript.target\n", "path": "pyscript.core/src/stdlib/pyscript/magic_js.py"}]}
1,037
284
gh_patches_debug_5283
rasdani/github-patches
git_diff
azavea__raster-vision-469
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Move to using master in azavea/models We've been using a branch of azavea/models, which can cause confusion. Before release, merge the `upgrade-sept-2018` branch into the main branch and update our install_deps script accordingly. </issue> <code> [start of rastervision/runner/command_dag.py] 1 import networkx as nx 2 3 import rastervision as rv 4 from rastervision.utils.files import file_exists 5 6 import click 7 8 9 class CommandDAG: 10 """ A directed acyclic graph of command definitions. 11 """ 12 13 def __init__(self, 14 command_definitions, 15 rerun_commands=False, 16 skip_file_check=False): 17 """Generates a CommandDAG from a list of CommandDefinitions 18 19 This logic checks if there are any non-exsiting URIs that are 20 not produced as outputs by some command in the set. If so, 21 it raises a ConfigError stating the missing files. 22 """ 23 # Create a set of edges, from input_uri to command_config and 24 # from command_config to output_uri. Nodes for commands are their 25 # index into command_definitions. 26 27 uri_dag = nx.DiGraph() 28 29 for idx, command_def in enumerate(command_definitions): 30 uri_dag.add_node(idx) 31 for input_uri in command_def.io_def.input_uris: 32 uri_dag.add_edge(input_uri, idx) 33 34 for output_uri in command_def.io_def.output_uris: 35 uri_dag.add_edge(idx, output_uri) 36 37 # Find all source input_uris, and ensure they exist. 38 if not skip_file_check: 39 unsolved_sources = [ 40 uri for uri in uri_dag.nodes 41 if (type(uri) == str and len(uri_dag.in_edges(uri)) == 0) 42 ] 43 44 missing_files = [] 45 46 with click.progressbar( 47 unsolved_sources, 48 label='Ensuring input files exists ') as uris: 49 for uri in uris: 50 if not file_exists(uri): 51 missing_files.append(uri) 52 53 if any(missing_files): 54 raise rv.ConfigError( 55 'Files do not exist and are not supplied by commands:\n' 56 '\t{}\n'.format(',\b\t'.join(missing_files))) 57 58 # If we are not rerunning, remove commands that have existing outputs. 59 self.skipped_commands = [] 60 if not rerun_commands: 61 commands_to_outputs = [(idx, edge[1]) for idx in uri_dag.nodes 62 if type(idx) == int 63 for edge in uri_dag.out_edges(idx)] 64 with click.progressbar( 65 commands_to_outputs, 66 label='Checking for existing output') as lst: 67 for idx, output_uri in lst: 68 if file_exists(output_uri): 69 uri_dag.remove_edge(idx, output_uri) 70 71 for idx in set(map(lambda x: x[0], commands_to_outputs)): 72 if len(uri_dag.out_edges(idx)) == 0: 73 self.skipped_commands.append(command_definitions[idx]) 74 uri_dag.remove_node(idx) 75 76 # Collapse the graph to create edges from command to command. 77 command_id_dag = nx.DiGraph() 78 79 for idx in [idx for idx in uri_dag.nodes if (type(idx) == int)]: 80 command_id_dag.add_node(idx) 81 for upstream_idx in [ 82 edge2[0] for edge1 in uri_dag.in_edges(idx) 83 for edge2 in uri_dag.in_edges(edge1[0]) 84 ]: 85 command_id_dag.add_edge(upstream_idx, idx) 86 87 # Feed this digraph of commands to the child runner. 88 self.command_definitions = command_definitions 89 self.command_id_dag = command_id_dag 90 91 def get_sorted_commands(self): 92 """Return a topologically sorted list of commands configurations. 93 94 Returns a list of command configurations that are sorted such that every 95 command that depends on some other parent command appears later 96 than that parent command. 97 """ 98 return [ 99 self.command_definitions[idx].command_config 100 for idx in self.get_sorted_command_ids() 101 ] 102 103 def get_sorted_command_ids(self): 104 """Return a topologically sorted list of commands ids. 105 106 Returns a list of command IDs that can be used to retrieve 107 specific commands out of this DAG. These are sorted such that every 108 command that depends on some other parent command appears later 109 than that parent command. 110 """ 111 return [idx for idx in nx.topological_sort(self.command_id_dag)] 112 113 def get_command(self, command_id): 114 """Retrieves a command configuration for the given ID""" 115 return self.get_command_definition(command_id).command_config 116 117 def get_command_definition(self, command_id): 118 """Retrieves a command definition for the given ID""" 119 return self.command_definitions[command_id] 120 121 def get_upstream_command_ids(self, command_id): 122 """Returns the command ids for upstream commands for the command 123 with the given id. 124 """ 125 return list( 126 map(lambda x: x[0], self.command_id_dag.in_edges(command_id))) 127 128 def get_command_definitions(self): 129 """Returns the command definitions that will be run in this DAG.""" 130 return [ 131 self.command_definitions[idx] for idx in self.command_id_dag.nodes 132 ] 133 [end of rastervision/runner/command_dag.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/rastervision/runner/command_dag.py b/rastervision/runner/command_dag.py --- a/rastervision/runner/command_dag.py +++ b/rastervision/runner/command_dag.py @@ -45,7 +45,7 @@ with click.progressbar( unsolved_sources, - label='Ensuring input files exists ') as uris: + label='Ensuring input files exist ') as uris: for uri in uris: if not file_exists(uri): missing_files.append(uri)
{"golden_diff": "diff --git a/rastervision/runner/command_dag.py b/rastervision/runner/command_dag.py\n--- a/rastervision/runner/command_dag.py\n+++ b/rastervision/runner/command_dag.py\n@@ -45,7 +45,7 @@\n \n with click.progressbar(\n unsolved_sources,\n- label='Ensuring input files exists ') as uris:\n+ label='Ensuring input files exist ') as uris:\n for uri in uris:\n if not file_exists(uri):\n missing_files.append(uri)\n", "issue": "Move to using master in azavea/models\nWe've been using a branch of azavea/models, which can cause confusion.\r\n\r\nBefore release, merge the `upgrade-sept-2018` branch into the main branch and update our install_deps script accordingly.\n", "before_files": [{"content": "import networkx as nx\n\nimport rastervision as rv\nfrom rastervision.utils.files import file_exists\n\nimport click\n\n\nclass CommandDAG:\n \"\"\" A directed acyclic graph of command definitions.\n \"\"\"\n\n def __init__(self,\n command_definitions,\n rerun_commands=False,\n skip_file_check=False):\n \"\"\"Generates a CommandDAG from a list of CommandDefinitions\n\n This logic checks if there are any non-exsiting URIs that are\n not produced as outputs by some command in the set. If so,\n it raises a ConfigError stating the missing files.\n \"\"\"\n # Create a set of edges, from input_uri to command_config and\n # from command_config to output_uri. Nodes for commands are their\n # index into command_definitions.\n\n uri_dag = nx.DiGraph()\n\n for idx, command_def in enumerate(command_definitions):\n uri_dag.add_node(idx)\n for input_uri in command_def.io_def.input_uris:\n uri_dag.add_edge(input_uri, idx)\n\n for output_uri in command_def.io_def.output_uris:\n uri_dag.add_edge(idx, output_uri)\n\n # Find all source input_uris, and ensure they exist.\n if not skip_file_check:\n unsolved_sources = [\n uri for uri in uri_dag.nodes\n if (type(uri) == str and len(uri_dag.in_edges(uri)) == 0)\n ]\n\n missing_files = []\n\n with click.progressbar(\n unsolved_sources,\n label='Ensuring input files exists ') as uris:\n for uri in uris:\n if not file_exists(uri):\n missing_files.append(uri)\n\n if any(missing_files):\n raise rv.ConfigError(\n 'Files do not exist and are not supplied by commands:\\n'\n '\\t{}\\n'.format(',\\b\\t'.join(missing_files)))\n\n # If we are not rerunning, remove commands that have existing outputs.\n self.skipped_commands = []\n if not rerun_commands:\n commands_to_outputs = [(idx, edge[1]) for idx in uri_dag.nodes\n if type(idx) == int\n for edge in uri_dag.out_edges(idx)]\n with click.progressbar(\n commands_to_outputs,\n label='Checking for existing output') as lst:\n for idx, output_uri in lst:\n if file_exists(output_uri):\n uri_dag.remove_edge(idx, output_uri)\n\n for idx in set(map(lambda x: x[0], commands_to_outputs)):\n if len(uri_dag.out_edges(idx)) == 0:\n self.skipped_commands.append(command_definitions[idx])\n uri_dag.remove_node(idx)\n\n # Collapse the graph to create edges from command to command.\n command_id_dag = nx.DiGraph()\n\n for idx in [idx for idx in uri_dag.nodes if (type(idx) == int)]:\n command_id_dag.add_node(idx)\n for upstream_idx in [\n edge2[0] for edge1 in uri_dag.in_edges(idx)\n for edge2 in uri_dag.in_edges(edge1[0])\n ]:\n command_id_dag.add_edge(upstream_idx, idx)\n\n # Feed this digraph of commands to the child runner.\n self.command_definitions = command_definitions\n self.command_id_dag = command_id_dag\n\n def get_sorted_commands(self):\n \"\"\"Return a topologically sorted list of commands configurations.\n\n Returns a list of command configurations that are sorted such that every\n command that depends on some other parent command appears later\n than that parent command.\n \"\"\"\n return [\n self.command_definitions[idx].command_config\n for idx in self.get_sorted_command_ids()\n ]\n\n def get_sorted_command_ids(self):\n \"\"\"Return a topologically sorted list of commands ids.\n\n Returns a list of command IDs that can be used to retrieve\n specific commands out of this DAG. These are sorted such that every\n command that depends on some other parent command appears later\n than that parent command.\n \"\"\"\n return [idx for idx in nx.topological_sort(self.command_id_dag)]\n\n def get_command(self, command_id):\n \"\"\"Retrieves a command configuration for the given ID\"\"\"\n return self.get_command_definition(command_id).command_config\n\n def get_command_definition(self, command_id):\n \"\"\"Retrieves a command definition for the given ID\"\"\"\n return self.command_definitions[command_id]\n\n def get_upstream_command_ids(self, command_id):\n \"\"\"Returns the command ids for upstream commands for the command\n with the given id.\n \"\"\"\n return list(\n map(lambda x: x[0], self.command_id_dag.in_edges(command_id)))\n\n def get_command_definitions(self):\n \"\"\"Returns the command definitions that will be run in this DAG.\"\"\"\n return [\n self.command_definitions[idx] for idx in self.command_id_dag.nodes\n ]\n", "path": "rastervision/runner/command_dag.py"}]}
1,952
126
gh_patches_debug_1363
rasdani/github-patches
git_diff
ManageIQ__integration_tests-7728
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> cfme.log only showing on first test in a run. cfme.log link only appears on the first test from a selection but shows all logs from all tests in that run. Expected to have a separate log link for each test specific to that test. See attached ![screenshot from 2018-08-14 15-50-11](https://user-images.githubusercontent.com/18352403/44099182-fca60de6-9fd9-11e8-8525-27ca6032229f.png) </issue> <code> [start of artifactor/plugins/logger.py] 1 """ Logger plugin for Artifactor 2 3 Add a stanza to the artifactor config like this, 4 artifactor: 5 log_dir: /home/username/outdir 6 per_run: test #test, run, None 7 overwrite: True 8 plugins: 9 logger: 10 enabled: True 11 plugin: logger 12 level: DEBUG 13 """ 14 import os 15 from logging import makeLogRecord 16 from artifactor import ArtifactorBasePlugin 17 from cfme.utils.log import make_file_handler 18 19 20 class Logger(ArtifactorBasePlugin): 21 22 class Test(object): 23 def __init__(self, ident): 24 self.ident = ident 25 self.in_progress = False 26 self.handler = None 27 28 def close(self): 29 if self.handle is not None: 30 self.handler.close() 31 self.handler = None 32 33 def plugin_initialize(self): 34 self.register_plugin_hook('start_test', self.start_test) 35 self.register_plugin_hook('finish_test', self.finish_test) 36 self.register_plugin_hook('log_message', self.log_message) 37 38 def configure(self): 39 self.configured = True 40 self.level = self.data.get('level', 'DEBUG') 41 42 @ArtifactorBasePlugin.check_configured 43 def start_test(self, artifact_path, test_name, test_location, slaveid): 44 if not slaveid: 45 slaveid = "Master" 46 test_ident = "{}/{}".format(test_location, test_name) 47 if slaveid in self.store: 48 if self.store[slaveid].in_progress: 49 print("Test already running, can't start another, logger") 50 return None 51 self.store[slaveid].close() 52 self.store[slaveid] = self.Test(test_ident) 53 self.store[slaveid].in_progress = True 54 filename = "{ident}-cfme.log".format(ident=self.ident) 55 self.store[slaveid].handler = make_file_handler( 56 filename, 57 root=artifact_path, 58 # we overwrite 59 mode='w', 60 level=self.level) 61 62 self.fire_hook('filedump', test_location=test_location, test_name=test_name, 63 description="cfme.log", slaveid=slaveid, contents="", file_type="log", 64 display_glyph="align-justify", dont_write=True, 65 os_filename=os.path.join(artifact_path, filename), 66 group_id="pytest-logfile") 67 68 @ArtifactorBasePlugin.check_configured 69 def finish_test(self, artifact_path, test_name, test_location, slaveid): 70 if not slaveid: 71 slaveid = "Master" 72 self.store[slaveid].in_progress = False 73 self.store[slaveid].close() 74 75 @ArtifactorBasePlugin.check_configured 76 def log_message(self, log_record, slaveid): 77 # json transport fallout: args must be a dict or a tuple, json makes a tuple into a list 78 args = log_record['args'] 79 log_record['args'] = tuple(args) if isinstance(args, list) else args 80 record = makeLogRecord(log_record) 81 if not slaveid: 82 slaveid = "Master" 83 if slaveid in self.store: 84 handler = self.store[slaveid].handler 85 if handler and record.levelno >= handler.level: 86 handler.handle(record) 87 [end of artifactor/plugins/logger.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/artifactor/plugins/logger.py b/artifactor/plugins/logger.py --- a/artifactor/plugins/logger.py +++ b/artifactor/plugins/logger.py @@ -26,7 +26,7 @@ self.handler = None def close(self): - if self.handle is not None: + if self.handler is not None: self.handler.close() self.handler = None
{"golden_diff": "diff --git a/artifactor/plugins/logger.py b/artifactor/plugins/logger.py\n--- a/artifactor/plugins/logger.py\n+++ b/artifactor/plugins/logger.py\n@@ -26,7 +26,7 @@\n self.handler = None\n \n def close(self):\n- if self.handle is not None:\n+ if self.handler is not None:\n self.handler.close()\n self.handler = None\n", "issue": "cfme.log only showing on first test in a run.\ncfme.log link only appears on the first test from a selection but shows all logs from all tests in that run. Expected to have a separate log link for each test specific to that test. See attached\r\n![screenshot from 2018-08-14 15-50-11](https://user-images.githubusercontent.com/18352403/44099182-fca60de6-9fd9-11e8-8525-27ca6032229f.png)\r\n\n", "before_files": [{"content": "\"\"\" Logger plugin for Artifactor\n\nAdd a stanza to the artifactor config like this,\nartifactor:\n log_dir: /home/username/outdir\n per_run: test #test, run, None\n overwrite: True\n plugins:\n logger:\n enabled: True\n plugin: logger\n level: DEBUG\n\"\"\"\nimport os\nfrom logging import makeLogRecord\nfrom artifactor import ArtifactorBasePlugin\nfrom cfme.utils.log import make_file_handler\n\n\nclass Logger(ArtifactorBasePlugin):\n\n class Test(object):\n def __init__(self, ident):\n self.ident = ident\n self.in_progress = False\n self.handler = None\n\n def close(self):\n if self.handle is not None:\n self.handler.close()\n self.handler = None\n\n def plugin_initialize(self):\n self.register_plugin_hook('start_test', self.start_test)\n self.register_plugin_hook('finish_test', self.finish_test)\n self.register_plugin_hook('log_message', self.log_message)\n\n def configure(self):\n self.configured = True\n self.level = self.data.get('level', 'DEBUG')\n\n @ArtifactorBasePlugin.check_configured\n def start_test(self, artifact_path, test_name, test_location, slaveid):\n if not slaveid:\n slaveid = \"Master\"\n test_ident = \"{}/{}\".format(test_location, test_name)\n if slaveid in self.store:\n if self.store[slaveid].in_progress:\n print(\"Test already running, can't start another, logger\")\n return None\n self.store[slaveid].close()\n self.store[slaveid] = self.Test(test_ident)\n self.store[slaveid].in_progress = True\n filename = \"{ident}-cfme.log\".format(ident=self.ident)\n self.store[slaveid].handler = make_file_handler(\n filename,\n root=artifact_path,\n # we overwrite\n mode='w',\n level=self.level)\n\n self.fire_hook('filedump', test_location=test_location, test_name=test_name,\n description=\"cfme.log\", slaveid=slaveid, contents=\"\", file_type=\"log\",\n display_glyph=\"align-justify\", dont_write=True,\n os_filename=os.path.join(artifact_path, filename),\n group_id=\"pytest-logfile\")\n\n @ArtifactorBasePlugin.check_configured\n def finish_test(self, artifact_path, test_name, test_location, slaveid):\n if not slaveid:\n slaveid = \"Master\"\n self.store[slaveid].in_progress = False\n self.store[slaveid].close()\n\n @ArtifactorBasePlugin.check_configured\n def log_message(self, log_record, slaveid):\n # json transport fallout: args must be a dict or a tuple, json makes a tuple into a list\n args = log_record['args']\n log_record['args'] = tuple(args) if isinstance(args, list) else args\n record = makeLogRecord(log_record)\n if not slaveid:\n slaveid = \"Master\"\n if slaveid in self.store:\n handler = self.store[slaveid].handler\n if handler and record.levelno >= handler.level:\n handler.handle(record)\n", "path": "artifactor/plugins/logger.py"}]}
1,535
89
gh_patches_debug_613
rasdani/github-patches
git_diff
pex-tool__pex-1314
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 2.1.38 On the docket: + [ ] PEX direct requirement metadata for resolves via Pip is incorrect. #1311 </issue> <code> [start of pex/version.py] 1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 __version__ = "2.1.37" 5 [end of pex/version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pex/version.py b/pex/version.py --- a/pex/version.py +++ b/pex/version.py @@ -1,4 +1,4 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -__version__ = "2.1.37" +__version__ = "2.1.38"
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.37\"\n+__version__ = \"2.1.38\"\n", "issue": "Release 2.1.38\nOn the docket:\r\n+ [ ] PEX direct requirement metadata for resolves via Pip is incorrect. #1311\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.37\"\n", "path": "pex/version.py"}]}
618
97
gh_patches_debug_8086
rasdani/github-patches
git_diff
lutris__lutris-1904
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Higher resolution icons are still saved in 32x32 directory Despite Lutris bumping its icon size to 128x128 (currently it's still 64x64 as bump to 128x128 hasn't been deployed yet), it still saves the icons into `icons/hicolor/32x32`. It should probably not do that and save it in proper 128x128 location instead. </issue> <code> [start of lutris/settings.py] 1 """Internal settings.""" 2 import os 3 from gi.repository import GLib 4 from lutris.util.settings import SettingsIO 5 from lutris import __version__ 6 7 PROJECT = "Lutris" 8 VERSION = __version__ 9 COPYRIGHT = "(c) 2010-2019 Lutris Gaming Platform" 10 AUTHORS = [ 11 "The Lutris team" 12 ] 13 14 # Paths 15 CONFIG_DIR = os.path.join(GLib.get_user_config_dir(), "lutris") 16 CONFIG_FILE = os.path.join(CONFIG_DIR, "lutris.conf") 17 DATA_DIR = os.path.join(GLib.get_user_data_dir(), "lutris") 18 RUNNER_DIR = os.path.join(DATA_DIR, "runners") 19 RUNTIME_DIR = os.path.join(DATA_DIR, "runtime") 20 CACHE_DIR = os.path.join(GLib.get_user_cache_dir(), "lutris") 21 GAME_CONFIG_DIR = os.path.join(CONFIG_DIR, "games") 22 23 TMP_PATH = os.path.join(CACHE_DIR, "tmp") 24 BANNER_PATH = os.path.join(DATA_DIR, "banners") 25 COVERART_PATH = os.path.join(DATA_DIR, "coverart") 26 ICON_PATH = os.path.join(GLib.get_user_data_dir(), "icons", "hicolor", "32x32", "apps") 27 28 sio = SettingsIO(CONFIG_FILE) 29 PGA_DB = sio.read_setting("pga_path") or os.path.join(DATA_DIR, "pga.db") 30 SITE_URL = sio.read_setting("website") or "https://lutris.net" 31 32 INSTALLER_URL = SITE_URL + "/api/installers/%s" 33 # XXX change this, should query on the installer, not the game. 34 INSTALLER_REVISION_URL = SITE_URL + "/api/installers/games/%s/revisions/%s" 35 GAME_URL = SITE_URL + "/games/%s/" 36 ICON_URL = SITE_URL + "/games/icon/%s.png" 37 BANNER_URL = SITE_URL + "/games/banner/%s.jpg" 38 RUNTIME_URL = "https://lutris.net/api/runtime" 39 40 read_setting = sio.read_setting 41 write_setting = sio.write_setting 42 [end of lutris/settings.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/lutris/settings.py b/lutris/settings.py --- a/lutris/settings.py +++ b/lutris/settings.py @@ -23,7 +23,7 @@ TMP_PATH = os.path.join(CACHE_DIR, "tmp") BANNER_PATH = os.path.join(DATA_DIR, "banners") COVERART_PATH = os.path.join(DATA_DIR, "coverart") -ICON_PATH = os.path.join(GLib.get_user_data_dir(), "icons", "hicolor", "32x32", "apps") +ICON_PATH = os.path.join(GLib.get_user_data_dir(), "icons", "hicolor", "128x128", "apps") sio = SettingsIO(CONFIG_FILE) PGA_DB = sio.read_setting("pga_path") or os.path.join(DATA_DIR, "pga.db")
{"golden_diff": "diff --git a/lutris/settings.py b/lutris/settings.py\n--- a/lutris/settings.py\n+++ b/lutris/settings.py\n@@ -23,7 +23,7 @@\n TMP_PATH = os.path.join(CACHE_DIR, \"tmp\")\n BANNER_PATH = os.path.join(DATA_DIR, \"banners\")\n COVERART_PATH = os.path.join(DATA_DIR, \"coverart\")\n-ICON_PATH = os.path.join(GLib.get_user_data_dir(), \"icons\", \"hicolor\", \"32x32\", \"apps\")\n+ICON_PATH = os.path.join(GLib.get_user_data_dir(), \"icons\", \"hicolor\", \"128x128\", \"apps\")\n \n sio = SettingsIO(CONFIG_FILE)\n PGA_DB = sio.read_setting(\"pga_path\") or os.path.join(DATA_DIR, \"pga.db\")\n", "issue": "Higher resolution icons are still saved in 32x32 directory\nDespite Lutris bumping its icon size to 128x128 (currently it's still 64x64 as bump to 128x128 hasn't been deployed yet), it still saves the icons into `icons/hicolor/32x32`.\r\nIt should probably not do that and save it in proper 128x128 location instead.\n", "before_files": [{"content": "\"\"\"Internal settings.\"\"\"\nimport os\nfrom gi.repository import GLib\nfrom lutris.util.settings import SettingsIO\nfrom lutris import __version__\n\nPROJECT = \"Lutris\"\nVERSION = __version__\nCOPYRIGHT = \"(c) 2010-2019 Lutris Gaming Platform\"\nAUTHORS = [\n \"The Lutris team\"\n]\n\n# Paths\nCONFIG_DIR = os.path.join(GLib.get_user_config_dir(), \"lutris\")\nCONFIG_FILE = os.path.join(CONFIG_DIR, \"lutris.conf\")\nDATA_DIR = os.path.join(GLib.get_user_data_dir(), \"lutris\")\nRUNNER_DIR = os.path.join(DATA_DIR, \"runners\")\nRUNTIME_DIR = os.path.join(DATA_DIR, \"runtime\")\nCACHE_DIR = os.path.join(GLib.get_user_cache_dir(), \"lutris\")\nGAME_CONFIG_DIR = os.path.join(CONFIG_DIR, \"games\")\n\nTMP_PATH = os.path.join(CACHE_DIR, \"tmp\")\nBANNER_PATH = os.path.join(DATA_DIR, \"banners\")\nCOVERART_PATH = os.path.join(DATA_DIR, \"coverart\")\nICON_PATH = os.path.join(GLib.get_user_data_dir(), \"icons\", \"hicolor\", \"32x32\", \"apps\")\n\nsio = SettingsIO(CONFIG_FILE)\nPGA_DB = sio.read_setting(\"pga_path\") or os.path.join(DATA_DIR, \"pga.db\")\nSITE_URL = sio.read_setting(\"website\") or \"https://lutris.net\"\n\nINSTALLER_URL = SITE_URL + \"/api/installers/%s\"\n# XXX change this, should query on the installer, not the game.\nINSTALLER_REVISION_URL = SITE_URL + \"/api/installers/games/%s/revisions/%s\"\nGAME_URL = SITE_URL + \"/games/%s/\"\nICON_URL = SITE_URL + \"/games/icon/%s.png\"\nBANNER_URL = SITE_URL + \"/games/banner/%s.jpg\"\nRUNTIME_URL = \"https://lutris.net/api/runtime\"\n\nread_setting = sio.read_setting\nwrite_setting = sio.write_setting\n", "path": "lutris/settings.py"}]}
1,135
181
gh_patches_debug_3319
rasdani/github-patches
git_diff
spack__spack-3825
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> `scorep` does not work on Darwin The Score-P package requires a case-sensitive file system. This is described in the install notes, and I confirmed with the developers. I suggest to disable Score-P on Darwin to avoid others having to track down this problem in the same way I had to. Alternatively, we can add an install-time test whether the build or install directories are on a case-insensitive file system. </issue> <code> [start of var/spack/repos/builtin/packages/scorep/package.py] 1 ############################################################################## 2 # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. 3 # Produced at the Lawrence Livermore National Laboratory. 4 # 5 # This file is part of Spack. 6 # Created by Todd Gamblin, [email protected], All rights reserved. 7 # LLNL-CODE-647188 8 # 9 # For details, see https://github.com/llnl/spack 10 # Please also see the LICENSE file for our notice and the LGPL. 11 # 12 # This program is free software; you can redistribute it and/or modify 13 # it under the terms of the GNU Lesser General Public License (as 14 # published by the Free Software Foundation) version 2.1, February 1999. 15 # 16 # This program is distributed in the hope that it will be useful, but 17 # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF 18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and 19 # conditions of the GNU Lesser General Public License for more details. 20 # 21 # You should have received a copy of the GNU Lesser General Public 22 # License along with this program; if not, write to the Free Software 23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 24 ############################################################################## 25 from spack import * 26 27 28 class Scorep(AutotoolsPackage): 29 """The Score-P measurement infrastructure is a highly scalable and 30 easy-to-use tool suite for profiling, event tracing, and online analysis 31 of HPC applications. 32 """ 33 34 homepage = "http://www.vi-hps.org/projects/score-p" 35 url = "http://www.vi-hps.org/upload/packages/scorep/scorep-2.0.2.tar.gz" 36 37 version('3.0', '44da8beaa3f71436a5f6fe51938aab2f') 38 version('2.0.2', '8f00e79e1b5b96e511c5ebecd10b2888') 39 version('1.4.2', '3b9a042b13bdd5836452354e6567f71e') 40 version('1.3', '9db6f957b7f51fa01377a9537867a55c') 41 42 ########## 43 # Dependencies for SCORE-P are quite tight. See the homepage for more 44 # information. 45 # SCOREP 3 46 depends_on('otf2@2:', when='@3:') 47 depends_on('opari2@2:', when='@3:') 48 depends_on('[email protected]:', when='@3:') 49 # SCOREP 2.0.2 50 depends_on('[email protected]', when='@2.0.2') 51 depends_on('[email protected]', when='@2.0.2') 52 depends_on('[email protected]:4.4', when='@2.0.2') 53 # SCOREP 1.4.2 54 depends_on('[email protected]:1.6', when='@1.4.2') 55 depends_on('[email protected]', when='@1.4.2') 56 depends_on('[email protected]:4.4', when='@1.4.2') 57 # SCOREP 1.3 58 depends_on("[email protected]", when='@1.3') 59 depends_on("[email protected]", when='@1.3') 60 depends_on("[email protected]", when='@1.3') 61 ########## 62 63 depends_on("mpi") 64 depends_on("papi") 65 66 variant('shmem', default=False, description='Enable shmem tracing') 67 68 def configure_args(self): 69 spec = self.spec 70 71 config_args = [ 72 "--with-otf2=%s" % spec['otf2'].prefix.bin, 73 "--with-opari2=%s" % spec['opari2'].prefix.bin, 74 "--with-cube=%s" % spec['cube'].prefix.bin, 75 "--with-papi-header=%s" % spec['papi'].prefix.include, 76 "--with-papi-lib=%s" % spec['papi'].prefix.lib, 77 "--enable-shared", 78 ] 79 80 if '~shmem' in spec: 81 config_args.append("--without-shmem") 82 83 config_args.extend(["CFLAGS=-fPIC", "CXXFLAGS=-fPIC"]) 84 return config_args 85 [end of var/spack/repos/builtin/packages/scorep/package.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -65,6 +65,11 @@ variant('shmem', default=False, description='Enable shmem tracing') + # Score-P requires a case-sensitive file system, and therefore + # does not work on macOS + # https://github.com/LLNL/spack/issues/1609 + conflicts('platform=darwin') + def configure_args(self): spec = self.spec
{"golden_diff": "diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py\n--- a/var/spack/repos/builtin/packages/scorep/package.py\n+++ b/var/spack/repos/builtin/packages/scorep/package.py\n@@ -65,6 +65,11 @@\n \n variant('shmem', default=False, description='Enable shmem tracing')\n \n+ # Score-P requires a case-sensitive file system, and therefore\n+ # does not work on macOS\n+ # https://github.com/LLNL/spack/issues/1609\n+ conflicts('platform=darwin')\n+\n def configure_args(self):\n spec = self.spec\n", "issue": "`scorep` does not work on Darwin\nThe Score-P package requires a case-sensitive file system. This is described in the install notes, and I confirmed with the developers. I suggest to disable Score-P on Darwin to avoid others having to track down this problem in the same way I had to. Alternatively, we can add an install-time test whether the build or install directories are on a case-insensitive file system.\n\n", "before_files": [{"content": "##############################################################################\n# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.\n# Produced at the Lawrence Livermore National Laboratory.\n#\n# This file is part of Spack.\n# Created by Todd Gamblin, [email protected], All rights reserved.\n# LLNL-CODE-647188\n#\n# For details, see https://github.com/llnl/spack\n# Please also see the LICENSE file for our notice and the LGPL.\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License (as\n# published by the Free Software Foundation) version 2.1, February 1999.\n#\n# This program is distributed in the hope that it will be useful, but\n# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and\n# conditions of the GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public\n# License along with this program; if not, write to the Free Software\n# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\n##############################################################################\nfrom spack import *\n\n\nclass Scorep(AutotoolsPackage):\n \"\"\"The Score-P measurement infrastructure is a highly scalable and\n easy-to-use tool suite for profiling, event tracing, and online analysis\n of HPC applications.\n \"\"\"\n\n homepage = \"http://www.vi-hps.org/projects/score-p\"\n url = \"http://www.vi-hps.org/upload/packages/scorep/scorep-2.0.2.tar.gz\"\n\n version('3.0', '44da8beaa3f71436a5f6fe51938aab2f')\n version('2.0.2', '8f00e79e1b5b96e511c5ebecd10b2888')\n version('1.4.2', '3b9a042b13bdd5836452354e6567f71e')\n version('1.3', '9db6f957b7f51fa01377a9537867a55c')\n\n ##########\n # Dependencies for SCORE-P are quite tight. See the homepage for more\n # information.\n # SCOREP 3\n depends_on('otf2@2:', when='@3:')\n depends_on('opari2@2:', when='@3:')\n depends_on('[email protected]:', when='@3:')\n # SCOREP 2.0.2\n depends_on('[email protected]', when='@2.0.2')\n depends_on('[email protected]', when='@2.0.2')\n depends_on('[email protected]:4.4', when='@2.0.2')\n # SCOREP 1.4.2\n depends_on('[email protected]:1.6', when='@1.4.2')\n depends_on('[email protected]', when='@1.4.2')\n depends_on('[email protected]:4.4', when='@1.4.2')\n # SCOREP 1.3\n depends_on(\"[email protected]\", when='@1.3')\n depends_on(\"[email protected]\", when='@1.3')\n depends_on(\"[email protected]\", when='@1.3')\n ##########\n\n depends_on(\"mpi\")\n depends_on(\"papi\")\n\n variant('shmem', default=False, description='Enable shmem tracing')\n\n def configure_args(self):\n spec = self.spec\n\n config_args = [\n \"--with-otf2=%s\" % spec['otf2'].prefix.bin,\n \"--with-opari2=%s\" % spec['opari2'].prefix.bin,\n \"--with-cube=%s\" % spec['cube'].prefix.bin,\n \"--with-papi-header=%s\" % spec['papi'].prefix.include,\n \"--with-papi-lib=%s\" % spec['papi'].prefix.lib,\n \"--enable-shared\",\n ]\n\n if '~shmem' in spec:\n config_args.append(\"--without-shmem\")\n\n config_args.extend([\"CFLAGS=-fPIC\", \"CXXFLAGS=-fPIC\"])\n return config_args\n", "path": "var/spack/repos/builtin/packages/scorep/package.py"}]}
1,836
157
gh_patches_debug_29093
rasdani/github-patches
git_diff
TheAlgorithms__Python-1093
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> decimal_to_binary() should return identical values as bin() https://github.com/TheAlgorithms/Python/blob/7b267e5e4f8ccb72dd58fcf0057642fd62a36bdf/conversions/decimal_to_binary.py#L4 Please change __decimal_to_binary()__ to return identical values as the Python builtin [__bin()__](https://docs.python.org/3/library/functions.html#bin). With doctests to prove it please. @PatOnTheBack @Corruption13 </issue> <code> [start of conversions/decimal_to_binary.py] 1 """Convert a Decimal Number to a Binary Number.""" 2 3 4 def decimal_to_binary(num): 5 """Convert a Decimal Number to a Binary Number.""" 6 binary = [] 7 while num > 0: 8 binary.insert(0, num % 2) 9 num >>= 1 10 return "".join(str(e) for e in binary) 11 12 13 def main(): 14 """Print binary equivelents of decimal numbers.""" 15 print("\n2 in binary is:") 16 print(decimal_to_binary(2)) # = 10 17 print("\n7 in binary is:") 18 print(decimal_to_binary(7)) # = 111 19 print("\n35 in binary is:") 20 print(decimal_to_binary(35)) # = 100011 21 print("\n") 22 23 24 if __name__ == '__main__': 25 main() 26 [end of conversions/decimal_to_binary.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/conversions/decimal_to_binary.py b/conversions/decimal_to_binary.py --- a/conversions/decimal_to_binary.py +++ b/conversions/decimal_to_binary.py @@ -2,24 +2,57 @@ def decimal_to_binary(num): - """Convert a Decimal Number to a Binary Number.""" + + """ + Convert a Integer Decimal Number to a Binary Number as str. + >>> decimal_to_binary(0) + '0b0' + >>> decimal_to_binary(2) + '0b10' + >>> decimal_to_binary(7) + '0b111' + >>> decimal_to_binary(35) + '0b100011' + >>> # negatives work too + >>> decimal_to_binary(-2) + '-0b10' + >>> # other floats will error + >>> decimal_to_binary(16.16) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + TypeError: 'float' object cannot be interpreted as an integer + >>> # strings will error as well + >>> decimal_to_binary('0xfffff') # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + TypeError: 'str' object cannot be interpreted as an integer + """ + + if type(num) == float: + raise TypeError("'float' object cannot be interpreted as an integer") + if type(num) == str: + raise TypeError("'str' object cannot be interpreted as an integer") + + if num == 0: + return "0b0" + + negative = False + + if num < 0: + negative = True + num = -num + binary = [] while num > 0: binary.insert(0, num % 2) num >>= 1 - return "".join(str(e) for e in binary) + if negative: + return "-0b" + "".join(str(e) for e in binary) -def main(): - """Print binary equivelents of decimal numbers.""" - print("\n2 in binary is:") - print(decimal_to_binary(2)) # = 10 - print("\n7 in binary is:") - print(decimal_to_binary(7)) # = 111 - print("\n35 in binary is:") - print(decimal_to_binary(35)) # = 100011 - print("\n") + return "0b" + "".join(str(e) for e in binary) -if __name__ == '__main__': - main() +if __name__ == "__main__": + import doctest + doctest.testmod()
{"golden_diff": "diff --git a/conversions/decimal_to_binary.py b/conversions/decimal_to_binary.py\n--- a/conversions/decimal_to_binary.py\n+++ b/conversions/decimal_to_binary.py\n@@ -2,24 +2,57 @@\n \n \n def decimal_to_binary(num):\n- \"\"\"Convert a Decimal Number to a Binary Number.\"\"\"\n+\n+ \"\"\"\n+ Convert a Integer Decimal Number to a Binary Number as str.\n+ >>> decimal_to_binary(0)\n+ '0b0'\n+ >>> decimal_to_binary(2)\n+ '0b10'\n+ >>> decimal_to_binary(7)\n+ '0b111'\n+ >>> decimal_to_binary(35)\n+ '0b100011'\n+ >>> # negatives work too\n+ >>> decimal_to_binary(-2)\n+ '-0b10'\n+ >>> # other floats will error\n+ >>> decimal_to_binary(16.16) # doctest: +ELLIPSIS\n+ Traceback (most recent call last):\n+ ...\n+ TypeError: 'float' object cannot be interpreted as an integer\n+ >>> # strings will error as well\n+ >>> decimal_to_binary('0xfffff') # doctest: +ELLIPSIS\n+ Traceback (most recent call last):\n+ ...\n+ TypeError: 'str' object cannot be interpreted as an integer\n+ \"\"\"\n+\n+ if type(num) == float:\n+ raise TypeError(\"'float' object cannot be interpreted as an integer\")\n+ if type(num) == str:\n+ raise TypeError(\"'str' object cannot be interpreted as an integer\")\n+\n+ if num == 0:\n+ return \"0b0\"\n+\n+ negative = False\n+\n+ if num < 0:\n+ negative = True\n+ num = -num\n+\n binary = []\n while num > 0:\n binary.insert(0, num % 2)\n num >>= 1\n- return \"\".join(str(e) for e in binary)\n \n+ if negative:\n+ return \"-0b\" + \"\".join(str(e) for e in binary)\n \n-def main():\n- \"\"\"Print binary equivelents of decimal numbers.\"\"\"\n- print(\"\\n2 in binary is:\")\n- print(decimal_to_binary(2)) # = 10\n- print(\"\\n7 in binary is:\")\n- print(decimal_to_binary(7)) # = 111\n- print(\"\\n35 in binary is:\")\n- print(decimal_to_binary(35)) # = 100011\n- print(\"\\n\")\n+ return \"0b\" + \"\".join(str(e) for e in binary)\n \n \n-if __name__ == '__main__':\n- main()\n+if __name__ == \"__main__\":\n+ import doctest\n+ doctest.testmod()\n", "issue": "decimal_to_binary() should return identical values as bin()\nhttps://github.com/TheAlgorithms/Python/blob/7b267e5e4f8ccb72dd58fcf0057642fd62a36bdf/conversions/decimal_to_binary.py#L4\r\n\r\nPlease change __decimal_to_binary()__ to return identical values as the Python builtin [__bin()__](https://docs.python.org/3/library/functions.html#bin). With doctests to prove it please.\r\n\r\n@PatOnTheBack @Corruption13\n", "before_files": [{"content": "\"\"\"Convert a Decimal Number to a Binary Number.\"\"\"\n\n\ndef decimal_to_binary(num):\n \"\"\"Convert a Decimal Number to a Binary Number.\"\"\"\n binary = []\n while num > 0:\n binary.insert(0, num % 2)\n num >>= 1\n return \"\".join(str(e) for e in binary)\n\n\ndef main():\n \"\"\"Print binary equivelents of decimal numbers.\"\"\"\n print(\"\\n2 in binary is:\")\n print(decimal_to_binary(2)) # = 10\n print(\"\\n7 in binary is:\")\n print(decimal_to_binary(7)) # = 111\n print(\"\\n35 in binary is:\")\n print(decimal_to_binary(35)) # = 100011\n print(\"\\n\")\n\n\nif __name__ == '__main__':\n main()\n", "path": "conversions/decimal_to_binary.py"}]}
886
629
gh_patches_debug_1651
rasdani/github-patches
git_diff
deeppavlov__DeepPavlov-76
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> What is "'Chainer' object has no attribute 'infer' 2018-03-04 14:09:23,638 (util.py:64 WorkerThread2) ERROR - TeleBot: "AttributeError occurred, args=("'Chainer' object has no attribute 'infer'",) Traceback (most recent call last): File "/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py", line 58, in run task(*args, **kwargs) File "/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py", line 48, in handle_inference pred = model.infer(context) AttributeError: 'Chainer' object has no attribute 'infer' " 2018-03-04 14:09:23.638 ERROR in 'TeleBot'['util'] at line 64: AttributeError occurred, args=("'Chainer' object has no attribute 'infer'",) Traceback (most recent call last): File "/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py", line 58, in run task(*args, **kwargs) File "/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py", line 48, in handle_inference pred = model.infer(context) AttributeError: 'Chainer' object has no attribute 'infer' Traceback (most recent call last): File "deep.py", line 60, in <module> main() File "deep.py", line 56, in main interact_model_by_telegram(pipeline_config_path, token) File "/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py", line 58, in interact_model_by_telegram init_bot_for_model(token, model) File "/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py", line 52, in init_bot_for_model bot.polling() File "/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/__init__.py", line 264, in polling self.__threaded_polling(none_stop, interval, timeout) File "/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/__init__.py", line 288, in __threaded_polling self.worker_pool.raise_exceptions() File "/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py", line 107, in raise_exceptions six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2]) File "/Users/developer/DeepPavlov/lib/python3.6/site-packages/six.py", line 693, in reraise raise value File "/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py", line 58, in run task(*args, **kwargs) File "/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py", line 48, in handle_inference pred = model.infer(context) AttributeError: 'Chainer' object has no attribute 'infer' </issue> <code> [start of telegram_utils/telegram_ui.py] 1 """ 2 Copyright 2017 Neural Networks and Deep Learning lab, MIPT 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15 """ 16 import telebot 17 18 from deeppavlov.core.common.file import read_json 19 from deeppavlov.core.commands.infer import build_model_from_config 20 21 22 def init_bot_for_model(token, model): 23 bot = telebot.TeleBot(token) 24 25 model_name = type(model).__name__ 26 models_info = read_json('../telegram_utils/models_info.json') 27 model_info = models_info[model_name] if model_name in models_info else models_info['@default'] 28 29 @bot.message_handler(commands=['start']) 30 def send_start_message(message): 31 chat_id = message.chat.id 32 out_message = model_info['start_message'] 33 if hasattr(model, 'reset'): 34 model.reset() 35 bot.send_message(chat_id, out_message) 36 37 @bot.message_handler(commands=['help']) 38 def send_help_message(message): 39 chat_id = message.chat.id 40 out_message = model_info['help_message'] 41 bot.send_message(chat_id, out_message) 42 43 @bot.message_handler() 44 def handle_inference(message): 45 chat_id = message.chat.id 46 context = message.text 47 48 pred = model.infer(context) 49 reply_message = str(pred) 50 bot.send_message(chat_id, reply_message) 51 52 bot.polling() 53 54 55 def interact_model_by_telegram(config_path, token): 56 config = read_json(config_path) 57 model = build_model_from_config(config) 58 init_bot_for_model(token, model) 59 [end of telegram_utils/telegram_ui.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/telegram_utils/telegram_ui.py b/telegram_utils/telegram_ui.py --- a/telegram_utils/telegram_ui.py +++ b/telegram_utils/telegram_ui.py @@ -45,7 +45,7 @@ chat_id = message.chat.id context = message.text - pred = model.infer(context) + pred = model(context) reply_message = str(pred) bot.send_message(chat_id, reply_message)
{"golden_diff": "diff --git a/telegram_utils/telegram_ui.py b/telegram_utils/telegram_ui.py\n--- a/telegram_utils/telegram_ui.py\n+++ b/telegram_utils/telegram_ui.py\n@@ -45,7 +45,7 @@\n chat_id = message.chat.id\n context = message.text\n \n- pred = model.infer(context)\n+ pred = model(context)\n reply_message = str(pred)\n bot.send_message(chat_id, reply_message)\n", "issue": "What is \"'Chainer' object has no attribute 'infer'\n2018-03-04 14:09:23,638 (util.py:64 WorkerThread2) ERROR - TeleBot: \"AttributeError occurred, args=(\"'Chainer' object has no attribute 'infer'\",)\r\nTraceback (most recent call last):\r\n File \"/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py\", line 58, in run\r\n task(*args, **kwargs)\r\n File \"/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py\", line 48, in handle_inference\r\n pred = model.infer(context)\r\nAttributeError: 'Chainer' object has no attribute 'infer'\r\n\"\r\n2018-03-04 14:09:23.638 ERROR in 'TeleBot'['util'] at line 64: AttributeError occurred, args=(\"'Chainer' object has no attribute 'infer'\",)\r\nTraceback (most recent call last):\r\n File \"/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py\", line 58, in run\r\n task(*args, **kwargs)\r\n File \"/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py\", line 48, in handle_inference\r\n pred = model.infer(context)\r\nAttributeError: 'Chainer' object has no attribute 'infer'\r\n\r\nTraceback (most recent call last):\r\n File \"deep.py\", line 60, in <module>\r\n main()\r\n File \"deep.py\", line 56, in main\r\n interact_model_by_telegram(pipeline_config_path, token)\r\n File \"/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py\", line 58, in interact_model_by_telegram\r\n init_bot_for_model(token, model)\r\n File \"/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py\", line 52, in init_bot_for_model\r\n bot.polling()\r\n File \"/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/__init__.py\", line 264, in polling\r\n self.__threaded_polling(none_stop, interval, timeout)\r\n File \"/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/__init__.py\", line 288, in __threaded_polling\r\n self.worker_pool.raise_exceptions()\r\n File \"/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py\", line 107, in raise_exceptions\r\n six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2])\r\n File \"/Users/developer/DeepPavlov/lib/python3.6/site-packages/six.py\", line 693, in reraise\r\n raise value\r\n File \"/Users/developer/DeepPavlov/lib/python3.6/site-packages/telebot/util.py\", line 58, in run\r\n task(*args, **kwargs)\r\n File \"/Users/developer/Project/DeepPavlov/telegram_utils/telegram_ui.py\", line 48, in handle_inference\r\n pred = model.infer(context)\r\nAttributeError: 'Chainer' object has no attribute 'infer'\r\n\n", "before_files": [{"content": "\"\"\"\nCopyright 2017 Neural Networks and Deep Learning lab, MIPT\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\nimport telebot\n\nfrom deeppavlov.core.common.file import read_json\nfrom deeppavlov.core.commands.infer import build_model_from_config\n\n\ndef init_bot_for_model(token, model):\n bot = telebot.TeleBot(token)\n\n model_name = type(model).__name__\n models_info = read_json('../telegram_utils/models_info.json')\n model_info = models_info[model_name] if model_name in models_info else models_info['@default']\n\n @bot.message_handler(commands=['start'])\n def send_start_message(message):\n chat_id = message.chat.id\n out_message = model_info['start_message']\n if hasattr(model, 'reset'):\n model.reset()\n bot.send_message(chat_id, out_message)\n\n @bot.message_handler(commands=['help'])\n def send_help_message(message):\n chat_id = message.chat.id\n out_message = model_info['help_message']\n bot.send_message(chat_id, out_message)\n\n @bot.message_handler()\n def handle_inference(message):\n chat_id = message.chat.id\n context = message.text\n\n pred = model.infer(context)\n reply_message = str(pred)\n bot.send_message(chat_id, reply_message)\n\n bot.polling()\n\n\ndef interact_model_by_telegram(config_path, token):\n config = read_json(config_path)\n model = build_model_from_config(config)\n init_bot_for_model(token, model)\n", "path": "telegram_utils/telegram_ui.py"}]}
1,819
100
gh_patches_debug_14730
rasdani/github-patches
git_diff
scrapy__scrapy-4775
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Document link extractor usage outside CrawlSpider rules https://docs.scrapy.org/en/latest/topics/link-extractors.html mentions that link extractors may be used outside `CrawlSpider`, but it does not go into detail on how to do that. Also, there are broken references to `scrapy.link.Link`, we should provide reference documentation for that class. </issue> <code> [start of scrapy/link.py] 1 """ 2 This module defines the Link object used in Link extractors. 3 4 For actual link extractors implementation see scrapy.linkextractors, or 5 its documentation in: docs/topics/link-extractors.rst 6 """ 7 8 9 class Link: 10 """Link objects represent an extracted link by the LinkExtractor.""" 11 12 __slots__ = ['url', 'text', 'fragment', 'nofollow'] 13 14 def __init__(self, url, text='', fragment='', nofollow=False): 15 if not isinstance(url, str): 16 got = url.__class__.__name__ 17 raise TypeError(f"Link urls must be str objects, got {got}") 18 self.url = url 19 self.text = text 20 self.fragment = fragment 21 self.nofollow = nofollow 22 23 def __eq__(self, other): 24 return ( 25 self.url == other.url 26 and self.text == other.text 27 and self.fragment == other.fragment 28 and self.nofollow == other.nofollow 29 ) 30 31 def __hash__(self): 32 return hash(self.url) ^ hash(self.text) ^ hash(self.fragment) ^ hash(self.nofollow) 33 34 def __repr__(self): 35 return ( 36 f'Link(url={self.url!r}, text={self.text!r}, ' 37 f'fragment={self.fragment!r}, nofollow={self.nofollow!r})' 38 ) 39 [end of scrapy/link.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/scrapy/link.py b/scrapy/link.py --- a/scrapy/link.py +++ b/scrapy/link.py @@ -7,7 +7,22 @@ class Link: - """Link objects represent an extracted link by the LinkExtractor.""" + """Link objects represent an extracted link by the LinkExtractor. + + Using the anchor tag sample below to illustrate the parameters:: + + <a href="https://example.com/nofollow.html#foo" rel="nofollow">Dont follow this one</a> + + :param url: the absolute url being linked to in the anchor tag. + From the sample, this is ``https://example.com/nofollow.html``. + + :param text: the text in the anchor tag. From the sample, this is ``Dont follow this one``. + + :param fragment: the part of the url after the hash symbol. From the sample, this is ``foo``. + + :param nofollow: an indication of the presence or absence of a nofollow value in the ``rel`` attribute + of the anchor tag. + """ __slots__ = ['url', 'text', 'fragment', 'nofollow']
{"golden_diff": "diff --git a/scrapy/link.py b/scrapy/link.py\n--- a/scrapy/link.py\n+++ b/scrapy/link.py\n@@ -7,7 +7,22 @@\n \n \n class Link:\n- \"\"\"Link objects represent an extracted link by the LinkExtractor.\"\"\"\n+ \"\"\"Link objects represent an extracted link by the LinkExtractor.\n+\n+ Using the anchor tag sample below to illustrate the parameters::\n+\n+ <a href=\"https://example.com/nofollow.html#foo\" rel=\"nofollow\">Dont follow this one</a>\n+\n+ :param url: the absolute url being linked to in the anchor tag.\n+ From the sample, this is ``https://example.com/nofollow.html``.\n+\n+ :param text: the text in the anchor tag. From the sample, this is ``Dont follow this one``.\n+\n+ :param fragment: the part of the url after the hash symbol. From the sample, this is ``foo``.\n+\n+ :param nofollow: an indication of the presence or absence of a nofollow value in the ``rel`` attribute\n+ of the anchor tag.\n+ \"\"\"\n \n __slots__ = ['url', 'text', 'fragment', 'nofollow']\n", "issue": "Document link extractor usage outside CrawlSpider rules\nhttps://docs.scrapy.org/en/latest/topics/link-extractors.html mentions that link extractors may be used outside `CrawlSpider`, but it does not go into detail on how to do that.\r\n\r\nAlso, there are broken references to `scrapy.link.Link`, we should provide reference documentation for that class.\n", "before_files": [{"content": "\"\"\"\nThis module defines the Link object used in Link extractors.\n\nFor actual link extractors implementation see scrapy.linkextractors, or\nits documentation in: docs/topics/link-extractors.rst\n\"\"\"\n\n\nclass Link:\n \"\"\"Link objects represent an extracted link by the LinkExtractor.\"\"\"\n\n __slots__ = ['url', 'text', 'fragment', 'nofollow']\n\n def __init__(self, url, text='', fragment='', nofollow=False):\n if not isinstance(url, str):\n got = url.__class__.__name__\n raise TypeError(f\"Link urls must be str objects, got {got}\")\n self.url = url\n self.text = text\n self.fragment = fragment\n self.nofollow = nofollow\n\n def __eq__(self, other):\n return (\n self.url == other.url\n and self.text == other.text\n and self.fragment == other.fragment\n and self.nofollow == other.nofollow\n )\n\n def __hash__(self):\n return hash(self.url) ^ hash(self.text) ^ hash(self.fragment) ^ hash(self.nofollow)\n\n def __repr__(self):\n return (\n f'Link(url={self.url!r}, text={self.text!r}, '\n f'fragment={self.fragment!r}, nofollow={self.nofollow!r})'\n )\n", "path": "scrapy/link.py"}]}
964
263
gh_patches_debug_38824
rasdani/github-patches
git_diff
liqd__a4-opin-399
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Use Generic Relation everywhere where generic fks are used </issue> <code> [start of euth/documents/serializers.py] 1 from rest_framework import serializers 2 3 from .models import Document, Paragraph 4 5 6 class ParagraphSerializer(serializers.Serializer): 7 id = serializers.IntegerField(required=False) 8 name = serializers.CharField( 9 required=False, 10 max_length=Paragraph._meta.get_field('name').max_length 11 ) 12 weight = serializers.IntegerField() 13 text = serializers.CharField() 14 15 16 class DocumentSerializer(serializers.ModelSerializer): 17 paragraphs = ParagraphSerializer(many=True, partial=True) 18 19 class Meta: 20 model = Document 21 exclude = ('creator',) 22 23 def create(self, validated_data): 24 paragraphs = validated_data.pop('paragraphs') 25 user = self.context['request'].user 26 document = Document.objects.create(creator=user, **validated_data) 27 28 for paragraph in paragraphs: 29 Paragraph.objects.create(document=document, **paragraph) 30 31 return document 32 33 def update(self, instance, validated_data): 34 instance.name = validated_data['name'] 35 instance.save() 36 paragraphs = validated_data.pop('paragraphs') 37 38 paragraph_ids = [item['id'] for item in paragraphs if 'id' in item] 39 instance.paragraphs.exclude(id__in=paragraph_ids).delete() 40 41 for paragraph in paragraphs: 42 paragraph['document'] = instance 43 if 'id' in paragraph: 44 instance.paragraphs.filter(id=paragraph['id'])\ 45 .update(**paragraph) 46 else: 47 instance.paragraphs.create(**paragraph) 48 49 return instance 50 [end of euth/documents/serializers.py] [start of euth/documents/models.py] 1 from ckeditor.fields import RichTextField 2 from django.contrib.contenttypes.models import ContentType 3 from django.core.exceptions import ObjectDoesNotExist, ValidationError 4 from django.db import models 5 from django.utils.functional import cached_property 6 from django.utils.translation import ugettext_lazy as _ 7 8 from contrib.transforms import html_transforms 9 from euth.comments import models as comment_models 10 from euth.contrib import base_models 11 from euth.modules import models as module_models 12 13 14 class Document(module_models.Item): 15 name = models.CharField(max_length=120) 16 17 def __str__(self): 18 return "{}_document_{}".format(str(self.module), self.pk) 19 20 def clean(self, *args, **kwargs): 21 if not self.pk: 22 try: 23 Document.objects.get(module=self.module) 24 raise ValidationError( 25 _('Document for that module already exists')) 26 except ObjectDoesNotExist: 27 super().clean(*args, **kwargs) 28 super().clean(*args, **kwargs) 29 30 @cached_property 31 def paragraphs_sorted(self): 32 return self.paragraphs.all().order_by('weight') 33 34 @cached_property 35 def comments(self): 36 contenttype = ContentType.objects.get_for_model(self) 37 pk = self.id 38 comments = comment_models.Comment.objects.all().filter( 39 content_type=contenttype, object_pk=pk) 40 return comments 41 42 43 class Paragraph(base_models.TimeStampedModel): 44 name = models.CharField(max_length=120, blank=True) 45 text = RichTextField() 46 weight = models.PositiveIntegerField() 47 document = models.ForeignKey(Document, 48 on_delete=models.CASCADE, 49 related_name='paragraphs') 50 51 class Meta: 52 ordering = ('weight',) 53 54 def __str__(self): 55 return "{}_paragraph_{}".format(str(self.document), self.weight) 56 57 def save(self, *args, **kwargs): 58 self.text = html_transforms.clean_html_field( 59 self.text) 60 super().save(*args, **kwargs) 61 62 @cached_property 63 def comments(self): 64 contenttype = ContentType.objects.get_for_model(self) 65 pk = self.id 66 comments = comment_models.Comment.objects.all().filter( 67 content_type=contenttype, object_pk=pk) 68 return comments 69 70 @cached_property 71 def project(self): 72 return self.document.project 73 [end of euth/documents/models.py] [start of euth/documents/phases.py] 1 from django.utils.translation import ugettext_lazy as _ 2 3 from euth import phases 4 5 from . import apps, models, views 6 7 8 class CreateDocumentPhase(phases.PhaseContent): 9 """ 10 Allows no interaction for participants, only 11 creation for moderators. 12 """ 13 app = apps.DocumentConfig.label 14 phase = 'create_document' 15 view = views.DocumentCreateView 16 weight = 30 17 18 name = _('Create document phase') 19 module_name = _('commenting text') 20 description = _('Create text for the project.') 21 22 features = {} 23 24 25 phases.content.register(CreateDocumentPhase()) 26 27 28 class CommentPhase(phases.PhaseContent): 29 """ 30 Allows only commenting of paragraphs. 31 """ 32 app = apps.DocumentConfig.label 33 phase = 'comment' 34 view = views.DocumentDetailView 35 weight = 40 36 37 name = _('Comment phase') 38 module_name = _('commenting text') 39 description = _('Collect comments for the text.') 40 41 features = { 42 'comment': (models.Paragraph,), 43 } 44 45 46 phases.content.register(CommentPhase()) 47 [end of euth/documents/phases.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/euth/documents/models.py b/euth/documents/models.py --- a/euth/documents/models.py +++ b/euth/documents/models.py @@ -1,5 +1,5 @@ from ckeditor.fields import RichTextField -from django.contrib.contenttypes.models import ContentType +from django.contrib.contenttypes.fields import GenericRelation from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.db import models from django.utils.functional import cached_property @@ -27,18 +27,6 @@ super().clean(*args, **kwargs) super().clean(*args, **kwargs) - @cached_property - def paragraphs_sorted(self): - return self.paragraphs.all().order_by('weight') - - @cached_property - def comments(self): - contenttype = ContentType.objects.get_for_model(self) - pk = self.id - comments = comment_models.Comment.objects.all().filter( - content_type=contenttype, object_pk=pk) - return comments - class Paragraph(base_models.TimeStampedModel): name = models.CharField(max_length=120, blank=True) @@ -47,6 +35,9 @@ document = models.ForeignKey(Document, on_delete=models.CASCADE, related_name='paragraphs') + comments = GenericRelation(comment_models.Comment, + related_query_name='paragraph', + object_id_field='object_pk') class Meta: ordering = ('weight',) @@ -59,14 +50,6 @@ self.text) super().save(*args, **kwargs) - @cached_property - def comments(self): - contenttype = ContentType.objects.get_for_model(self) - pk = self.id - comments = comment_models.Comment.objects.all().filter( - content_type=contenttype, object_pk=pk) - return comments - @cached_property def project(self): return self.document.project diff --git a/euth/documents/phases.py b/euth/documents/phases.py --- a/euth/documents/phases.py +++ b/euth/documents/phases.py @@ -39,7 +39,7 @@ description = _('Collect comments for the text.') features = { - 'comment': (models.Paragraph,), + 'comment': (models.Paragraph, models.Document), } diff --git a/euth/documents/serializers.py b/euth/documents/serializers.py --- a/euth/documents/serializers.py +++ b/euth/documents/serializers.py @@ -7,6 +7,7 @@ id = serializers.IntegerField(required=False) name = serializers.CharField( required=False, + allow_blank=True, max_length=Paragraph._meta.get_field('name').max_length ) weight = serializers.IntegerField()
{"golden_diff": "diff --git a/euth/documents/models.py b/euth/documents/models.py\n--- a/euth/documents/models.py\n+++ b/euth/documents/models.py\n@@ -1,5 +1,5 @@\n from ckeditor.fields import RichTextField\n-from django.contrib.contenttypes.models import ContentType\n+from django.contrib.contenttypes.fields import GenericRelation\n from django.core.exceptions import ObjectDoesNotExist, ValidationError\n from django.db import models\n from django.utils.functional import cached_property\n@@ -27,18 +27,6 @@\n super().clean(*args, **kwargs)\n super().clean(*args, **kwargs)\n \n- @cached_property\n- def paragraphs_sorted(self):\n- return self.paragraphs.all().order_by('weight')\n-\n- @cached_property\n- def comments(self):\n- contenttype = ContentType.objects.get_for_model(self)\n- pk = self.id\n- comments = comment_models.Comment.objects.all().filter(\n- content_type=contenttype, object_pk=pk)\n- return comments\n-\n \n class Paragraph(base_models.TimeStampedModel):\n name = models.CharField(max_length=120, blank=True)\n@@ -47,6 +35,9 @@\n document = models.ForeignKey(Document,\n on_delete=models.CASCADE,\n related_name='paragraphs')\n+ comments = GenericRelation(comment_models.Comment,\n+ related_query_name='paragraph',\n+ object_id_field='object_pk')\n \n class Meta:\n ordering = ('weight',)\n@@ -59,14 +50,6 @@\n self.text)\n super().save(*args, **kwargs)\n \n- @cached_property\n- def comments(self):\n- contenttype = ContentType.objects.get_for_model(self)\n- pk = self.id\n- comments = comment_models.Comment.objects.all().filter(\n- content_type=contenttype, object_pk=pk)\n- return comments\n-\n @cached_property\n def project(self):\n return self.document.project\ndiff --git a/euth/documents/phases.py b/euth/documents/phases.py\n--- a/euth/documents/phases.py\n+++ b/euth/documents/phases.py\n@@ -39,7 +39,7 @@\n description = _('Collect comments for the text.')\n \n features = {\n- 'comment': (models.Paragraph,),\n+ 'comment': (models.Paragraph, models.Document),\n }\n \n \ndiff --git a/euth/documents/serializers.py b/euth/documents/serializers.py\n--- a/euth/documents/serializers.py\n+++ b/euth/documents/serializers.py\n@@ -7,6 +7,7 @@\n id = serializers.IntegerField(required=False)\n name = serializers.CharField(\n required=False,\n+ allow_blank=True,\n max_length=Paragraph._meta.get_field('name').max_length\n )\n weight = serializers.IntegerField()\n", "issue": "Use Generic Relation everywhere where generic fks are used\n\n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom .models import Document, Paragraph\n\n\nclass ParagraphSerializer(serializers.Serializer):\n id = serializers.IntegerField(required=False)\n name = serializers.CharField(\n required=False,\n max_length=Paragraph._meta.get_field('name').max_length\n )\n weight = serializers.IntegerField()\n text = serializers.CharField()\n\n\nclass DocumentSerializer(serializers.ModelSerializer):\n paragraphs = ParagraphSerializer(many=True, partial=True)\n\n class Meta:\n model = Document\n exclude = ('creator',)\n\n def create(self, validated_data):\n paragraphs = validated_data.pop('paragraphs')\n user = self.context['request'].user\n document = Document.objects.create(creator=user, **validated_data)\n\n for paragraph in paragraphs:\n Paragraph.objects.create(document=document, **paragraph)\n\n return document\n\n def update(self, instance, validated_data):\n instance.name = validated_data['name']\n instance.save()\n paragraphs = validated_data.pop('paragraphs')\n\n paragraph_ids = [item['id'] for item in paragraphs if 'id' in item]\n instance.paragraphs.exclude(id__in=paragraph_ids).delete()\n\n for paragraph in paragraphs:\n paragraph['document'] = instance\n if 'id' in paragraph:\n instance.paragraphs.filter(id=paragraph['id'])\\\n .update(**paragraph)\n else:\n instance.paragraphs.create(**paragraph)\n\n return instance\n", "path": "euth/documents/serializers.py"}, {"content": "from ckeditor.fields import RichTextField\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import ObjectDoesNotExist, ValidationError\nfrom django.db import models\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom contrib.transforms import html_transforms\nfrom euth.comments import models as comment_models\nfrom euth.contrib import base_models\nfrom euth.modules import models as module_models\n\n\nclass Document(module_models.Item):\n name = models.CharField(max_length=120)\n\n def __str__(self):\n return \"{}_document_{}\".format(str(self.module), self.pk)\n\n def clean(self, *args, **kwargs):\n if not self.pk:\n try:\n Document.objects.get(module=self.module)\n raise ValidationError(\n _('Document for that module already exists'))\n except ObjectDoesNotExist:\n super().clean(*args, **kwargs)\n super().clean(*args, **kwargs)\n\n @cached_property\n def paragraphs_sorted(self):\n return self.paragraphs.all().order_by('weight')\n\n @cached_property\n def comments(self):\n contenttype = ContentType.objects.get_for_model(self)\n pk = self.id\n comments = comment_models.Comment.objects.all().filter(\n content_type=contenttype, object_pk=pk)\n return comments\n\n\nclass Paragraph(base_models.TimeStampedModel):\n name = models.CharField(max_length=120, blank=True)\n text = RichTextField()\n weight = models.PositiveIntegerField()\n document = models.ForeignKey(Document,\n on_delete=models.CASCADE,\n related_name='paragraphs')\n\n class Meta:\n ordering = ('weight',)\n\n def __str__(self):\n return \"{}_paragraph_{}\".format(str(self.document), self.weight)\n\n def save(self, *args, **kwargs):\n self.text = html_transforms.clean_html_field(\n self.text)\n super().save(*args, **kwargs)\n\n @cached_property\n def comments(self):\n contenttype = ContentType.objects.get_for_model(self)\n pk = self.id\n comments = comment_models.Comment.objects.all().filter(\n content_type=contenttype, object_pk=pk)\n return comments\n\n @cached_property\n def project(self):\n return self.document.project\n", "path": "euth/documents/models.py"}, {"content": "from django.utils.translation import ugettext_lazy as _\n\nfrom euth import phases\n\nfrom . import apps, models, views\n\n\nclass CreateDocumentPhase(phases.PhaseContent):\n \"\"\"\n Allows no interaction for participants, only\n creation for moderators.\n \"\"\"\n app = apps.DocumentConfig.label\n phase = 'create_document'\n view = views.DocumentCreateView\n weight = 30\n\n name = _('Create document phase')\n module_name = _('commenting text')\n description = _('Create text for the project.')\n\n features = {}\n\n\nphases.content.register(CreateDocumentPhase())\n\n\nclass CommentPhase(phases.PhaseContent):\n \"\"\"\n Allows only commenting of paragraphs.\n \"\"\"\n app = apps.DocumentConfig.label\n phase = 'comment'\n view = views.DocumentDetailView\n weight = 40\n\n name = _('Comment phase')\n module_name = _('commenting text')\n description = _('Collect comments for the text.')\n\n features = {\n 'comment': (models.Paragraph,),\n }\n\n\nphases.content.register(CommentPhase())\n", "path": "euth/documents/phases.py"}]}
1,900
604
gh_patches_debug_22070
rasdani/github-patches
git_diff
akvo__akvo-rsr-2290
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Invite existing users gives an IntegrityError See http://sentry.support.akvo-ops.org/rsr/live/group/797/. </issue> <code> [start of akvo/rest/views/employment.py] 1 # -*- coding: utf-8 -*- 2 """Akvo RSR is covered by the GNU Affero General Public License. 3 4 See more details in the license.txt file located at the root folder of the Akvo RSR module. 5 For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >. 6 """ 7 8 from django.contrib.auth.models import Group 9 from rest_framework.decorators import api_view, permission_classes 10 from rest_framework.exceptions import PermissionDenied 11 from rest_framework.permissions import IsAuthenticated 12 from rest_framework.response import Response 13 from akvo.rsr.models import Employment 14 from ..serializers import EmploymentSerializer 15 from ..viewsets import BaseRSRViewSet 16 17 18 class EmploymentViewSet(BaseRSRViewSet): 19 20 """Employment resource.""" 21 22 queryset = Employment.objects.select_related('organisation') 23 serializer_class = EmploymentSerializer 24 25 26 @api_view(['POST']) 27 @permission_classes((IsAuthenticated, )) 28 def approve_employment(request, pk=None): 29 employment = Employment.objects.get(pk=pk) 30 user = request.user 31 32 if not user.has_perm('rsr.change_employment', employment): 33 raise PermissionDenied 34 35 employment.approve(user) 36 37 return Response({'status': 'employment approved'}) 38 39 40 @api_view(['POST']) 41 @permission_classes((IsAuthenticated, )) 42 def set_group(request, pk=None, group_id=None): 43 employment = Employment.objects.get(pk=pk) 44 group = Group.objects.get(pk=group_id) 45 user = request.user 46 47 if not user.has_perm('rsr.change_employment', employment): 48 raise PermissionDenied 49 50 employment.group = group 51 employment.save() 52 53 return Response({'status': 'group set'}) 54 [end of akvo/rest/views/employment.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/akvo/rest/views/employment.py b/akvo/rest/views/employment.py --- a/akvo/rest/views/employment.py +++ b/akvo/rest/views/employment.py @@ -6,10 +6,12 @@ """ from django.contrib.auth.models import Group +from django.db import IntegrityError from rest_framework.decorators import api_view, permission_classes from rest_framework.exceptions import PermissionDenied from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response +from rest_framework import status from akvo.rsr.models import Employment from ..serializers import EmploymentSerializer from ..viewsets import BaseRSRViewSet @@ -48,6 +50,10 @@ raise PermissionDenied employment.group = group - employment.save() + try: + employment.save() + except IntegrityError: + return Response({'status': 'group not set', 'error': 'Employment already exists.'}, + status=status.HTTP_400_BAD_REQUEST) return Response({'status': 'group set'})
{"golden_diff": "diff --git a/akvo/rest/views/employment.py b/akvo/rest/views/employment.py\n--- a/akvo/rest/views/employment.py\n+++ b/akvo/rest/views/employment.py\n@@ -6,10 +6,12 @@\n \"\"\"\n \n from django.contrib.auth.models import Group\n+from django.db import IntegrityError\n from rest_framework.decorators import api_view, permission_classes\n from rest_framework.exceptions import PermissionDenied\n from rest_framework.permissions import IsAuthenticated\n from rest_framework.response import Response\n+from rest_framework import status\n from akvo.rsr.models import Employment\n from ..serializers import EmploymentSerializer\n from ..viewsets import BaseRSRViewSet\n@@ -48,6 +50,10 @@\n raise PermissionDenied\n \n employment.group = group\n- employment.save()\n+ try:\n+ employment.save()\n+ except IntegrityError:\n+ return Response({'status': 'group not set', 'error': 'Employment already exists.'},\n+ status=status.HTTP_400_BAD_REQUEST)\n \n return Response({'status': 'group set'})\n", "issue": "Invite existing users gives an IntegrityError\nSee http://sentry.support.akvo-ops.org/rsr/live/group/797/.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"Akvo RSR is covered by the GNU Affero General Public License.\n\nSee more details in the license.txt file located at the root folder of the Akvo RSR module.\nFor additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nfrom django.contrib.auth.models import Group\nfrom rest_framework.decorators import api_view, permission_classes\nfrom rest_framework.exceptions import PermissionDenied\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom akvo.rsr.models import Employment\nfrom ..serializers import EmploymentSerializer\nfrom ..viewsets import BaseRSRViewSet\n\n\nclass EmploymentViewSet(BaseRSRViewSet):\n\n \"\"\"Employment resource.\"\"\"\n\n queryset = Employment.objects.select_related('organisation')\n serializer_class = EmploymentSerializer\n\n\n@api_view(['POST'])\n@permission_classes((IsAuthenticated, ))\ndef approve_employment(request, pk=None):\n employment = Employment.objects.get(pk=pk)\n user = request.user\n\n if not user.has_perm('rsr.change_employment', employment):\n raise PermissionDenied\n\n employment.approve(user)\n\n return Response({'status': 'employment approved'})\n\n\n@api_view(['POST'])\n@permission_classes((IsAuthenticated, ))\ndef set_group(request, pk=None, group_id=None):\n employment = Employment.objects.get(pk=pk)\n group = Group.objects.get(pk=group_id)\n user = request.user\n\n if not user.has_perm('rsr.change_employment', employment):\n raise PermissionDenied\n\n employment.group = group\n employment.save()\n\n return Response({'status': 'group set'})\n", "path": "akvo/rest/views/employment.py"}]}
1,016
232
gh_patches_debug_3886
rasdani/github-patches
git_diff
nilearn__nilearn-1169
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Dropping scikit-learn dependency < 0.14.1 It makes easier to set base for decoder object in nilearn which actually requires lot of backports for versions. #1148 I don't have great justifications though. Let me know if I miss something important in nilearn which we need to take into account dropping 0.13. FYI: https://packages.debian.org/jessie/python-sklearn Discussions are welcome. </issue> <code> [start of nilearn/version.py] 1 # *- encoding: utf-8 -*- 2 """ 3 nilearn version, required package versions, and utilities for checking 4 """ 5 # Author: Loïc Estève, Ben Cipollini 6 # License: simplified BSD 7 8 # PEP0440 compatible formatted version, see: 9 # https://www.python.org/dev/peps/pep-0440/ 10 # 11 # Generic release markers: 12 # X.Y 13 # X.Y.Z # For bugfix releases 14 # 15 # Admissible pre-release markers: 16 # X.YaN # Alpha release 17 # X.YbN # Beta release 18 # X.YrcN # Release Candidate 19 # X.Y # Final release 20 # 21 # Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer. 22 # 'X.Y.dev0' is the canonical version of 'X.Y.dev' 23 # 24 __version__ = '0.2.5' 25 26 _NILEARN_INSTALL_MSG = 'See %s for installation information.' % ( 27 'http://nilearn.github.io/introduction.html#installation') 28 29 # This is a tuple to preserve order, so that dependencies are checked 30 # in some meaningful order (more => less 'core'). We avoid using 31 # collections.OrderedDict to preserve Python 2.6 compatibility. 32 REQUIRED_MODULE_METADATA = ( 33 ('numpy', { 34 'min_version': '1.6.1', 35 'required_at_installation': True, 36 'install_info': _NILEARN_INSTALL_MSG}), 37 ('scipy', { 38 'min_version': '0.9.0', 39 'required_at_installation': True, 40 'install_info': _NILEARN_INSTALL_MSG}), 41 ('sklearn', { 42 'min_version': '0.13', 43 'required_at_installation': True, 44 'install_info': _NILEARN_INSTALL_MSG}), 45 ('nibabel', { 46 'min_version': '1.1.0', 47 'required_at_installation': False})) 48 49 OPTIONAL_MATPLOTLIB_MIN_VERSION = '1.1.1' 50 51 52 def _import_module_with_version_check( 53 module_name, 54 minimum_version, 55 install_info=None): 56 """Check that module is installed with a recent enough version 57 """ 58 from distutils.version import LooseVersion 59 60 try: 61 module = __import__(module_name) 62 except ImportError as exc: 63 user_friendly_info = ('Module "{0}" could not be found. {1}').format( 64 module_name, 65 install_info or 'Please install it properly to use nilearn.') 66 exc.args += (user_friendly_info,) 67 raise 68 69 # Avoid choking on modules with no __version__ attribute 70 module_version = getattr(module, '__version__', '0.0.0') 71 72 version_too_old = (not LooseVersion(module_version) >= 73 LooseVersion(minimum_version)) 74 75 if version_too_old: 76 message = ( 77 'A {module_name} version of at least {minimum_version} ' 78 'is required to use nilearn. {module_version} was found. ' 79 'Please upgrade {module_name}').format( 80 module_name=module_name, 81 minimum_version=minimum_version, 82 module_version=module_version) 83 84 raise ImportError(message) 85 86 return module 87 88 89 def _check_module_dependencies(is_nilearn_installing=False): 90 """Throw an exception if nilearn dependencies are not installed. 91 92 Parameters 93 ---------- 94 is_nilearn_installing: boolean 95 if True, only error on missing packages that cannot be auto-installed. 96 if False, error on any missing package. 97 98 Throws 99 ------- 100 ImportError 101 """ 102 103 for (module_name, module_metadata) in REQUIRED_MODULE_METADATA: 104 if not (is_nilearn_installing and 105 not module_metadata['required_at_installation']): 106 # Skip check only when installing and it's a module that 107 # will be auto-installed. 108 _import_module_with_version_check( 109 module_name=module_name, 110 minimum_version=module_metadata['min_version'], 111 install_info=module_metadata.get('install_info')) 112 [end of nilearn/version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/nilearn/version.py b/nilearn/version.py --- a/nilearn/version.py +++ b/nilearn/version.py @@ -39,7 +39,7 @@ 'required_at_installation': True, 'install_info': _NILEARN_INSTALL_MSG}), ('sklearn', { - 'min_version': '0.13', + 'min_version': '0.14.1', 'required_at_installation': True, 'install_info': _NILEARN_INSTALL_MSG}), ('nibabel', {
{"golden_diff": "diff --git a/nilearn/version.py b/nilearn/version.py\n--- a/nilearn/version.py\n+++ b/nilearn/version.py\n@@ -39,7 +39,7 @@\n 'required_at_installation': True,\n 'install_info': _NILEARN_INSTALL_MSG}),\n ('sklearn', {\n- 'min_version': '0.13',\n+ 'min_version': '0.14.1',\n 'required_at_installation': True,\n 'install_info': _NILEARN_INSTALL_MSG}),\n ('nibabel', {\n", "issue": "Dropping scikit-learn dependency < 0.14.1\nIt makes easier to set base for decoder object in nilearn which actually requires lot of backports for versions. #1148 \n\nI don't have great justifications though. Let me know if I miss something important in nilearn which we need to take into account dropping 0.13.\n\nFYI: https://packages.debian.org/jessie/python-sklearn\n\nDiscussions are welcome.\n\n", "before_files": [{"content": "# *- encoding: utf-8 -*-\n\"\"\"\nnilearn version, required package versions, and utilities for checking\n\"\"\"\n# Author: Lo\u00efc Est\u00e8ve, Ben Cipollini\n# License: simplified BSD\n\n# PEP0440 compatible formatted version, see:\n# https://www.python.org/dev/peps/pep-0440/\n#\n# Generic release markers:\n# X.Y\n# X.Y.Z # For bugfix releases\n#\n# Admissible pre-release markers:\n# X.YaN # Alpha release\n# X.YbN # Beta release\n# X.YrcN # Release Candidate\n# X.Y # Final release\n#\n# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.\n# 'X.Y.dev0' is the canonical version of 'X.Y.dev'\n#\n__version__ = '0.2.5'\n\n_NILEARN_INSTALL_MSG = 'See %s for installation information.' % (\n 'http://nilearn.github.io/introduction.html#installation')\n\n# This is a tuple to preserve order, so that dependencies are checked\n# in some meaningful order (more => less 'core'). We avoid using\n# collections.OrderedDict to preserve Python 2.6 compatibility.\nREQUIRED_MODULE_METADATA = (\n ('numpy', {\n 'min_version': '1.6.1',\n 'required_at_installation': True,\n 'install_info': _NILEARN_INSTALL_MSG}),\n ('scipy', {\n 'min_version': '0.9.0',\n 'required_at_installation': True,\n 'install_info': _NILEARN_INSTALL_MSG}),\n ('sklearn', {\n 'min_version': '0.13',\n 'required_at_installation': True,\n 'install_info': _NILEARN_INSTALL_MSG}),\n ('nibabel', {\n 'min_version': '1.1.0',\n 'required_at_installation': False}))\n\nOPTIONAL_MATPLOTLIB_MIN_VERSION = '1.1.1'\n\n\ndef _import_module_with_version_check(\n module_name,\n minimum_version,\n install_info=None):\n \"\"\"Check that module is installed with a recent enough version\n \"\"\"\n from distutils.version import LooseVersion\n\n try:\n module = __import__(module_name)\n except ImportError as exc:\n user_friendly_info = ('Module \"{0}\" could not be found. {1}').format(\n module_name,\n install_info or 'Please install it properly to use nilearn.')\n exc.args += (user_friendly_info,)\n raise\n\n # Avoid choking on modules with no __version__ attribute\n module_version = getattr(module, '__version__', '0.0.0')\n\n version_too_old = (not LooseVersion(module_version) >=\n LooseVersion(minimum_version))\n\n if version_too_old:\n message = (\n 'A {module_name} version of at least {minimum_version} '\n 'is required to use nilearn. {module_version} was found. '\n 'Please upgrade {module_name}').format(\n module_name=module_name,\n minimum_version=minimum_version,\n module_version=module_version)\n\n raise ImportError(message)\n\n return module\n\n\ndef _check_module_dependencies(is_nilearn_installing=False):\n \"\"\"Throw an exception if nilearn dependencies are not installed.\n\n Parameters\n ----------\n is_nilearn_installing: boolean\n if True, only error on missing packages that cannot be auto-installed.\n if False, error on any missing package.\n\n Throws\n -------\n ImportError\n \"\"\"\n\n for (module_name, module_metadata) in REQUIRED_MODULE_METADATA:\n if not (is_nilearn_installing and\n not module_metadata['required_at_installation']):\n # Skip check only when installing and it's a module that\n # will be auto-installed.\n _import_module_with_version_check(\n module_name=module_name,\n minimum_version=module_metadata['min_version'],\n install_info=module_metadata.get('install_info'))\n", "path": "nilearn/version.py"}]}
1,745
126
gh_patches_debug_19787
rasdani/github-patches
git_diff
digitalfabrik__integreat-cms-538
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Pages endpoint: skip pages if parent is not visible If a parent page in the page tree is in any state that makes it invisible in the API, we do not want the children to appear in the pages endpoint. This should allow for easy deactivating of "full chapters". Pages endpoint: skip pages if parent is not visible If a parent page in the page tree is in any state that makes it invisible in the API, we do not want the children to appear in the pages endpoint. This should allow for easy deactivating of "full chapters". </issue> <code> [start of src/api/v3/pages.py] 1 from django.http import JsonResponse 2 3 from cms.models import Region 4 5 6 def transform_page(page_translation): 7 if page_translation.page.parent: 8 parent = { 9 "id": page_translation.page.parent.id, 10 "url": page_translation.page.parent.get_translation( 11 page_translation.language.code 12 ).permalink, 13 "path": page_translation.page.parent.get_translation( 14 page_translation.language.code 15 ).slug, 16 } 17 else: 18 parent = None 19 return { 20 "id": page_translation.id, 21 "url": page_translation.permalink, 22 "path": page_translation.slug, 23 "title": page_translation.title, 24 "modified_gmt": page_translation.last_updated, 25 "excerpt": page_translation.text, 26 "content": page_translation.combined_text, 27 "parent": parent, 28 "order": page_translation.page.lft, # use left edge indicator of mptt model for order 29 "available_languages": page_translation.available_languages, 30 "thumbnail": None, 31 "hash": None, 32 } 33 34 35 # pylint: disable=unused-argument 36 def pages(request, region_slug, language_code): 37 region = Region.get_current_region(request) 38 result = [] 39 for page in region.pages.all(): 40 page_translation = page.get_public_translation(language_code) 41 if page_translation: 42 result.append(transform_page(page_translation)) 43 return JsonResponse( 44 result, safe=False 45 ) # Turn off Safe-Mode to allow serializing arrays 46 [end of src/api/v3/pages.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/api/v3/pages.py b/src/api/v3/pages.py --- a/src/api/v3/pages.py +++ b/src/api/v3/pages.py @@ -36,10 +36,20 @@ def pages(request, region_slug, language_code): region = Region.get_current_region(request) result = [] - for page in region.pages.all(): + for page in region.pages.filter(archived=False, parent=None): # get main level page_translation = page.get_public_translation(language_code) if page_translation: result.append(transform_page(page_translation)) + result = get_children(page, language_code, result) return JsonResponse( result, safe=False ) # Turn off Safe-Mode to allow serializing arrays + + +def get_children(parent, language_code, result): + for page in parent.children.filter(archived=False): + page_translation = page.get_public_translation(language_code) + if page_translation: + result.append(transform_page(page_translation)) + result = get_children(page, language_code, result) + return result
{"golden_diff": "diff --git a/src/api/v3/pages.py b/src/api/v3/pages.py\n--- a/src/api/v3/pages.py\n+++ b/src/api/v3/pages.py\n@@ -36,10 +36,20 @@\n def pages(request, region_slug, language_code):\n region = Region.get_current_region(request)\n result = []\n- for page in region.pages.all():\n+ for page in region.pages.filter(archived=False, parent=None): # get main level\n page_translation = page.get_public_translation(language_code)\n if page_translation:\n result.append(transform_page(page_translation))\n+ result = get_children(page, language_code, result)\n return JsonResponse(\n result, safe=False\n ) # Turn off Safe-Mode to allow serializing arrays\n+\n+\n+def get_children(parent, language_code, result):\n+ for page in parent.children.filter(archived=False):\n+ page_translation = page.get_public_translation(language_code)\n+ if page_translation:\n+ result.append(transform_page(page_translation))\n+ result = get_children(page, language_code, result)\n+ return result\n", "issue": "Pages endpoint: skip pages if parent is not visible\nIf a parent page in the page tree is in any state that makes it invisible in the API, we do not want the children to appear in the pages endpoint. This should allow for easy deactivating of \"full chapters\". \nPages endpoint: skip pages if parent is not visible\nIf a parent page in the page tree is in any state that makes it invisible in the API, we do not want the children to appear in the pages endpoint. This should allow for easy deactivating of \"full chapters\". \n", "before_files": [{"content": "from django.http import JsonResponse\n\nfrom cms.models import Region\n\n\ndef transform_page(page_translation):\n if page_translation.page.parent:\n parent = {\n \"id\": page_translation.page.parent.id,\n \"url\": page_translation.page.parent.get_translation(\n page_translation.language.code\n ).permalink,\n \"path\": page_translation.page.parent.get_translation(\n page_translation.language.code\n ).slug,\n }\n else:\n parent = None\n return {\n \"id\": page_translation.id,\n \"url\": page_translation.permalink,\n \"path\": page_translation.slug,\n \"title\": page_translation.title,\n \"modified_gmt\": page_translation.last_updated,\n \"excerpt\": page_translation.text,\n \"content\": page_translation.combined_text,\n \"parent\": parent,\n \"order\": page_translation.page.lft, # use left edge indicator of mptt model for order\n \"available_languages\": page_translation.available_languages,\n \"thumbnail\": None,\n \"hash\": None,\n }\n\n\n# pylint: disable=unused-argument\ndef pages(request, region_slug, language_code):\n region = Region.get_current_region(request)\n result = []\n for page in region.pages.all():\n page_translation = page.get_public_translation(language_code)\n if page_translation:\n result.append(transform_page(page_translation))\n return JsonResponse(\n result, safe=False\n ) # Turn off Safe-Mode to allow serializing arrays\n", "path": "src/api/v3/pages.py"}]}
1,042
238
gh_patches_debug_15390
rasdani/github-patches
git_diff
pyodide__pyodide-1138
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Nested attribute access in JS->Python type conversion Currently the following code fails, ```js >>> from js import window >>> window.URL.createObjectURL Error: Traceback (most recent call last): File "/lib/python3.7/site-packages/pyodide.py", line 45, in eval_code return eval(compile(expr, '<eval>', mode='eval'), ns, ns) File "<eval>", line 1, in <module> AttributeError: 'JsBoundMethod' object has no attribute 'createObjectURL' ``` (while `window.URL.createObjectURL` is a valid JS object) because nested attributes (i.e. attribute of an attribute) don't seem to be supported. It would have been nice to make it work, though I have not looked at how difficult that would be. from js import fetch treats fetch as a free function `fetch` is a member function of `window`. However, using `from js import fetch` doesn't realize that and leads to the error: `TypeError: 'fetch' called on an object that does not implement interface Window.` For Reproducing the Error: ``` %%py from js import document, Request, fetch, URL img_tag = document.createElement('img') req = Request.new('https://i.ibb.co/3f4yJQS/face4.jpg') def func(response): return response.blob() def func2(blob): objURL = URL.createObjectURL(blob) img_tag.src = objURL fetch(req).then(func).then(func2) document.body.appendChild(img_tag) ``` </issue> <code> [start of src/pyodide-py/pyodide/_core.py] 1 # type: ignore 2 import platform 3 4 if platform.system() == "Emscripten": 5 from _pyodide_core import JsProxy, JsBoundMethod, JsException 6 else: 7 # Can add shims here if we are so inclined. 8 class JsException(Exception): 9 """ 10 A wrapper around a Javascript Error to allow the Error to be thrown in Python. 11 """ 12 13 # Defined in jsproxy.c 14 15 class JsProxy: 16 """A proxy to make a Javascript object behave like a Python object""" 17 18 # Defined in jsproxy.c 19 20 class JsBoundMethod: 21 """A proxy to make it possible to call Javascript bound methods from Python.""" 22 23 # Defined in jsproxy.c 24 25 26 __all__ = [JsProxy, JsBoundMethod, JsException] 27 [end of src/pyodide-py/pyodide/_core.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/pyodide-py/pyodide/_core.py b/src/pyodide-py/pyodide/_core.py --- a/src/pyodide-py/pyodide/_core.py +++ b/src/pyodide-py/pyodide/_core.py @@ -2,7 +2,7 @@ import platform if platform.system() == "Emscripten": - from _pyodide_core import JsProxy, JsBoundMethod, JsException + from _pyodide_core import JsProxy, JsMethod, JsException else: # Can add shims here if we are so inclined. class JsException(Exception): @@ -17,10 +17,10 @@ # Defined in jsproxy.c - class JsBoundMethod: + class JsMethod: """A proxy to make it possible to call Javascript bound methods from Python.""" # Defined in jsproxy.c -__all__ = [JsProxy, JsBoundMethod, JsException] +__all__ = [JsProxy, JsMethod, JsException]
{"golden_diff": "diff --git a/src/pyodide-py/pyodide/_core.py b/src/pyodide-py/pyodide/_core.py\n--- a/src/pyodide-py/pyodide/_core.py\n+++ b/src/pyodide-py/pyodide/_core.py\n@@ -2,7 +2,7 @@\n import platform\n \n if platform.system() == \"Emscripten\":\n- from _pyodide_core import JsProxy, JsBoundMethod, JsException\n+ from _pyodide_core import JsProxy, JsMethod, JsException\n else:\n # Can add shims here if we are so inclined.\n class JsException(Exception):\n@@ -17,10 +17,10 @@\n \n # Defined in jsproxy.c\n \n- class JsBoundMethod:\n+ class JsMethod:\n \"\"\"A proxy to make it possible to call Javascript bound methods from Python.\"\"\"\n \n # Defined in jsproxy.c\n \n \n-__all__ = [JsProxy, JsBoundMethod, JsException]\n+__all__ = [JsProxy, JsMethod, JsException]\n", "issue": "Nested attribute access in JS->Python type conversion\nCurrently the following code fails,\r\n```js\r\n>>> from js import window\r\n>>> window.URL.createObjectURL\r\nError: Traceback (most recent call last):\r\n File \"/lib/python3.7/site-packages/pyodide.py\", line 45, in eval_code\r\n return eval(compile(expr, '<eval>', mode='eval'), ns, ns)\r\n File \"<eval>\", line 1, in <module>\r\nAttributeError: 'JsBoundMethod' object has no attribute 'createObjectURL'\r\n```\r\n(while `window.URL.createObjectURL` is a valid JS object) because nested attributes (i.e. attribute of an attribute) don't seem to be supported. It would have been nice to make it work, though I have not looked at how difficult that would be.\nfrom js import fetch treats fetch as a free function\n`fetch` is a member function of `window`.\r\nHowever, using `from js import fetch` doesn't realize that and leads to the error:\r\n\r\n`TypeError: 'fetch' called on an object that does not implement interface Window.`\r\n\r\nFor Reproducing the Error:\r\n```\r\n%%py\r\n\r\nfrom js import document, Request, fetch, URL\r\nimg_tag = document.createElement('img')\r\nreq = Request.new('https://i.ibb.co/3f4yJQS/face4.jpg')\r\n\r\ndef func(response):\r\n return response.blob()\r\n\r\ndef func2(blob):\r\n objURL = URL.createObjectURL(blob)\r\n img_tag.src = objURL\r\n\r\nfetch(req).then(func).then(func2)\r\n\r\ndocument.body.appendChild(img_tag)\r\n```\n", "before_files": [{"content": "# type: ignore\nimport platform\n\nif platform.system() == \"Emscripten\":\n from _pyodide_core import JsProxy, JsBoundMethod, JsException\nelse:\n # Can add shims here if we are so inclined.\n class JsException(Exception):\n \"\"\"\n A wrapper around a Javascript Error to allow the Error to be thrown in Python.\n \"\"\"\n\n # Defined in jsproxy.c\n\n class JsProxy:\n \"\"\"A proxy to make a Javascript object behave like a Python object\"\"\"\n\n # Defined in jsproxy.c\n\n class JsBoundMethod:\n \"\"\"A proxy to make it possible to call Javascript bound methods from Python.\"\"\"\n\n # Defined in jsproxy.c\n\n\n__all__ = [JsProxy, JsBoundMethod, JsException]\n", "path": "src/pyodide-py/pyodide/_core.py"}]}
1,095
236
gh_patches_debug_5221
rasdani/github-patches
git_diff
saulpw__visidata-1011
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> vds: cannot export sheets containing a date column **Small description** It's not possible to save a sheet to a .vds file if said sheet contains a date column. This results in the error below. **Expected result** It should just work(tm). **Actual result with screenshot** ```stacktrace Traceback (most recent call last): File "/nix/store/srkr2wnwq95ylmgiadh28p3jiaadl5yw-visidata-2.4/lib/python3.8/site-packages/visidata/threads.py", line 215, in _toplevelTryFunc t.status = func(*args, **kwargs) File "/nix/store/srkr2wnwq95ylmgiadh28p3jiaadl5yw-visidata-2.4/lib/python3.8/site-packages/visidata/loaders/vds.py", line 32, in save_vds fp.write(json.dumps(d)+NL) File "/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/__init__.py", line 231, in dumps return _default_encoder.encode(obj) File "/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/encoder.py", line 199, in encode chunks = self.iterencode(o, _one_shot=True) File "/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/encoder.py", line 257, in iterencode return _iterencode(o, 0) File "/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/encoder.py", line 179, in default raise TypeError(f'Object of type {o.__class__.__name__} ' TypeError: Object of type date is not JSON serializable ``` **Steps to reproduce with sample data and a .vd** ❯ cat testsheet.vd ```log sheet col row longname input keystrokes comment visidata_menu sheets-stack S open Sheets Stack: join or jump between the active sheets on the current stack sheets キsheets add-row a append a blank row sheets name キ edit-cell testsheet e edit contents of current cell sheets キtestsheet open-row ^J open sheet referenced in current row testsheet 0 rename-col testcol ^ edit name of current column testsheet testcol type-date @ set type of current column to date testsheet add-row a append a blank row testsheet testcol 0 edit-cell 2021-06-14 e edit contents of current cell testsheet save-all test.vds g^S save all sheets to given file or directory) ``` **Additional context** Problem is present on v2.4 and on the develop branch (commit 3350d9fd8c9e64ebf409deae4b31085d12efeb7f) </issue> <code> [start of visidata/loaders/vds.py] 1 'Custom VisiData save format' 2 3 import json 4 from visidata import * 5 6 NL='\n' 7 8 @VisiData.api 9 def open_vds(vd, p): 10 return VdsIndexSheet(p.name, source=p) 11 12 13 @VisiData.api 14 def save_vds(vd, p, *sheets): 15 'Save in custom VisiData format, preserving columns and their attributes.' 16 17 with p.open_text(mode='w') as fp: 18 for vs in sheets: 19 # class and attrs for vs 20 d = { 'name': vs.name, } 21 fp.write('#'+json.dumps(d)+NL) 22 23 # class and attrs for each column in vs 24 for col in vs.visibleCols: 25 d = col.__getstate__() 26 d['col'] = type(col).__name__ 27 fp.write('#'+json.dumps(d)+NL) 28 29 with Progress(gerund='saving'): 30 for row in vs.iterdispvals(*vs.visibleCols, format=False): 31 d = {col.name:val for col, val in row.items()} 32 fp.write(json.dumps(d)+NL) 33 34 35 class VdsIndexSheet(IndexSheet): 36 def iterload(self): 37 vs = None 38 with self.source.open_text() as fp: 39 line = fp.readline() 40 while line: 41 if line.startswith('#{'): 42 d = json.loads(line[1:]) 43 if 'col' not in d: 44 vs = VdsSheet(d.pop('name'), columns=[], source=self.source, source_fpos=fp.tell()) 45 yield vs 46 line = fp.readline() 47 48 49 class VdsSheet(Sheet): 50 def newRow(self): 51 return {} # rowdef: dict 52 53 def iterload(self): 54 self.colnames = {} 55 self.columns = [] 56 57 with self.source.open_text() as fp: 58 fp.seek(self.source_fpos) 59 60 # consume all metadata, create columns 61 line = fp.readline() 62 while line and line.startswith('#{'): 63 d = json.loads(line[1:]) 64 if 'col' not in d: 65 raise Exception(d) 66 classname = d.pop('col') 67 if classname == 'Column': 68 classname = 'ItemColumn' 69 d['expr'] = d['name'] 70 71 c = globals()[classname](d.pop('name')) 72 self.colnames[c.name] = c 73 self.addColumn(c) 74 for k, v in d.items(): 75 setattr(c, k, v) 76 77 line = fp.readline() 78 79 while line and not line.startswith('#{'): 80 d = json.loads(line) 81 yield d 82 line = fp.readline() 83 [end of visidata/loaders/vds.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/visidata/loaders/vds.py b/visidata/loaders/vds.py --- a/visidata/loaders/vds.py +++ b/visidata/loaders/vds.py @@ -29,7 +29,7 @@ with Progress(gerund='saving'): for row in vs.iterdispvals(*vs.visibleCols, format=False): d = {col.name:val for col, val in row.items()} - fp.write(json.dumps(d)+NL) + fp.write(json.dumps(d, default=str)+NL) class VdsIndexSheet(IndexSheet):
{"golden_diff": "diff --git a/visidata/loaders/vds.py b/visidata/loaders/vds.py\n--- a/visidata/loaders/vds.py\n+++ b/visidata/loaders/vds.py\n@@ -29,7 +29,7 @@\n with Progress(gerund='saving'):\n for row in vs.iterdispvals(*vs.visibleCols, format=False):\n d = {col.name:val for col, val in row.items()}\n- fp.write(json.dumps(d)+NL)\n+ fp.write(json.dumps(d, default=str)+NL)\n \n \n class VdsIndexSheet(IndexSheet):\n", "issue": "vds: cannot export sheets containing a date column\n**Small description**\r\n\r\nIt's not possible to save a sheet to a .vds file if said sheet contains a date column.\r\nThis results in the error below.\r\n\r\n**Expected result**\r\n\r\nIt should just work(tm).\r\n\r\n**Actual result with screenshot**\r\n\r\n```stacktrace\r\nTraceback (most recent call last):\r\n File \"/nix/store/srkr2wnwq95ylmgiadh28p3jiaadl5yw-visidata-2.4/lib/python3.8/site-packages/visidata/threads.py\", line 215, in _toplevelTryFunc\r\n t.status = func(*args, **kwargs)\r\n File \"/nix/store/srkr2wnwq95ylmgiadh28p3jiaadl5yw-visidata-2.4/lib/python3.8/site-packages/visidata/loaders/vds.py\", line 32, in save_vds\r\n fp.write(json.dumps(d)+NL)\r\n File \"/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/__init__.py\", line 231, in dumps\r\n return _default_encoder.encode(obj)\r\n File \"/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/encoder.py\", line 199, in encode\r\n chunks = self.iterencode(o, _one_shot=True)\r\n File \"/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/encoder.py\", line 257, in iterencode\r\n return _iterencode(o, 0)\r\n File \"/nix/store/4s0h5aawbap3xhldxhcijvl26751qrjr-python3-3.8.9/lib/python3.8/json/encoder.py\", line 179, in default\r\n raise TypeError(f'Object of type {o.__class__.__name__} '\r\nTypeError: Object of type date is not JSON serializable\r\n```\r\n\r\n**Steps to reproduce with sample data and a .vd**\r\n\r\n\u276f cat testsheet.vd\r\n```log\r\nsheet\tcol\trow\tlongname\tinput\tkeystrokes\tcomment\r\nvisidata_menu\t\t\tsheets-stack\t\tS\topen Sheets Stack: join or jump between the active sheets on the current stack\r\nsheets\t\t\u30adsheets\tadd-row\t\ta\tappend a blank row\r\nsheets\tname\t\u30ad\tedit-cell\ttestsheet\te\tedit contents of current cell\r\nsheets\t\t\u30adtestsheet\topen-row\t\t^J\topen sheet referenced in current row\r\ntestsheet\t0\t\trename-col\ttestcol\t^\tedit name of current column\r\ntestsheet\ttestcol\t\ttype-date\t\t@\tset type of current column to date\r\ntestsheet\t\t\tadd-row\t\ta\tappend a blank row\r\ntestsheet\ttestcol\t0\tedit-cell\t2021-06-14\te\tedit contents of current cell\r\ntestsheet\t\t\tsave-all\ttest.vds\tg^S\tsave all sheets to given file or directory)\r\n```\r\n\r\n\r\n**Additional context**\r\n\r\nProblem is present on v2.4 and on the develop branch (commit 3350d9fd8c9e64ebf409deae4b31085d12efeb7f)\n", "before_files": [{"content": "'Custom VisiData save format'\n\nimport json\nfrom visidata import *\n\nNL='\\n'\n\[email protected]\ndef open_vds(vd, p):\n return VdsIndexSheet(p.name, source=p)\n\n\[email protected]\ndef save_vds(vd, p, *sheets):\n 'Save in custom VisiData format, preserving columns and their attributes.'\n\n with p.open_text(mode='w') as fp:\n for vs in sheets:\n # class and attrs for vs\n d = { 'name': vs.name, }\n fp.write('#'+json.dumps(d)+NL)\n\n # class and attrs for each column in vs\n for col in vs.visibleCols:\n d = col.__getstate__()\n d['col'] = type(col).__name__\n fp.write('#'+json.dumps(d)+NL)\n\n with Progress(gerund='saving'):\n for row in vs.iterdispvals(*vs.visibleCols, format=False):\n d = {col.name:val for col, val in row.items()}\n fp.write(json.dumps(d)+NL)\n\n\nclass VdsIndexSheet(IndexSheet):\n def iterload(self):\n vs = None\n with self.source.open_text() as fp:\n line = fp.readline()\n while line:\n if line.startswith('#{'):\n d = json.loads(line[1:])\n if 'col' not in d:\n vs = VdsSheet(d.pop('name'), columns=[], source=self.source, source_fpos=fp.tell())\n yield vs\n line = fp.readline()\n\n\nclass VdsSheet(Sheet):\n def newRow(self):\n return {} # rowdef: dict\n\n def iterload(self):\n self.colnames = {}\n self.columns = []\n\n with self.source.open_text() as fp:\n fp.seek(self.source_fpos)\n\n # consume all metadata, create columns\n line = fp.readline()\n while line and line.startswith('#{'):\n d = json.loads(line[1:])\n if 'col' not in d:\n raise Exception(d)\n classname = d.pop('col')\n if classname == 'Column':\n classname = 'ItemColumn'\n d['expr'] = d['name']\n\n c = globals()[classname](d.pop('name'))\n self.colnames[c.name] = c\n self.addColumn(c)\n for k, v in d.items():\n setattr(c, k, v)\n\n line = fp.readline()\n\n while line and not line.startswith('#{'):\n d = json.loads(line)\n yield d\n line = fp.readline()\n", "path": "visidata/loaders/vds.py"}]}
2,029
130
gh_patches_debug_33630
rasdani/github-patches
git_diff
wagtail__wagtail-1150
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Possibly incorrect hook names? Going through the docs, I see two rather unusual hook names: `construct_wagtail_edit_bird` and `construct_whitelister_element_rules`. The first seems like a placeholder name that accidentally made it out of the alpha stage. Based on the docs, it seems like it should be called `construct_wagtail_userbar`. The second seems like a straight up typo. I've never heard the word "whitelister" before. I'm pretty sure this hook should be called `construct_whitelisted_element_rules`. Changing the names of hooks is obviously a major undertaking, since some code bases will have already implemented them. But adding the new names and deprecating the old ones for a few releases should be entirely possible. I'd be happy to do this in a pull request, since it's only a dozen or lines of code to change, but I don't really know how wagtail handles deprecating old APIs. Possibly incorrect hook names? Going through the docs, I see two rather unusual hook names: `construct_wagtail_edit_bird` and `construct_whitelister_element_rules`. The first seems like a placeholder name that accidentally made it out of the alpha stage. Based on the docs, it seems like it should be called `construct_wagtail_userbar`. The second seems like a straight up typo. I've never heard the word "whitelister" before. I'm pretty sure this hook should be called `construct_whitelisted_element_rules`. Changing the names of hooks is obviously a major undertaking, since some code bases will have already implemented them. But adding the new names and deprecating the old ones for a few releases should be entirely possible. I'd be happy to do this in a pull request, since it's only a dozen or lines of code to change, but I don't really know how wagtail handles deprecating old APIs. </issue> <code> [start of wagtail/wagtailadmin/views/userbar.py] 1 from django.shortcuts import render 2 from django.contrib.auth.decorators import permission_required 3 4 from wagtail.wagtailadmin.userbar import EditPageItem, AddPageItem, ApproveModerationEditPageItem, RejectModerationEditPageItem 5 from wagtail.wagtailcore import hooks 6 from wagtail.wagtailcore.models import Page, PageRevision 7 8 9 @permission_required('wagtailadmin.access_admin', raise_exception=True) 10 def for_frontend(request, page_id): 11 items = [ 12 EditPageItem(Page.objects.get(id=page_id)), 13 AddPageItem(Page.objects.get(id=page_id)), 14 ] 15 16 for fn in hooks.get_hooks('construct_wagtail_edit_bird'): 17 fn(request, items) 18 19 # Render the items 20 rendered_items = [item.render(request) for item in items] 21 22 # Remove any unrendered items 23 rendered_items = [item for item in rendered_items if item] 24 25 # Render the edit bird 26 return render(request, 'wagtailadmin/userbar/base.html', { 27 'items': rendered_items, 28 }) 29 30 31 @permission_required('wagtailadmin.access_admin', raise_exception=True) 32 def for_moderation(request, revision_id): 33 items = [ 34 EditPageItem(PageRevision.objects.get(id=revision_id).page), 35 AddPageItem(PageRevision.objects.get(id=revision_id).page), 36 ApproveModerationEditPageItem(PageRevision.objects.get(id=revision_id)), 37 RejectModerationEditPageItem(PageRevision.objects.get(id=revision_id)), 38 ] 39 40 for fn in hooks.get_hooks('construct_wagtail_edit_bird'): 41 fn(request, items) 42 43 # Render the items 44 rendered_items = [item.render(request) for item in items] 45 46 # Remove any unrendered items 47 rendered_items = [item for item in rendered_items if item] 48 49 # Render the edit bird 50 return render(request, 'wagtailadmin/userbar/base.html', { 51 'items': rendered_items, 52 }) 53 [end of wagtail/wagtailadmin/views/userbar.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/wagtail/wagtailadmin/views/userbar.py b/wagtail/wagtailadmin/views/userbar.py --- a/wagtail/wagtailadmin/views/userbar.py +++ b/wagtail/wagtailadmin/views/userbar.py @@ -1,3 +1,5 @@ +import warnings + from django.shortcuts import render from django.contrib.auth.decorators import permission_required @@ -5,6 +7,8 @@ from wagtail.wagtailcore import hooks from wagtail.wagtailcore.models import Page, PageRevision +from wagtail.utils.deprecation import RemovedInWagtail11Warning + @permission_required('wagtailadmin.access_admin', raise_exception=True) def for_frontend(request, page_id): @@ -13,7 +17,10 @@ AddPageItem(Page.objects.get(id=page_id)), ] - for fn in hooks.get_hooks('construct_wagtail_edit_bird'): + # TODO: Remove in 1.1 release + run_deprecated_edit_bird_hook(request, items) + + for fn in hooks.get_hooks('construct_wagtail_userbar'): fn(request, items) # Render the items @@ -37,7 +44,10 @@ RejectModerationEditPageItem(PageRevision.objects.get(id=revision_id)), ] - for fn in hooks.get_hooks('construct_wagtail_edit_bird'): + # TODO: Remove in 1.1 release + run_deprecated_edit_bird_hook(request, items) + + for fn in hooks.get_hooks('construct_wagtail_userbar'): fn(request, items) # Render the items @@ -50,3 +60,13 @@ return render(request, 'wagtailadmin/userbar/base.html', { 'items': rendered_items, }) + + +def run_deprecated_edit_bird_hook(request, items): + for fn in hooks.get_hooks('construct_wagtail_edit_bird'): + fn(request, items) + + warnings.warn( + "The 'construct_wagtail_edit_bird' hook has been renamed to 'construct_wagtail_userbar'." + "Please update function '%s' in '%s'." % (fn.__name__, fn.__module__), RemovedInWagtail11Warning + )
{"golden_diff": "diff --git a/wagtail/wagtailadmin/views/userbar.py b/wagtail/wagtailadmin/views/userbar.py\n--- a/wagtail/wagtailadmin/views/userbar.py\n+++ b/wagtail/wagtailadmin/views/userbar.py\n@@ -1,3 +1,5 @@\n+import warnings\n+\n from django.shortcuts import render\n from django.contrib.auth.decorators import permission_required\n \n@@ -5,6 +7,8 @@\n from wagtail.wagtailcore import hooks\n from wagtail.wagtailcore.models import Page, PageRevision\n \n+from wagtail.utils.deprecation import RemovedInWagtail11Warning\n+\n \n @permission_required('wagtailadmin.access_admin', raise_exception=True)\n def for_frontend(request, page_id):\n@@ -13,7 +17,10 @@\n AddPageItem(Page.objects.get(id=page_id)),\n ]\n \n- for fn in hooks.get_hooks('construct_wagtail_edit_bird'):\n+ # TODO: Remove in 1.1 release\n+ run_deprecated_edit_bird_hook(request, items)\n+\n+ for fn in hooks.get_hooks('construct_wagtail_userbar'):\n fn(request, items)\n \n # Render the items\n@@ -37,7 +44,10 @@\n RejectModerationEditPageItem(PageRevision.objects.get(id=revision_id)),\n ]\n \n- for fn in hooks.get_hooks('construct_wagtail_edit_bird'):\n+ # TODO: Remove in 1.1 release\n+ run_deprecated_edit_bird_hook(request, items)\n+\n+ for fn in hooks.get_hooks('construct_wagtail_userbar'):\n fn(request, items)\n \n # Render the items\n@@ -50,3 +60,13 @@\n return render(request, 'wagtailadmin/userbar/base.html', {\n 'items': rendered_items,\n })\n+\n+\n+def run_deprecated_edit_bird_hook(request, items):\n+ for fn in hooks.get_hooks('construct_wagtail_edit_bird'):\n+ fn(request, items)\n+\n+ warnings.warn(\n+ \"The 'construct_wagtail_edit_bird' hook has been renamed to 'construct_wagtail_userbar'.\"\n+ \"Please update function '%s' in '%s'.\" % (fn.__name__, fn.__module__), RemovedInWagtail11Warning\n+ )\n", "issue": "Possibly incorrect hook names?\nGoing through the docs, I see two rather unusual hook names: `construct_wagtail_edit_bird` and `construct_whitelister_element_rules`. \n\nThe first seems like a placeholder name that accidentally made it out of the alpha stage. Based on the docs, it seems like it should be called `construct_wagtail_userbar`.\n\nThe second seems like a straight up typo. I've never heard the word \"whitelister\" before. I'm pretty sure this hook should be called `construct_whitelisted_element_rules`.\n\nChanging the names of hooks is obviously a major undertaking, since some code bases will have already implemented them. But adding the new names and deprecating the old ones for a few releases should be entirely possible. I'd be happy to do this in a pull request, since it's only a dozen or lines of code to change, but I don't really know how wagtail handles deprecating old APIs.\n\nPossibly incorrect hook names?\nGoing through the docs, I see two rather unusual hook names: `construct_wagtail_edit_bird` and `construct_whitelister_element_rules`. \n\nThe first seems like a placeholder name that accidentally made it out of the alpha stage. Based on the docs, it seems like it should be called `construct_wagtail_userbar`.\n\nThe second seems like a straight up typo. I've never heard the word \"whitelister\" before. I'm pretty sure this hook should be called `construct_whitelisted_element_rules`.\n\nChanging the names of hooks is obviously a major undertaking, since some code bases will have already implemented them. But adding the new names and deprecating the old ones for a few releases should be entirely possible. I'd be happy to do this in a pull request, since it's only a dozen or lines of code to change, but I don't really know how wagtail handles deprecating old APIs.\n\n", "before_files": [{"content": "from django.shortcuts import render\nfrom django.contrib.auth.decorators import permission_required\n\nfrom wagtail.wagtailadmin.userbar import EditPageItem, AddPageItem, ApproveModerationEditPageItem, RejectModerationEditPageItem\nfrom wagtail.wagtailcore import hooks\nfrom wagtail.wagtailcore.models import Page, PageRevision\n\n\n@permission_required('wagtailadmin.access_admin', raise_exception=True)\ndef for_frontend(request, page_id):\n items = [\n EditPageItem(Page.objects.get(id=page_id)),\n AddPageItem(Page.objects.get(id=page_id)),\n ]\n\n for fn in hooks.get_hooks('construct_wagtail_edit_bird'):\n fn(request, items)\n\n # Render the items\n rendered_items = [item.render(request) for item in items]\n\n # Remove any unrendered items\n rendered_items = [item for item in rendered_items if item]\n\n # Render the edit bird\n return render(request, 'wagtailadmin/userbar/base.html', {\n 'items': rendered_items,\n })\n\n\n@permission_required('wagtailadmin.access_admin', raise_exception=True)\ndef for_moderation(request, revision_id):\n items = [\n EditPageItem(PageRevision.objects.get(id=revision_id).page),\n AddPageItem(PageRevision.objects.get(id=revision_id).page),\n ApproveModerationEditPageItem(PageRevision.objects.get(id=revision_id)),\n RejectModerationEditPageItem(PageRevision.objects.get(id=revision_id)),\n ]\n\n for fn in hooks.get_hooks('construct_wagtail_edit_bird'):\n fn(request, items)\n\n # Render the items\n rendered_items = [item.render(request) for item in items]\n\n # Remove any unrendered items\n rendered_items = [item for item in rendered_items if item]\n\n # Render the edit bird\n return render(request, 'wagtailadmin/userbar/base.html', {\n 'items': rendered_items,\n })\n", "path": "wagtail/wagtailadmin/views/userbar.py"}]}
1,463
515
gh_patches_debug_39227
rasdani/github-patches
git_diff
scikit-image__scikit-image-3930
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Use astropy instead of pyfits to read FITS images pyfits is not currently installable on 3.5, and it looks like AstroPy is more actively maintained. </issue> <code> [start of skimage/io/_plugins/fits_plugin.py] 1 __all__ = ['imread', 'imread_collection'] 2 3 import skimage.io as io 4 5 try: 6 from astropy.io import fits as pyfits 7 except ImportError: 8 try: 9 import pyfits 10 except ImportError: 11 raise ImportError( 12 "PyFITS could not be found. Please refer to\n" 13 "http://www.stsci.edu/resources/software_hardware/pyfits\n" 14 "for further instructions.") 15 16 17 def imread(fname, dtype=None): 18 """Load an image from a FITS file. 19 20 Parameters 21 ---------- 22 fname : string 23 Image file name, e.g. ``test.fits``. 24 dtype : dtype, optional 25 For FITS, this argument is ignored because Stefan is planning on 26 removing the dtype argument from imread anyway. 27 28 Returns 29 ------- 30 img_array : ndarray 31 Unlike plugins such as PIL, where different color bands/channels are 32 stored in the third dimension, FITS images are greyscale-only and can 33 be N-dimensional, so an array of the native FITS dimensionality is 34 returned, without color channels. 35 36 Currently if no image is found in the file, None will be returned 37 38 Notes 39 ----- 40 41 Currently FITS ``imread()`` always returns the first image extension when 42 given a Multi-Extension FITS file; use ``imread_collection()`` (which does 43 lazy loading) to get all the extensions at once. 44 45 """ 46 47 hdulist = pyfits.open(fname) 48 49 # Iterate over FITS image extensions, ignoring any other extension types 50 # such as binary tables, and get the first image data array: 51 img_array = None 52 for hdu in hdulist: 53 if isinstance(hdu, pyfits.ImageHDU) or \ 54 isinstance(hdu, pyfits.PrimaryHDU): 55 if hdu.data is not None: 56 img_array = hdu.data 57 break 58 hdulist.close() 59 60 return img_array 61 62 63 def imread_collection(load_pattern, conserve_memory=True): 64 """Load a collection of images from one or more FITS files 65 66 Parameters 67 ---------- 68 load_pattern : str or list 69 List of extensions to load. Filename globbing is currently 70 unsupported. 71 converve_memory : bool 72 If True, never keep more than one in memory at a specific 73 time. Otherwise, images will be cached once they are loaded. 74 75 Returns 76 ------- 77 78 ic : ImageCollection 79 Collection of images. 80 81 """ 82 83 intype = type(load_pattern) 84 if intype is not list and intype is not str: 85 raise TypeError("Input must be a filename or list of filenames") 86 87 # Ensure we have a list, otherwise we'll end up iterating over the string: 88 if intype is not list: 89 load_pattern = [load_pattern] 90 91 # Generate a list of filename/extension pairs by opening the list of 92 # files and finding the image extensions in each one: 93 ext_list = [] 94 for filename in load_pattern: 95 hdulist = pyfits.open(filename) 96 for n, hdu in zip(range(len(hdulist)), hdulist): 97 if isinstance(hdu, pyfits.ImageHDU) or \ 98 isinstance(hdu, pyfits.PrimaryHDU): 99 # Ignore (primary) header units with no data (use '.size' 100 # rather than '.data' to avoid actually loading the image): 101 try: 102 data_size = hdu.size() 103 except TypeError: # (size changed to int in PyFITS 3.1) 104 data_size = hdu.size 105 if data_size > 0: 106 ext_list.append((filename, n)) 107 hdulist.close() 108 109 return io.ImageCollection(ext_list, load_func=FITSFactory, 110 conserve_memory=conserve_memory) 111 112 113 def FITSFactory(image_ext): 114 """Load an image extension from a FITS file and return a NumPy array 115 116 Parameters 117 ---------- 118 119 image_ext : tuple 120 FITS extension to load, in the format ``(filename, ext_num)``. 121 The FITS ``(extname, extver)`` format is unsupported, since this 122 function is not called directly by the user and 123 ``imread_collection()`` does the work of figuring out which 124 extensions need loading. 125 126 """ 127 128 # Expect a length-2 tuple with a filename as the first element: 129 if not isinstance(image_ext, tuple): 130 raise TypeError("Expected a tuple") 131 132 if len(image_ext) != 2: 133 raise ValueError("Expected a tuple of length 2") 134 135 filename = image_ext[0] 136 extnum = image_ext[1] 137 138 if type(filename) is not str or type(extnum) is not int: 139 raise ValueError("Expected a (filename, extension) tuple") 140 141 hdulist = pyfits.open(filename) 142 143 data = hdulist[extnum].data 144 145 hdulist.close() 146 147 if data is None: 148 raise RuntimeError( 149 "Extension %d of %s has no data" % (extnum, filename)) 150 151 return data 152 [end of skimage/io/_plugins/fits_plugin.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/skimage/io/_plugins/fits_plugin.py b/skimage/io/_plugins/fits_plugin.py --- a/skimage/io/_plugins/fits_plugin.py +++ b/skimage/io/_plugins/fits_plugin.py @@ -3,15 +3,12 @@ import skimage.io as io try: - from astropy.io import fits as pyfits + from astropy.io import fits except ImportError: - try: - import pyfits - except ImportError: - raise ImportError( - "PyFITS could not be found. Please refer to\n" - "http://www.stsci.edu/resources/software_hardware/pyfits\n" - "for further instructions.") + raise ImportError( + "Astropy could not be found. It is needed to read FITS files.\n" + "Please refer to http://www.astropy.org for installation\n" + "instructions.") def imread(fname, dtype=None): @@ -44,14 +41,14 @@ """ - hdulist = pyfits.open(fname) + hdulist = fits.open(fname) # Iterate over FITS image extensions, ignoring any other extension types # such as binary tables, and get the first image data array: img_array = None for hdu in hdulist: - if isinstance(hdu, pyfits.ImageHDU) or \ - isinstance(hdu, pyfits.PrimaryHDU): + if isinstance(hdu, fits.ImageHDU) or \ + isinstance(hdu, fits.PrimaryHDU): if hdu.data is not None: img_array = hdu.data break @@ -92,16 +89,16 @@ # files and finding the image extensions in each one: ext_list = [] for filename in load_pattern: - hdulist = pyfits.open(filename) + hdulist = fits.open(filename) for n, hdu in zip(range(len(hdulist)), hdulist): - if isinstance(hdu, pyfits.ImageHDU) or \ - isinstance(hdu, pyfits.PrimaryHDU): + if isinstance(hdu, fits.ImageHDU) or \ + isinstance(hdu, fits.PrimaryHDU): # Ignore (primary) header units with no data (use '.size' # rather than '.data' to avoid actually loading the image): try: + data_size = hdu.size # size is int in Astropy 3.1.2 + except TypeError: data_size = hdu.size() - except TypeError: # (size changed to int in PyFITS 3.1) - data_size = hdu.size if data_size > 0: ext_list.append((filename, n)) hdulist.close() @@ -138,7 +135,7 @@ if type(filename) is not str or type(extnum) is not int: raise ValueError("Expected a (filename, extension) tuple") - hdulist = pyfits.open(filename) + hdulist = fits.open(filename) data = hdulist[extnum].data
{"golden_diff": "diff --git a/skimage/io/_plugins/fits_plugin.py b/skimage/io/_plugins/fits_plugin.py\n--- a/skimage/io/_plugins/fits_plugin.py\n+++ b/skimage/io/_plugins/fits_plugin.py\n@@ -3,15 +3,12 @@\n import skimage.io as io\n \n try:\n- from astropy.io import fits as pyfits\n+ from astropy.io import fits\n except ImportError:\n- try:\n- import pyfits\n- except ImportError:\n- raise ImportError(\n- \"PyFITS could not be found. Please refer to\\n\"\n- \"http://www.stsci.edu/resources/software_hardware/pyfits\\n\"\n- \"for further instructions.\")\n+ raise ImportError(\n+ \"Astropy could not be found. It is needed to read FITS files.\\n\"\n+ \"Please refer to http://www.astropy.org for installation\\n\"\n+ \"instructions.\")\n \n \n def imread(fname, dtype=None):\n@@ -44,14 +41,14 @@\n \n \"\"\"\n \n- hdulist = pyfits.open(fname)\n+ hdulist = fits.open(fname)\n \n # Iterate over FITS image extensions, ignoring any other extension types\n # such as binary tables, and get the first image data array:\n img_array = None\n for hdu in hdulist:\n- if isinstance(hdu, pyfits.ImageHDU) or \\\n- isinstance(hdu, pyfits.PrimaryHDU):\n+ if isinstance(hdu, fits.ImageHDU) or \\\n+ isinstance(hdu, fits.PrimaryHDU):\n if hdu.data is not None:\n img_array = hdu.data\n break\n@@ -92,16 +89,16 @@\n # files and finding the image extensions in each one:\n ext_list = []\n for filename in load_pattern:\n- hdulist = pyfits.open(filename)\n+ hdulist = fits.open(filename)\n for n, hdu in zip(range(len(hdulist)), hdulist):\n- if isinstance(hdu, pyfits.ImageHDU) or \\\n- isinstance(hdu, pyfits.PrimaryHDU):\n+ if isinstance(hdu, fits.ImageHDU) or \\\n+ isinstance(hdu, fits.PrimaryHDU):\n # Ignore (primary) header units with no data (use '.size'\n # rather than '.data' to avoid actually loading the image):\n try:\n+ data_size = hdu.size # size is int in Astropy 3.1.2\n+ except TypeError:\n data_size = hdu.size()\n- except TypeError: # (size changed to int in PyFITS 3.1)\n- data_size = hdu.size\n if data_size > 0:\n ext_list.append((filename, n))\n hdulist.close()\n@@ -138,7 +135,7 @@\n if type(filename) is not str or type(extnum) is not int:\n raise ValueError(\"Expected a (filename, extension) tuple\")\n \n- hdulist = pyfits.open(filename)\n+ hdulist = fits.open(filename)\n \n data = hdulist[extnum].data\n", "issue": "Use astropy instead of pyfits to read FITS images\npyfits is not currently installable on 3.5, and it looks like AstroPy is more actively maintained.\n", "before_files": [{"content": "__all__ = ['imread', 'imread_collection']\n\nimport skimage.io as io\n\ntry:\n from astropy.io import fits as pyfits\nexcept ImportError:\n try:\n import pyfits\n except ImportError:\n raise ImportError(\n \"PyFITS could not be found. Please refer to\\n\"\n \"http://www.stsci.edu/resources/software_hardware/pyfits\\n\"\n \"for further instructions.\")\n\n\ndef imread(fname, dtype=None):\n \"\"\"Load an image from a FITS file.\n\n Parameters\n ----------\n fname : string\n Image file name, e.g. ``test.fits``.\n dtype : dtype, optional\n For FITS, this argument is ignored because Stefan is planning on\n removing the dtype argument from imread anyway.\n\n Returns\n -------\n img_array : ndarray\n Unlike plugins such as PIL, where different color bands/channels are\n stored in the third dimension, FITS images are greyscale-only and can\n be N-dimensional, so an array of the native FITS dimensionality is\n returned, without color channels.\n\n Currently if no image is found in the file, None will be returned\n\n Notes\n -----\n\n Currently FITS ``imread()`` always returns the first image extension when\n given a Multi-Extension FITS file; use ``imread_collection()`` (which does\n lazy loading) to get all the extensions at once.\n\n \"\"\"\n\n hdulist = pyfits.open(fname)\n\n # Iterate over FITS image extensions, ignoring any other extension types\n # such as binary tables, and get the first image data array:\n img_array = None\n for hdu in hdulist:\n if isinstance(hdu, pyfits.ImageHDU) or \\\n isinstance(hdu, pyfits.PrimaryHDU):\n if hdu.data is not None:\n img_array = hdu.data\n break\n hdulist.close()\n\n return img_array\n\n\ndef imread_collection(load_pattern, conserve_memory=True):\n \"\"\"Load a collection of images from one or more FITS files\n\n Parameters\n ----------\n load_pattern : str or list\n List of extensions to load. Filename globbing is currently\n unsupported.\n converve_memory : bool\n If True, never keep more than one in memory at a specific\n time. Otherwise, images will be cached once they are loaded.\n\n Returns\n -------\n\n ic : ImageCollection\n Collection of images.\n\n \"\"\"\n\n intype = type(load_pattern)\n if intype is not list and intype is not str:\n raise TypeError(\"Input must be a filename or list of filenames\")\n\n # Ensure we have a list, otherwise we'll end up iterating over the string:\n if intype is not list:\n load_pattern = [load_pattern]\n\n # Generate a list of filename/extension pairs by opening the list of\n # files and finding the image extensions in each one:\n ext_list = []\n for filename in load_pattern:\n hdulist = pyfits.open(filename)\n for n, hdu in zip(range(len(hdulist)), hdulist):\n if isinstance(hdu, pyfits.ImageHDU) or \\\n isinstance(hdu, pyfits.PrimaryHDU):\n # Ignore (primary) header units with no data (use '.size'\n # rather than '.data' to avoid actually loading the image):\n try:\n data_size = hdu.size()\n except TypeError: # (size changed to int in PyFITS 3.1)\n data_size = hdu.size\n if data_size > 0:\n ext_list.append((filename, n))\n hdulist.close()\n\n return io.ImageCollection(ext_list, load_func=FITSFactory,\n conserve_memory=conserve_memory)\n\n\ndef FITSFactory(image_ext):\n \"\"\"Load an image extension from a FITS file and return a NumPy array\n\n Parameters\n ----------\n\n image_ext : tuple\n FITS extension to load, in the format ``(filename, ext_num)``.\n The FITS ``(extname, extver)`` format is unsupported, since this\n function is not called directly by the user and\n ``imread_collection()`` does the work of figuring out which\n extensions need loading.\n\n \"\"\"\n\n # Expect a length-2 tuple with a filename as the first element:\n if not isinstance(image_ext, tuple):\n raise TypeError(\"Expected a tuple\")\n\n if len(image_ext) != 2:\n raise ValueError(\"Expected a tuple of length 2\")\n\n filename = image_ext[0]\n extnum = image_ext[1]\n\n if type(filename) is not str or type(extnum) is not int:\n raise ValueError(\"Expected a (filename, extension) tuple\")\n\n hdulist = pyfits.open(filename)\n\n data = hdulist[extnum].data\n\n hdulist.close()\n\n if data is None:\n raise RuntimeError(\n \"Extension %d of %s has no data\" % (extnum, filename))\n\n return data\n", "path": "skimage/io/_plugins/fits_plugin.py"}]}
2,020
689
gh_patches_debug_6496
rasdani/github-patches
git_diff
interlegis__sapl-3150
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Períodos de parlamentares sumiram ## Comportamento Esperado Devem aparecer todos os períodos dos parlamentares, correspondentes às legislaturas. ## Comportamento Atual Aparecem apenas da 6ª legislatura à atual. Passos para Reproduzir (para bugs) https://sapl.agudo.rs.leg.br/parlamentar/ ## Contexto Pesquisa de parlamentares por legislatura. ## Imagens do Ocorrido ![Captura de tela de 2020-04-22 08-08-12](https://user-images.githubusercontent.com/19169882/79975386-4579c300-8471-11ea-95b9-96459516774f.png) ## Seu Ambiente Versão usada (Release): 3.1.160-RC11 Nome e versão do navegador: Versão 80.0.3987.163 (Versão oficial) Nome e versão do Sistema Operacional (desktop ou mobile): Ubuntu 16.04 64 bits </issue> <code> [start of sapl/api/pagination.py] 1 from django.core.paginator import EmptyPage 2 from rest_framework import pagination 3 from rest_framework.response import Response 4 5 6 class StandardPagination(pagination.PageNumberPagination): 7 page_size = 10 8 page_size_query_param = 'page_size' 9 max_page_size = 50 10 11 def get_paginated_response(self, data): 12 try: 13 previous_page_number = self.page.previous_page_number() 14 except EmptyPage: 15 previous_page_number = None 16 17 try: 18 next_page_number = self.page.next_page_number() 19 except EmptyPage: 20 next_page_number = None 21 22 return Response({ 23 'pagination': { 24 'previous_page': previous_page_number, 25 'next_page': next_page_number, 26 'start_index': self.page.start_index(), 27 'end_index': self.page.end_index(), 28 'total_entries': self.page.paginator.count, 29 'total_pages': self.page.paginator.num_pages, 30 'page': self.page.number, 31 }, 32 'results': data, 33 34 }) 35 [end of sapl/api/pagination.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sapl/api/pagination.py b/sapl/api/pagination.py --- a/sapl/api/pagination.py +++ b/sapl/api/pagination.py @@ -8,6 +8,11 @@ page_size_query_param = 'page_size' max_page_size = 50 + def paginate_queryset(self, queryset, request, view=None): + if request.query_params.get('get_all', False) == 'true': + return None + return super().paginate_queryset(queryset, request, view=view) + def get_paginated_response(self, data): try: previous_page_number = self.page.previous_page_number()
{"golden_diff": "diff --git a/sapl/api/pagination.py b/sapl/api/pagination.py\n--- a/sapl/api/pagination.py\n+++ b/sapl/api/pagination.py\n@@ -8,6 +8,11 @@\n page_size_query_param = 'page_size'\n max_page_size = 50\n \n+ def paginate_queryset(self, queryset, request, view=None):\n+ if request.query_params.get('get_all', False) == 'true':\n+ return None\n+ return super().paginate_queryset(queryset, request, view=view)\n+\n def get_paginated_response(self, data):\n try:\n previous_page_number = self.page.previous_page_number()\n", "issue": "Per\u00edodos de parlamentares sumiram\n## Comportamento Esperado\r\nDevem aparecer todos os per\u00edodos dos parlamentares, correspondentes \u00e0s legislaturas.\r\n\r\n## Comportamento Atual\r\nAparecem apenas da 6\u00aa legislatura \u00e0 atual.\r\n\r\nPassos para Reproduzir (para bugs)\r\nhttps://sapl.agudo.rs.leg.br/parlamentar/\r\n\r\n## Contexto\r\nPesquisa de parlamentares por legislatura.\r\n\r\n## Imagens do Ocorrido\r\n![Captura de tela de 2020-04-22 08-08-12](https://user-images.githubusercontent.com/19169882/79975386-4579c300-8471-11ea-95b9-96459516774f.png)\r\n\r\n## Seu Ambiente\r\nVers\u00e3o usada (Release): 3.1.160-RC11\r\nNome e vers\u00e3o do navegador: Vers\u00e3o 80.0.3987.163 (Vers\u00e3o oficial)\r\nNome e vers\u00e3o do Sistema Operacional (desktop ou mobile): Ubuntu 16.04 64 bits\r\n\r\n\n", "before_files": [{"content": "from django.core.paginator import EmptyPage\nfrom rest_framework import pagination\nfrom rest_framework.response import Response\n\n\nclass StandardPagination(pagination.PageNumberPagination):\n page_size = 10\n page_size_query_param = 'page_size'\n max_page_size = 50\n\n def get_paginated_response(self, data):\n try:\n previous_page_number = self.page.previous_page_number()\n except EmptyPage:\n previous_page_number = None\n\n try:\n next_page_number = self.page.next_page_number()\n except EmptyPage:\n next_page_number = None\n\n return Response({\n 'pagination': {\n 'previous_page': previous_page_number,\n 'next_page': next_page_number,\n 'start_index': self.page.start_index(),\n 'end_index': self.page.end_index(),\n 'total_entries': self.page.paginator.count,\n 'total_pages': self.page.paginator.num_pages,\n 'page': self.page.number,\n },\n 'results': data,\n\n })\n", "path": "sapl/api/pagination.py"}]}
1,085
143
gh_patches_debug_20590
rasdani/github-patches
git_diff
e-valuation__EvaP-795
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Remove passwords from error messages EvaP sends error messages to the admins when a server error occurs. Post data sent to the page where the error occurred will be included in the message, but passwords must not be included in these messages. This currently happens e.g. when a user is logging in who has two accounts like described in #791. Remove passwords from error messages EvaP sends error messages to the admins when a server error occurs. Post data sent to the page where the error occurred will be included in the message, but passwords must not be included in these messages. This currently happens e.g. when a user is logging in who has two accounts like described in #791. </issue> <code> [start of evap/evaluation/views.py] 1 import logging 2 3 from django.conf import settings 4 from django.contrib import messages 5 from django.contrib.auth import login as auth_login 6 from django.contrib.auth.decorators import login_required 7 from django.core.mail import EmailMessage 8 from django.http import HttpResponse 9 from django.shortcuts import redirect, render 10 from django.utils.translation import ugettext as _ 11 from django.core.urlresolvers import resolve, Resolver404 12 from django.views.decorators.http import require_POST 13 14 from evap.evaluation.forms import NewKeyForm, LoginUsernameForm 15 from evap.evaluation.models import UserProfile, FaqSection, EmailTemplate, Semester 16 17 logger = logging.getLogger(__name__) 18 19 20 def index(request): 21 """Main entry page into EvaP providing all the login options available. The username/password 22 login is thought to be used for internal users, e.g. by connecting to a LDAP directory. 23 The login key mechanism is meant to be used to include external participants, e.g. visiting 24 students or visiting contributors. 25 """ 26 27 # parse the form data into the respective form 28 submit_type = request.POST.get("submit_type", "no_submit") 29 new_key_form = NewKeyForm(request.POST if submit_type == "new_key" else None) 30 login_username_form = LoginUsernameForm(request, request.POST if submit_type == "login_username" else None) 31 32 # process form data 33 if request.method == 'POST': 34 if new_key_form.is_valid(): 35 # user wants a new login key 36 profile = new_key_form.get_user() 37 profile.generate_login_key() 38 profile.save() 39 40 EmailTemplate.send_login_url_to_user(new_key_form.get_user()) 41 42 messages.success(request, _("We sent you an email with a one-time login URL. Please check your inbox.")) 43 return redirect('evaluation:index') 44 elif login_username_form.is_valid(): 45 # user would like to login with username and password and passed password test 46 auth_login(request, login_username_form.get_user()) 47 48 # clean up our test cookie 49 if request.session.test_cookie_worked(): 50 request.session.delete_test_cookie() 51 52 # if not logged in by now, render form 53 if not request.user.is_authenticated(): 54 # set test cookie to verify whether they work in the next step 55 request.session.set_test_cookie() 56 57 template_data = dict(new_key_form=new_key_form, login_username_form=login_username_form) 58 return render(request, "index.html", template_data) 59 else: 60 user, __ = UserProfile.objects.get_or_create(username=request.user.username) 61 62 # check for redirect variable 63 redirect_to = request.GET.get("next", None) 64 if redirect_to is not None: 65 if redirect_to.startswith("/staff/"): 66 if request.user.is_staff: 67 return redirect(redirect_to) 68 elif redirect_to.startswith("/grades/"): 69 if request.user.is_grade_publisher: 70 return redirect(redirect_to) 71 elif redirect_to.startswith("/contributor/"): 72 if user.is_contributor: 73 return redirect(redirect_to) 74 elif redirect_to.startswith("/student/"): 75 if user.is_participant: 76 return redirect(redirect_to) 77 else: 78 try: 79 resolve(redirect_to) 80 except Resolver404: 81 pass 82 else: 83 return redirect(redirect_to) 84 85 # redirect user to appropriate start page 86 if request.user.is_staff: 87 return redirect('staff:index') 88 elif request.user.is_grade_publisher: 89 return redirect('grades:semester_view', Semester.active_semester().id) 90 elif user.is_student: 91 return redirect('student:index') 92 elif user.is_contributor_or_delegate: 93 return redirect('contributor:index') 94 elif user.is_participant: 95 return redirect('student:index') 96 else: 97 return redirect('results:index') 98 99 100 def faq(request): 101 return render(request, "faq.html", dict(sections=FaqSection.objects.all())) 102 103 104 def legal_notice(request): 105 return render(request, "legal_notice.html", dict()) 106 107 108 @require_POST 109 @login_required 110 def feedback_send(request): 111 sender_email = request.user.email 112 message = request.POST.get("message") 113 subject = "Feedback from {}".format(sender_email) 114 115 if message: 116 mail = EmailMessage( 117 subject=subject, 118 body=message, 119 to=[settings.FEEDBACK_EMAIL]) 120 121 try: 122 mail.send() 123 logger.info('Sent feedback email: \n{}\n'.format(mail.message())) 124 except Exception: 125 logger.exception('An exception occurred when sending the following feedback email:\n{}\n'.format(mail.message())) 126 127 return HttpResponse() 128 [end of evap/evaluation/views.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/evap/evaluation/views.py b/evap/evaluation/views.py --- a/evap/evaluation/views.py +++ b/evap/evaluation/views.py @@ -10,6 +10,7 @@ from django.utils.translation import ugettext as _ from django.core.urlresolvers import resolve, Resolver404 from django.views.decorators.http import require_POST +from django.views.decorators.debug import sensitive_post_parameters from evap.evaluation.forms import NewKeyForm, LoginUsernameForm from evap.evaluation.models import UserProfile, FaqSection, EmailTemplate, Semester @@ -17,6 +18,7 @@ logger = logging.getLogger(__name__) +@sensitive_post_parameters("password") def index(request): """Main entry page into EvaP providing all the login options available. The username/password login is thought to be used for internal users, e.g. by connecting to a LDAP directory.
{"golden_diff": "diff --git a/evap/evaluation/views.py b/evap/evaluation/views.py\n--- a/evap/evaluation/views.py\n+++ b/evap/evaluation/views.py\n@@ -10,6 +10,7 @@\n from django.utils.translation import ugettext as _\n from django.core.urlresolvers import resolve, Resolver404\n from django.views.decorators.http import require_POST\n+from django.views.decorators.debug import sensitive_post_parameters\n \n from evap.evaluation.forms import NewKeyForm, LoginUsernameForm\n from evap.evaluation.models import UserProfile, FaqSection, EmailTemplate, Semester\n@@ -17,6 +18,7 @@\n logger = logging.getLogger(__name__)\n \n \n+@sensitive_post_parameters(\"password\")\n def index(request):\n \"\"\"Main entry page into EvaP providing all the login options available. The username/password\n login is thought to be used for internal users, e.g. by connecting to a LDAP directory.\n", "issue": "Remove passwords from error messages\nEvaP sends error messages to the admins when a server error occurs. Post data sent to the page where the error occurred will be included in the message, but passwords must not be included in these messages.\nThis currently happens e.g. when a user is logging in who has two accounts like described in #791.\n\nRemove passwords from error messages\nEvaP sends error messages to the admins when a server error occurs. Post data sent to the page where the error occurred will be included in the message, but passwords must not be included in these messages.\nThis currently happens e.g. when a user is logging in who has two accounts like described in #791.\n\n", "before_files": [{"content": "import logging\n\nfrom django.conf import settings\nfrom django.contrib import messages\nfrom django.contrib.auth import login as auth_login\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.mail import EmailMessage\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect, render\nfrom django.utils.translation import ugettext as _\nfrom django.core.urlresolvers import resolve, Resolver404\nfrom django.views.decorators.http import require_POST\n\nfrom evap.evaluation.forms import NewKeyForm, LoginUsernameForm\nfrom evap.evaluation.models import UserProfile, FaqSection, EmailTemplate, Semester\n\nlogger = logging.getLogger(__name__)\n\n\ndef index(request):\n \"\"\"Main entry page into EvaP providing all the login options available. The username/password\n login is thought to be used for internal users, e.g. by connecting to a LDAP directory.\n The login key mechanism is meant to be used to include external participants, e.g. visiting\n students or visiting contributors.\n \"\"\"\n\n # parse the form data into the respective form\n submit_type = request.POST.get(\"submit_type\", \"no_submit\")\n new_key_form = NewKeyForm(request.POST if submit_type == \"new_key\" else None)\n login_username_form = LoginUsernameForm(request, request.POST if submit_type == \"login_username\" else None)\n\n # process form data\n if request.method == 'POST':\n if new_key_form.is_valid():\n # user wants a new login key\n profile = new_key_form.get_user()\n profile.generate_login_key()\n profile.save()\n\n EmailTemplate.send_login_url_to_user(new_key_form.get_user())\n\n messages.success(request, _(\"We sent you an email with a one-time login URL. Please check your inbox.\"))\n return redirect('evaluation:index')\n elif login_username_form.is_valid():\n # user would like to login with username and password and passed password test\n auth_login(request, login_username_form.get_user())\n\n # clean up our test cookie\n if request.session.test_cookie_worked():\n request.session.delete_test_cookie()\n\n # if not logged in by now, render form\n if not request.user.is_authenticated():\n # set test cookie to verify whether they work in the next step\n request.session.set_test_cookie()\n\n template_data = dict(new_key_form=new_key_form, login_username_form=login_username_form)\n return render(request, \"index.html\", template_data)\n else:\n user, __ = UserProfile.objects.get_or_create(username=request.user.username)\n\n # check for redirect variable\n redirect_to = request.GET.get(\"next\", None)\n if redirect_to is not None:\n if redirect_to.startswith(\"/staff/\"):\n if request.user.is_staff:\n return redirect(redirect_to)\n elif redirect_to.startswith(\"/grades/\"):\n if request.user.is_grade_publisher:\n return redirect(redirect_to)\n elif redirect_to.startswith(\"/contributor/\"):\n if user.is_contributor:\n return redirect(redirect_to)\n elif redirect_to.startswith(\"/student/\"):\n if user.is_participant:\n return redirect(redirect_to)\n else:\n try:\n resolve(redirect_to)\n except Resolver404:\n pass\n else:\n return redirect(redirect_to)\n\n # redirect user to appropriate start page\n if request.user.is_staff:\n return redirect('staff:index')\n elif request.user.is_grade_publisher:\n return redirect('grades:semester_view', Semester.active_semester().id)\n elif user.is_student:\n return redirect('student:index')\n elif user.is_contributor_or_delegate:\n return redirect('contributor:index')\n elif user.is_participant:\n return redirect('student:index')\n else:\n return redirect('results:index')\n\n\ndef faq(request):\n return render(request, \"faq.html\", dict(sections=FaqSection.objects.all()))\n\n\ndef legal_notice(request):\n return render(request, \"legal_notice.html\", dict())\n\n\n@require_POST\n@login_required\ndef feedback_send(request):\n sender_email = request.user.email\n message = request.POST.get(\"message\")\n subject = \"Feedback from {}\".format(sender_email)\n\n if message:\n mail = EmailMessage(\n subject=subject,\n body=message,\n to=[settings.FEEDBACK_EMAIL])\n\n try:\n mail.send()\n logger.info('Sent feedback email: \\n{}\\n'.format(mail.message()))\n except Exception:\n logger.exception('An exception occurred when sending the following feedback email:\\n{}\\n'.format(mail.message()))\n\n return HttpResponse()\n", "path": "evap/evaluation/views.py"}]}
1,915
202
gh_patches_debug_6190
rasdani/github-patches
git_diff
microsoft__torchgeo-626
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update README Before blog: update verbiage and add pics from blog/paper. After blog: add link to blog. </issue> <code> [start of torchgeo/__init__.py] 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. 3 4 """TorchGeo: datasets, transforms, and models for geospatial data. 5 6 This library is part of the `PyTorch <http://pytorch.org/>`_ project. PyTorch is an open 7 source machine learning framework. 8 9 The :mod:`torchgeo` package consists of popular datasets, model architectures, and 10 common image transformations for geospatial data. 11 """ 12 13 __author__ = "Adam J. Stewart" 14 __version__ = "0.3.0.dev0" 15 [end of torchgeo/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/torchgeo/__init__.py b/torchgeo/__init__.py --- a/torchgeo/__init__.py +++ b/torchgeo/__init__.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -"""TorchGeo: datasets, transforms, and models for geospatial data. +"""TorchGeo: datasets, samplers, transforms, and pre-trained models for geospatial data. This library is part of the `PyTorch <http://pytorch.org/>`_ project. PyTorch is an open source machine learning framework.
{"golden_diff": "diff --git a/torchgeo/__init__.py b/torchgeo/__init__.py\n--- a/torchgeo/__init__.py\n+++ b/torchgeo/__init__.py\n@@ -1,7 +1,7 @@\n # Copyright (c) Microsoft Corporation. All rights reserved.\n # Licensed under the MIT License.\n \n-\"\"\"TorchGeo: datasets, transforms, and models for geospatial data.\n+\"\"\"TorchGeo: datasets, samplers, transforms, and pre-trained models for geospatial data.\n \n This library is part of the `PyTorch <http://pytorch.org/>`_ project. PyTorch is an open\n source machine learning framework.\n", "issue": "Update README\nBefore blog: update verbiage and add pics from blog/paper. \n\nAfter blog: add link to blog.\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\n\"\"\"TorchGeo: datasets, transforms, and models for geospatial data.\n\nThis library is part of the `PyTorch <http://pytorch.org/>`_ project. PyTorch is an open\nsource machine learning framework.\n\nThe :mod:`torchgeo` package consists of popular datasets, model architectures, and\ncommon image transformations for geospatial data.\n\"\"\"\n\n__author__ = \"Adam J. Stewart\"\n__version__ = \"0.3.0.dev0\"\n", "path": "torchgeo/__init__.py"}]}
710
146
gh_patches_debug_34052
rasdani/github-patches
git_diff
ckan__ckan-5750
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Purging deleted packages does not delete associated filestore/datastore resources **CKAN version** 2.9 **Describe the bug** After deleting and purging dataset packages, the associated datastore/filestore resources are orphaned and not deleted. **Steps to reproduce** 1. Delete datasets 2. Go to sysadmin/trash 3. Select "Purge All" 4. Check datastore and filestore. Associated resources are still there. **Expected behavior** Dataset resources in the filestore and the datastore are deleted when the associated package resources are purged. **Additional details** This is related to #4705 and a lot of work has been done on PRs #4867 and #4905, but neither have been merged. </issue> <code> [start of ckanext/datastore/cli.py] 1 # encoding: utf-8 2 3 import logging 4 import os 5 6 import click 7 8 from ckan.model import parse_db_config 9 from ckan.common import config 10 11 import ckanext.datastore as datastore_module 12 from ckanext.datastore.backend.postgres import identifier 13 from ckanext.datastore.blueprint import DUMP_FORMATS, dump_to 14 15 log = logging.getLogger(__name__) 16 17 18 @click.group() 19 def datastore(): 20 u'''Perform commands to set up the datastore. 21 ''' 22 23 24 @datastore.command( 25 u'set-permissions', 26 short_help=u'Generate SQL for permission configuration.' 27 ) 28 def set_permissions(): 29 u'''Emit an SQL script that will set the permissions for the datastore 30 users as configured in your configuration file.''' 31 32 write_url = _parse_db_config(u'ckan.datastore.write_url') 33 read_url = _parse_db_config(u'ckan.datastore.read_url') 34 db_url = _parse_db_config(u'sqlalchemy.url') 35 36 # Basic validation that read and write URLs reference the same database. 37 # This obviously doesn't check they're the same database (the hosts/ports 38 # could be different), but it's better than nothing, I guess. 39 40 if write_url[u'db_name'] != read_url[u'db_name']: 41 click.secho( 42 u'The datastore write_url and read_url must refer to the same ' 43 u'database!', 44 fg=u'red', 45 bold=True 46 ) 47 raise click.Abort() 48 49 sql = permissions_sql( 50 maindb=db_url[u'db_name'], 51 datastoredb=write_url[u'db_name'], 52 mainuser=db_url[u'db_user'], 53 writeuser=write_url[u'db_user'], 54 readuser=read_url[u'db_user'] 55 ) 56 57 click.echo(sql) 58 59 60 def permissions_sql(maindb, datastoredb, mainuser, writeuser, readuser): 61 template_filename = os.path.join( 62 os.path.dirname(datastore_module.__file__), u'set_permissions.sql' 63 ) 64 with open(template_filename) as fp: 65 template = fp.read() 66 return template.format( 67 maindb=identifier(maindb), 68 datastoredb=identifier(datastoredb), 69 mainuser=identifier(mainuser), 70 writeuser=identifier(writeuser), 71 readuser=identifier(readuser) 72 ) 73 74 75 @datastore.command() 76 @click.argument(u'resource-id', nargs=1) 77 @click.argument( 78 u'output-file', 79 type=click.File(u'wb'), 80 default=click.get_binary_stream(u'stdout') 81 ) 82 @click.option(u'--format', default=u'csv', type=click.Choice(DUMP_FORMATS)) 83 @click.option(u'--offset', type=click.IntRange(0, None), default=0) 84 @click.option(u'--limit', type=click.IntRange(0)) 85 @click.option(u'--bom', is_flag=True) # FIXME: options based on format 86 @click.pass_context 87 def dump(ctx, resource_id, output_file, format, offset, limit, bom): 88 u'''Dump a datastore resource. 89 ''' 90 flask_app = ctx.meta['flask_app'] 91 with flask_app.test_request_context(): 92 dump_to( 93 resource_id, 94 output_file, 95 fmt=format, 96 offset=offset, 97 limit=limit, 98 options={u'bom': bom}, 99 sort=u'_id', 100 search_params={} 101 ) 102 103 104 def _parse_db_config(config_key=u'sqlalchemy.url'): 105 db_config = parse_db_config(config_key) 106 if not db_config: 107 click.secho( 108 u'Could not extract db details from url: %r' % config[config_key], 109 fg=u'red', 110 bold=True 111 ) 112 raise click.Abort() 113 return db_config 114 [end of ckanext/datastore/cli.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ckanext/datastore/cli.py b/ckanext/datastore/cli.py --- a/ckanext/datastore/cli.py +++ b/ckanext/datastore/cli.py @@ -7,6 +7,7 @@ from ckan.model import parse_db_config from ckan.common import config +import ckan.logic as logic import ckanext.datastore as datastore_module from ckanext.datastore.backend.postgres import identifier @@ -111,3 +112,69 @@ ) raise click.Abort() return db_config + + [email protected]( + u'purge', + short_help=u'purge orphaned resources from the datastore.' +) +def purge(): + u'''Purge orphaned resources from the datastore using the datastore_delete + action, which drops tables when called without filters.''' + + site_user = logic.get_action(u'get_site_user')({u'ignore_auth': True}, {}) + context = {u'user': site_user[u'name']} + + result = logic.get_action(u'datastore_search')( + context, + {u'resource_id': u'_table_metadata'} + ) + + resource_id_list = [] + for record in result[u'records']: + try: + # ignore 'alias' records (views) as they are automatically + # deleted when the parent resource table is dropped + if record[u'alias_of']: + continue + + # we need to do this to trigger resource_show auth function + site_user = logic.get_action(u'get_site_user')( + {u'ignore_auth': True}, {}) + context = {u'user': site_user[u'name']} + + logic.get_action(u'resource_show')( + context, + {u'id': record[u'name']} + ) + except logic.NotFound: + resource_id_list.append(record[u'name']) + click.echo(u"Resource '%s' orphaned - queued for drop" % + record[u'name']) + except KeyError: + continue + + orphaned_table_count = len(resource_id_list) + click.echo(u'%d orphaned tables found.' % orphaned_table_count) + + if not orphaned_table_count: + return + + click.confirm(u'Proceed with purge?', abort=True) + + # Drop the orphaned datastore tables. When datastore_delete is called + # without filters, it does a drop table cascade + drop_count = 0 + for resource_id in resource_id_list: + logic.get_action(u'datastore_delete')( + context, + {u'resource_id': resource_id, u'force': True} + ) + click.echo(u"Table '%s' dropped)" % resource_id) + drop_count += 1 + + click.echo(u'Dropped %s tables' % drop_count) + + +def get_commands(): + return (set_permissions, dump, purge)
{"golden_diff": "diff --git a/ckanext/datastore/cli.py b/ckanext/datastore/cli.py\n--- a/ckanext/datastore/cli.py\n+++ b/ckanext/datastore/cli.py\n@@ -7,6 +7,7 @@\n \n from ckan.model import parse_db_config\n from ckan.common import config\n+import ckan.logic as logic\n \n import ckanext.datastore as datastore_module\n from ckanext.datastore.backend.postgres import identifier\n@@ -111,3 +112,69 @@\n )\n raise click.Abort()\n return db_config\n+\n+\[email protected](\n+ u'purge',\n+ short_help=u'purge orphaned resources from the datastore.'\n+)\n+def purge():\n+ u'''Purge orphaned resources from the datastore using the datastore_delete\n+ action, which drops tables when called without filters.'''\n+\n+ site_user = logic.get_action(u'get_site_user')({u'ignore_auth': True}, {})\n+ context = {u'user': site_user[u'name']}\n+\n+ result = logic.get_action(u'datastore_search')(\n+ context,\n+ {u'resource_id': u'_table_metadata'}\n+ )\n+\n+ resource_id_list = []\n+ for record in result[u'records']:\n+ try:\n+ # ignore 'alias' records (views) as they are automatically\n+ # deleted when the parent resource table is dropped\n+ if record[u'alias_of']:\n+ continue\n+\n+ # we need to do this to trigger resource_show auth function\n+ site_user = logic.get_action(u'get_site_user')(\n+ {u'ignore_auth': True}, {})\n+ context = {u'user': site_user[u'name']}\n+\n+ logic.get_action(u'resource_show')(\n+ context,\n+ {u'id': record[u'name']}\n+ )\n+ except logic.NotFound:\n+ resource_id_list.append(record[u'name'])\n+ click.echo(u\"Resource '%s' orphaned - queued for drop\" %\n+ record[u'name'])\n+ except KeyError:\n+ continue\n+\n+ orphaned_table_count = len(resource_id_list)\n+ click.echo(u'%d orphaned tables found.' % orphaned_table_count)\n+\n+ if not orphaned_table_count:\n+ return\n+\n+ click.confirm(u'Proceed with purge?', abort=True)\n+\n+ # Drop the orphaned datastore tables. When datastore_delete is called\n+ # without filters, it does a drop table cascade\n+ drop_count = 0\n+ for resource_id in resource_id_list:\n+ logic.get_action(u'datastore_delete')(\n+ context,\n+ {u'resource_id': resource_id, u'force': True}\n+ )\n+ click.echo(u\"Table '%s' dropped)\" % resource_id)\n+ drop_count += 1\n+\n+ click.echo(u'Dropped %s tables' % drop_count)\n+\n+\n+def get_commands():\n+ return (set_permissions, dump, purge)\n", "issue": "Purging deleted packages does not delete associated filestore/datastore resources\n**CKAN version**\r\n2.9\r\n\r\n**Describe the bug**\r\nAfter deleting and purging dataset packages, the associated datastore/filestore resources are orphaned and not deleted.\r\n\r\n**Steps to reproduce**\r\n\r\n1. Delete datasets\r\n2. Go to sysadmin/trash\r\n3. Select \"Purge All\"\r\n4. Check datastore and filestore. Associated resources are still there.\r\n\r\n**Expected behavior**\r\nDataset resources in the filestore and the datastore are deleted when the associated package resources are purged.\r\n\r\n**Additional details**\r\nThis is related to #4705 and a lot of work has been done on PRs #4867 and #4905, but neither have been merged.\r\n\n", "before_files": [{"content": "# encoding: utf-8\n\nimport logging\nimport os\n\nimport click\n\nfrom ckan.model import parse_db_config\nfrom ckan.common import config\n\nimport ckanext.datastore as datastore_module\nfrom ckanext.datastore.backend.postgres import identifier\nfrom ckanext.datastore.blueprint import DUMP_FORMATS, dump_to\n\nlog = logging.getLogger(__name__)\n\n\[email protected]()\ndef datastore():\n u'''Perform commands to set up the datastore.\n '''\n\n\[email protected](\n u'set-permissions',\n short_help=u'Generate SQL for permission configuration.'\n)\ndef set_permissions():\n u'''Emit an SQL script that will set the permissions for the datastore\n users as configured in your configuration file.'''\n\n write_url = _parse_db_config(u'ckan.datastore.write_url')\n read_url = _parse_db_config(u'ckan.datastore.read_url')\n db_url = _parse_db_config(u'sqlalchemy.url')\n\n # Basic validation that read and write URLs reference the same database.\n # This obviously doesn't check they're the same database (the hosts/ports\n # could be different), but it's better than nothing, I guess.\n\n if write_url[u'db_name'] != read_url[u'db_name']:\n click.secho(\n u'The datastore write_url and read_url must refer to the same '\n u'database!',\n fg=u'red',\n bold=True\n )\n raise click.Abort()\n\n sql = permissions_sql(\n maindb=db_url[u'db_name'],\n datastoredb=write_url[u'db_name'],\n mainuser=db_url[u'db_user'],\n writeuser=write_url[u'db_user'],\n readuser=read_url[u'db_user']\n )\n\n click.echo(sql)\n\n\ndef permissions_sql(maindb, datastoredb, mainuser, writeuser, readuser):\n template_filename = os.path.join(\n os.path.dirname(datastore_module.__file__), u'set_permissions.sql'\n )\n with open(template_filename) as fp:\n template = fp.read()\n return template.format(\n maindb=identifier(maindb),\n datastoredb=identifier(datastoredb),\n mainuser=identifier(mainuser),\n writeuser=identifier(writeuser),\n readuser=identifier(readuser)\n )\n\n\[email protected]()\[email protected](u'resource-id', nargs=1)\[email protected](\n u'output-file',\n type=click.File(u'wb'),\n default=click.get_binary_stream(u'stdout')\n)\[email protected](u'--format', default=u'csv', type=click.Choice(DUMP_FORMATS))\[email protected](u'--offset', type=click.IntRange(0, None), default=0)\[email protected](u'--limit', type=click.IntRange(0))\[email protected](u'--bom', is_flag=True) # FIXME: options based on format\[email protected]_context\ndef dump(ctx, resource_id, output_file, format, offset, limit, bom):\n u'''Dump a datastore resource.\n '''\n flask_app = ctx.meta['flask_app']\n with flask_app.test_request_context():\n dump_to(\n resource_id,\n output_file,\n fmt=format,\n offset=offset,\n limit=limit,\n options={u'bom': bom},\n sort=u'_id',\n search_params={}\n )\n\n\ndef _parse_db_config(config_key=u'sqlalchemy.url'):\n db_config = parse_db_config(config_key)\n if not db_config:\n click.secho(\n u'Could not extract db details from url: %r' % config[config_key],\n fg=u'red',\n bold=True\n )\n raise click.Abort()\n return db_config\n", "path": "ckanext/datastore/cli.py"}]}
1,744
665
gh_patches_debug_19386
rasdani/github-patches
git_diff
OCHA-DAP__hdx-ckan-1747
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Ebola Page: make millions and % smaller About half the current height looks right. ![image](https://cloud.githubusercontent.com/assets/1654485/5007241/32010f00-6a4f-11e4-906d-4be38940993f.png) </issue> <code> [start of ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py] 1 ''' 2 Created on Nov 3, 2014 3 4 @author: alexandru-m-g 5 ''' 6 7 import logging 8 import datetime as dt 9 import decimal 10 11 import pylons.config as config 12 13 import ckan.lib.base as base 14 import ckan.logic as logic 15 import ckan.model as model 16 import ckan.common as common 17 import ckan.lib.helpers as h 18 19 render = base.render 20 get_action = logic.get_action 21 c = common.c 22 request = common.request 23 _ = common._ 24 25 Decimal = decimal.Decimal 26 27 log = logging.getLogger(__name__) 28 29 30 class CrisisController(base.BaseController): 31 32 def show(self): 33 34 context = {'model': model, 'session': model.Session, 35 'user': c.user or c.author, 'for_view': True, 36 'auth_user_obj': c.userobj} 37 38 datastore_resource_id = self._get_datastore_resource_id( 39 context, config.get('hdx.crisis.ebola_dataset', None), config.get('hdx.crisis.ebola_resource_title', None)) 40 if datastore_resource_id: 41 c.top_line_items = self._get_top_line_items( 42 context, datastore_resource_id) 43 44 limit = 25 45 c.q = u'ebola' 46 47 page = int(request.params.get('page', 1)) 48 data_dict = {'sort': u'metadata_modified desc', 49 'fq': '+dataset_type:dataset', 50 'rows': limit, 51 'q': c.q, 52 'start': (page - 1) * limit 53 } 54 query = get_action("package_search")(context, data_dict) 55 56 def pager_url(q=None, page=None): 57 return h.url_for('show_crisis', page=page) 58 59 c.page = h.Page( 60 collection=query['results'], 61 page=page, 62 url=pager_url, 63 item_count=query['count'], 64 items_per_page=limit 65 ) 66 c.items = query['results'] 67 c.item_count = query['count'] 68 69 c.other_links = {} 70 c.other_links['show_more'] = h.url_for( 71 "search", **{'q': u'ebola', 'sort': u'metadata_modified desc', 72 'ext_indicator': '0'}) 73 74 return render('crisis/crisis.html') 75 76 def _get_decimal_value(self, value): 77 decimal_value = Decimal(str(value)).quantize( 78 Decimal('.1'), rounding=decimal.ROUND_HALF_UP) 79 return decimal_value 80 81 def _format_results(self, result): 82 for r in result['records']: 83 d = dt.datetime.strptime(r[u'latest_date'], '%Y-%m-%dT%H:%M:%S') 84 r[u'latest_date'] = dt.datetime.strftime(d, '%b %d, %Y') 85 86 modified_value = r[u'value'] 87 if r[u'units'] == 'ratio': 88 modified_value *= 100.0 89 elif r[u'units'] == 'million': 90 modified_value /= 1000000.0 91 92 int_value = int(modified_value) 93 if int_value == modified_value: 94 r[u'formatted_value'] = '{:,}'.format(int_value) 95 else: 96 if r[u'units'] == 'ratio': 97 r[u'formatted_value'] = '{:,.1f}%'.format( 98 self._get_decimal_value(modified_value)) 99 elif r[u'units'] == 'million': 100 r[u'formatted_value'] = '{:,.1f} '.format( 101 self._get_decimal_value(modified_value)) 102 r[u'formatted_value'] += ' ' + _('million') 103 104 def _get_top_line_items(self, context, datastore_resource_id): 105 modified_context = dict(context) 106 modified_context['ignore_auth'] = True 107 result = get_action('datastore_search')( 108 modified_context, {'resource_id': datastore_resource_id}) 109 if 'records' in result: 110 self._format_results(result) 111 return result['records'] 112 return [] 113 114 def _get_datastore_resource_id(self, context, dataset_id, resource_name): 115 try: 116 modified_context = dict(context) 117 modified_context['ignore_auth'] = True 118 dataset = get_action('package_show')( 119 modified_context, {'id': dataset_id}) 120 121 if 'resources' in dataset: 122 for r in dataset['resources']: 123 if 'datastore_active' in r and r['datastore_active'] \ 124 and r['name'] == resource_name: 125 return r['id'] 126 return None 127 except: 128 log.warning('No dataset with id ' + dataset_id) 129 return None 130 [end of ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py b/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py --- a/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py +++ b/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py @@ -94,12 +94,12 @@ r[u'formatted_value'] = '{:,}'.format(int_value) else: if r[u'units'] == 'ratio': - r[u'formatted_value'] = '{:,.1f}%'.format( + r[u'formatted_value'] = '{:,.1f}'.format( self._get_decimal_value(modified_value)) elif r[u'units'] == 'million': - r[u'formatted_value'] = '{:,.1f} '.format( + r[u'formatted_value'] = '{:,.1f}'.format( self._get_decimal_value(modified_value)) - r[u'formatted_value'] += ' ' + _('million') + #r[u'formatted_value'] += ' ' + _('million') def _get_top_line_items(self, context, datastore_resource_id): modified_context = dict(context)
{"golden_diff": "diff --git a/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py b/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py\n--- a/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py\n+++ b/ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py\n@@ -94,12 +94,12 @@\n r[u'formatted_value'] = '{:,}'.format(int_value)\n else:\n if r[u'units'] == 'ratio':\n- r[u'formatted_value'] = '{:,.1f}%'.format(\n+ r[u'formatted_value'] = '{:,.1f}'.format(\n self._get_decimal_value(modified_value))\n elif r[u'units'] == 'million':\n- r[u'formatted_value'] = '{:,.1f} '.format(\n+ r[u'formatted_value'] = '{:,.1f}'.format(\n self._get_decimal_value(modified_value))\n- r[u'formatted_value'] += ' ' + _('million')\n+ #r[u'formatted_value'] += ' ' + _('million')\n \n def _get_top_line_items(self, context, datastore_resource_id):\n modified_context = dict(context)\n", "issue": "Ebola Page: make millions and % smaller\nAbout half the current height looks right.\n\n![image](https://cloud.githubusercontent.com/assets/1654485/5007241/32010f00-6a4f-11e4-906d-4be38940993f.png)\n\n", "before_files": [{"content": "'''\nCreated on Nov 3, 2014\n\n@author: alexandru-m-g\n'''\n\nimport logging\nimport datetime as dt\nimport decimal\n\nimport pylons.config as config\n\nimport ckan.lib.base as base\nimport ckan.logic as logic\nimport ckan.model as model\nimport ckan.common as common\nimport ckan.lib.helpers as h\n\nrender = base.render\nget_action = logic.get_action\nc = common.c\nrequest = common.request\n_ = common._\n\nDecimal = decimal.Decimal\n\nlog = logging.getLogger(__name__)\n\n\nclass CrisisController(base.BaseController):\n\n def show(self):\n\n context = {'model': model, 'session': model.Session,\n 'user': c.user or c.author, 'for_view': True,\n 'auth_user_obj': c.userobj}\n\n datastore_resource_id = self._get_datastore_resource_id(\n context, config.get('hdx.crisis.ebola_dataset', None), config.get('hdx.crisis.ebola_resource_title', None))\n if datastore_resource_id:\n c.top_line_items = self._get_top_line_items(\n context, datastore_resource_id)\n\n limit = 25\n c.q = u'ebola'\n\n page = int(request.params.get('page', 1))\n data_dict = {'sort': u'metadata_modified desc',\n 'fq': '+dataset_type:dataset',\n 'rows': limit,\n 'q': c.q,\n 'start': (page - 1) * limit\n }\n query = get_action(\"package_search\")(context, data_dict)\n\n def pager_url(q=None, page=None):\n return h.url_for('show_crisis', page=page)\n\n c.page = h.Page(\n collection=query['results'],\n page=page,\n url=pager_url,\n item_count=query['count'],\n items_per_page=limit\n )\n c.items = query['results']\n c.item_count = query['count']\n\n c.other_links = {}\n c.other_links['show_more'] = h.url_for(\n \"search\", **{'q': u'ebola', 'sort': u'metadata_modified desc',\n 'ext_indicator': '0'})\n\n return render('crisis/crisis.html')\n\n def _get_decimal_value(self, value):\n decimal_value = Decimal(str(value)).quantize(\n Decimal('.1'), rounding=decimal.ROUND_HALF_UP)\n return decimal_value\n\n def _format_results(self, result):\n for r in result['records']:\n d = dt.datetime.strptime(r[u'latest_date'], '%Y-%m-%dT%H:%M:%S')\n r[u'latest_date'] = dt.datetime.strftime(d, '%b %d, %Y')\n\n modified_value = r[u'value']\n if r[u'units'] == 'ratio':\n modified_value *= 100.0\n elif r[u'units'] == 'million':\n modified_value /= 1000000.0\n\n int_value = int(modified_value)\n if int_value == modified_value:\n r[u'formatted_value'] = '{:,}'.format(int_value)\n else:\n if r[u'units'] == 'ratio':\n r[u'formatted_value'] = '{:,.1f}%'.format(\n self._get_decimal_value(modified_value))\n elif r[u'units'] == 'million':\n r[u'formatted_value'] = '{:,.1f} '.format(\n self._get_decimal_value(modified_value))\n r[u'formatted_value'] += ' ' + _('million')\n\n def _get_top_line_items(self, context, datastore_resource_id):\n modified_context = dict(context)\n modified_context['ignore_auth'] = True\n result = get_action('datastore_search')(\n modified_context, {'resource_id': datastore_resource_id})\n if 'records' in result:\n self._format_results(result)\n return result['records']\n return []\n\n def _get_datastore_resource_id(self, context, dataset_id, resource_name):\n try:\n modified_context = dict(context)\n modified_context['ignore_auth'] = True\n dataset = get_action('package_show')(\n modified_context, {'id': dataset_id})\n\n if 'resources' in dataset:\n for r in dataset['resources']:\n if 'datastore_active' in r and r['datastore_active'] \\\n and r['name'] == resource_name:\n return r['id']\n return None\n except:\n log.warning('No dataset with id ' + dataset_id)\n return None\n", "path": "ckanext-hdx_crisis/ckanext/hdx_crisis/controllers/crisis_controller.py"}]}
1,929
307
gh_patches_debug_18214
rasdani/github-patches
git_diff
python-poetry__poetry-1954
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> command to check lock file <!-- Checked checkbox should look like this: [x] --> - [X] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate. ## Issue Would it be possible to add a `poetry check` command (oh, I see you'e used that already, maybe the command could be extended or have a `poetry lock --check`?) that will return zero if the current poetry.lock is both correct w.r.t. the pyproject.toml (ie: packages match) *and* that it matches the output of `poetry lock`. </issue> <code> [start of poetry/console/commands/lock.py] 1 from cleo.helpers import option 2 3 from .installer_command import InstallerCommand 4 5 6 class LockCommand(InstallerCommand): 7 8 name = "lock" 9 description = "Locks the project dependencies." 10 11 options = [ 12 option( 13 "no-update", None, "Do not update locked versions, only refresh lock file." 14 ), 15 ] 16 17 help = """ 18 The <info>lock</info> command reads the <comment>pyproject.toml</> file from the 19 current directory, processes it, and locks the dependencies in the <comment>poetry.lock</> 20 file. 21 22 <info>poetry lock</info> 23 """ 24 25 loggers = ["poetry.repositories.pypi_repository"] 26 27 def handle(self) -> int: 28 self._installer.use_executor( 29 self.poetry.config.get("experimental.new-installer", False) 30 ) 31 32 self._installer.lock(update=not self.option("no-update")) 33 34 return self._installer.run() 35 [end of poetry/console/commands/lock.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/poetry/console/commands/lock.py b/poetry/console/commands/lock.py --- a/poetry/console/commands/lock.py +++ b/poetry/console/commands/lock.py @@ -12,6 +12,12 @@ option( "no-update", None, "Do not update locked versions, only refresh lock file." ), + option( + "check", + None, + "Check that the <comment>poetry.lock</> file corresponds to the current version " + "of <comment>pyproject.toml</>.", + ), ] help = """ @@ -29,6 +35,13 @@ self.poetry.config.get("experimental.new-installer", False) ) + if self.option("check"): + return ( + 0 + if self.poetry.locker.is_locked() and self.poetry.locker.is_fresh() + else 1 + ) + self._installer.lock(update=not self.option("no-update")) return self._installer.run()
{"golden_diff": "diff --git a/poetry/console/commands/lock.py b/poetry/console/commands/lock.py\n--- a/poetry/console/commands/lock.py\n+++ b/poetry/console/commands/lock.py\n@@ -12,6 +12,12 @@\n option(\n \"no-update\", None, \"Do not update locked versions, only refresh lock file.\"\n ),\n+ option(\n+ \"check\",\n+ None,\n+ \"Check that the <comment>poetry.lock</> file corresponds to the current version \"\n+ \"of <comment>pyproject.toml</>.\",\n+ ),\n ]\n \n help = \"\"\"\n@@ -29,6 +35,13 @@\n self.poetry.config.get(\"experimental.new-installer\", False)\n )\n \n+ if self.option(\"check\"):\n+ return (\n+ 0\n+ if self.poetry.locker.is_locked() and self.poetry.locker.is_fresh()\n+ else 1\n+ )\n+\n self._installer.lock(update=not self.option(\"no-update\"))\n \n return self._installer.run()\n", "issue": "command to check lock file\n<!-- Checked checkbox should look like this: [x] -->\r\n- [X] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate.\r\n\r\n## Issue\r\n\r\nWould it be possible to add a `poetry check` command (oh, I see you'e used that already, maybe the command could be extended or have a `poetry lock --check`?) that will return zero if the current poetry.lock is both correct w.r.t. the pyproject.toml (ie: packages match) *and* that it matches the output of `poetry lock`.\r\n\n", "before_files": [{"content": "from cleo.helpers import option\n\nfrom .installer_command import InstallerCommand\n\n\nclass LockCommand(InstallerCommand):\n\n name = \"lock\"\n description = \"Locks the project dependencies.\"\n\n options = [\n option(\n \"no-update\", None, \"Do not update locked versions, only refresh lock file.\"\n ),\n ]\n\n help = \"\"\"\nThe <info>lock</info> command reads the <comment>pyproject.toml</> file from the\ncurrent directory, processes it, and locks the dependencies in the <comment>poetry.lock</>\nfile.\n\n<info>poetry lock</info>\n\"\"\"\n\n loggers = [\"poetry.repositories.pypi_repository\"]\n\n def handle(self) -> int:\n self._installer.use_executor(\n self.poetry.config.get(\"experimental.new-installer\", False)\n )\n\n self._installer.lock(update=not self.option(\"no-update\"))\n\n return self._installer.run()\n", "path": "poetry/console/commands/lock.py"}]}
944
244
gh_patches_debug_39342
rasdani/github-patches
git_diff
conan-io__conan-5564
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> VS generator not exporting for <Lib> tag in vcxproj Need to add Conan-handled lib to be referenced in .vcxproj in <Lib> tags. Conan VS-generator is exporting only in <Link> tag. </issue> <code> [start of conans/client/generators/visualstudio.py] 1 import os 2 import re 3 4 from conans.model import Generator 5 from conans.paths import BUILD_INFO_VISUAL_STUDIO 6 from conans.client.tools.files import VALID_LIB_EXTENSIONS 7 8 9 class VisualStudioGenerator(Generator): 10 11 template = '''<?xml version="1.0" encoding="utf-8"?> 12 <Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> 13 <ImportGroup Label="PropertySheets" /> 14 <PropertyGroup Label="UserMacros" /> 15 <PropertyGroup Label="Conan-RootDirs">{item_properties} 16 </PropertyGroup> 17 {properties} 18 <ItemGroup /> 19 </Project>''' 20 21 properties_template = ''' <PropertyGroup Label="ConanVariables"{condition}> 22 <ConanBinaryDirectories>{bin_dirs};%(ConanBinaryDirectories)</ConanBinaryDirectories> 23 <ConanResourceDirectories>{res_dirs};%(ConanResourceDirectories)$(</ConanResourceDirectories> 24 </PropertyGroup> 25 <PropertyGroup{condition}> 26 <LocalDebuggerEnvironment>PATH=%PATH%;{bin_dirs}</LocalDebuggerEnvironment> 27 <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor> 28 </PropertyGroup> 29 <ItemDefinitionGroup{condition}> 30 <ClCompile> 31 <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> 32 <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions> 33 <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions> 34 </ClCompile> 35 <Link> 36 <AdditionalLibraryDirectories>{lib_dirs}%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> 37 <AdditionalDependencies>{libs}%(AdditionalDependencies)</AdditionalDependencies> 38 <AdditionalOptions>{linker_flags} %(AdditionalOptions)</AdditionalOptions> 39 </Link> 40 <Midl> 41 <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> 42 </Midl> 43 <ResourceCompile> 44 <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> 45 <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions> 46 <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions> 47 </ResourceCompile> 48 </ItemDefinitionGroup>''' 49 50 item_template = ''' 51 <Conan-{name}-Root>{root_dir}</Conan-{name}-Root>''' 52 53 def _format_items(self): 54 sections = [] 55 for dep_name, cpp_info in self._deps_build_info.dependencies: 56 fields = { 57 'root_dir': cpp_info.rootpath, 58 'name': dep_name.replace(".", "-") 59 } 60 section = self.item_template.format(**fields) 61 sections.append(section) 62 return "".join(sections) 63 64 @property 65 def filename(self): 66 return BUILD_INFO_VISUAL_STUDIO 67 68 def _format_properties(self, build_info, condition): 69 def has_valid_ext(lib): 70 ext = os.path.splitext(lib)[1] 71 return ext in VALID_LIB_EXTENSIONS 72 73 fields = { 74 'condition': condition, 75 'bin_dirs': "".join("%s;" % p for p in build_info.bin_paths), 76 'res_dirs': "".join("%s;" % p for p in build_info.res_paths), 77 'include_dirs': "".join("%s;" % p for p in build_info.include_paths), 78 'lib_dirs': "".join("%s;" % p for p in build_info.lib_paths), 79 'libs': "".join(['%s.lib;' % lib if not has_valid_ext(lib) 80 else '%s;' % lib for lib in build_info.libs]), 81 'definitions': "".join("%s;" % d for d in build_info.defines), 82 'compiler_flags': " ".join(build_info.cxxflags + build_info.cflags), 83 'linker_flags': " ".join(build_info.sharedlinkflags), 84 'exe_flags': " ".join(build_info.exelinkflags) 85 } 86 formatted_template = self.properties_template.format(**fields) 87 return formatted_template 88 89 @property 90 def content(self): 91 per_item_props = self._format_items() 92 93 properties = [self._format_properties(self._deps_build_info, condition='')] 94 for config, cpp_info in self._deps_build_info.configs.items(): 95 condition = " Condition=\"'$(Configuration)' == '%s'\"" % config 96 properties.append(self._format_properties(cpp_info, condition=condition)) 97 98 fields = { 99 'item_properties': per_item_props, 100 'properties': '\n'.join(properties) 101 } 102 formatted_template = self.template.format(**fields) 103 104 userprofile = os.getenv("USERPROFILE") 105 if userprofile: 106 userprofile = userprofile.replace("\\", "\\\\") 107 formatted_template = re.sub(userprofile, "$(USERPROFILE)", formatted_template, 108 flags=re.I) 109 return formatted_template 110 [end of conans/client/generators/visualstudio.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/conans/client/generators/visualstudio.py b/conans/client/generators/visualstudio.py --- a/conans/client/generators/visualstudio.py +++ b/conans/client/generators/visualstudio.py @@ -19,8 +19,12 @@ </Project>''' properties_template = ''' <PropertyGroup Label="ConanVariables"{condition}> - <ConanBinaryDirectories>{bin_dirs};%(ConanBinaryDirectories)</ConanBinaryDirectories> - <ConanResourceDirectories>{res_dirs};%(ConanResourceDirectories)$(</ConanResourceDirectories> + <ConanCompilerFlags>{compiler_flags}</ConanCompilerFlags> + <ConanPreprocessorDefinitions>{definitions}</ConanPreprocessorDefinitions> + <ConanIncludeDirectories>{include_dirs}</ConanIncludeDirectories> + <ConanResourceDirectories>{res_dirs}</ConanResourceDirectories> + <ConanLibraryDirectories>{lib_dirs}</ConanLibraryDirectories> + <ConanBinaryDirectories>{bin_dirs}</ConanBinaryDirectories> </PropertyGroup> <PropertyGroup{condition}> <LocalDebuggerEnvironment>PATH=%PATH%;{bin_dirs}</LocalDebuggerEnvironment> @@ -28,22 +32,26 @@ </PropertyGroup> <ItemDefinitionGroup{condition}> <ClCompile> - <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> - <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions> - <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions> + <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> + <PreprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions> + <AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions> </ClCompile> <Link> - <AdditionalLibraryDirectories>{lib_dirs}%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> + <AdditionalLibraryDirectories>$(ConanLibraryDirectories)%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> <AdditionalDependencies>{libs}%(AdditionalDependencies)</AdditionalDependencies> <AdditionalOptions>{linker_flags} %(AdditionalOptions)</AdditionalOptions> </Link> + <Lib> + <AdditionalLibraryDirectories>$(ConanLibraryDirectories)%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> + <AdditionalDependencies>{libs}%(AdditionalDependencies)</AdditionalDependencies> + </Lib> <Midl> - <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> + <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> </Midl> <ResourceCompile> - <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> - <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions> - <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions> + <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> + <PreprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions> + <AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions> </ResourceCompile> </ItemDefinitionGroup>'''
{"golden_diff": "diff --git a/conans/client/generators/visualstudio.py b/conans/client/generators/visualstudio.py\n--- a/conans/client/generators/visualstudio.py\n+++ b/conans/client/generators/visualstudio.py\n@@ -19,8 +19,12 @@\n </Project>'''\n \n properties_template = ''' <PropertyGroup Label=\"ConanVariables\"{condition}>\n- <ConanBinaryDirectories>{bin_dirs};%(ConanBinaryDirectories)</ConanBinaryDirectories>\n- <ConanResourceDirectories>{res_dirs};%(ConanResourceDirectories)$(</ConanResourceDirectories>\n+ <ConanCompilerFlags>{compiler_flags}</ConanCompilerFlags>\n+ <ConanPreprocessorDefinitions>{definitions}</ConanPreprocessorDefinitions>\n+ <ConanIncludeDirectories>{include_dirs}</ConanIncludeDirectories>\n+ <ConanResourceDirectories>{res_dirs}</ConanResourceDirectories>\n+ <ConanLibraryDirectories>{lib_dirs}</ConanLibraryDirectories>\n+ <ConanBinaryDirectories>{bin_dirs}</ConanBinaryDirectories>\n </PropertyGroup>\n <PropertyGroup{condition}>\n <LocalDebuggerEnvironment>PATH=%PATH%;{bin_dirs}</LocalDebuggerEnvironment>\n@@ -28,22 +32,26 @@\n </PropertyGroup>\n <ItemDefinitionGroup{condition}>\n <ClCompile>\n- <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n- <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions>\n- <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions>\n+ <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n+ <PreprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions>\n+ <AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions>\n </ClCompile>\n <Link>\n- <AdditionalLibraryDirectories>{lib_dirs}%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>\n+ <AdditionalLibraryDirectories>$(ConanLibraryDirectories)%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>\n <AdditionalDependencies>{libs}%(AdditionalDependencies)</AdditionalDependencies>\n <AdditionalOptions>{linker_flags} %(AdditionalOptions)</AdditionalOptions>\n </Link>\n+ <Lib>\n+ <AdditionalLibraryDirectories>$(ConanLibraryDirectories)%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>\n+ <AdditionalDependencies>{libs}%(AdditionalDependencies)</AdditionalDependencies>\n+ </Lib>\n <Midl>\n- <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n+ <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n </Midl>\n <ResourceCompile>\n- <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n- <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions>\n- <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions>\n+ <AdditionalIncludeDirectories>$(ConanIncludeDirectories)%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n+ <PreprocessorDefinitions>$(ConanPreprocessorDefinitions)%(PreprocessorDefinitions)</PreprocessorDefinitions>\n+ <AdditionalOptions>$(ConanCompilerFlags) %(AdditionalOptions)</AdditionalOptions>\n </ResourceCompile>\n </ItemDefinitionGroup>'''\n", "issue": "VS generator not exporting for <Lib> tag in vcxproj\nNeed to add Conan-handled lib to be referenced in .vcxproj in <Lib> tags. Conan VS-generator is exporting only in <Link> tag. \r\n\n", "before_files": [{"content": "import os\nimport re\n\nfrom conans.model import Generator\nfrom conans.paths import BUILD_INFO_VISUAL_STUDIO\nfrom conans.client.tools.files import VALID_LIB_EXTENSIONS\n\n\nclass VisualStudioGenerator(Generator):\n\n template = '''<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<Project ToolsVersion=\"4.0\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">\n <ImportGroup Label=\"PropertySheets\" />\n <PropertyGroup Label=\"UserMacros\" />\n <PropertyGroup Label=\"Conan-RootDirs\">{item_properties}\n </PropertyGroup>\n {properties}\n <ItemGroup />\n</Project>'''\n\n properties_template = ''' <PropertyGroup Label=\"ConanVariables\"{condition}>\n <ConanBinaryDirectories>{bin_dirs};%(ConanBinaryDirectories)</ConanBinaryDirectories>\n <ConanResourceDirectories>{res_dirs};%(ConanResourceDirectories)$(</ConanResourceDirectories>\n </PropertyGroup>\n <PropertyGroup{condition}>\n <LocalDebuggerEnvironment>PATH=%PATH%;{bin_dirs}</LocalDebuggerEnvironment>\n <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>\n </PropertyGroup>\n <ItemDefinitionGroup{condition}>\n <ClCompile>\n <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions>\n <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions>\n </ClCompile>\n <Link>\n <AdditionalLibraryDirectories>{lib_dirs}%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>\n <AdditionalDependencies>{libs}%(AdditionalDependencies)</AdditionalDependencies>\n <AdditionalOptions>{linker_flags} %(AdditionalOptions)</AdditionalOptions>\n </Link>\n <Midl>\n <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n </Midl>\n <ResourceCompile>\n <AdditionalIncludeDirectories>{include_dirs}%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n <PreprocessorDefinitions>{definitions}%(PreprocessorDefinitions)</PreprocessorDefinitions>\n <AdditionalOptions>{compiler_flags} %(AdditionalOptions)</AdditionalOptions>\n </ResourceCompile>\n </ItemDefinitionGroup>'''\n\n item_template = '''\n <Conan-{name}-Root>{root_dir}</Conan-{name}-Root>'''\n\n def _format_items(self):\n sections = []\n for dep_name, cpp_info in self._deps_build_info.dependencies:\n fields = {\n 'root_dir': cpp_info.rootpath,\n 'name': dep_name.replace(\".\", \"-\")\n }\n section = self.item_template.format(**fields)\n sections.append(section)\n return \"\".join(sections)\n\n @property\n def filename(self):\n return BUILD_INFO_VISUAL_STUDIO\n\n def _format_properties(self, build_info, condition):\n def has_valid_ext(lib):\n ext = os.path.splitext(lib)[1]\n return ext in VALID_LIB_EXTENSIONS\n\n fields = {\n 'condition': condition,\n 'bin_dirs': \"\".join(\"%s;\" % p for p in build_info.bin_paths),\n 'res_dirs': \"\".join(\"%s;\" % p for p in build_info.res_paths),\n 'include_dirs': \"\".join(\"%s;\" % p for p in build_info.include_paths),\n 'lib_dirs': \"\".join(\"%s;\" % p for p in build_info.lib_paths),\n 'libs': \"\".join(['%s.lib;' % lib if not has_valid_ext(lib)\n else '%s;' % lib for lib in build_info.libs]),\n 'definitions': \"\".join(\"%s;\" % d for d in build_info.defines),\n 'compiler_flags': \" \".join(build_info.cxxflags + build_info.cflags),\n 'linker_flags': \" \".join(build_info.sharedlinkflags),\n 'exe_flags': \" \".join(build_info.exelinkflags)\n }\n formatted_template = self.properties_template.format(**fields)\n return formatted_template\n\n @property\n def content(self):\n per_item_props = self._format_items()\n\n properties = [self._format_properties(self._deps_build_info, condition='')]\n for config, cpp_info in self._deps_build_info.configs.items():\n condition = \" Condition=\\\"'$(Configuration)' == '%s'\\\"\" % config\n properties.append(self._format_properties(cpp_info, condition=condition))\n\n fields = {\n 'item_properties': per_item_props,\n 'properties': '\\n'.join(properties)\n }\n formatted_template = self.template.format(**fields)\n\n userprofile = os.getenv(\"USERPROFILE\")\n if userprofile:\n userprofile = userprofile.replace(\"\\\\\", \"\\\\\\\\\")\n formatted_template = re.sub(userprofile, \"$(USERPROFILE)\", formatted_template,\n flags=re.I)\n return formatted_template\n", "path": "conans/client/generators/visualstudio.py"}]}
1,840
726
gh_patches_debug_9185
rasdani/github-patches
git_diff
fidals__shopelectro-729
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> 502 error on category pages We can reproduce it with code below. Seems it's connected with https://github.com/fidals/shopelectro/issues/648, but it's not checked yet ``` import requests BASE = 'https://www.shopelectro.ru' broken_urls = [] for c in CategoryPage.objects.all(): response = requests.get(BASE + c.url) if response.status_code != 200: broken_urls += (response.status_code, response.url) print('\n'.join((str(url) for url in broken_urls))) --- # output: 502 https://www.shopelectro.ru/catalog/categories/batarei-saliutov-moshchnye/ 502 https://www.shopelectro.ru/catalog/categories/khlopushki-pnevmaticheskie/ 502 https://www.shopelectro.ru/catalog/categories/izmeritelnye-pribory-211/ 502 https://www.shopelectro.ru/catalog/categories/dlia-tiporazmera-aaaaa/ 502 https://www.shopelectro.ru/catalog/categories/tiporazmer-aa/ 502 https://www.shopelectro.ru/catalog/categories/tiporazmer-6f22/ 502 https://www.shopelectro.ru/catalog/categories/invertory/ 502 https://www.shopelectro.ru/catalog/categories/palchikovye-aa-r6-lr6-fr6-316/ 502 https://www.shopelectro.ru/catalog/categories/mizinchikovye-aaa-r03-lr03-fr03-286/ ``` </issue> <code> [start of shopelectro/celery.py] 1 from __future__ import absolute_import, unicode_literals 2 from datetime import timedelta 3 import os 4 5 from celery import Celery 6 from kombu import Exchange, Queue 7 8 # set the default Django settings module for the 'celery' program. 9 os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'shopelectro.settings.local') 10 11 app = Celery('shopelectro') 12 13 # Exchanges 14 default_exchange = Exchange('default', type='direct') 15 utils_exchange = Exchange('utils', type='direct') 16 17 # http://docs.celeryproject.org/en/latest/userguide/tasks.html 18 task_queues = ( 19 Queue( 20 name='default', 21 exchange=default_exchange, 22 routing_key='default', 23 ), 24 Queue( 25 name='mail', 26 exchange=utils_exchange, 27 routing_key='utils.mail', 28 ), 29 Queue( 30 name='command', 31 exchange=utils_exchange, 32 routing_key='utils.command', 33 ) 34 ) 35 36 # http://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html 37 beat_schedule = { 38 'update-catalog': { 39 'task': 'shopelectro.tasks.update_catalog', 40 'schedule': timedelta(hours=2).total_seconds(), 41 }, 42 'check-purchase': { 43 'task': 'shopelectro.tasks.check_purchase', 44 'schedule': timedelta(days=1).total_seconds(), 45 }, 46 } 47 48 # http://docs.celeryproject.org/en/master/userguide/routing.html 49 task_routes = { 50 'shopelectro.tasks.update_catalog': { 51 'queue': 'command', 52 'routing_key': 'utils.command', 53 'priority': 30, 54 }, 55 'shopelectro.tasks.check_purchase': { 56 'queue': 'command', 57 'routing_key': 'utils.command', 58 'priority': 20, 59 }, 60 'ecommerce.tasks.send_mail': { 61 'queue': 'mail', 62 'routing_key': 'utils.mail', 63 'priority': 50, 64 }, 65 } 66 67 # Using a string here means the worker don't have to serialize 68 # the configuration object to child processes. 69 # - namespace='CELERY' means all celery-related configuration keys 70 # should have a `CELERY_` prefix. 71 app.config_from_object('django.conf:settings', namespace='CELERY') 72 73 # http://docs.celeryproject.org/en/latest/userguide/configuration.html 74 75 BROCKER_URL = ( 76 f'amqp://{os.environ["RABBITMQ_DEFAULT_USER"]}:{os.environ["RABBITMQ_DEFAULT_PASS"]}' 77 f'@{os.environ["RABBITMQ_URL"]}:{os.environ["RABBITMQ_PORT"]}/' 78 ) 79 app.conf.update( 80 broker_url=BROCKER_URL, 81 broker_heartbeat=30, 82 task_acks_late=True, 83 task_default_queue='default', 84 task_default_exchange='default', 85 task_default_routing_key='default', 86 task_ignore_result=True, 87 worker_pool_restarts=1000, 88 task_routes=task_routes, 89 task_queues=task_queues, 90 beat_schedule=beat_schedule, 91 ) 92 93 # Load task modules from all registered Django app configs. 94 app.autodiscover_tasks() 95 [end of shopelectro/celery.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/shopelectro/celery.py b/shopelectro/celery.py --- a/shopelectro/celery.py +++ b/shopelectro/celery.py @@ -83,11 +83,11 @@ task_default_queue='default', task_default_exchange='default', task_default_routing_key='default', - task_ignore_result=True, - worker_pool_restarts=1000, + worker_pool_restarts=True, task_routes=task_routes, task_queues=task_queues, beat_schedule=beat_schedule, + worker_max_memory_per_child=200000, # after 250MB will restart ) # Load task modules from all registered Django app configs.
{"golden_diff": "diff --git a/shopelectro/celery.py b/shopelectro/celery.py\n--- a/shopelectro/celery.py\n+++ b/shopelectro/celery.py\n@@ -83,11 +83,11 @@\n task_default_queue='default',\n task_default_exchange='default',\n task_default_routing_key='default',\n- task_ignore_result=True,\n- worker_pool_restarts=1000,\n+ worker_pool_restarts=True,\n task_routes=task_routes,\n task_queues=task_queues,\n beat_schedule=beat_schedule,\n+ worker_max_memory_per_child=200000, # after 250MB will restart\n )\n \n # Load task modules from all registered Django app configs.\n", "issue": "502 error on category pages\nWe can reproduce it with code below.\r\n\r\nSeems it's connected with https://github.com/fidals/shopelectro/issues/648, but it's not checked yet\r\n\r\n```\r\nimport requests\r\n\r\nBASE = 'https://www.shopelectro.ru'\r\n\r\nbroken_urls = []\r\nfor c in CategoryPage.objects.all():\r\n\tresponse = requests.get(BASE + c.url)\r\n\tif response.status_code != 200:\r\n\t\tbroken_urls += (response.status_code, response.url)\r\n\r\nprint('\\n'.join((str(url) for url in broken_urls)))\r\n\r\n---\r\n\r\n# output:\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/batarei-saliutov-moshchnye/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/khlopushki-pnevmaticheskie/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/izmeritelnye-pribory-211/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/dlia-tiporazmera-aaaaa/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/tiporazmer-aa/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/tiporazmer-6f22/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/invertory/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/palchikovye-aa-r6-lr6-fr6-316/\r\n502\r\nhttps://www.shopelectro.ru/catalog/categories/mizinchikovye-aaa-r03-lr03-fr03-286/\r\n```\n", "before_files": [{"content": "from __future__ import absolute_import, unicode_literals\nfrom datetime import timedelta\nimport os\n\nfrom celery import Celery\nfrom kombu import Exchange, Queue\n\n# set the default Django settings module for the 'celery' program.\nos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'shopelectro.settings.local')\n\napp = Celery('shopelectro')\n\n# Exchanges\ndefault_exchange = Exchange('default', type='direct')\nutils_exchange = Exchange('utils', type='direct')\n\n# http://docs.celeryproject.org/en/latest/userguide/tasks.html\ntask_queues = (\n Queue(\n name='default',\n exchange=default_exchange,\n routing_key='default',\n ),\n Queue(\n name='mail',\n exchange=utils_exchange,\n routing_key='utils.mail',\n ),\n Queue(\n name='command',\n exchange=utils_exchange,\n routing_key='utils.command',\n )\n)\n\n# http://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html\nbeat_schedule = {\n 'update-catalog': {\n 'task': 'shopelectro.tasks.update_catalog',\n 'schedule': timedelta(hours=2).total_seconds(),\n },\n 'check-purchase': {\n 'task': 'shopelectro.tasks.check_purchase',\n 'schedule': timedelta(days=1).total_seconds(),\n },\n}\n\n# http://docs.celeryproject.org/en/master/userguide/routing.html\ntask_routes = {\n 'shopelectro.tasks.update_catalog': {\n 'queue': 'command',\n 'routing_key': 'utils.command',\n 'priority': 30,\n },\n 'shopelectro.tasks.check_purchase': {\n 'queue': 'command',\n 'routing_key': 'utils.command',\n 'priority': 20,\n },\n 'ecommerce.tasks.send_mail': {\n 'queue': 'mail',\n 'routing_key': 'utils.mail',\n 'priority': 50,\n },\n}\n\n# Using a string here means the worker don't have to serialize\n# the configuration object to child processes.\n# - namespace='CELERY' means all celery-related configuration keys\n# should have a `CELERY_` prefix.\napp.config_from_object('django.conf:settings', namespace='CELERY')\n\n# http://docs.celeryproject.org/en/latest/userguide/configuration.html\n\nBROCKER_URL = (\n f'amqp://{os.environ[\"RABBITMQ_DEFAULT_USER\"]}:{os.environ[\"RABBITMQ_DEFAULT_PASS\"]}'\n f'@{os.environ[\"RABBITMQ_URL\"]}:{os.environ[\"RABBITMQ_PORT\"]}/'\n)\napp.conf.update(\n broker_url=BROCKER_URL,\n broker_heartbeat=30,\n task_acks_late=True,\n task_default_queue='default',\n task_default_exchange='default',\n task_default_routing_key='default',\n task_ignore_result=True,\n worker_pool_restarts=1000,\n task_routes=task_routes,\n task_queues=task_queues,\n beat_schedule=beat_schedule,\n)\n\n# Load task modules from all registered Django app configs.\napp.autodiscover_tasks()\n", "path": "shopelectro/celery.py"}]}
1,764
168
gh_patches_debug_13030
rasdani/github-patches
git_diff
microsoft__AzureTRE-395
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Application Insights configuration for processor function broken The `createNewWorkspace` function monitoring blade indicates that Application Insights integration is not enabled. ![image](https://user-images.githubusercontent.com/166592/123793166-b7edfa80-d8e1-11eb-8906-b4175f4348e0.png) At a first glance, the config seems to be there in [processor_function/function.tf](https://github.com/microsoft/AzureTRE/blob/develop/templates/core/terraform/processor_function/function.tf#L24) ```plaintext APP_INSIGHTS_INSTRUMENTATION_KEY = var.app_insights_instrumentation_key ``` however, the setting key name is incorrect. Manually enabling the integration in the portal adds additional config keys: `APPLICATIONINSIGHTS_CONNECTION_STRING` `APPINSIGHTS_INSTRUMENTATIONKEY` Need to add above keys with values to function app config. [Reference](https://docs.microsoft.com/en-us/azure/azure-monitor/app/resource-manager-function-app) </issue> <code> [start of processor_function/shared/logging.py] 1 import logging 2 import os 3 4 from opencensus.ext.azure.log_exporter import AzureLogHandler 5 from opencensus.trace import config_integration 6 from opencensus.trace.samplers import AlwaysOnSampler 7 from opencensus.trace.tracer import Tracer 8 9 10 def initialize_logging(logging_level: int, correlation_id: str) -> logging.LoggerAdapter: 11 """ 12 Adds the Application Insights handler for the root logger and sets the given logging level. 13 Creates and returns a logger adapter that integrates the correlation ID, if given, to the log messages. 14 15 :param logging_level: The logging level to set e.g., logging.WARNING. 16 :param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights. 17 :returns: A newly created logger adapter. 18 """ 19 logger = logging.getLogger() 20 logger.addHandler(logging.StreamHandler()) # For logging into console 21 app_insights_instrumentation_key = os.getenv("APP_INSIGHTS_INSTRUMENTATION_KEY") 22 23 try: 24 logger.addHandler(AzureLogHandler(connection_string=f"InstrumentationKey={app_insights_instrumentation_key}")) 25 except ValueError as e: 26 logger.error(f"Failed to set Application Insights logger handler: {e}") 27 28 config_integration.trace_integrations(['logging']) 29 logging.basicConfig(level=logging_level, format='%(asctime)s traceId=%(traceId)s spanId=%(spanId)s %(message)s') 30 Tracer(sampler=AlwaysOnSampler()) 31 logger.setLevel(logging_level) 32 33 extra = None 34 35 if correlation_id: 36 extra = {'traceId': correlation_id} 37 38 adapter = logging.LoggerAdapter(logger, extra) 39 adapter.debug(f"Logger adapter initialized with extra: {extra}") 40 41 return adapter 42 [end of processor_function/shared/logging.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/processor_function/shared/logging.py b/processor_function/shared/logging.py --- a/processor_function/shared/logging.py +++ b/processor_function/shared/logging.py @@ -18,10 +18,10 @@ """ logger = logging.getLogger() logger.addHandler(logging.StreamHandler()) # For logging into console - app_insights_instrumentation_key = os.getenv("APP_INSIGHTS_INSTRUMENTATION_KEY") + app_insights_connection_string = os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING") try: - logger.addHandler(AzureLogHandler(connection_string=f"InstrumentationKey={app_insights_instrumentation_key}")) + logger.addHandler(AzureLogHandler(connection_string=app_insights_connection_string)) except ValueError as e: logger.error(f"Failed to set Application Insights logger handler: {e}")
{"golden_diff": "diff --git a/processor_function/shared/logging.py b/processor_function/shared/logging.py\n--- a/processor_function/shared/logging.py\n+++ b/processor_function/shared/logging.py\n@@ -18,10 +18,10 @@\n \"\"\"\n logger = logging.getLogger()\n logger.addHandler(logging.StreamHandler()) # For logging into console\n- app_insights_instrumentation_key = os.getenv(\"APP_INSIGHTS_INSTRUMENTATION_KEY\")\n+ app_insights_connection_string = os.getenv(\"APPLICATIONINSIGHTS_CONNECTION_STRING\")\n \n try:\n- logger.addHandler(AzureLogHandler(connection_string=f\"InstrumentationKey={app_insights_instrumentation_key}\"))\n+ logger.addHandler(AzureLogHandler(connection_string=app_insights_connection_string))\n except ValueError as e:\n logger.error(f\"Failed to set Application Insights logger handler: {e}\")\n", "issue": "Application Insights configuration for processor function broken\nThe `createNewWorkspace` function monitoring blade indicates that Application Insights integration is not enabled.\r\n![image](https://user-images.githubusercontent.com/166592/123793166-b7edfa80-d8e1-11eb-8906-b4175f4348e0.png)\r\n\r\nAt a first glance, the config seems to be there in [processor_function/function.tf](https://github.com/microsoft/AzureTRE/blob/develop/templates/core/terraform/processor_function/function.tf#L24)\r\n```plaintext\r\n APP_INSIGHTS_INSTRUMENTATION_KEY = var.app_insights_instrumentation_key\r\n```\r\nhowever, the setting key name is incorrect.\r\n\r\nManually enabling the integration in the portal adds additional config keys:\r\n\r\n`APPLICATIONINSIGHTS_CONNECTION_STRING`\r\n`APPINSIGHTS_INSTRUMENTATIONKEY`\r\n\r\nNeed to add above keys with values to function app config.\r\n\r\n[Reference](https://docs.microsoft.com/en-us/azure/azure-monitor/app/resource-manager-function-app)\n", "before_files": [{"content": "import logging\nimport os\n\nfrom opencensus.ext.azure.log_exporter import AzureLogHandler\nfrom opencensus.trace import config_integration\nfrom opencensus.trace.samplers import AlwaysOnSampler\nfrom opencensus.trace.tracer import Tracer\n\n\ndef initialize_logging(logging_level: int, correlation_id: str) -> logging.LoggerAdapter:\n \"\"\"\n Adds the Application Insights handler for the root logger and sets the given logging level.\n Creates and returns a logger adapter that integrates the correlation ID, if given, to the log messages.\n\n :param logging_level: The logging level to set e.g., logging.WARNING.\n :param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights.\n :returns: A newly created logger adapter.\n \"\"\"\n logger = logging.getLogger()\n logger.addHandler(logging.StreamHandler()) # For logging into console\n app_insights_instrumentation_key = os.getenv(\"APP_INSIGHTS_INSTRUMENTATION_KEY\")\n\n try:\n logger.addHandler(AzureLogHandler(connection_string=f\"InstrumentationKey={app_insights_instrumentation_key}\"))\n except ValueError as e:\n logger.error(f\"Failed to set Application Insights logger handler: {e}\")\n\n config_integration.trace_integrations(['logging'])\n logging.basicConfig(level=logging_level, format='%(asctime)s traceId=%(traceId)s spanId=%(spanId)s %(message)s')\n Tracer(sampler=AlwaysOnSampler())\n logger.setLevel(logging_level)\n\n extra = None\n\n if correlation_id:\n extra = {'traceId': correlation_id}\n\n adapter = logging.LoggerAdapter(logger, extra)\n adapter.debug(f\"Logger adapter initialized with extra: {extra}\")\n\n return adapter\n", "path": "processor_function/shared/logging.py"}]}
1,209
182
gh_patches_debug_22772
rasdani/github-patches
git_diff
nilearn__nilearn-559
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Doc does not build Am I the only one? ``` Exception occurred: File "/home/aa013911/work/nilearn/doc/sphinxext/sphinxgallery/docs_resolv.py", line 348, in embed_code_links gallery_dir = os.path.join(app.builder.srcdir, gallery_conf['gallery_dir']) KeyError: 'gallery_dir' The full traceback has been saved in /tmp/sphinx-err-sePU7r.log, if you want to report the issue to the developers. Please also report this if it was a user error, so that a better error message can be provided next time. ``` </issue> <code> [start of doc/sphinxext/sphinxgallery/__init__.py] 1 """Sphinx Gallery 2 """ 3 import os 4 __version__ = '0.0.8' 5 6 def path_static(): 7 """Returns path to packaged static files""" 8 return os.path.abspath(os.path.dirname(__file__))+'/_static' 9 [end of doc/sphinxext/sphinxgallery/__init__.py] [start of doc/sphinxext/sphinxgallery/gen_gallery.py] 1 # -*- coding: utf-8 -*- 2 # Author: Óscar Nájera 3 # License: 3-clause BSD 4 5 import os 6 from sphinxgallery.gen_rst import generate_dir_rst 7 from sphinxgallery.docs_resolv import embed_code_links 8 9 10 def generate_gallery_rst(app): 11 """Starts the gallery configuration and recursively scans the examples 12 directory in order to populate the examples gallery 13 """ 14 try: 15 plot_gallery = eval(app.builder.config.plot_gallery) 16 except TypeError: 17 plot_gallery = bool(app.builder.config.plot_gallery) 18 19 if not plot_gallery: 20 return 21 22 gallery_conf.update(app.config.sphinxgallery_conf) 23 24 # this assures I can call the config in other places 25 app.config.sphinxgallery_conf = gallery_conf 26 27 examples_dir = os.path.join(app.builder.srcdir, gallery_conf['examples_dir']) 28 gallery_dir = os.path.join(app.builder.srcdir, gallery_conf['gallery_dir']) 29 mod_examples_dir = os.path.join(app.builder.srcdir, gallery_conf['mod_example_dir']) 30 31 for workdir in [examples_dir, gallery_dir, mod_examples_dir]: 32 if not os.path.exists(workdir): 33 os.makedirs(workdir) 34 35 # we create an index.rst with all examples 36 fhindex = open(os.path.join(gallery_dir, 'index.rst'), 'w') 37 fhindex.write(""" 38 39 .. _examples-index: 40 41 Gallery of Examples 42 =================== 43 44 """) 45 # Here we don't use an os.walk, but we recurse only twice: flat is 46 # better than nested. 47 seen_backrefs = set() 48 generate_dir_rst('.', fhindex, examples_dir, gallery_dir, gallery_conf, plot_gallery, seen_backrefs) 49 for directory in sorted(os.listdir(examples_dir)): 50 if os.path.isdir(os.path.join(examples_dir, directory)): 51 generate_dir_rst(directory, fhindex, examples_dir, gallery_dir, gallery_conf, plot_gallery, seen_backrefs) 52 fhindex.flush() 53 54 55 gallery_conf = { 56 'examples_dir' : '../examples', 57 'gallery_dir' : 'auto_examples', 58 'mod_example_dir': 'modules/generated', 59 'doc_module' : (), 60 'reference_url' : {}, 61 } 62 63 def setup(app): 64 app.add_config_value('plot_gallery', True, 'html') 65 app.add_config_value('sphinxgallery_conf', gallery_conf, 'html') 66 app.add_stylesheet('gallery.css') 67 68 app.connect('builder-inited', generate_gallery_rst) 69 70 app.connect('build-finished', embed_code_links) 71 72 # Sphinx hack: sphinx copies generated images to the build directory 73 # each time the docs are made. If the desired image name already 74 # exists, it appends a digit to prevent overwrites. The problem is, 75 # the directory is never cleared. This means that each time you build 76 # the docs, the number of images in the directory grows. 77 # 78 # This question has been asked on the sphinx development list, but there 79 # was no response: http://osdir.com/ml/sphinx-dev/2011-02/msg00123.html 80 # 81 # The following is a hack that prevents this behavior by clearing the 82 # image build directory each time the docs are built. If sphinx 83 # changes their layout between versions, this will not work (though 84 # it should probably not cause a crash). Tested successfully 85 # on Sphinx 1.0.7 86 build_image_dir = '_build/html/_images' 87 if os.path.exists(build_image_dir): 88 filelist = os.listdir(build_image_dir) 89 for filename in filelist: 90 if filename.endswith('png'): 91 os.remove(os.path.join(build_image_dir, filename)) 92 93 def setup_module(): 94 # HACK: Stop nosetests running setup() above 95 pass 96 [end of doc/sphinxext/sphinxgallery/gen_gallery.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/doc/sphinxext/sphinxgallery/__init__.py b/doc/sphinxext/sphinxgallery/__init__.py --- a/doc/sphinxext/sphinxgallery/__init__.py +++ b/doc/sphinxext/sphinxgallery/__init__.py @@ -1,7 +1,7 @@ """Sphinx Gallery """ import os -__version__ = '0.0.8' +__version__ = '0.0.9-dev' def path_static(): """Returns path to packaged static files""" diff --git a/doc/sphinxext/sphinxgallery/gen_gallery.py b/doc/sphinxext/sphinxgallery/gen_gallery.py --- a/doc/sphinxext/sphinxgallery/gen_gallery.py +++ b/doc/sphinxext/sphinxgallery/gen_gallery.py @@ -16,14 +16,14 @@ except TypeError: plot_gallery = bool(app.builder.config.plot_gallery) - if not plot_gallery: - return - gallery_conf.update(app.config.sphinxgallery_conf) # this assures I can call the config in other places app.config.sphinxgallery_conf = gallery_conf + if not plot_gallery: + return + examples_dir = os.path.join(app.builder.srcdir, gallery_conf['examples_dir']) gallery_dir = os.path.join(app.builder.srcdir, gallery_conf['gallery_dir']) mod_examples_dir = os.path.join(app.builder.srcdir, gallery_conf['mod_example_dir'])
{"golden_diff": "diff --git a/doc/sphinxext/sphinxgallery/__init__.py b/doc/sphinxext/sphinxgallery/__init__.py\n--- a/doc/sphinxext/sphinxgallery/__init__.py\n+++ b/doc/sphinxext/sphinxgallery/__init__.py\n@@ -1,7 +1,7 @@\n \"\"\"Sphinx Gallery\n \"\"\"\n import os\n-__version__ = '0.0.8'\n+__version__ = '0.0.9-dev'\n \n def path_static():\n \"\"\"Returns path to packaged static files\"\"\"\ndiff --git a/doc/sphinxext/sphinxgallery/gen_gallery.py b/doc/sphinxext/sphinxgallery/gen_gallery.py\n--- a/doc/sphinxext/sphinxgallery/gen_gallery.py\n+++ b/doc/sphinxext/sphinxgallery/gen_gallery.py\n@@ -16,14 +16,14 @@\n except TypeError:\n plot_gallery = bool(app.builder.config.plot_gallery)\n \n- if not plot_gallery:\n- return\n-\n gallery_conf.update(app.config.sphinxgallery_conf)\n \n # this assures I can call the config in other places\n app.config.sphinxgallery_conf = gallery_conf\n \n+ if not plot_gallery:\n+ return\n+\n examples_dir = os.path.join(app.builder.srcdir, gallery_conf['examples_dir'])\n gallery_dir = os.path.join(app.builder.srcdir, gallery_conf['gallery_dir'])\n mod_examples_dir = os.path.join(app.builder.srcdir, gallery_conf['mod_example_dir'])\n", "issue": "Doc does not build\nAm I the only one?\n\n```\nException occurred:\n File \"/home/aa013911/work/nilearn/doc/sphinxext/sphinxgallery/docs_resolv.py\", line 348, in embed_code_links\n gallery_dir = os.path.join(app.builder.srcdir, gallery_conf['gallery_dir'])\nKeyError: 'gallery_dir'\nThe full traceback has been saved in /tmp/sphinx-err-sePU7r.log, if you want to report the issue to the developers.\nPlease also report this if it was a user error, so that a better error message can be provided next time.\n```\n\n", "before_files": [{"content": "\"\"\"Sphinx Gallery\n\"\"\"\nimport os\n__version__ = '0.0.8'\n\ndef path_static():\n \"\"\"Returns path to packaged static files\"\"\"\n return os.path.abspath(os.path.dirname(__file__))+'/_static'\n", "path": "doc/sphinxext/sphinxgallery/__init__.py"}, {"content": "# -*- coding: utf-8 -*-\n# Author: \u00d3scar N\u00e1jera\n# License: 3-clause BSD\n\nimport os\nfrom sphinxgallery.gen_rst import generate_dir_rst\nfrom sphinxgallery.docs_resolv import embed_code_links\n\n\ndef generate_gallery_rst(app):\n \"\"\"Starts the gallery configuration and recursively scans the examples\n directory in order to populate the examples gallery\n \"\"\"\n try:\n plot_gallery = eval(app.builder.config.plot_gallery)\n except TypeError:\n plot_gallery = bool(app.builder.config.plot_gallery)\n\n if not plot_gallery:\n return\n\n gallery_conf.update(app.config.sphinxgallery_conf)\n\n # this assures I can call the config in other places\n app.config.sphinxgallery_conf = gallery_conf\n\n examples_dir = os.path.join(app.builder.srcdir, gallery_conf['examples_dir'])\n gallery_dir = os.path.join(app.builder.srcdir, gallery_conf['gallery_dir'])\n mod_examples_dir = os.path.join(app.builder.srcdir, gallery_conf['mod_example_dir'])\n\n for workdir in [examples_dir, gallery_dir, mod_examples_dir]:\n if not os.path.exists(workdir):\n os.makedirs(workdir)\n\n # we create an index.rst with all examples\n fhindex = open(os.path.join(gallery_dir, 'index.rst'), 'w')\n fhindex.write(\"\"\"\n\n.. _examples-index:\n\nGallery of Examples\n===================\n\n\"\"\")\n # Here we don't use an os.walk, but we recurse only twice: flat is\n # better than nested.\n seen_backrefs = set()\n generate_dir_rst('.', fhindex, examples_dir, gallery_dir, gallery_conf, plot_gallery, seen_backrefs)\n for directory in sorted(os.listdir(examples_dir)):\n if os.path.isdir(os.path.join(examples_dir, directory)):\n generate_dir_rst(directory, fhindex, examples_dir, gallery_dir, gallery_conf, plot_gallery, seen_backrefs)\n fhindex.flush()\n\n\ngallery_conf = {\n 'examples_dir' : '../examples',\n 'gallery_dir' : 'auto_examples',\n 'mod_example_dir': 'modules/generated',\n 'doc_module' : (),\n 'reference_url' : {},\n}\n\ndef setup(app):\n app.add_config_value('plot_gallery', True, 'html')\n app.add_config_value('sphinxgallery_conf', gallery_conf, 'html')\n app.add_stylesheet('gallery.css')\n\n app.connect('builder-inited', generate_gallery_rst)\n\n app.connect('build-finished', embed_code_links)\n\n # Sphinx hack: sphinx copies generated images to the build directory\n # each time the docs are made. If the desired image name already\n # exists, it appends a digit to prevent overwrites. The problem is,\n # the directory is never cleared. This means that each time you build\n # the docs, the number of images in the directory grows.\n #\n # This question has been asked on the sphinx development list, but there\n # was no response: http://osdir.com/ml/sphinx-dev/2011-02/msg00123.html\n #\n # The following is a hack that prevents this behavior by clearing the\n # image build directory each time the docs are built. If sphinx\n # changes their layout between versions, this will not work (though\n # it should probably not cause a crash). Tested successfully\n # on Sphinx 1.0.7\n build_image_dir = '_build/html/_images'\n if os.path.exists(build_image_dir):\n filelist = os.listdir(build_image_dir)\n for filename in filelist:\n if filename.endswith('png'):\n os.remove(os.path.join(build_image_dir, filename))\n\ndef setup_module():\n # HACK: Stop nosetests running setup() above\n pass\n", "path": "doc/sphinxext/sphinxgallery/gen_gallery.py"}]}
1,793
313
gh_patches_debug_22829
rasdani/github-patches
git_diff
pyodide__pyodide-891
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> JsProxy does not handle Error correctly Example: ```python from js import Error e = Error.new("Hi") raise e ``` Raises: `TypeError: exceptions must derive from BaseException`. This came up in https://github.com/iodide-project/pyodide/pull/880#pullrequestreview-555341317. I will try to fix this. </issue> <code> [start of src/pyodide-py/pyodide/__init__.py] 1 from ._base import open_url, eval_code, find_imports, as_nested_list 2 from .console import get_completions 3 4 __version__ = "0.15.0" 5 6 __all__ = ["open_url", "eval_code", "find_imports", "as_nested_list", "get_completions"] 7 [end of src/pyodide-py/pyodide/__init__.py] [start of src/pyodide-py/pyodide/_base.py] 1 """ 2 A library of helper utilities for connecting Python to the browser environment. 3 """ 4 5 import ast 6 from io import StringIO 7 from textwrap import dedent 8 from typing import Dict, List, Any 9 10 11 def open_url(url: str) -> StringIO: 12 """ 13 Fetches a given URL 14 15 Parameters 16 ---------- 17 url 18 URL to fetch 19 20 Returns 21 ------- 22 a io.StringIO object with the contents of the URL. 23 """ 24 from js import XMLHttpRequest 25 26 req = XMLHttpRequest.new() 27 req.open("GET", url, False) 28 req.send(None) 29 return StringIO(req.response) 30 31 32 def eval_code(code: str, ns: Dict[str, Any]) -> None: 33 """Runs a code string 34 35 The last part of the provided code may be an expression. 36 37 Parameters 38 ---------- 39 code 40 the Python code to run. 41 ns 42 `locals()` or `globals()` context where to execute code. 43 44 Returns 45 ------- 46 None 47 """ 48 # handle mis-indented input from multi-line strings 49 code = dedent(code) 50 51 mod = ast.parse(code) 52 if len(mod.body) == 0: 53 return None 54 55 expr: Any 56 if isinstance(mod.body[-1], ast.Expr): 57 expr = ast.Expression(mod.body[-1].value) 58 del mod.body[-1] 59 else: 60 expr = None 61 62 if len(mod.body): 63 exec(compile(mod, "<exec>", mode="exec"), ns, ns) 64 if expr is not None: 65 return eval(compile(expr, "<eval>", mode="eval"), ns, ns) 66 else: 67 return None 68 69 70 def find_imports(code: str) -> List[str]: 71 """ 72 Finds the imports in a string of code 73 74 Parameters 75 ---------- 76 code 77 the Python code to run. 78 79 Returns 80 ------- 81 A list of module names that are imported in the code. 82 83 Examples 84 -------- 85 >>> from pyodide import find_imports 86 >>> code = "import numpy as np; import scipy.stats" 87 >>> find_imports(code) 88 ['numpy', 'scipy'] 89 """ 90 # handle mis-indented input from multi-line strings 91 code = dedent(code) 92 93 mod = ast.parse(code) 94 imports = set() 95 for node in ast.walk(mod): 96 if isinstance(node, ast.Import): 97 for name in node.names: 98 node_name = name.name 99 imports.add(node_name.split(".")[0]) 100 elif isinstance(node, ast.ImportFrom): 101 module_name = node.module 102 if module_name is None: 103 continue 104 imports.add(module_name.split(".")[0]) 105 return list(sorted(imports)) 106 107 108 def as_nested_list(obj) -> List: 109 """Convert a nested JS array to nested Python list. 110 111 Assumes a Javascript object is made of (possibly nested) arrays and 112 converts them to nested Python lists. 113 114 Parameters 115 ---------- 116 obj 117 a Javscript object made of nested arrays. 118 119 Returns 120 ------- 121 Python list, or a nested Python list 122 """ 123 try: 124 it = iter(obj) 125 return [as_nested_list(x) for x in it] 126 except TypeError: 127 return obj 128 [end of src/pyodide-py/pyodide/_base.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/pyodide-py/pyodide/__init__.py b/src/pyodide-py/pyodide/__init__.py --- a/src/pyodide-py/pyodide/__init__.py +++ b/src/pyodide-py/pyodide/__init__.py @@ -1,6 +1,13 @@ -from ._base import open_url, eval_code, find_imports, as_nested_list +from ._base import open_url, eval_code, find_imports, as_nested_list, JsException from .console import get_completions __version__ = "0.15.0" -__all__ = ["open_url", "eval_code", "find_imports", "as_nested_list", "get_completions"] +__all__ = [ + "open_url", + "eval_code", + "find_imports", + "as_nested_list", + "get_completions", + "JsException", +] diff --git a/src/pyodide-py/pyodide/_base.py b/src/pyodide-py/pyodide/_base.py --- a/src/pyodide-py/pyodide/_base.py +++ b/src/pyodide-py/pyodide/_base.py @@ -1,6 +1,8 @@ """ A library of helper utilities for connecting Python to the browser environment. """ +# Added by C: +# JsException (from jsproxy.c) import ast from io import StringIO @@ -8,6 +10,16 @@ from typing import Dict, List, Any +class JsException(Exception): + """ + A wrapper around a Javascript Error to allow the Error to be thrown in Python. + """ + + # This gets overwritten in jsproxy.c, it is just here for autodoc and humans + # reading this file. + pass + + def open_url(url: str) -> StringIO: """ Fetches a given URL
{"golden_diff": "diff --git a/src/pyodide-py/pyodide/__init__.py b/src/pyodide-py/pyodide/__init__.py\n--- a/src/pyodide-py/pyodide/__init__.py\n+++ b/src/pyodide-py/pyodide/__init__.py\n@@ -1,6 +1,13 @@\n-from ._base import open_url, eval_code, find_imports, as_nested_list\n+from ._base import open_url, eval_code, find_imports, as_nested_list, JsException\n from .console import get_completions\n \n __version__ = \"0.15.0\"\n \n-__all__ = [\"open_url\", \"eval_code\", \"find_imports\", \"as_nested_list\", \"get_completions\"]\n+__all__ = [\n+ \"open_url\",\n+ \"eval_code\",\n+ \"find_imports\",\n+ \"as_nested_list\",\n+ \"get_completions\",\n+ \"JsException\",\n+]\ndiff --git a/src/pyodide-py/pyodide/_base.py b/src/pyodide-py/pyodide/_base.py\n--- a/src/pyodide-py/pyodide/_base.py\n+++ b/src/pyodide-py/pyodide/_base.py\n@@ -1,6 +1,8 @@\n \"\"\"\n A library of helper utilities for connecting Python to the browser environment.\n \"\"\"\n+# Added by C:\n+# JsException (from jsproxy.c)\n \n import ast\n from io import StringIO\n@@ -8,6 +10,16 @@\n from typing import Dict, List, Any\n \n \n+class JsException(Exception):\n+ \"\"\"\n+ A wrapper around a Javascript Error to allow the Error to be thrown in Python.\n+ \"\"\"\n+\n+ # This gets overwritten in jsproxy.c, it is just here for autodoc and humans\n+ # reading this file.\n+ pass\n+\n+\n def open_url(url: str) -> StringIO:\n \"\"\"\n Fetches a given URL\n", "issue": "JsProxy does not handle Error correctly\nExample:\r\n```python\r\nfrom js import Error\r\ne = Error.new(\"Hi\")\r\nraise e\r\n```\r\nRaises: `TypeError: exceptions must derive from BaseException`.\r\nThis came up in https://github.com/iodide-project/pyodide/pull/880#pullrequestreview-555341317. I will try to fix this.\n", "before_files": [{"content": "from ._base import open_url, eval_code, find_imports, as_nested_list\nfrom .console import get_completions\n\n__version__ = \"0.15.0\"\n\n__all__ = [\"open_url\", \"eval_code\", \"find_imports\", \"as_nested_list\", \"get_completions\"]\n", "path": "src/pyodide-py/pyodide/__init__.py"}, {"content": "\"\"\"\nA library of helper utilities for connecting Python to the browser environment.\n\"\"\"\n\nimport ast\nfrom io import StringIO\nfrom textwrap import dedent\nfrom typing import Dict, List, Any\n\n\ndef open_url(url: str) -> StringIO:\n \"\"\"\n Fetches a given URL\n\n Parameters\n ----------\n url\n URL to fetch\n\n Returns\n -------\n a io.StringIO object with the contents of the URL.\n \"\"\"\n from js import XMLHttpRequest\n\n req = XMLHttpRequest.new()\n req.open(\"GET\", url, False)\n req.send(None)\n return StringIO(req.response)\n\n\ndef eval_code(code: str, ns: Dict[str, Any]) -> None:\n \"\"\"Runs a code string\n\n The last part of the provided code may be an expression.\n\n Parameters\n ----------\n code\n the Python code to run.\n ns\n `locals()` or `globals()` context where to execute code.\n\n Returns\n -------\n None\n \"\"\"\n # handle mis-indented input from multi-line strings\n code = dedent(code)\n\n mod = ast.parse(code)\n if len(mod.body) == 0:\n return None\n\n expr: Any\n if isinstance(mod.body[-1], ast.Expr):\n expr = ast.Expression(mod.body[-1].value)\n del mod.body[-1]\n else:\n expr = None\n\n if len(mod.body):\n exec(compile(mod, \"<exec>\", mode=\"exec\"), ns, ns)\n if expr is not None:\n return eval(compile(expr, \"<eval>\", mode=\"eval\"), ns, ns)\n else:\n return None\n\n\ndef find_imports(code: str) -> List[str]:\n \"\"\"\n Finds the imports in a string of code\n\n Parameters\n ----------\n code\n the Python code to run.\n\n Returns\n -------\n A list of module names that are imported in the code.\n\n Examples\n --------\n >>> from pyodide import find_imports\n >>> code = \"import numpy as np; import scipy.stats\"\n >>> find_imports(code)\n ['numpy', 'scipy']\n \"\"\"\n # handle mis-indented input from multi-line strings\n code = dedent(code)\n\n mod = ast.parse(code)\n imports = set()\n for node in ast.walk(mod):\n if isinstance(node, ast.Import):\n for name in node.names:\n node_name = name.name\n imports.add(node_name.split(\".\")[0])\n elif isinstance(node, ast.ImportFrom):\n module_name = node.module\n if module_name is None:\n continue\n imports.add(module_name.split(\".\")[0])\n return list(sorted(imports))\n\n\ndef as_nested_list(obj) -> List:\n \"\"\"Convert a nested JS array to nested Python list.\n\n Assumes a Javascript object is made of (possibly nested) arrays and\n converts them to nested Python lists.\n\n Parameters\n ----------\n obj\n a Javscript object made of nested arrays.\n\n Returns\n -------\n Python list, or a nested Python list\n \"\"\"\n try:\n it = iter(obj)\n return [as_nested_list(x) for x in it]\n except TypeError:\n return obj\n", "path": "src/pyodide-py/pyodide/_base.py"}]}
1,715
430
gh_patches_debug_20992
rasdani/github-patches
git_diff
fossasia__open-event-server-4997
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Minor bug in export orders as CSV task **Describe the bug** Minor bug in exporting orders as CSV. Trying to decode string which throws a type error: ```python if order.user and order.user.first_name else '', str(order.user.last_name, 'utf-8') ``` **To Reproduce** Steps to reproduce the behavior: 1. Start export orders as csv task 2. Make sure user is linked for that particular order 3. See the error **Expected behavior** It should work properly **Stacktrace** ```sh File "/home/mayank/OpenSource/open-event-server/app/api/helpers/tasks.py", line 238, in export_order_csv_task content = export_orders_csv(orders) File "/home/mayank/OpenSource/open-event-server/app/api/helpers/csv_jobs_util.py", line 13, in export_orders_csv if order.user and order.user.first_name else '', TypeError: decoding str is not supported ``` **Additional details (please complete the following information):** - OS: Ubuntu 18.04 - Python Version 3.6 - `HEAD` Commit hash [e.g. `4629c62`] </issue> <code> [start of app/api/helpers/csv_jobs_util.py] 1 def export_orders_csv(orders): 2 headers = ['Order#', 'Order Date', 'Status', 'Payment Type', 'Total Amount', 'Quantity', 3 'Discount Code', 'First Name', 'Last Name', 'Email'] 4 5 rows = [headers] 6 for order in orders: 7 if order.status != "deleted": 8 column = [str(order.get_invoice_number()), str(order.created_at) if order.created_at else '', 9 str(order.status) if order.status else '', str(order.paid_via) if order.paid_via else '', 10 str(order.amount) if order.amount else '', str(order.get_tickets_count()), 11 str(order.discount_code.code) if order.discount_code else '', 12 str(order.user.first_name, 'utf-8') 13 if order.user and order.user.first_name else '', 14 str(order.user.last_name, 'utf-8') 15 if order.user and order.user.last_name else '', 16 str(order.user._email, 'utf-8') if order.user and order.user._email else ''] 17 rows.append(column) 18 19 return rows 20 [end of app/api/helpers/csv_jobs_util.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/app/api/helpers/csv_jobs_util.py b/app/api/helpers/csv_jobs_util.py --- a/app/api/helpers/csv_jobs_util.py +++ b/app/api/helpers/csv_jobs_util.py @@ -9,11 +9,11 @@ str(order.status) if order.status else '', str(order.paid_via) if order.paid_via else '', str(order.amount) if order.amount else '', str(order.get_tickets_count()), str(order.discount_code.code) if order.discount_code else '', - str(order.user.first_name, 'utf-8') + str(order.user.first_name) if order.user and order.user.first_name else '', - str(order.user.last_name, 'utf-8') + str(order.user.last_name) if order.user and order.user.last_name else '', - str(order.user._email, 'utf-8') if order.user and order.user._email else ''] + str(order.user.email) if order.user and order.user.email else ''] rows.append(column) return rows
{"golden_diff": "diff --git a/app/api/helpers/csv_jobs_util.py b/app/api/helpers/csv_jobs_util.py\n--- a/app/api/helpers/csv_jobs_util.py\n+++ b/app/api/helpers/csv_jobs_util.py\n@@ -9,11 +9,11 @@\n str(order.status) if order.status else '', str(order.paid_via) if order.paid_via else '',\n str(order.amount) if order.amount else '', str(order.get_tickets_count()),\n str(order.discount_code.code) if order.discount_code else '',\n- str(order.user.first_name, 'utf-8')\n+ str(order.user.first_name)\n if order.user and order.user.first_name else '',\n- str(order.user.last_name, 'utf-8')\n+ str(order.user.last_name)\n if order.user and order.user.last_name else '',\n- str(order.user._email, 'utf-8') if order.user and order.user._email else '']\n+ str(order.user.email) if order.user and order.user.email else '']\n rows.append(column)\n \n return rows\n", "issue": "Minor bug in export orders as CSV task\n**Describe the bug**\r\nMinor bug in exporting orders as CSV. Trying to decode string which throws a type error:\r\n```python\r\n if order.user and order.user.first_name else '',\r\n str(order.user.last_name, 'utf-8')\r\n```\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Start export orders as csv task \r\n2. Make sure user is linked for that particular order\r\n3. See the error\r\n\r\n**Expected behavior**\r\nIt should work properly\r\n\r\n**Stacktrace**\r\n```sh\r\n File \"/home/mayank/OpenSource/open-event-server/app/api/helpers/tasks.py\", line 238, in export_order_csv_task\r\n content = export_orders_csv(orders)\r\n File \"/home/mayank/OpenSource/open-event-server/app/api/helpers/csv_jobs_util.py\", line 13, in export_orders_csv\r\n if order.user and order.user.first_name else '',\r\nTypeError: decoding str is not supported\r\n```\r\n**Additional details (please complete the following information):**\r\n - OS: Ubuntu 18.04\r\n - Python Version 3.6\r\n - `HEAD` Commit hash [e.g. `4629c62`]\n", "before_files": [{"content": "def export_orders_csv(orders):\n headers = ['Order#', 'Order Date', 'Status', 'Payment Type', 'Total Amount', 'Quantity',\n 'Discount Code', 'First Name', 'Last Name', 'Email']\n\n rows = [headers]\n for order in orders:\n if order.status != \"deleted\":\n column = [str(order.get_invoice_number()), str(order.created_at) if order.created_at else '',\n str(order.status) if order.status else '', str(order.paid_via) if order.paid_via else '',\n str(order.amount) if order.amount else '', str(order.get_tickets_count()),\n str(order.discount_code.code) if order.discount_code else '',\n str(order.user.first_name, 'utf-8')\n if order.user and order.user.first_name else '',\n str(order.user.last_name, 'utf-8')\n if order.user and order.user.last_name else '',\n str(order.user._email, 'utf-8') if order.user and order.user._email else '']\n rows.append(column)\n\n return rows\n", "path": "app/api/helpers/csv_jobs_util.py"}]}
1,046
220
gh_patches_debug_56926
rasdani/github-patches
git_diff
kivy__python-for-android-615
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> OpenSSL 1.0.2e outdated (replaced by 1.0.2f) A new version has been published today. The recipe no longer can download version 1.0.2e. Does `disable-solver.patch` still apply? </issue> <code> [start of pythonforandroid/recipes/openssl/__init__.py] 1 from functools import partial 2 3 from pythonforandroid.toolchain import Recipe, shprint, current_directory 4 import sh 5 6 7 class OpenSSLRecipe(Recipe): 8 version = '1.0.2e' 9 url = 'https://www.openssl.org/source/openssl-{version}.tar.gz' 10 11 def should_build(self, arch): 12 return not self.has_libs(arch, 'libssl.so', 'libcrypto.so') 13 14 def check_symbol(self, env, sofile, symbol): 15 nm = env.get('NM', 'nm') 16 syms = sh.sh('-c', "{} -gp {} | cut -d' ' -f3".format( 17 nm, sofile), _env=env).splitlines() 18 if symbol in syms: 19 return True 20 print('{} missing symbol {}; rebuilding'.format(sofile, symbol)) 21 return False 22 23 def get_recipe_env(self, arch=None): 24 env = super(OpenSSLRecipe, self).get_recipe_env(arch) 25 env['CFLAGS'] += ' ' + env['LDFLAGS'] 26 env['CC'] += ' ' + env['LDFLAGS'] 27 return env 28 29 def select_build_arch(self, arch): 30 aname = arch.arch 31 if 'arm64' in aname: 32 return 'linux-aarch64' 33 if 'v7a' in aname: 34 return 'android-armv7' 35 if 'arm' in aname: 36 return 'android' 37 return 'linux-armv4' 38 39 def build_arch(self, arch): 40 env = self.get_recipe_env(arch) 41 with current_directory(self.get_build_dir(arch.arch)): 42 # sh fails with code 255 trying to execute ./Configure 43 # so instead we manually run perl passing in Configure 44 perl = sh.Command('perl') 45 buildarch = self.select_build_arch(arch) 46 shprint(perl, 'Configure', 'shared', 'no-dso', 'no-krb5', buildarch, _env=env) 47 self.apply_patch('disable-sover.patch', arch.arch) 48 49 check_crypto = partial(self.check_symbol, env, 'libcrypto.so') 50 # check_ssl = partial(self.check_symbol, env, 'libssl.so') 51 while True: 52 shprint(sh.make, 'build_libs', _env=env) 53 if all(map(check_crypto, ('SSLeay', 'MD5_Transform', 'MD4_Init'))): 54 break 55 shprint(sh.make, 'clean', _env=env) 56 57 self.install_libs(arch, 'libssl.so', 'libcrypto.so') 58 59 recipe = OpenSSLRecipe() 60 [end of pythonforandroid/recipes/openssl/__init__.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pythonforandroid/recipes/openssl/__init__.py b/pythonforandroid/recipes/openssl/__init__.py --- a/pythonforandroid/recipes/openssl/__init__.py +++ b/pythonforandroid/recipes/openssl/__init__.py @@ -5,7 +5,7 @@ class OpenSSLRecipe(Recipe): - version = '1.0.2e' + version = '1.0.2f' url = 'https://www.openssl.org/source/openssl-{version}.tar.gz' def should_build(self, arch):
{"golden_diff": "diff --git a/pythonforandroid/recipes/openssl/__init__.py b/pythonforandroid/recipes/openssl/__init__.py\n--- a/pythonforandroid/recipes/openssl/__init__.py\n+++ b/pythonforandroid/recipes/openssl/__init__.py\n@@ -5,7 +5,7 @@\n \n \n class OpenSSLRecipe(Recipe):\n- version = '1.0.2e'\n+ version = '1.0.2f'\n url = 'https://www.openssl.org/source/openssl-{version}.tar.gz'\n \n def should_build(self, arch):\n", "issue": "OpenSSL 1.0.2e outdated (replaced by 1.0.2f)\nA new version has been published today. The recipe no longer can download version 1.0.2e.\n\nDoes `disable-solver.patch` still apply?\n\n", "before_files": [{"content": "from functools import partial\n\nfrom pythonforandroid.toolchain import Recipe, shprint, current_directory\nimport sh\n\n\nclass OpenSSLRecipe(Recipe):\n version = '1.0.2e'\n url = 'https://www.openssl.org/source/openssl-{version}.tar.gz'\n\n def should_build(self, arch):\n return not self.has_libs(arch, 'libssl.so', 'libcrypto.so')\n\n def check_symbol(self, env, sofile, symbol):\n nm = env.get('NM', 'nm')\n syms = sh.sh('-c', \"{} -gp {} | cut -d' ' -f3\".format(\n nm, sofile), _env=env).splitlines()\n if symbol in syms:\n return True\n print('{} missing symbol {}; rebuilding'.format(sofile, symbol))\n return False\n\n def get_recipe_env(self, arch=None):\n env = super(OpenSSLRecipe, self).get_recipe_env(arch)\n env['CFLAGS'] += ' ' + env['LDFLAGS']\n env['CC'] += ' ' + env['LDFLAGS']\n return env\n\n def select_build_arch(self, arch):\n aname = arch.arch\n if 'arm64' in aname:\n return 'linux-aarch64'\n if 'v7a' in aname:\n return 'android-armv7'\n if 'arm' in aname:\n return 'android'\n return 'linux-armv4'\n\n def build_arch(self, arch):\n env = self.get_recipe_env(arch)\n with current_directory(self.get_build_dir(arch.arch)):\n # sh fails with code 255 trying to execute ./Configure\n # so instead we manually run perl passing in Configure\n perl = sh.Command('perl')\n buildarch = self.select_build_arch(arch)\n shprint(perl, 'Configure', 'shared', 'no-dso', 'no-krb5', buildarch, _env=env)\n self.apply_patch('disable-sover.patch', arch.arch)\n\n check_crypto = partial(self.check_symbol, env, 'libcrypto.so')\n # check_ssl = partial(self.check_symbol, env, 'libssl.so')\n while True:\n shprint(sh.make, 'build_libs', _env=env)\n if all(map(check_crypto, ('SSLeay', 'MD5_Transform', 'MD4_Init'))):\n break\n shprint(sh.make, 'clean', _env=env)\n\n self.install_libs(arch, 'libssl.so', 'libcrypto.so')\n\nrecipe = OpenSSLRecipe()\n", "path": "pythonforandroid/recipes/openssl/__init__.py"}]}
1,284
126
gh_patches_debug_520
rasdani/github-patches
git_diff
googleapis__google-api-python-client-303
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Suggest use of httplib2 v0.9.2 in readme if using python > 3 Python3.x implementations of the client, passes bytestring to httplib2 header encoding method. Httplib2 < 0.9.2 does not support this. Documentation should make a note that implementations in python3 should explicitly install httplib2 0.9.2. PyPi index targets 0.8.0, which is no bueno for those unaware of this issue. </issue> <code> [start of setup.py] 1 # Copyright 2014 Google Inc. All Rights Reserved. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 """Setup script for Google API Python client. 16 17 Also installs included versions of third party libraries, if those libraries 18 are not already installed. 19 """ 20 from __future__ import print_function 21 22 import sys 23 24 if sys.version_info < (2, 6): 25 print('google-api-python-client requires python version >= 2.6.', 26 file=sys.stderr) 27 sys.exit(1) 28 if (3, 1) <= sys.version_info < (3, 3): 29 print('google-api-python-client requires python3 version >= 3.3.', 30 file=sys.stderr) 31 sys.exit(1) 32 33 from setuptools import setup 34 import pkg_resources 35 36 def _DetectBadness(): 37 import os 38 if 'SKIP_GOOGLEAPICLIENT_COMPAT_CHECK' in os.environ: 39 return 40 o2c_pkg = None 41 try: 42 o2c_pkg = pkg_resources.get_distribution('oauth2client') 43 except pkg_resources.DistributionNotFound: 44 pass 45 oauth2client = None 46 try: 47 import oauth2client 48 except ImportError: 49 pass 50 if o2c_pkg is None and oauth2client is not None: 51 raise RuntimeError( 52 'Previous version of google-api-python-client detected; due to a ' 53 'packaging issue, we cannot perform an in-place upgrade. Please remove ' 54 'the old version and re-install this package.' 55 ) 56 57 _DetectBadness() 58 59 packages = [ 60 'apiclient', 61 'googleapiclient', 62 'googleapiclient/discovery_cache', 63 ] 64 65 install_requires = [ 66 'httplib2>=0.8,<1dev', 67 'oauth2client>=1.5.0,<5.0.0dev', 68 'six>=1.6.1,<2dev', 69 'uritemplate>=3.0.0,<4dev', 70 ] 71 72 if sys.version_info < (2, 7): 73 install_requires.append('argparse') 74 75 long_desc = """The Google API Client for Python is a client library for 76 accessing the Plus, Moderator, and many other Google APIs.""" 77 78 import googleapiclient 79 version = googleapiclient.__version__ 80 81 setup( 82 name="google-api-python-client", 83 version=version, 84 description="Google API Client Library for Python", 85 long_description=long_desc, 86 author="Google Inc.", 87 url="http://github.com/google/google-api-python-client/", 88 install_requires=install_requires, 89 packages=packages, 90 package_data={}, 91 license="Apache 2.0", 92 keywords="google api client", 93 classifiers=[ 94 'Programming Language :: Python :: 2', 95 'Programming Language :: Python :: 2.6', 96 'Programming Language :: Python :: 2.7', 97 'Programming Language :: Python :: 3', 98 'Programming Language :: Python :: 3.3', 99 'Programming Language :: Python :: 3.4', 100 'Development Status :: 5 - Production/Stable', 101 'Intended Audience :: Developers', 102 'License :: OSI Approved :: Apache Software License', 103 'Operating System :: OS Independent', 104 'Topic :: Internet :: WWW/HTTP', 105 ], 106 ) 107 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -63,7 +63,7 @@ ] install_requires = [ - 'httplib2>=0.8,<1dev', + 'httplib2>=0.9.2,<1dev', 'oauth2client>=1.5.0,<5.0.0dev', 'six>=1.6.1,<2dev', 'uritemplate>=3.0.0,<4dev',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -63,7 +63,7 @@\n ]\n \n install_requires = [\n- 'httplib2>=0.8,<1dev',\n+ 'httplib2>=0.9.2,<1dev',\n 'oauth2client>=1.5.0,<5.0.0dev',\n 'six>=1.6.1,<2dev',\n 'uritemplate>=3.0.0,<4dev',\n", "issue": "Suggest use of httplib2 v0.9.2 in readme if using python > 3\nPython3.x implementations of the client, passes bytestring to httplib2 header encoding method. Httplib2 < 0.9.2 does not support this. \n\nDocumentation should make a note that implementations in python3 should explicitly install httplib2 0.9.2. PyPi index targets 0.8.0, which is no bueno for those unaware of this issue. \n\n", "before_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Setup script for Google API Python client.\n\nAlso installs included versions of third party libraries, if those libraries\nare not already installed.\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\n\nif sys.version_info < (2, 6):\n print('google-api-python-client requires python version >= 2.6.',\n file=sys.stderr)\n sys.exit(1)\nif (3, 1) <= sys.version_info < (3, 3):\n print('google-api-python-client requires python3 version >= 3.3.',\n file=sys.stderr)\n sys.exit(1)\n\nfrom setuptools import setup\nimport pkg_resources\n\ndef _DetectBadness():\n import os\n if 'SKIP_GOOGLEAPICLIENT_COMPAT_CHECK' in os.environ:\n return\n o2c_pkg = None\n try:\n o2c_pkg = pkg_resources.get_distribution('oauth2client')\n except pkg_resources.DistributionNotFound:\n pass\n oauth2client = None\n try:\n import oauth2client\n except ImportError:\n pass\n if o2c_pkg is None and oauth2client is not None:\n raise RuntimeError(\n 'Previous version of google-api-python-client detected; due to a '\n 'packaging issue, we cannot perform an in-place upgrade. Please remove '\n 'the old version and re-install this package.'\n )\n\n_DetectBadness()\n\npackages = [\n 'apiclient',\n 'googleapiclient',\n 'googleapiclient/discovery_cache',\n]\n\ninstall_requires = [\n 'httplib2>=0.8,<1dev',\n 'oauth2client>=1.5.0,<5.0.0dev',\n 'six>=1.6.1,<2dev',\n 'uritemplate>=3.0.0,<4dev',\n]\n\nif sys.version_info < (2, 7):\n install_requires.append('argparse')\n\nlong_desc = \"\"\"The Google API Client for Python is a client library for\naccessing the Plus, Moderator, and many other Google APIs.\"\"\"\n\nimport googleapiclient\nversion = googleapiclient.__version__\n\nsetup(\n name=\"google-api-python-client\",\n version=version,\n description=\"Google API Client Library for Python\",\n long_description=long_desc,\n author=\"Google Inc.\",\n url=\"http://github.com/google/google-api-python-client/\",\n install_requires=install_requires,\n packages=packages,\n package_data={},\n license=\"Apache 2.0\",\n keywords=\"google api client\",\n classifiers=[\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: OS Independent',\n 'Topic :: Internet :: WWW/HTTP',\n ],\n)\n", "path": "setup.py"}]}
1,648
116
gh_patches_debug_504
rasdani/github-patches
git_diff
beetbox__beets-3702
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> "the" plugin uses incorrect regex ### Problem Running this command: ```sh ~ ❯ beet move artist:trance -p Moving 40 items. M:\TET - Travailleur En Trance\[2008] Cobra Coded Escalation\01-01 - Cobra Reporting In.mp3 -> M:\‒ Travailleur En Trance, TET\[2008] Cobra Coded Escalation\01-01 - Cobra Reporting In.mp3 ``` Led to this problem: "TET" is recognized by the "the" plugin as something it should move. This is because the regex used in the.py, line 26, is ``` PATTERN_THE = u'^[the]{3}\\s' ``` which matches "TET". It should probably be: ``` PATTERN_THE = u'^the\\s' ``` ### Setup * OS: Windows 10 2004 * Python version: 3.8 * beets version: latest trunk * Turning off plugins made problem go away (yes/no): obviously, if I disable 'the' it no longer does this </issue> <code> [start of beetsplug/the.py] 1 # -*- coding: utf-8 -*- 2 # This file is part of beets. 3 # Copyright 2016, Blemjhoo Tezoulbr <[email protected]>. 4 # 5 # Permission is hereby granted, free of charge, to any person obtaining 6 # a copy of this software and associated documentation files (the 7 # "Software"), to deal in the Software without restriction, including 8 # without limitation the rights to use, copy, modify, merge, publish, 9 # distribute, sublicense, and/or sell copies of the Software, and to 10 # permit persons to whom the Software is furnished to do so, subject to 11 # the following conditions: 12 # 13 # The above copyright notice and this permission notice shall be 14 # included in all copies or substantial portions of the Software. 15 16 """Moves patterns in path formats (suitable for moving articles).""" 17 18 from __future__ import division, absolute_import, print_function 19 20 import re 21 from beets.plugins import BeetsPlugin 22 23 __author__ = '[email protected]' 24 __version__ = '1.1' 25 26 PATTERN_THE = u'^[the]{3}\\s' 27 PATTERN_A = u'^[a][n]?\\s' 28 FORMAT = u'{0}, {1}' 29 30 31 class ThePlugin(BeetsPlugin): 32 33 patterns = [] 34 35 def __init__(self): 36 super(ThePlugin, self).__init__() 37 38 self.template_funcs['the'] = self.the_template_func 39 40 self.config.add({ 41 'the': True, 42 'a': True, 43 'format': u'{0}, {1}', 44 'strip': False, 45 'patterns': [], 46 }) 47 48 self.patterns = self.config['patterns'].as_str_seq() 49 for p in self.patterns: 50 if p: 51 try: 52 re.compile(p) 53 except re.error: 54 self._log.error(u'invalid pattern: {0}', p) 55 else: 56 if not (p.startswith('^') or p.endswith('$')): 57 self._log.warning(u'warning: \"{0}\" will not ' 58 u'match string start/end', p) 59 if self.config['a']: 60 self.patterns = [PATTERN_A] + self.patterns 61 if self.config['the']: 62 self.patterns = [PATTERN_THE] + self.patterns 63 if not self.patterns: 64 self._log.warning(u'no patterns defined!') 65 66 def unthe(self, text, pattern): 67 """Moves pattern in the path format string or strips it 68 69 text -- text to handle 70 pattern -- regexp pattern (case ignore is already on) 71 strip -- if True, pattern will be removed 72 """ 73 if text: 74 r = re.compile(pattern, flags=re.IGNORECASE) 75 try: 76 t = r.findall(text)[0] 77 except IndexError: 78 return text 79 else: 80 r = re.sub(r, '', text).strip() 81 if self.config['strip']: 82 return r 83 else: 84 fmt = self.config['format'].as_str() 85 return fmt.format(r, t.strip()).strip() 86 else: 87 return u'' 88 89 def the_template_func(self, text): 90 if not self.patterns: 91 return text 92 if text: 93 for p in self.patterns: 94 r = self.unthe(text, p) 95 if r != text: 96 self._log.debug(u'\"{0}\" -> \"{1}\"', text, r) 97 break 98 return r 99 else: 100 return u'' 101 [end of beetsplug/the.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/beetsplug/the.py b/beetsplug/the.py --- a/beetsplug/the.py +++ b/beetsplug/the.py @@ -23,7 +23,7 @@ __author__ = '[email protected]' __version__ = '1.1' -PATTERN_THE = u'^[the]{3}\\s' +PATTERN_THE = u'^the\\s' PATTERN_A = u'^[a][n]?\\s' FORMAT = u'{0}, {1}'
{"golden_diff": "diff --git a/beetsplug/the.py b/beetsplug/the.py\n--- a/beetsplug/the.py\n+++ b/beetsplug/the.py\n@@ -23,7 +23,7 @@\n __author__ = '[email protected]'\n __version__ = '1.1'\n \n-PATTERN_THE = u'^[the]{3}\\\\s'\n+PATTERN_THE = u'^the\\\\s'\n PATTERN_A = u'^[a][n]?\\\\s'\n FORMAT = u'{0}, {1}'\n", "issue": "\"the\" plugin uses incorrect regex\n### Problem\r\n\r\nRunning this command:\r\n\r\n```sh\r\n~ \u276f beet move artist:trance -p\r\nMoving 40 items.\r\nM:\\TET - Travailleur En Trance\\[2008] Cobra Coded Escalation\\01-01 - Cobra Reporting In.mp3\r\n -> M:\\\u2012 Travailleur En Trance, TET\\[2008] Cobra Coded Escalation\\01-01 - Cobra Reporting In.mp3\r\n```\r\n\r\nLed to this problem:\r\n\r\n\"TET\" is recognized by the \"the\" plugin as something it should move. This is because the regex used in the.py, line 26, is\r\n```\r\nPATTERN_THE = u'^[the]{3}\\\\s'\r\n```\r\nwhich matches \"TET\". It should probably be:\r\n```\r\nPATTERN_THE = u'^the\\\\s'\r\n```\r\n\r\n### Setup\r\n\r\n* OS: Windows 10 2004\r\n* Python version: 3.8\r\n* beets version: latest trunk\r\n* Turning off plugins made problem go away (yes/no): obviously, if I disable 'the' it no longer does this\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This file is part of beets.\n# Copyright 2016, Blemjhoo Tezoulbr <[email protected]>.\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be\n# included in all copies or substantial portions of the Software.\n\n\"\"\"Moves patterns in path formats (suitable for moving articles).\"\"\"\n\nfrom __future__ import division, absolute_import, print_function\n\nimport re\nfrom beets.plugins import BeetsPlugin\n\n__author__ = '[email protected]'\n__version__ = '1.1'\n\nPATTERN_THE = u'^[the]{3}\\\\s'\nPATTERN_A = u'^[a][n]?\\\\s'\nFORMAT = u'{0}, {1}'\n\n\nclass ThePlugin(BeetsPlugin):\n\n patterns = []\n\n def __init__(self):\n super(ThePlugin, self).__init__()\n\n self.template_funcs['the'] = self.the_template_func\n\n self.config.add({\n 'the': True,\n 'a': True,\n 'format': u'{0}, {1}',\n 'strip': False,\n 'patterns': [],\n })\n\n self.patterns = self.config['patterns'].as_str_seq()\n for p in self.patterns:\n if p:\n try:\n re.compile(p)\n except re.error:\n self._log.error(u'invalid pattern: {0}', p)\n else:\n if not (p.startswith('^') or p.endswith('$')):\n self._log.warning(u'warning: \\\"{0}\\\" will not '\n u'match string start/end', p)\n if self.config['a']:\n self.patterns = [PATTERN_A] + self.patterns\n if self.config['the']:\n self.patterns = [PATTERN_THE] + self.patterns\n if not self.patterns:\n self._log.warning(u'no patterns defined!')\n\n def unthe(self, text, pattern):\n \"\"\"Moves pattern in the path format string or strips it\n\n text -- text to handle\n pattern -- regexp pattern (case ignore is already on)\n strip -- if True, pattern will be removed\n \"\"\"\n if text:\n r = re.compile(pattern, flags=re.IGNORECASE)\n try:\n t = r.findall(text)[0]\n except IndexError:\n return text\n else:\n r = re.sub(r, '', text).strip()\n if self.config['strip']:\n return r\n else:\n fmt = self.config['format'].as_str()\n return fmt.format(r, t.strip()).strip()\n else:\n return u''\n\n def the_template_func(self, text):\n if not self.patterns:\n return text\n if text:\n for p in self.patterns:\n r = self.unthe(text, p)\n if r != text:\n self._log.debug(u'\\\"{0}\\\" -> \\\"{1}\\\"', text, r)\n break\n return r\n else:\n return u''\n", "path": "beetsplug/the.py"}]}
1,731
116
gh_patches_debug_777
rasdani/github-patches
git_diff
getmoto__moto-1840
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Cryptography Package has a Security Vulnerability Discovered using pipenv's security check feature that there's a vulnerability in the cryptography package versions<2.3. > Checking installed package safety... 36351: cryptography >=1.9.0,<2.3 resolved (2.2.2 installed)! python-cryptography versions >=1.9.0 and <2.3 did not enforce a minimum tag length for finalize_with_tag API. If a user did not validate the input length prior to passing it to finalize_with_tag an attacker could craft an invalid payload with a shortened tag (e.g. 1 byte) such that they would have a 1 in 256 chance of passing the MAC check. GCM tag forgeries can cause key leakage. More details here: http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-10903 setup.py should be updated to require cryptography>=2.3.0. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 from __future__ import unicode_literals 3 import setuptools 4 from setuptools import setup, find_packages 5 import sys 6 7 8 install_requires = [ 9 "Jinja2>=2.7.3", 10 "boto>=2.36.0", 11 "boto3>=1.6.16,<1.8", 12 "botocore>=1.9.16,<1.11", 13 "cryptography>=2.0.0", 14 "requests>=2.5", 15 "xmltodict", 16 "six>1.9", 17 "werkzeug", 18 "pyaml", 19 "pytz", 20 "python-dateutil<3.0.0,>=2.1", 21 "python-jose<3.0.0", 22 "mock", 23 "docker>=2.5.1", 24 "jsondiff==1.1.1", 25 "aws-xray-sdk<0.96,>=0.93", 26 "responses>=0.9.0", 27 ] 28 29 extras_require = { 30 'server': ['flask'], 31 } 32 33 # https://hynek.me/articles/conditional-python-dependencies/ 34 if int(setuptools.__version__.split(".", 1)[0]) < 18: 35 if sys.version_info[0:2] < (3, 3): 36 install_requires.append("backports.tempfile") 37 else: 38 extras_require[":python_version<'3.3'"] = ["backports.tempfile"] 39 40 41 setup( 42 name='moto', 43 version='1.3.6', 44 description='A library that allows your python tests to easily' 45 ' mock out the boto library', 46 author='Steve Pulec', 47 author_email='[email protected]', 48 url='https://github.com/spulec/moto', 49 entry_points={ 50 'console_scripts': [ 51 'moto_server = moto.server:main', 52 ], 53 }, 54 packages=find_packages(exclude=("tests", "tests.*")), 55 install_requires=install_requires, 56 extras_require=extras_require, 57 include_package_data=True, 58 license="Apache", 59 test_suite="tests", 60 classifiers=[ 61 "Programming Language :: Python :: 2", 62 "Programming Language :: Python :: 2.7", 63 "Programming Language :: Python :: 3", 64 "Programming Language :: Python :: 3.3", 65 "Programming Language :: Python :: 3.4", 66 "Programming Language :: Python :: 3.5", 67 "Programming Language :: Python :: 3.6", 68 "License :: OSI Approved :: Apache Software License", 69 "Topic :: Software Development :: Testing", 70 ], 71 ) 72 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ "boto>=2.36.0", "boto3>=1.6.16,<1.8", "botocore>=1.9.16,<1.11", - "cryptography>=2.0.0", + "cryptography>=2.3.0", "requests>=2.5", "xmltodict", "six>1.9",
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -10,7 +10,7 @@\n \"boto>=2.36.0\",\n \"boto3>=1.6.16,<1.8\",\n \"botocore>=1.9.16,<1.11\",\n- \"cryptography>=2.0.0\",\n+ \"cryptography>=2.3.0\",\n \"requests>=2.5\",\n \"xmltodict\",\n \"six>1.9\",\n", "issue": "Cryptography Package has a Security Vulnerability\nDiscovered using pipenv's security check feature that there's a vulnerability in the cryptography package versions<2.3.\r\n \r\n> Checking installed package safety...\r\n36351: cryptography >=1.9.0,<2.3 resolved (2.2.2 installed)!\r\npython-cryptography versions >=1.9.0 and <2.3 did not enforce a minimum tag length for finalize_with_tag API. If a user did not validate the input length prior to passing it to finalize_with_tag an attacker could craft an invalid payload with a shortened tag (e.g. 1 byte) such that they would have a 1 in 256 chance of passing the MAC check. GCM tag forgeries can cause key leakage.\r\n \r\nMore details here: http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-10903\r\n\r\nsetup.py should be updated to require cryptography>=2.3.0.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nfrom __future__ import unicode_literals\nimport setuptools\nfrom setuptools import setup, find_packages\nimport sys\n\n\ninstall_requires = [\n \"Jinja2>=2.7.3\",\n \"boto>=2.36.0\",\n \"boto3>=1.6.16,<1.8\",\n \"botocore>=1.9.16,<1.11\",\n \"cryptography>=2.0.0\",\n \"requests>=2.5\",\n \"xmltodict\",\n \"six>1.9\",\n \"werkzeug\",\n \"pyaml\",\n \"pytz\",\n \"python-dateutil<3.0.0,>=2.1\",\n \"python-jose<3.0.0\",\n \"mock\",\n \"docker>=2.5.1\",\n \"jsondiff==1.1.1\",\n \"aws-xray-sdk<0.96,>=0.93\",\n \"responses>=0.9.0\",\n]\n\nextras_require = {\n 'server': ['flask'],\n}\n\n# https://hynek.me/articles/conditional-python-dependencies/\nif int(setuptools.__version__.split(\".\", 1)[0]) < 18:\n if sys.version_info[0:2] < (3, 3):\n install_requires.append(\"backports.tempfile\")\nelse:\n extras_require[\":python_version<'3.3'\"] = [\"backports.tempfile\"]\n\n\nsetup(\n name='moto',\n version='1.3.6',\n description='A library that allows your python tests to easily'\n ' mock out the boto library',\n author='Steve Pulec',\n author_email='[email protected]',\n url='https://github.com/spulec/moto',\n entry_points={\n 'console_scripts': [\n 'moto_server = moto.server:main',\n ],\n },\n packages=find_packages(exclude=(\"tests\", \"tests.*\")),\n install_requires=install_requires,\n extras_require=extras_require,\n include_package_data=True,\n license=\"Apache\",\n test_suite=\"tests\",\n classifiers=[\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Topic :: Software Development :: Testing\",\n ],\n)\n", "path": "setup.py"}]}
1,444
126
gh_patches_debug_49499
rasdani/github-patches
git_diff
pex-tool__pex-1896
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Release 2.1.104 On the docket: + [x] Pull in Pip fixes. #1805 + [x] pex fails to overwrite zipapp pex with loose pex #1879 + [x] Make lock update sensitive to artifacts. #1887 + [x] PEXBuilder is not robust to ephemeral .pyc compiles. #1889 </issue> <code> [start of pex/version.py] 1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 __version__ = "2.1.103" 5 [end of pex/version.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pex/version.py b/pex/version.py --- a/pex/version.py +++ b/pex/version.py @@ -1,4 +1,4 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -__version__ = "2.1.103" +__version__ = "2.1.104"
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.103\"\n+__version__ = \"2.1.104\"\n", "issue": "Release 2.1.104\nOn the docket:\r\n+ [x] Pull in Pip fixes. #1805\r\n+ [x] pex fails to overwrite zipapp pex with loose pex #1879\r\n+ [x] Make lock update sensitive to artifacts. #1887 \r\n+ [x] PEXBuilder is not robust to ephemeral .pyc compiles. #1889 \n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.103\"\n", "path": "pex/version.py"}]}
679
99