problem_id
stringlengths
18
22
source
stringclasses
1 value
task_type
stringclasses
1 value
in_source_id
stringlengths
13
58
prompt
stringlengths
1.71k
9.01k
golden_diff
stringlengths
151
4.94k
verification_info
stringlengths
465
11.3k
num_tokens_prompt
int64
557
2.05k
num_tokens_diff
int64
48
1.02k
gh_patches_debug_38915
rasdani/github-patches
git_diff
lisa-lab__pylearn2-1512
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> speed up NanGuardMode on GPU and move to Theano. This can be done as in gh-1054. Do the reduction on the GPU, then this will transfer much less data. The CudaNdarray object do not support many reduction, but we can compile a Theano function that take a gpu object, do the reduction and return the result on the CPU to inspect it. </issue> <code> [start of pylearn2/devtools/nan_guard.py] 1 """ 2 Functionality for detecting NaNs in a Theano graph. 3 """ 4 __authors__ = "Ian Goodfellow" 5 __copyright__ = "Copyright 2010-2012, Universite de Montreal" 6 __credits__ = ["Ian Goodfellow"] 7 __license__ = "3-clause BSD" 8 __maintainer__ = "LISA Lab" 9 __email__ = "pylearn-dev@googlegroups" 10 11 import logging 12 from theano.compile import Mode 13 import theano 14 import numpy as np 15 from pylearn2.models.dbm import flatten 16 from pylearn2.utils import contains_nan, contains_inf 17 18 19 logger = logging.getLogger(__name__) 20 21 22 class NanGuardMode(Mode): 23 """ 24 A Theano compilation Mode that makes the compiled function automatically 25 detect NaNs and Infs and detect an error if they occur. 26 27 Parameters 28 ---------- 29 nan_is_error : bool 30 If True, raise an error anytime a NaN is encountered 31 inf_is_error: bool 32 If True, raise an error anytime an Inf is encountered. Note that some 33 pylearn2 modules currently use np.inf as a default value (e.g. 34 mlp.max_pool) and these will cause an error if inf_is_error is True. 35 big_is_error: bool 36 If True, raise an error when a value greater than 1e10 is encountered. 37 """ 38 def __init__(self, nan_is_error, inf_is_error, big_is_error=True): 39 def do_check_on(var, nd, f, is_input): 40 """ 41 Checks `var` for NaNs / Infs. If detected, raises an exception 42 and / or prints information about `nd`, `f`, and `is_input` to 43 help the user determine the cause of the invalid values. 44 45 Parameters 46 ---------- 47 var : numpy.ndarray 48 The value to be checked. 49 nd : theano.gof.Apply 50 The Apply node being executed 51 f : callable 52 The thunk for the apply node 53 is_input : bool 54 If True, `var` is an input to `nd`. 55 If False, it is an output. 56 """ 57 error = False 58 if nan_is_error: 59 if contains_nan(var): 60 logger.error('NaN detected') 61 error = True 62 if inf_is_error: 63 if contains_inf(var): 64 logger.error('Inf detected') 65 error = True 66 if big_is_error: 67 if np.abs(var).max() > 1e10: 68 logger.error('Big value detected') 69 error = True 70 if error: 71 if is_input: 72 logger.error('In an input') 73 else: 74 logger.error('In an output') 75 logger.error('Inputs: ') 76 for ivar, ival in zip(nd.inputs, f.inputs): 77 logger.error('var') 78 logger.error(ivar) 79 logger.error(theano.printing.min_informative_str(ivar)) 80 logger.error('val') 81 logger.error(ival) 82 logger.error('Node:') 83 logger.error(nd) 84 assert False 85 86 def nan_check(i, node, fn): 87 """ 88 Runs `fn` while checking its inputs and outputs for NaNs / Infs 89 90 Parameters 91 ---------- 92 i : currently ignored (TODO: determine why it is here or remove) 93 node : theano.gof.Apply 94 The Apply node currently being executed 95 fn : callable 96 The thunk to execute for this Apply node 97 """ 98 inputs = fn.inputs 99 # TODO: figure out why individual inputs are themselves lists sometimes 100 for x in flatten(inputs): 101 do_check_on(x, node, fn, True) 102 fn() 103 outputs = fn.outputs 104 for j, x in enumerate(flatten(outputs)): 105 do_check_on(x, node, fn, False) 106 107 wrap_linker = theano.gof.WrapLinkerMany([theano.gof.OpWiseCLinker()], [nan_check]) 108 super(NanGuardMode, self).__init__(wrap_linker, optimizer=theano.config.optimizer) 109 [end of pylearn2/devtools/nan_guard.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pylearn2/devtools/nan_guard.py b/pylearn2/devtools/nan_guard.py --- a/pylearn2/devtools/nan_guard.py +++ b/pylearn2/devtools/nan_guard.py @@ -11,6 +11,8 @@ import logging from theano.compile import Mode import theano +import theano.tensor as T +import theano.sandbox.cuda as cuda import numpy as np from pylearn2.models.dbm import flatten from pylearn2.utils import contains_nan, contains_inf @@ -36,6 +38,23 @@ If True, raise an error when a value greater than 1e10 is encountered. """ def __init__(self, nan_is_error, inf_is_error, big_is_error=True): + if cuda.cuda_available: + self.guard_input = cuda.fvector('nan_guard') + if nan_is_error or inf_is_error: + self.gpumin = theano.function( + [self.guard_input], T.min(self.guard_input), + mode='FAST_RUN' + ) + if inf_is_error: + self.gpumax = theano.function( + [self.guard_input], T.max(self.guard_input), + mode='FAST_RUN' + ) + if big_is_error: + self.gpuabsmax = theano.function( + [self.guard_input], T.max(T.abs_(self.guard_input)), + mode='FAST_RUN' + ) def do_check_on(var, nd, f, is_input): """ Checks `var` for NaNs / Infs. If detected, raises an exception @@ -56,15 +75,31 @@ """ error = False if nan_is_error: - if contains_nan(var): + err = False + if cuda.cuda_available and isinstance(var, cuda.CudaNdarray): + err = np.isnan(self.gpumin(var.reshape(var.size))) + else: + err = contains_nan(var) + if err: logger.error('NaN detected') error = True if inf_is_error: - if contains_inf(var): + err = False + if cuda.cuda_available and isinstance(var, cuda.CudaNdarray): + err = (np.isinf(self.gpumin(var.reshape(var.size))) or \ + np.isinf(self.gpumax(var.reshape(var.size)))) + else: + err = contains_inf(var) + if err: logger.error('Inf detected') error = True if big_is_error: - if np.abs(var).max() > 1e10: + err = False + if cuda.cuda_available and isinstance(var, cuda.CudaNdarray): + err = (self.gpuabsmax(var.reshape(var.size)) > 1e10) + else: + err = (np.abs(var).max() > 1e10) + if err: logger.error('Big value detected') error = True if error:
{"golden_diff": "diff --git a/pylearn2/devtools/nan_guard.py b/pylearn2/devtools/nan_guard.py\n--- a/pylearn2/devtools/nan_guard.py\n+++ b/pylearn2/devtools/nan_guard.py\n@@ -11,6 +11,8 @@\n import logging\n from theano.compile import Mode\n import theano\n+import theano.tensor as T\n+import theano.sandbox.cuda as cuda\n import numpy as np\n from pylearn2.models.dbm import flatten\n from pylearn2.utils import contains_nan, contains_inf\n@@ -36,6 +38,23 @@\n If True, raise an error when a value greater than 1e10 is encountered.\n \"\"\"\n def __init__(self, nan_is_error, inf_is_error, big_is_error=True):\n+ if cuda.cuda_available:\n+ self.guard_input = cuda.fvector('nan_guard')\n+ if nan_is_error or inf_is_error:\n+ self.gpumin = theano.function(\n+ [self.guard_input], T.min(self.guard_input),\n+ mode='FAST_RUN'\n+ )\n+ if inf_is_error:\n+ self.gpumax = theano.function(\n+ [self.guard_input], T.max(self.guard_input),\n+ mode='FAST_RUN'\n+ )\n+ if big_is_error:\n+ self.gpuabsmax = theano.function(\n+ [self.guard_input], T.max(T.abs_(self.guard_input)),\n+ mode='FAST_RUN'\n+ )\n def do_check_on(var, nd, f, is_input):\n \"\"\"\n Checks `var` for NaNs / Infs. If detected, raises an exception\n@@ -56,15 +75,31 @@\n \"\"\"\n error = False\n if nan_is_error:\n- if contains_nan(var):\n+ err = False\n+ if cuda.cuda_available and isinstance(var, cuda.CudaNdarray):\n+ err = np.isnan(self.gpumin(var.reshape(var.size)))\n+ else:\n+ err = contains_nan(var)\n+ if err:\n logger.error('NaN detected')\n error = True\n if inf_is_error:\n- if contains_inf(var):\n+ err = False\n+ if cuda.cuda_available and isinstance(var, cuda.CudaNdarray):\n+ err = (np.isinf(self.gpumin(var.reshape(var.size))) or \\\n+ np.isinf(self.gpumax(var.reshape(var.size))))\n+ else:\n+ err = contains_inf(var)\n+ if err:\n logger.error('Inf detected')\n error = True\n if big_is_error:\n- if np.abs(var).max() > 1e10:\n+ err = False\n+ if cuda.cuda_available and isinstance(var, cuda.CudaNdarray):\n+ err = (self.gpuabsmax(var.reshape(var.size)) > 1e10)\n+ else:\n+ err = (np.abs(var).max() > 1e10)\n+ if err:\n logger.error('Big value detected')\n error = True\n if error:\n", "issue": "speed up NanGuardMode on GPU and move to Theano.\nThis can be done as in gh-1054. Do the reduction on the GPU, then this will transfer much less data.\n\nThe CudaNdarray object do not support many reduction, but we can compile a Theano function that take a gpu object, do the reduction and return the result on the CPU to inspect it.\n\n", "before_files": [{"content": "\"\"\"\nFunctionality for detecting NaNs in a Theano graph.\n\"\"\"\n__authors__ = \"Ian Goodfellow\"\n__copyright__ = \"Copyright 2010-2012, Universite de Montreal\"\n__credits__ = [\"Ian Goodfellow\"]\n__license__ = \"3-clause BSD\"\n__maintainer__ = \"LISA Lab\"\n__email__ = \"pylearn-dev@googlegroups\"\n\nimport logging\nfrom theano.compile import Mode\nimport theano\nimport numpy as np\nfrom pylearn2.models.dbm import flatten\nfrom pylearn2.utils import contains_nan, contains_inf\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass NanGuardMode(Mode):\n \"\"\"\n A Theano compilation Mode that makes the compiled function automatically\n detect NaNs and Infs and detect an error if they occur.\n\n Parameters\n ----------\n nan_is_error : bool\n If True, raise an error anytime a NaN is encountered\n inf_is_error: bool\n If True, raise an error anytime an Inf is encountered. Note that some\n pylearn2 modules currently use np.inf as a default value (e.g.\n mlp.max_pool) and these will cause an error if inf_is_error is True.\n big_is_error: bool\n If True, raise an error when a value greater than 1e10 is encountered.\n \"\"\"\n def __init__(self, nan_is_error, inf_is_error, big_is_error=True):\n def do_check_on(var, nd, f, is_input):\n \"\"\"\n Checks `var` for NaNs / Infs. If detected, raises an exception\n and / or prints information about `nd`, `f`, and `is_input` to\n help the user determine the cause of the invalid values.\n\n Parameters\n ----------\n var : numpy.ndarray\n The value to be checked.\n nd : theano.gof.Apply\n The Apply node being executed\n f : callable\n The thunk for the apply node\n is_input : bool\n If True, `var` is an input to `nd`.\n If False, it is an output.\n \"\"\"\n error = False\n if nan_is_error:\n if contains_nan(var):\n logger.error('NaN detected')\n error = True\n if inf_is_error:\n if contains_inf(var):\n logger.error('Inf detected')\n error = True\n if big_is_error:\n if np.abs(var).max() > 1e10:\n logger.error('Big value detected')\n error = True\n if error:\n if is_input:\n logger.error('In an input')\n else:\n logger.error('In an output')\n logger.error('Inputs: ')\n for ivar, ival in zip(nd.inputs, f.inputs):\n logger.error('var')\n logger.error(ivar)\n logger.error(theano.printing.min_informative_str(ivar))\n logger.error('val')\n logger.error(ival)\n logger.error('Node:')\n logger.error(nd)\n assert False\n\n def nan_check(i, node, fn):\n \"\"\"\n Runs `fn` while checking its inputs and outputs for NaNs / Infs\n\n Parameters\n ----------\n i : currently ignored (TODO: determine why it is here or remove)\n node : theano.gof.Apply\n The Apply node currently being executed\n fn : callable\n The thunk to execute for this Apply node\n \"\"\"\n inputs = fn.inputs\n # TODO: figure out why individual inputs are themselves lists sometimes\n for x in flatten(inputs):\n do_check_on(x, node, fn, True)\n fn()\n outputs = fn.outputs\n for j, x in enumerate(flatten(outputs)):\n do_check_on(x, node, fn, False)\n\n wrap_linker = theano.gof.WrapLinkerMany([theano.gof.OpWiseCLinker()], [nan_check])\n super(NanGuardMode, self).__init__(wrap_linker, optimizer=theano.config.optimizer)\n", "path": "pylearn2/devtools/nan_guard.py"}]}
1,706
667
gh_patches_debug_5561
rasdani/github-patches
git_diff
Showndarya__Hacktoberfest-545
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Make JSON linting more verbose ? Currently we simply check : 1. json is valid 2. json contains the keys we want 3. those keys are not empty the problem is with step 1, it's not very helpful if people have no idea what the JSON spec is. One fix is : 1. add a link to jsonlint.com or such a service in the print of the error so that people can check themselves. 2. add something like http://deron.meranda.us/python/demjson/ with proper package caching on travis side, but it will make for slower builds still probably </issue> <code> [start of .travis.py] 1 import json 2 import os 3 import re 4 import subprocess 5 6 # Get a diff between master and current. 7 try: 8 commit_range = os.environ["TRAVIS_COMMIT_RANGE"] 9 changed_files = subprocess.check_output(["git", "diff", "--name-only", commit_range]) 10 except KeyError: 11 print("πŸ”₯ This should be run on Travis. Otherwise make sure TRAVIS_BRANCH is set.") 12 exit(1) 13 14 # Filter JSON files only. 15 changed_files_json = [] 16 if changed_files: 17 changed_files = changed_files.decode() 18 for changed_file in changed_files.split('\n'): 19 if re.search(r"\.json$", changed_file): 20 changed_files_json.append(changed_file) 21 22 23 # Iterate over list of changed JSON files. 24 for changed_file_json in changed_files_json: 25 print(f"Checking file {changed_file_json}...") 26 there_was_an_error = False 27 28 if not os.path.basename(changed_file_json)[0].isupper(): 29 there_was_an_error = True 30 print("πŸ”₯ File name not capitalized.") 31 32 try: 33 with open(changed_file_json) as data_file: 34 file_content = json.loads(data_file.read()) 35 except json.decoder.JSONDecodeError: 36 there_was_an_error = True 37 print("πŸ”₯ JSON could not be parsed.") 38 39 if 'word' not in file_content: 40 there_was_an_error = True 41 print("πŸ”₯ Key 'word' not found.") 42 43 if not file_content["word"]: 44 there_was_an_error = True 45 print("πŸ”₯ Value for 'word' appears to be empty.") 46 47 if 'definitions' not in file_content: 48 there_was_an_error = True 49 print("πŸ”₯ Key 'definitions' not found.") 50 51 if not file_content["definitions"]: 52 there_was_an_error = True 53 print("πŸ”₯ Value for 'definitions' appears to be empty.") 54 55 if 'parts-of-speech' not in file_content: 56 there_was_an_error = True 57 print("πŸ”₯ Key 'parts-of-speech' not found.") 58 59 if not file_content["parts-of-speech"]: 60 there_was_an_error = True 61 print("πŸ”₯ Value for 'parts-of-speech' appears to be empty.") 62 63 if there_was_an_error: 64 exit(1) 65 [end of .travis.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/.travis.py b/.travis.py --- a/.travis.py +++ b/.travis.py @@ -34,7 +34,7 @@ file_content = json.loads(data_file.read()) except json.decoder.JSONDecodeError: there_was_an_error = True - print("πŸ”₯ JSON could not be parsed.") + print(f"πŸ”₯ JSON could not be parsed. Follow this link to know more : https://jsonlint.com/?json={data_file.read()}") if 'word' not in file_content: there_was_an_error = True
{"golden_diff": "diff --git a/.travis.py b/.travis.py\n--- a/.travis.py\n+++ b/.travis.py\n@@ -34,7 +34,7 @@\n file_content = json.loads(data_file.read())\n except json.decoder.JSONDecodeError:\n there_was_an_error = True\n- print(\"\ud83d\udd25 JSON could not be parsed.\")\n+ print(f\"\ud83d\udd25 JSON could not be parsed. Follow this link to know more : https://jsonlint.com/?json={data_file.read()}\")\n \n if 'word' not in file_content:\n there_was_an_error = True\n", "issue": "Make JSON linting more verbose ?\nCurrently we simply check :\r\n1. json is valid\r\n2. json contains the keys we want\r\n3. those keys are not empty\r\n\r\nthe problem is with step 1, it's not very helpful if people have no idea what the JSON spec is.\r\n\r\nOne fix is :\r\n1. add a link to jsonlint.com or such a service in the print of the error so that people can check themselves.\r\n2. add something like http://deron.meranda.us/python/demjson/ with proper package caching on travis side, but it will make for slower builds still probably\n", "before_files": [{"content": "import json\nimport os\nimport re\nimport subprocess\n\n# Get a diff between master and current.\ntry:\n commit_range = os.environ[\"TRAVIS_COMMIT_RANGE\"]\n changed_files = subprocess.check_output([\"git\", \"diff\", \"--name-only\", commit_range])\nexcept KeyError:\n print(\"\ud83d\udd25 This should be run on Travis. Otherwise make sure TRAVIS_BRANCH is set.\")\n exit(1)\n\n# Filter JSON files only.\nchanged_files_json = []\nif changed_files:\n changed_files = changed_files.decode()\n for changed_file in changed_files.split('\\n'):\n if re.search(r\"\\.json$\", changed_file):\n changed_files_json.append(changed_file)\n\n \n# Iterate over list of changed JSON files.\nfor changed_file_json in changed_files_json:\n print(f\"Checking file {changed_file_json}...\")\n there_was_an_error = False\n\n if not os.path.basename(changed_file_json)[0].isupper():\n there_was_an_error = True\n print(\"\ud83d\udd25 File name not capitalized.\")\n\n try:\n with open(changed_file_json) as data_file:\n file_content = json.loads(data_file.read())\n except json.decoder.JSONDecodeError:\n there_was_an_error = True\n print(\"\ud83d\udd25 JSON could not be parsed.\")\n\n if 'word' not in file_content:\n there_was_an_error = True\n print(\"\ud83d\udd25 Key 'word' not found.\")\n\n if not file_content[\"word\"]:\n there_was_an_error = True\n print(\"\ud83d\udd25 Value for 'word' appears to be empty.\")\n\n if 'definitions' not in file_content:\n there_was_an_error = True\n print(\"\ud83d\udd25 Key 'definitions' not found.\")\n\n if not file_content[\"definitions\"]:\n there_was_an_error = True\n print(\"\ud83d\udd25 Value for 'definitions' appears to be empty.\")\n\n if 'parts-of-speech' not in file_content:\n there_was_an_error = True\n print(\"\ud83d\udd25 Key 'parts-of-speech' not found.\")\n\n if not file_content[\"parts-of-speech\"]:\n there_was_an_error = True\n print(\"\ud83d\udd25 Value for 'parts-of-speech' appears to be empty.\")\n\n if there_was_an_error:\n exit(1)\n", "path": ".travis.py"}]}
1,255
129
gh_patches_debug_38657
rasdani/github-patches
git_diff
litestar-org__litestar-1780
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> StaticFilesConfig and virtual directories I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems. https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32 </issue> <code> [start of litestar/contrib/sqlalchemy/types.py] 1 from __future__ import annotations 2 3 import uuid 4 from base64 import b64decode 5 from typing import TYPE_CHECKING, Any, cast 6 7 from sqlalchemy.dialects.oracle import BLOB as ORA_BLOB 8 from sqlalchemy.dialects.oracle import RAW as ORA_RAW 9 from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB 10 from sqlalchemy.dialects.postgresql import UUID as PG_UUID 11 from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator 12 from sqlalchemy.types import JSON as _JSON 13 14 if TYPE_CHECKING: 15 from sqlalchemy.engine import Dialect 16 17 BigIntIdentity = BigInteger().with_variant(Integer, "sqlite") 18 19 20 class GUID(TypeDecorator): 21 """Platform-independent GUID type. 22 23 Uses PostgreSQL's UUID type, Oracle's RAW(16) type, otherwise uses 24 BINARY(16) or CHAR(32), storing as stringified hex values. 25 26 Will accept stringified UUIDs as a hexstring or an actual UUID 27 28 """ 29 30 impl = BINARY(16) 31 cache_ok = True 32 33 @property 34 def python_type(self) -> type[uuid.UUID]: 35 return uuid.UUID 36 37 def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None: 38 self.binary = binary 39 40 def load_dialect_impl(self, dialect: Dialect) -> Any: 41 if dialect.name in {"postgresql", "duckdb"}: 42 return dialect.type_descriptor(PG_UUID()) 43 if dialect.name == "oracle": 44 return dialect.type_descriptor(ORA_RAW(16)) 45 if self.binary: 46 return dialect.type_descriptor(BINARY(16)) 47 return dialect.type_descriptor(CHAR(32)) 48 49 def process_bind_param(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> bytes | str | None: 50 if value is None: 51 return value 52 if dialect.name in {"postgresql", "duckdb"}: 53 return str(value) 54 value = self.to_uuid(value) 55 if value is None: 56 return value 57 if dialect.name in {"oracle", "spanner+spanner"}: 58 return value.bytes 59 return value.bytes if self.binary else value.hex 60 61 def process_result_value(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> uuid.UUID | None: 62 if value is None: 63 return value 64 if isinstance(value, uuid.UUID): 65 return value 66 if dialect.name == "spanner+spanner": 67 return uuid.UUID(bytes=b64decode(value)) 68 if self.binary: 69 return uuid.UUID(bytes=cast("bytes", value)) 70 return uuid.UUID(hex=cast("str", value)) 71 72 @staticmethod 73 def to_uuid(value: Any) -> uuid.UUID | None: 74 if isinstance(value, uuid.UUID) or value is None: 75 return value 76 try: 77 value = uuid.UUID(hex=value) 78 except (TypeError, ValueError): 79 value = uuid.UUID(bytes=value) 80 return cast("uuid.UUID | None", value) 81 82 83 class JSON(TypeDecorator): 84 """Platform-independent JSON type. 85 86 Uses JSONB type for postgres, BLOB for Oracle, otherwise uses the generic JSON data type. 87 88 JSON = _JSON().with_variant(PG_JSONB, "postgresql").with_variant(ORA_BLOB, "oracle") 89 90 """ 91 92 impl = _JSON 93 cache_ok = True 94 95 @property 96 def python_type(self) -> type[dict]: 97 return dict 98 99 def __init__(self, *args: Any, **kwargs: Any) -> None: 100 """Initialize JSON type""" 101 102 def load_dialect_impl(self, dialect: Dialect) -> Any: 103 if dialect.name == "postgresql": 104 return dialect.type_descriptor(PG_JSONB()) # type: ignore 105 if dialect.name == "oracle": 106 return dialect.type_descriptor(ORA_BLOB()) 107 return dialect.type_descriptor(_JSON()) 108 [end of litestar/contrib/sqlalchemy/types.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py --- a/litestar/contrib/sqlalchemy/types.py +++ b/litestar/contrib/sqlalchemy/types.py @@ -4,11 +4,12 @@ from base64 import b64decode from typing import TYPE_CHECKING, Any, cast +from sqlalchemy import text, util from sqlalchemy.dialects.oracle import BLOB as ORA_BLOB from sqlalchemy.dialects.oracle import RAW as ORA_RAW from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB from sqlalchemy.dialects.postgresql import UUID as PG_UUID -from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator +from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, SchemaType, TypeDecorator from sqlalchemy.types import JSON as _JSON if TYPE_CHECKING: @@ -80,7 +81,7 @@ return cast("uuid.UUID | None", value) -class JSON(TypeDecorator): +class JSON(TypeDecorator, SchemaType): # type: ignore """Platform-independent JSON type. Uses JSONB type for postgres, BLOB for Oracle, otherwise uses the generic JSON data type. @@ -98,6 +99,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize JSON type""" + self.name = kwargs.pop("name", None) + self.oracle_strict = kwargs.pop("oracle_strict", True) def load_dialect_impl(self, dialect: Dialect) -> Any: if dialect.name == "postgresql": @@ -105,3 +108,31 @@ if dialect.name == "oracle": return dialect.type_descriptor(ORA_BLOB()) return dialect.type_descriptor(_JSON()) + + def _should_create_constraint(self, compiler: Any, **kw: Any) -> bool: + return bool(compiler.dialect.name == "oracle") + + def _variant_mapping_for_set_table(self, column: Any) -> dict | None: + if column.type._variant_mapping: + variant_mapping = dict(column.type._variant_mapping) + variant_mapping["_default"] = column.type + else: + variant_mapping = None + return variant_mapping + + @util.preload_module("sqlalchemy.sql.schema") + def _set_table(self, column: Any, table: Any) -> None: + schema = util.preloaded.sql_schema + variant_mapping = self._variant_mapping_for_set_table(column) + constraint_options = "(strict)" if self.oracle_strict else "" + sqltext = text(f"{column.name} is json {constraint_options}") + e = schema.CheckConstraint( + sqltext, + name=f"{column.name}_is_json", + _create_rule=util.portable_instancemethod( # type: ignore[no-untyped-call] + self._should_create_constraint, + {"variant_mapping": variant_mapping}, + ), + _type_bound=True, + ) + table.append_constraint(e)
{"golden_diff": "diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py\n--- a/litestar/contrib/sqlalchemy/types.py\n+++ b/litestar/contrib/sqlalchemy/types.py\n@@ -4,11 +4,12 @@\n from base64 import b64decode\n from typing import TYPE_CHECKING, Any, cast\n \n+from sqlalchemy import text, util\n from sqlalchemy.dialects.oracle import BLOB as ORA_BLOB\n from sqlalchemy.dialects.oracle import RAW as ORA_RAW\n from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB\n from sqlalchemy.dialects.postgresql import UUID as PG_UUID\n-from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator\n+from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, SchemaType, TypeDecorator\n from sqlalchemy.types import JSON as _JSON\n \n if TYPE_CHECKING:\n@@ -80,7 +81,7 @@\n return cast(\"uuid.UUID | None\", value)\n \n \n-class JSON(TypeDecorator):\n+class JSON(TypeDecorator, SchemaType): # type: ignore\n \"\"\"Platform-independent JSON type.\n \n Uses JSONB type for postgres, BLOB for Oracle, otherwise uses the generic JSON data type.\n@@ -98,6 +99,8 @@\n \n def __init__(self, *args: Any, **kwargs: Any) -> None:\n \"\"\"Initialize JSON type\"\"\"\n+ self.name = kwargs.pop(\"name\", None)\n+ self.oracle_strict = kwargs.pop(\"oracle_strict\", True)\n \n def load_dialect_impl(self, dialect: Dialect) -> Any:\n if dialect.name == \"postgresql\":\n@@ -105,3 +108,31 @@\n if dialect.name == \"oracle\":\n return dialect.type_descriptor(ORA_BLOB())\n return dialect.type_descriptor(_JSON())\n+\n+ def _should_create_constraint(self, compiler: Any, **kw: Any) -> bool:\n+ return bool(compiler.dialect.name == \"oracle\")\n+\n+ def _variant_mapping_for_set_table(self, column: Any) -> dict | None:\n+ if column.type._variant_mapping:\n+ variant_mapping = dict(column.type._variant_mapping)\n+ variant_mapping[\"_default\"] = column.type\n+ else:\n+ variant_mapping = None\n+ return variant_mapping\n+\n+ @util.preload_module(\"sqlalchemy.sql.schema\")\n+ def _set_table(self, column: Any, table: Any) -> None:\n+ schema = util.preloaded.sql_schema\n+ variant_mapping = self._variant_mapping_for_set_table(column)\n+ constraint_options = \"(strict)\" if self.oracle_strict else \"\"\n+ sqltext = text(f\"{column.name} is json {constraint_options}\")\n+ e = schema.CheckConstraint(\n+ sqltext,\n+ name=f\"{column.name}_is_json\",\n+ _create_rule=util.portable_instancemethod( # type: ignore[no-untyped-call]\n+ self._should_create_constraint,\n+ {\"variant_mapping\": variant_mapping},\n+ ),\n+ _type_bound=True,\n+ )\n+ table.append_constraint(e)\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from __future__ import annotations\n\nimport uuid\nfrom base64 import b64decode\nfrom typing import TYPE_CHECKING, Any, cast\n\nfrom sqlalchemy.dialects.oracle import BLOB as ORA_BLOB\nfrom sqlalchemy.dialects.oracle import RAW as ORA_RAW\nfrom sqlalchemy.dialects.postgresql import JSONB as PG_JSONB\nfrom sqlalchemy.dialects.postgresql import UUID as PG_UUID\nfrom sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator\nfrom sqlalchemy.types import JSON as _JSON\n\nif TYPE_CHECKING:\n from sqlalchemy.engine import Dialect\n\nBigIntIdentity = BigInteger().with_variant(Integer, \"sqlite\")\n\n\nclass GUID(TypeDecorator):\n \"\"\"Platform-independent GUID type.\n\n Uses PostgreSQL's UUID type, Oracle's RAW(16) type, otherwise uses\n BINARY(16) or CHAR(32), storing as stringified hex values.\n\n Will accept stringified UUIDs as a hexstring or an actual UUID\n\n \"\"\"\n\n impl = BINARY(16)\n cache_ok = True\n\n @property\n def python_type(self) -> type[uuid.UUID]:\n return uuid.UUID\n\n def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None:\n self.binary = binary\n\n def load_dialect_impl(self, dialect: Dialect) -> Any:\n if dialect.name in {\"postgresql\", \"duckdb\"}:\n return dialect.type_descriptor(PG_UUID())\n if dialect.name == \"oracle\":\n return dialect.type_descriptor(ORA_RAW(16))\n if self.binary:\n return dialect.type_descriptor(BINARY(16))\n return dialect.type_descriptor(CHAR(32))\n\n def process_bind_param(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> bytes | str | None:\n if value is None:\n return value\n if dialect.name in {\"postgresql\", \"duckdb\"}:\n return str(value)\n value = self.to_uuid(value)\n if value is None:\n return value\n if dialect.name in {\"oracle\", \"spanner+spanner\"}:\n return value.bytes\n return value.bytes if self.binary else value.hex\n\n def process_result_value(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> uuid.UUID | None:\n if value is None:\n return value\n if isinstance(value, uuid.UUID):\n return value\n if dialect.name == \"spanner+spanner\":\n return uuid.UUID(bytes=b64decode(value))\n if self.binary:\n return uuid.UUID(bytes=cast(\"bytes\", value))\n return uuid.UUID(hex=cast(\"str\", value))\n\n @staticmethod\n def to_uuid(value: Any) -> uuid.UUID | None:\n if isinstance(value, uuid.UUID) or value is None:\n return value\n try:\n value = uuid.UUID(hex=value)\n except (TypeError, ValueError):\n value = uuid.UUID(bytes=value)\n return cast(\"uuid.UUID | None\", value)\n\n\nclass JSON(TypeDecorator):\n \"\"\"Platform-independent JSON type.\n\n Uses JSONB type for postgres, BLOB for Oracle, otherwise uses the generic JSON data type.\n\n JSON = _JSON().with_variant(PG_JSONB, \"postgresql\").with_variant(ORA_BLOB, \"oracle\")\n\n \"\"\"\n\n impl = _JSON\n cache_ok = True\n\n @property\n def python_type(self) -> type[dict]:\n return dict\n\n def __init__(self, *args: Any, **kwargs: Any) -> None:\n \"\"\"Initialize JSON type\"\"\"\n\n def load_dialect_impl(self, dialect: Dialect) -> Any:\n if dialect.name == \"postgresql\":\n return dialect.type_descriptor(PG_JSONB()) # type: ignore\n if dialect.name == \"oracle\":\n return dialect.type_descriptor(ORA_BLOB())\n return dialect.type_descriptor(_JSON())\n", "path": "litestar/contrib/sqlalchemy/types.py"}]}
1,776
682
gh_patches_debug_19016
rasdani/github-patches
git_diff
sql-machine-learning__elasticdl-1401
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [PS-1] RPC service `pull_embedding_vector` implementation </issue> <code> [start of elasticdl/python/ps/servicer.py] 1 import threading 2 3 from google.protobuf import empty_pb2 4 5 from elasticdl.proto import elasticdl_pb2, elasticdl_pb2_grpc 6 from elasticdl.python.common.dtypes import dtype_numpy_to_tensor 7 8 9 class PserverServicer(elasticdl_pb2_grpc.PserverServicer): 10 """PS service implementation""" 11 12 def __init__( 13 self, 14 parameters, 15 grads_to_wait, 16 optimizer, 17 lr_staleness_modulation=False, 18 use_async=False, 19 ): 20 self._parameters = parameters 21 self._grads_to_wait = grads_to_wait 22 self._optimizer = optimizer 23 self._lr_staleness_modulation = lr_staleness_modulation 24 self._use_async = use_async 25 self._version = 0 26 self._lock = threading.Lock() 27 28 def pull_variable(self, request, _): 29 """ 30 Response with all non-embedding parameters if initialized. 31 """ 32 res = elasticdl_pb2.PullVariableResponse() 33 if not self._parameters.init_status: 34 res.model_init_status = False 35 return res 36 37 # Only sync-SGD needs lock 38 # TODO: use a read-write lock to support multiple concurrent reads 39 if not self._use_async: 40 self._lock.acquire() 41 res.model.version = self._parameters.version 42 for name, var in self._parameters.non_embedding_params.items(): 43 tensor = res.model.param.add() 44 tensor.name = name 45 tensor.dim.extend(var.shape.as_list()) 46 var_values = var.numpy() 47 tensor.content = var_values.tobytes() 48 tensor.dtype = dtype_numpy_to_tensor(var_values.dtype) 49 if not self._use_async: 50 self._lock.release() 51 res.model_init_status = True 52 return res 53 54 def pull_embedding_vector(self, request, _): 55 # TODO: implement this RPC service 56 return elasticdl_pb2.Tensor() 57 58 def push_model(self, request, _): 59 with self._lock: 60 self._parameters.init_from_model_pb(request) 61 return empty_pb2.Empty() 62 63 def push_gradient(self, request, _): 64 # TODO: implement this RPC service 65 return elasticdl_pb2.PushGradientResponse() 66 [end of elasticdl/python/ps/servicer.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/elasticdl/python/ps/servicer.py b/elasticdl/python/ps/servicer.py --- a/elasticdl/python/ps/servicer.py +++ b/elasticdl/python/ps/servicer.py @@ -4,6 +4,7 @@ from elasticdl.proto import elasticdl_pb2, elasticdl_pb2_grpc from elasticdl.python.common.dtypes import dtype_numpy_to_tensor +from elasticdl.python.common.tensor import Tensor, serialize_tensor class PserverServicer(elasticdl_pb2_grpc.PserverServicer): @@ -52,8 +53,15 @@ return res def pull_embedding_vector(self, request, _): - # TODO: implement this RPC service - return elasticdl_pb2.Tensor() + ret = elasticdl_pb2.Tensor() + if not request.ids: + return ret + embedding_vectors = self._parameters.get_embedding_param( + request.name, request.ids + ) + tensor = Tensor(values=embedding_vectors) + serialize_tensor(tensor, ret) + return ret def push_model(self, request, _): with self._lock:
{"golden_diff": "diff --git a/elasticdl/python/ps/servicer.py b/elasticdl/python/ps/servicer.py\n--- a/elasticdl/python/ps/servicer.py\n+++ b/elasticdl/python/ps/servicer.py\n@@ -4,6 +4,7 @@\n \n from elasticdl.proto import elasticdl_pb2, elasticdl_pb2_grpc\n from elasticdl.python.common.dtypes import dtype_numpy_to_tensor\n+from elasticdl.python.common.tensor import Tensor, serialize_tensor\n \n \n class PserverServicer(elasticdl_pb2_grpc.PserverServicer):\n@@ -52,8 +53,15 @@\n return res\n \n def pull_embedding_vector(self, request, _):\n- # TODO: implement this RPC service\n- return elasticdl_pb2.Tensor()\n+ ret = elasticdl_pb2.Tensor()\n+ if not request.ids:\n+ return ret\n+ embedding_vectors = self._parameters.get_embedding_param(\n+ request.name, request.ids\n+ )\n+ tensor = Tensor(values=embedding_vectors)\n+ serialize_tensor(tensor, ret)\n+ return ret\n \n def push_model(self, request, _):\n with self._lock:\n", "issue": "[PS-1] RPC service `pull_embedding_vector` implementation\n\n", "before_files": [{"content": "import threading\n\nfrom google.protobuf import empty_pb2\n\nfrom elasticdl.proto import elasticdl_pb2, elasticdl_pb2_grpc\nfrom elasticdl.python.common.dtypes import dtype_numpy_to_tensor\n\n\nclass PserverServicer(elasticdl_pb2_grpc.PserverServicer):\n \"\"\"PS service implementation\"\"\"\n\n def __init__(\n self,\n parameters,\n grads_to_wait,\n optimizer,\n lr_staleness_modulation=False,\n use_async=False,\n ):\n self._parameters = parameters\n self._grads_to_wait = grads_to_wait\n self._optimizer = optimizer\n self._lr_staleness_modulation = lr_staleness_modulation\n self._use_async = use_async\n self._version = 0\n self._lock = threading.Lock()\n\n def pull_variable(self, request, _):\n \"\"\"\n Response with all non-embedding parameters if initialized.\n \"\"\"\n res = elasticdl_pb2.PullVariableResponse()\n if not self._parameters.init_status:\n res.model_init_status = False\n return res\n\n # Only sync-SGD needs lock\n # TODO: use a read-write lock to support multiple concurrent reads\n if not self._use_async:\n self._lock.acquire()\n res.model.version = self._parameters.version\n for name, var in self._parameters.non_embedding_params.items():\n tensor = res.model.param.add()\n tensor.name = name\n tensor.dim.extend(var.shape.as_list())\n var_values = var.numpy()\n tensor.content = var_values.tobytes()\n tensor.dtype = dtype_numpy_to_tensor(var_values.dtype)\n if not self._use_async:\n self._lock.release()\n res.model_init_status = True\n return res\n\n def pull_embedding_vector(self, request, _):\n # TODO: implement this RPC service\n return elasticdl_pb2.Tensor()\n\n def push_model(self, request, _):\n with self._lock:\n self._parameters.init_from_model_pb(request)\n return empty_pb2.Empty()\n\n def push_gradient(self, request, _):\n # TODO: implement this RPC service\n return elasticdl_pb2.PushGradientResponse()\n", "path": "elasticdl/python/ps/servicer.py"}]}
1,145
256
gh_patches_debug_16671
rasdani/github-patches
git_diff
CiviWiki__OpenCiviWiki-1116
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [BUG]: FrontEnd routing issues ### Description The header in the application routes the application incorrectly and needs to be fixed. If it routes the anchor tag incorrectly, we'll be having a nested routes tree of /page1/page2/... ### What should have happened? Its end output must be like this ```html <a class="dark-white-text menu-text" href="/howitworks">How CiviWiki Works</a> ``` instead of ```html <a class="dark-white-text menu-text" href="howitworks">How CiviWiki Works</a> ``` ### What browser(s) are you seeing the problem on? All </issue> <code> [start of project/frontend_views/urls.py] 1 from django.urls import path 2 from frontend_views import views 3 4 urlpatterns = [ 5 path("about/", views.about_view, name="about"), 6 path("support_us/", views.support_us_view, name="support us"), 7 path("howitworks/", views.how_it_works_view, name="how it works"), 8 path("profile/<str:username>/", views.user_profile, name="profile"), 9 path("thread/<int:thread_id>/", views.issue_thread, name="issue thread"), 10 path("profile/", views.user_profile, name="default_profile"), 11 path("", views.base_view, name="base"), 12 path("thread/<int:thread_id>/csv/", views.civi2csv, name="civi2csv"), 13 ] 14 [end of project/frontend_views/urls.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/project/frontend_views/urls.py b/project/frontend_views/urls.py --- a/project/frontend_views/urls.py +++ b/project/frontend_views/urls.py @@ -3,10 +3,10 @@ urlpatterns = [ path("about/", views.about_view, name="about"), - path("support_us/", views.support_us_view, name="support us"), - path("howitworks/", views.how_it_works_view, name="how it works"), + path("support_us/", views.support_us_view, name="support_us"), + path("howitworks/", views.how_it_works_view, name="how_it_works"), path("profile/<str:username>/", views.user_profile, name="profile"), - path("thread/<int:thread_id>/", views.issue_thread, name="issue thread"), + path("thread/<int:thread_id>/", views.issue_thread, name="issue_thread"), path("profile/", views.user_profile, name="default_profile"), path("", views.base_view, name="base"), path("thread/<int:thread_id>/csv/", views.civi2csv, name="civi2csv"),
{"golden_diff": "diff --git a/project/frontend_views/urls.py b/project/frontend_views/urls.py\n--- a/project/frontend_views/urls.py\n+++ b/project/frontend_views/urls.py\n@@ -3,10 +3,10 @@\n \n urlpatterns = [\n path(\"about/\", views.about_view, name=\"about\"),\n- path(\"support_us/\", views.support_us_view, name=\"support us\"),\n- path(\"howitworks/\", views.how_it_works_view, name=\"how it works\"),\n+ path(\"support_us/\", views.support_us_view, name=\"support_us\"),\n+ path(\"howitworks/\", views.how_it_works_view, name=\"how_it_works\"),\n path(\"profile/<str:username>/\", views.user_profile, name=\"profile\"),\n- path(\"thread/<int:thread_id>/\", views.issue_thread, name=\"issue thread\"),\n+ path(\"thread/<int:thread_id>/\", views.issue_thread, name=\"issue_thread\"),\n path(\"profile/\", views.user_profile, name=\"default_profile\"),\n path(\"\", views.base_view, name=\"base\"),\n path(\"thread/<int:thread_id>/csv/\", views.civi2csv, name=\"civi2csv\"),\n", "issue": "[BUG]: FrontEnd routing issues\n### Description\r\n\r\nThe header in the application routes the application incorrectly and needs to be fixed. If it routes the anchor tag incorrectly, we'll be having a nested routes tree of /page1/page2/...\r\n\r\n\r\n\r\n### What should have happened?\r\nIts end output must be like this\r\n```html\r\n<a class=\"dark-white-text menu-text\" href=\"/howitworks\">How CiviWiki Works</a>\r\n```\r\ninstead of \r\n```html\r\n<a class=\"dark-white-text menu-text\" href=\"howitworks\">How CiviWiki Works</a>\r\n```\r\n\r\n### What browser(s) are you seeing the problem on?\r\nAll\r\n\n", "before_files": [{"content": "from django.urls import path\nfrom frontend_views import views\n\nurlpatterns = [\n path(\"about/\", views.about_view, name=\"about\"),\n path(\"support_us/\", views.support_us_view, name=\"support us\"),\n path(\"howitworks/\", views.how_it_works_view, name=\"how it works\"),\n path(\"profile/<str:username>/\", views.user_profile, name=\"profile\"),\n path(\"thread/<int:thread_id>/\", views.issue_thread, name=\"issue thread\"),\n path(\"profile/\", views.user_profile, name=\"default_profile\"),\n path(\"\", views.base_view, name=\"base\"),\n path(\"thread/<int:thread_id>/csv/\", views.civi2csv, name=\"civi2csv\"),\n]\n", "path": "project/frontend_views/urls.py"}]}
852
255
gh_patches_debug_23139
rasdani/github-patches
git_diff
Pyomo__pyomo-2740
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Deprecate the old 'pyomo' command syntax In earlier releases, we supported commands like ` pyomo --solver=glpk foo.py bar.dat` And we moved to a nested pyomo command, which uses the following syntax: ` pyomo solve --solver=glpk foo.py bar.dat` However, to enable graceful transition for users, we added a hack to transition to the old syntax if it looks like the user is not specifying a valid subcommand. However, this leads to confusing error messages, and I think it's time to deprecate that hack and start throwing warnings that this hack will be removed in an upcoming release. </issue> <code> [start of pyomo/scripting/pyomo_main.py] 1 # ___________________________________________________________________________ 2 # 3 # Pyomo: Python Optimization Modeling Objects 4 # Copyright (c) 2008-2022 5 # National Technology and Engineering Solutions of Sandia, LLC 6 # Under the terms of Contract DE-NA0003525 with National Technology and 7 # Engineering Solutions of Sandia, LLC, the U.S. Government retains certain 8 # rights in this software. 9 # This software is distributed under the 3-clause BSD License. 10 # ___________________________________________________________________________ 11 12 import sys 13 import copy 14 15 try: 16 import pkg_resources 17 18 pyomo_commands = pkg_resources.iter_entry_points('pyomo.command') 19 except: 20 pyomo_commands = [] 21 # 22 # Load modules associated with Plugins that are defined in 23 # EGG files. 24 # 25 for entrypoint in pyomo_commands: 26 try: 27 plugin_class = entrypoint.load() 28 except Exception: 29 exctype, err, tb = sys.exc_info() # BUG? 30 import traceback 31 32 msg = ( 33 "Error loading pyomo.command entry point %s:\nOriginal %s: %s\n" 34 "Traceback:\n%s" 35 % (entrypoint, exctype.__name__, err, ''.join(traceback.format_tb(tb))) 36 ) 37 # clear local variables to remove circular references 38 exctype = err = tb = None 39 # TODO: Should this just log an error and re-raise the original 40 # exception? 41 raise ImportError(msg) 42 43 44 def main(args=None): 45 # 46 # Load subcommands 47 # 48 from pyomo.scripting import pyomo_parser 49 import pyomo.environ 50 51 # 52 # Parse the arguments 53 # 54 parser = pyomo_parser.get_parser() 55 if args is None: 56 args = copy.copy(sys.argv[1:]) 57 # 58 # This is a hack to convert a command-line to a 'solve' subcommand 59 # 60 if not args: 61 args.append('-h') 62 # FIXME: This should use the logger and not print() 63 if args[0][0] == '-': 64 if args[0] not in ['-h', '--help', '--version']: 65 print("WARNING: converting to the 'pyomo solve' subcommand") 66 args = ['solve'] + args[0:] 67 elif args[0] not in pyomo_parser.subparsers: 68 print("WARNING: converting to the 'pyomo solve' subcommand") 69 args = ['solve'] + args[0:] 70 # 71 # Process arguments 72 # 73 _options, _unparsed = parser.parse_known_args(args) 74 # 75 # Process the results 76 # 77 if _options.func.__code__.co_argcount == 1: 78 # 79 # If the execution function only accepts one argument, then we 80 # create an exception if there are unparsed arguments. 81 # 82 if len(_unparsed) > 0: 83 # 84 # Re-parse the command-line to create an exception 85 # 86 parser.parse_args(_unparsed) 87 retval = _options.func(_options) 88 else: 89 retval = _options.func(_options, _unparsed) 90 return retval 91 92 93 def main_console_script(): 94 "This is the entry point for the main Pyomo script" 95 # Note that we eat the retval data structure and only return the 96 # process return code 97 ans = main() 98 try: 99 return ans.errorcode 100 except AttributeError: 101 return ans 102 103 104 if __name__ == '__main__': 105 sys.exit(main_console_script()) 106 [end of pyomo/scripting/pyomo_main.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/pyomo/scripting/pyomo_main.py b/pyomo/scripting/pyomo_main.py --- a/pyomo/scripting/pyomo_main.py +++ b/pyomo/scripting/pyomo_main.py @@ -11,6 +11,7 @@ import sys import copy +from pyomo.common.deprecation import deprecation_warning try: import pkg_resources @@ -59,13 +60,16 @@ # if not args: args.append('-h') - # FIXME: This should use the logger and not print() if args[0][0] == '-': if args[0] not in ['-h', '--help', '--version']: - print("WARNING: converting to the 'pyomo solve' subcommand") + deprecation_warning("Running the 'pyomo' script with no subcommand is deprecated. " + "Defaulting to 'pyomo solve'", + version='TBD') args = ['solve'] + args[0:] elif args[0] not in pyomo_parser.subparsers: - print("WARNING: converting to the 'pyomo solve' subcommand") + deprecation_warning("Running the 'pyomo' script with no subcommand is deprecated. " + "Defaulting to 'pyomo solve'", + version='TBD') args = ['solve'] + args[0:] # # Process arguments
{"golden_diff": "diff --git a/pyomo/scripting/pyomo_main.py b/pyomo/scripting/pyomo_main.py\n--- a/pyomo/scripting/pyomo_main.py\n+++ b/pyomo/scripting/pyomo_main.py\n@@ -11,6 +11,7 @@\n \n import sys\n import copy\n+from pyomo.common.deprecation import deprecation_warning\n \n try:\n import pkg_resources\n@@ -59,13 +60,16 @@\n #\n if not args:\n args.append('-h')\n- # FIXME: This should use the logger and not print()\n if args[0][0] == '-':\n if args[0] not in ['-h', '--help', '--version']:\n- print(\"WARNING: converting to the 'pyomo solve' subcommand\")\n+ deprecation_warning(\"Running the 'pyomo' script with no subcommand is deprecated. \"\n+ \"Defaulting to 'pyomo solve'\",\n+ version='TBD')\n args = ['solve'] + args[0:]\n elif args[0] not in pyomo_parser.subparsers:\n- print(\"WARNING: converting to the 'pyomo solve' subcommand\")\n+ deprecation_warning(\"Running the 'pyomo' script with no subcommand is deprecated. \"\n+ \"Defaulting to 'pyomo solve'\",\n+ version='TBD')\n args = ['solve'] + args[0:]\n #\n # Process arguments\n", "issue": "Deprecate the old 'pyomo' command syntax\nIn earlier releases, we supported commands like\r\n` pyomo --solver=glpk foo.py bar.dat`\r\nAnd we moved to a nested pyomo command, which uses the following syntax:\r\n` pyomo solve --solver=glpk foo.py bar.dat`\r\n\r\nHowever, to enable graceful transition for users, we added a hack to transition to the old syntax if it looks like the user is not specifying a valid subcommand. However, this leads to confusing error messages, and I think it's time to deprecate that hack and start throwing warnings that this hack will be removed in an upcoming release.\n", "before_files": [{"content": "# ___________________________________________________________________________\n#\n# Pyomo: Python Optimization Modeling Objects\n# Copyright (c) 2008-2022\n# National Technology and Engineering Solutions of Sandia, LLC\n# Under the terms of Contract DE-NA0003525 with National Technology and\n# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain\n# rights in this software.\n# This software is distributed under the 3-clause BSD License.\n# ___________________________________________________________________________\n\nimport sys\nimport copy\n\ntry:\n import pkg_resources\n\n pyomo_commands = pkg_resources.iter_entry_points('pyomo.command')\nexcept:\n pyomo_commands = []\n#\n# Load modules associated with Plugins that are defined in\n# EGG files.\n#\nfor entrypoint in pyomo_commands:\n try:\n plugin_class = entrypoint.load()\n except Exception:\n exctype, err, tb = sys.exc_info() # BUG?\n import traceback\n\n msg = (\n \"Error loading pyomo.command entry point %s:\\nOriginal %s: %s\\n\"\n \"Traceback:\\n%s\"\n % (entrypoint, exctype.__name__, err, ''.join(traceback.format_tb(tb)))\n )\n # clear local variables to remove circular references\n exctype = err = tb = None\n # TODO: Should this just log an error and re-raise the original\n # exception?\n raise ImportError(msg)\n\n\ndef main(args=None):\n #\n # Load subcommands\n #\n from pyomo.scripting import pyomo_parser\n import pyomo.environ\n\n #\n # Parse the arguments\n #\n parser = pyomo_parser.get_parser()\n if args is None:\n args = copy.copy(sys.argv[1:])\n #\n # This is a hack to convert a command-line to a 'solve' subcommand\n #\n if not args:\n args.append('-h')\n # FIXME: This should use the logger and not print()\n if args[0][0] == '-':\n if args[0] not in ['-h', '--help', '--version']:\n print(\"WARNING: converting to the 'pyomo solve' subcommand\")\n args = ['solve'] + args[0:]\n elif args[0] not in pyomo_parser.subparsers:\n print(\"WARNING: converting to the 'pyomo solve' subcommand\")\n args = ['solve'] + args[0:]\n #\n # Process arguments\n #\n _options, _unparsed = parser.parse_known_args(args)\n #\n # Process the results\n #\n if _options.func.__code__.co_argcount == 1:\n #\n # If the execution function only accepts one argument, then we\n # create an exception if there are unparsed arguments.\n #\n if len(_unparsed) > 0:\n #\n # Re-parse the command-line to create an exception\n #\n parser.parse_args(_unparsed)\n retval = _options.func(_options)\n else:\n retval = _options.func(_options, _unparsed)\n return retval\n\n\ndef main_console_script():\n \"This is the entry point for the main Pyomo script\"\n # Note that we eat the retval data structure and only return the\n # process return code\n ans = main()\n try:\n return ans.errorcode\n except AttributeError:\n return ans\n\n\nif __name__ == '__main__':\n sys.exit(main_console_script())\n", "path": "pyomo/scripting/pyomo_main.py"}]}
1,643
307
gh_patches_debug_21797
rasdani/github-patches
git_diff
bookwyrm-social__bookwyrm-664
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> 403 attempting to login ![6A0CC90F-F750-4128-83AA-BB02D4E649BD](https://user-images.githubusercontent.com/918105/109430197-f2d43600-79b4-11eb-8855-a58decfda03e.png) It looks like if I open another tab I am logged into my account though. </issue> <code> [start of bookwyrm/views/authentication.py] 1 ''' class views for login/register views ''' 2 from django.contrib.auth import authenticate, login, logout 3 from django.contrib.auth.decorators import login_required 4 from django.core.exceptions import PermissionDenied 5 from django.shortcuts import get_object_or_404, redirect 6 from django.template.response import TemplateResponse 7 from django.utils import timezone 8 from django.utils.decorators import method_decorator 9 from django.views import View 10 11 from bookwyrm import forms, models 12 from bookwyrm.settings import DOMAIN 13 14 15 # pylint: disable= no-self-use 16 class Login(View): 17 ''' authenticate an existing user ''' 18 def get(self, request): 19 ''' login page ''' 20 if request.user.is_authenticated: 21 return redirect('/') 22 # sene user to the login page 23 data = { 24 'title': 'Login', 25 'login_form': forms.LoginForm(), 26 'register_form': forms.RegisterForm(), 27 } 28 return TemplateResponse(request, 'login.html', data) 29 30 def post(self, request): 31 ''' authentication action ''' 32 login_form = forms.LoginForm(request.POST) 33 34 localname = login_form.data['localname'] 35 if '@' in localname: # looks like an email address to me 36 email = localname 37 try: 38 username = models.User.objects.get(email=email) 39 except models.User.DoesNotExist: # maybe it's a full username? 40 username = localname 41 else: 42 username = '%s@%s' % (localname, DOMAIN) 43 password = login_form.data['password'] 44 user = authenticate(request, username=username, password=password) 45 if user is not None: 46 # successful login 47 login(request, user) 48 user.last_active_date = timezone.now() 49 user.save(broadcast=False) 50 return redirect(request.GET.get('next', '/')) 51 52 # login errors 53 login_form.non_field_errors = 'Username or password are incorrect' 54 register_form = forms.RegisterForm() 55 data = { 56 'login_form': login_form, 57 'register_form': register_form 58 } 59 return TemplateResponse(request, 'login.html', data) 60 61 62 class Register(View): 63 ''' register a user ''' 64 def post(self, request): 65 ''' join the server ''' 66 if not models.SiteSettings.get().allow_registration: 67 invite_code = request.POST.get('invite_code') 68 69 if not invite_code: 70 raise PermissionDenied 71 72 invite = get_object_or_404(models.SiteInvite, code=invite_code) 73 if not invite.valid(): 74 raise PermissionDenied 75 else: 76 invite = None 77 78 form = forms.RegisterForm(request.POST) 79 errors = False 80 if not form.is_valid(): 81 errors = True 82 83 localname = form.data['localname'].strip() 84 email = form.data['email'] 85 password = form.data['password'] 86 87 # check localname and email uniqueness 88 if models.User.objects.filter(localname=localname).first(): 89 form.errors['localname'] = [ 90 'User with this username already exists'] 91 errors = True 92 93 if errors: 94 data = { 95 'login_form': forms.LoginForm(), 96 'register_form': form, 97 'invite': invite, 98 'valid': invite.valid() if invite else True, 99 } 100 if invite: 101 return TemplateResponse(request, 'invite.html', data) 102 return TemplateResponse(request, 'login.html', data) 103 104 username = '%s@%s' % (localname, DOMAIN) 105 user = models.User.objects.create_user( 106 username, email, password, localname=localname, local=True) 107 if invite: 108 invite.times_used += 1 109 invite.save() 110 111 login(request, user) 112 return redirect('/') 113 114 115 @method_decorator(login_required, name='dispatch') 116 class Logout(View): 117 ''' log out ''' 118 def get(self, request): 119 ''' done with this place! outa here! ''' 120 logout(request) 121 return redirect('/') 122 [end of bookwyrm/views/authentication.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/bookwyrm/views/authentication.py b/bookwyrm/views/authentication.py --- a/bookwyrm/views/authentication.py +++ b/bookwyrm/views/authentication.py @@ -6,6 +6,7 @@ from django.template.response import TemplateResponse from django.utils import timezone from django.utils.decorators import method_decorator +from django.views.decorators.csrf import csrf_exempt from django.views import View from bookwyrm import forms, models @@ -13,6 +14,7 @@ # pylint: disable= no-self-use +@method_decorator(csrf_exempt, name='dispatch') class Login(View): ''' authenticate an existing user ''' def get(self, request): @@ -29,6 +31,8 @@ def post(self, request): ''' authentication action ''' + if request.user.is_authenticated: + return redirect('/') login_form = forms.LoginForm(request.POST) localname = login_form.data['localname']
{"golden_diff": "diff --git a/bookwyrm/views/authentication.py b/bookwyrm/views/authentication.py\n--- a/bookwyrm/views/authentication.py\n+++ b/bookwyrm/views/authentication.py\n@@ -6,6 +6,7 @@\n from django.template.response import TemplateResponse\n from django.utils import timezone\n from django.utils.decorators import method_decorator\n+from django.views.decorators.csrf import csrf_exempt\n from django.views import View\n \n from bookwyrm import forms, models\n@@ -13,6 +14,7 @@\n \n \n # pylint: disable= no-self-use\n+@method_decorator(csrf_exempt, name='dispatch')\n class Login(View):\n ''' authenticate an existing user '''\n def get(self, request):\n@@ -29,6 +31,8 @@\n \n def post(self, request):\n ''' authentication action '''\n+ if request.user.is_authenticated:\n+ return redirect('/')\n login_form = forms.LoginForm(request.POST)\n \n localname = login_form.data['localname']\n", "issue": "403 attempting to login\n![6A0CC90F-F750-4128-83AA-BB02D4E649BD](https://user-images.githubusercontent.com/918105/109430197-f2d43600-79b4-11eb-8855-a58decfda03e.png)\r\n\r\nIt looks like if I open another tab I am logged into my account though. \n", "before_files": [{"content": "''' class views for login/register views '''\nfrom django.contrib.auth import authenticate, login, logout\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.exceptions import PermissionDenied\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.template.response import TemplateResponse\nfrom django.utils import timezone\nfrom django.utils.decorators import method_decorator\nfrom django.views import View\n\nfrom bookwyrm import forms, models\nfrom bookwyrm.settings import DOMAIN\n\n\n# pylint: disable= no-self-use\nclass Login(View):\n ''' authenticate an existing user '''\n def get(self, request):\n ''' login page '''\n if request.user.is_authenticated:\n return redirect('/')\n # sene user to the login page\n data = {\n 'title': 'Login',\n 'login_form': forms.LoginForm(),\n 'register_form': forms.RegisterForm(),\n }\n return TemplateResponse(request, 'login.html', data)\n\n def post(self, request):\n ''' authentication action '''\n login_form = forms.LoginForm(request.POST)\n\n localname = login_form.data['localname']\n if '@' in localname: # looks like an email address to me\n email = localname\n try:\n username = models.User.objects.get(email=email)\n except models.User.DoesNotExist: # maybe it's a full username?\n username = localname\n else:\n username = '%s@%s' % (localname, DOMAIN)\n password = login_form.data['password']\n user = authenticate(request, username=username, password=password)\n if user is not None:\n # successful login\n login(request, user)\n user.last_active_date = timezone.now()\n user.save(broadcast=False)\n return redirect(request.GET.get('next', '/'))\n\n # login errors\n login_form.non_field_errors = 'Username or password are incorrect'\n register_form = forms.RegisterForm()\n data = {\n 'login_form': login_form,\n 'register_form': register_form\n }\n return TemplateResponse(request, 'login.html', data)\n\n\nclass Register(View):\n ''' register a user '''\n def post(self, request):\n ''' join the server '''\n if not models.SiteSettings.get().allow_registration:\n invite_code = request.POST.get('invite_code')\n\n if not invite_code:\n raise PermissionDenied\n\n invite = get_object_or_404(models.SiteInvite, code=invite_code)\n if not invite.valid():\n raise PermissionDenied\n else:\n invite = None\n\n form = forms.RegisterForm(request.POST)\n errors = False\n if not form.is_valid():\n errors = True\n\n localname = form.data['localname'].strip()\n email = form.data['email']\n password = form.data['password']\n\n # check localname and email uniqueness\n if models.User.objects.filter(localname=localname).first():\n form.errors['localname'] = [\n 'User with this username already exists']\n errors = True\n\n if errors:\n data = {\n 'login_form': forms.LoginForm(),\n 'register_form': form,\n 'invite': invite,\n 'valid': invite.valid() if invite else True,\n }\n if invite:\n return TemplateResponse(request, 'invite.html', data)\n return TemplateResponse(request, 'login.html', data)\n\n username = '%s@%s' % (localname, DOMAIN)\n user = models.User.objects.create_user(\n username, email, password, localname=localname, local=True)\n if invite:\n invite.times_used += 1\n invite.save()\n\n login(request, user)\n return redirect('/')\n\n\n@method_decorator(login_required, name='dispatch')\nclass Logout(View):\n ''' log out '''\n def get(self, request):\n ''' done with this place! outa here! '''\n logout(request)\n return redirect('/')\n", "path": "bookwyrm/views/authentication.py"}]}
1,732
206
gh_patches_debug_18116
rasdani/github-patches
git_diff
beeware__toga-2139
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Android Read-only Multi-line text input displays suggestions (Spell Checking) ### Describe the bug When using the "MultilineTextInput" with the "readonly" parameter set to "True" on Android, it will suggest spelling improvements based on dictionary candidates. This behavior results in underlined text on android devices when using a language other than the keyboard language or with unknown words in general. ### Steps to reproduce 1. Use MultilineTextInput in readonly with a set text 2. Compile for android and set the KeyBoard language to a different one than the text. 3. The suggestions should appear, underlining the Text. ### Expected behavior Suggestions do not show up in MultilineTextInput if set to readonly. ### Screenshots _No response_ ### Environment - Operating System: Fedora 38 Workstation / Lineage OS 20.0 - Python version: 3.11.5 - Software versions: - Briefcase: 0.3.15 - Toga: 0.3.1 [tool.briefcase.app.trinker.android] requires = [ #"toga-android~=0.3.1" 'git+https://github.com/beeware/toga.git#subdirectory=core', 'git+https://github.com/beeware/toga.git#subdirectory=android', ] ### Logs ``` ``` ### Additional context I did some research on the topic and while I'm not really an Android dev, I think I found the problem. In "android/src/toga_android/widgets/textinput.py" we need to set the flag [TYPE_TEXT_FLAG_NO_SUGGESTIONS](https://developer.android.com/reference/android/text/InputType#TYPE_TEXT_FLAG_NO_SUGGESTIONS) if "readonly" is true. As said before, I'm not really an Android dev and am not sure how this would be implemented. </issue> <code> [start of android/src/toga_android/widgets/textinput.py] 1 from java import dynamic_proxy 2 from travertino.size import at_least 3 4 from android.text import InputType, TextWatcher 5 from android.view import Gravity, View 6 from android.widget import EditText 7 from toga_android.keys import toga_key 8 9 from .label import TextViewWidget 10 11 12 class TogaTextWatcher(dynamic_proxy(TextWatcher)): 13 def __init__(self, impl): 14 super().__init__() 15 self.impl = impl 16 17 def beforeTextChanged(self, _charSequence, _start, _count, _after): 18 pass 19 20 def afterTextChanged(self, _editable): 21 self.impl._on_change() 22 23 def onTextChanged(self, _charSequence, _start, _before, _count): 24 pass 25 26 27 class TogaKeyListener(dynamic_proxy(View.OnKeyListener)): 28 def __init__(self, impl): 29 super().__init__() 30 self.impl = impl 31 32 def onKey(self, _view, _key, _event): 33 event_info = toga_key(_event) 34 if event_info is None: 35 pass # pragma: nocover 36 else: 37 key_pressed = event_info["key"].value 38 if (key_pressed == "<enter>" or key_pressed == "numpad:enter") and ( 39 int(_event.getAction()) == 1 40 ): 41 self.impl._on_confirm() 42 return False 43 44 45 class TogaFocusListener(dynamic_proxy(View.OnFocusChangeListener)): 46 def __init__(self, impl): 47 super().__init__() 48 self.impl = impl 49 50 def onFocusChange(self, view, has_focus): 51 if has_focus: 52 self.impl._on_gain_focus() 53 else: 54 self.impl._on_lose_focus() 55 56 57 class TextInput(TextViewWidget): 58 def create(self, input_type=InputType.TYPE_CLASS_TEXT): 59 self.native = EditText(self._native_activity) 60 self.native.setInputType(input_type) 61 self.cache_textview_defaults() 62 63 self.native.addTextChangedListener(TogaTextWatcher(self)) 64 self.native.setOnKeyListener(TogaKeyListener(self)) 65 self.native.setOnFocusChangeListener(TogaFocusListener(self)) 66 67 def get_value(self): 68 return str(self.native.getText()) 69 70 def set_value(self, value): 71 self.native.setText(value) 72 73 def get_readonly(self): 74 return not self.native.isFocusable() 75 76 def set_readonly(self, readonly): 77 if readonly: 78 # Implicitly calls setFocusableInTouchMode(False) 79 self.native.setFocusable(False) 80 else: 81 # Implicitly calls setFocusable(True) 82 self.native.setFocusableInTouchMode(True) 83 84 def get_placeholder(self): 85 return str(self.native.getHint()) 86 87 def set_placeholder(self, value): 88 self.native.setHint(value) 89 90 def set_alignment(self, value): 91 self.set_textview_alignment(value, Gravity.CENTER_VERTICAL) 92 93 def set_error(self, error_message): 94 self.native.setError(error_message) 95 96 def clear_error(self): 97 self.native.setError(None) 98 99 def is_valid(self): 100 return self.native.getError() is None 101 102 def _on_change(self): 103 self.interface.on_change(None) 104 self.interface._validate() 105 106 def _on_confirm(self): 107 self.interface.on_confirm(None) 108 109 def _on_gain_focus(self): 110 self.interface.on_gain_focus(None) 111 112 def _on_lose_focus(self): 113 self.interface.on_lose_focus(None) 114 115 def rehint(self): 116 self.interface.intrinsic.width = at_least(self.interface._MIN_WIDTH) 117 self.native.measure(View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED) 118 self.interface.intrinsic.height = self.native.getMeasuredHeight() 119 [end of android/src/toga_android/widgets/textinput.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/android/src/toga_android/widgets/textinput.py b/android/src/toga_android/widgets/textinput.py --- a/android/src/toga_android/widgets/textinput.py +++ b/android/src/toga_android/widgets/textinput.py @@ -77,9 +77,19 @@ if readonly: # Implicitly calls setFocusableInTouchMode(False) self.native.setFocusable(False) + # Add TYPE_TEXT_FLAG_NO_SUGGESTIONS to the input type to disable suggestions + input_type = ( + self.native.getInputType() | InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS + ) + self.native.setInputType(input_type) else: # Implicitly calls setFocusable(True) self.native.setFocusableInTouchMode(True) + # Remove TYPE_TEXT_FLAG_NO_SUGGESTIONS to enable suggestions + input_type = ( + self.native.getInputType() & ~InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS + ) + self.native.setInputType(input_type) def get_placeholder(self): return str(self.native.getHint())
{"golden_diff": "diff --git a/android/src/toga_android/widgets/textinput.py b/android/src/toga_android/widgets/textinput.py\n--- a/android/src/toga_android/widgets/textinput.py\n+++ b/android/src/toga_android/widgets/textinput.py\n@@ -77,9 +77,19 @@\n if readonly:\n # Implicitly calls setFocusableInTouchMode(False)\n self.native.setFocusable(False)\n+ # Add TYPE_TEXT_FLAG_NO_SUGGESTIONS to the input type to disable suggestions\n+ input_type = (\n+ self.native.getInputType() | InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS\n+ )\n+ self.native.setInputType(input_type)\n else:\n # Implicitly calls setFocusable(True)\n self.native.setFocusableInTouchMode(True)\n+ # Remove TYPE_TEXT_FLAG_NO_SUGGESTIONS to enable suggestions\n+ input_type = (\n+ self.native.getInputType() & ~InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS\n+ )\n+ self.native.setInputType(input_type)\n \n def get_placeholder(self):\n return str(self.native.getHint())\n", "issue": "Android Read-only Multi-line text input displays suggestions (Spell Checking)\n### Describe the bug\n\nWhen using the \"MultilineTextInput\" with the \"readonly\" parameter set to \"True\" on Android, it will suggest spelling improvements based on dictionary candidates. This behavior results in underlined text on android devices when using a language other than the keyboard language or with unknown words in general.\n\n### Steps to reproduce\n\n1. Use MultilineTextInput in readonly with a set text\r\n2. Compile for android and set the KeyBoard language to a different one than the text.\r\n3. The suggestions should appear, underlining the Text.\n\n### Expected behavior\n\nSuggestions do not show up in MultilineTextInput if set to readonly.\n\n### Screenshots\n\n_No response_\n\n### Environment\n\n- Operating System: Fedora 38 Workstation / Lineage OS 20.0\r\n- Python version: 3.11.5\r\n- Software versions:\r\n - Briefcase: 0.3.15\r\n - Toga: 0.3.1 \r\n\r\n[tool.briefcase.app.trinker.android]\r\nrequires = [\r\n #\"toga-android~=0.3.1\"\r\n 'git+https://github.com/beeware/toga.git#subdirectory=core',\r\n 'git+https://github.com/beeware/toga.git#subdirectory=android',\r\n]\r\n\r\n\n\n### Logs\n\n```\r\n\r\n```\r\n\n\n### Additional context\n\nI did some research on the topic and while I'm not really an Android dev, I think I found the problem. In \"android/src/toga_android/widgets/textinput.py\" we need to set the flag [TYPE_TEXT_FLAG_NO_SUGGESTIONS](https://developer.android.com/reference/android/text/InputType#TYPE_TEXT_FLAG_NO_SUGGESTIONS) if \"readonly\" is true.\r\n\r\nAs said before, I'm not really an Android dev and am not sure how this would be implemented.\r\n\n", "before_files": [{"content": "from java import dynamic_proxy\nfrom travertino.size import at_least\n\nfrom android.text import InputType, TextWatcher\nfrom android.view import Gravity, View\nfrom android.widget import EditText\nfrom toga_android.keys import toga_key\n\nfrom .label import TextViewWidget\n\n\nclass TogaTextWatcher(dynamic_proxy(TextWatcher)):\n def __init__(self, impl):\n super().__init__()\n self.impl = impl\n\n def beforeTextChanged(self, _charSequence, _start, _count, _after):\n pass\n\n def afterTextChanged(self, _editable):\n self.impl._on_change()\n\n def onTextChanged(self, _charSequence, _start, _before, _count):\n pass\n\n\nclass TogaKeyListener(dynamic_proxy(View.OnKeyListener)):\n def __init__(self, impl):\n super().__init__()\n self.impl = impl\n\n def onKey(self, _view, _key, _event):\n event_info = toga_key(_event)\n if event_info is None:\n pass # pragma: nocover\n else:\n key_pressed = event_info[\"key\"].value\n if (key_pressed == \"<enter>\" or key_pressed == \"numpad:enter\") and (\n int(_event.getAction()) == 1\n ):\n self.impl._on_confirm()\n return False\n\n\nclass TogaFocusListener(dynamic_proxy(View.OnFocusChangeListener)):\n def __init__(self, impl):\n super().__init__()\n self.impl = impl\n\n def onFocusChange(self, view, has_focus):\n if has_focus:\n self.impl._on_gain_focus()\n else:\n self.impl._on_lose_focus()\n\n\nclass TextInput(TextViewWidget):\n def create(self, input_type=InputType.TYPE_CLASS_TEXT):\n self.native = EditText(self._native_activity)\n self.native.setInputType(input_type)\n self.cache_textview_defaults()\n\n self.native.addTextChangedListener(TogaTextWatcher(self))\n self.native.setOnKeyListener(TogaKeyListener(self))\n self.native.setOnFocusChangeListener(TogaFocusListener(self))\n\n def get_value(self):\n return str(self.native.getText())\n\n def set_value(self, value):\n self.native.setText(value)\n\n def get_readonly(self):\n return not self.native.isFocusable()\n\n def set_readonly(self, readonly):\n if readonly:\n # Implicitly calls setFocusableInTouchMode(False)\n self.native.setFocusable(False)\n else:\n # Implicitly calls setFocusable(True)\n self.native.setFocusableInTouchMode(True)\n\n def get_placeholder(self):\n return str(self.native.getHint())\n\n def set_placeholder(self, value):\n self.native.setHint(value)\n\n def set_alignment(self, value):\n self.set_textview_alignment(value, Gravity.CENTER_VERTICAL)\n\n def set_error(self, error_message):\n self.native.setError(error_message)\n\n def clear_error(self):\n self.native.setError(None)\n\n def is_valid(self):\n return self.native.getError() is None\n\n def _on_change(self):\n self.interface.on_change(None)\n self.interface._validate()\n\n def _on_confirm(self):\n self.interface.on_confirm(None)\n\n def _on_gain_focus(self):\n self.interface.on_gain_focus(None)\n\n def _on_lose_focus(self):\n self.interface.on_lose_focus(None)\n\n def rehint(self):\n self.interface.intrinsic.width = at_least(self.interface._MIN_WIDTH)\n self.native.measure(View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED)\n self.interface.intrinsic.height = self.native.getMeasuredHeight()\n", "path": "android/src/toga_android/widgets/textinput.py"}]}
1,951
235
gh_patches_debug_14420
rasdani/github-patches
git_diff
pyinstaller__pyinstaller-6774
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> XDG_DATA_DIRS environment variable being overriden when running packaged app I have a simple app that runs a local Flask web server and opens a web browser to show it on start up using the following command: webbrowser.open('http://localhost:%d' % port, new=2, autoraise=True) When I run it in development mode, this works fine. However, when it's packaged using pyinstaller and run as an executable, I get this: gvfs-open: http://localhost:5000: error opening location: No application is registered as handling this file The order of the browser's web browser will try is the same in both instances: 'xdg-open', 'gvfs-open', 'gnome-open', 'x-www-browser', 'firefox' I tried using specific commands using `webbrowser.get(NAME).open` but none worked (except Firefox). It seems like the XDG_DATA_DIRS environment variable is being over ridden: `print('XDG_DATA_DIRS: ' + os.environ.get('XDG_DATA_DIRS', 'not found'))` yields `/usr/share/ubuntu:/usr/share/gnome:/usr/local/share:/usr/share:/var/lib/snapd/desktop:/var/lib/snapd/desktop` in development mode and `SOURCE_DIR/dist/linux/app/share` in the packaged executable. I'm using Python 3.5.2 and pyinstaller 3.3.1 on Ubuntu 16.04 with the command pyinstaller --add-data="static:static" app.py Stack overflow link: https://stackoverflow.com/questions/51657864/pyinstaller-webbrowser-open-doesnt-work-in-packaged-app </issue> <code> [start of PyInstaller/hooks/rthooks/pyi_rth_glib.py] 1 #----------------------------------------------------------------------------- 2 # Copyright (c) 2015-2022, PyInstaller Development Team. 3 # 4 # Licensed under the Apache License, Version 2.0 (the "License"); 5 # you may not use this file except in compliance with the License. 6 # 7 # The full license is in the file COPYING.txt, distributed with this software. 8 # 9 # SPDX-License-Identifier: Apache-2.0 10 #----------------------------------------------------------------------------- 11 12 import os 13 import sys 14 15 os.environ['XDG_DATA_DIRS'] = os.path.join(sys._MEIPASS, 'share') 16 [end of PyInstaller/hooks/rthooks/pyi_rth_glib.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/PyInstaller/hooks/rthooks/pyi_rth_glib.py b/PyInstaller/hooks/rthooks/pyi_rth_glib.py --- a/PyInstaller/hooks/rthooks/pyi_rth_glib.py +++ b/PyInstaller/hooks/rthooks/pyi_rth_glib.py @@ -12,4 +12,20 @@ import os import sys -os.environ['XDG_DATA_DIRS'] = os.path.join(sys._MEIPASS, 'share') +# Prepend the frozen application's data dir to XDG_DATA_DIRS. We need to avoid overwriting the existing paths in order +# to allow the frozen application to run system-installed applications (for example, launch a web browser via the +# webbrowser module on Linux). Should the user desire complete isolation of the frozen application from the system, +# they need to clean up XDG_DATA_DIRS at the start of their program (i.e., remove all entries but first). +pyi_data_dir = os.path.join(sys._MEIPASS, 'share') + +xdg_data_dirs = os.environ.get('XDG_DATA_DIRS', None) +if xdg_data_dirs: + if pyi_data_dir not in xdg_data_dirs: + xdg_data_dirs = pyi_data_dir + os.pathsep + xdg_data_dirs +else: + xdg_data_dirs = pyi_data_dir +os.environ['XDG_DATA_DIRS'] = xdg_data_dirs + +# Cleanup aux variables +del xdg_data_dirs +del pyi_data_dir
{"golden_diff": "diff --git a/PyInstaller/hooks/rthooks/pyi_rth_glib.py b/PyInstaller/hooks/rthooks/pyi_rth_glib.py\n--- a/PyInstaller/hooks/rthooks/pyi_rth_glib.py\n+++ b/PyInstaller/hooks/rthooks/pyi_rth_glib.py\n@@ -12,4 +12,20 @@\n import os\n import sys\n \n-os.environ['XDG_DATA_DIRS'] = os.path.join(sys._MEIPASS, 'share')\n+# Prepend the frozen application's data dir to XDG_DATA_DIRS. We need to avoid overwriting the existing paths in order\n+# to allow the frozen application to run system-installed applications (for example, launch a web browser via the\n+# webbrowser module on Linux). Should the user desire complete isolation of the frozen application from the system,\n+# they need to clean up XDG_DATA_DIRS at the start of their program (i.e., remove all entries but first).\n+pyi_data_dir = os.path.join(sys._MEIPASS, 'share')\n+\n+xdg_data_dirs = os.environ.get('XDG_DATA_DIRS', None)\n+if xdg_data_dirs:\n+ if pyi_data_dir not in xdg_data_dirs:\n+ xdg_data_dirs = pyi_data_dir + os.pathsep + xdg_data_dirs\n+else:\n+ xdg_data_dirs = pyi_data_dir\n+os.environ['XDG_DATA_DIRS'] = xdg_data_dirs\n+\n+# Cleanup aux variables\n+del xdg_data_dirs\n+del pyi_data_dir\n", "issue": "XDG_DATA_DIRS environment variable being overriden when running packaged app\nI have a simple app that runs a local Flask web server and opens a web browser to show it on start up using the following command: \r\n\r\n webbrowser.open('http://localhost:%d' % port, new=2, autoraise=True)\r\n\r\nWhen I run it in development mode, this works fine. However, when it's packaged using pyinstaller and run as an executable, I get this:\r\n\r\n gvfs-open: http://localhost:5000: error opening location: No application is registered as handling this file\r\n\r\nThe order of the browser's web browser will try is the same in both instances: \r\n\r\n 'xdg-open', 'gvfs-open', 'gnome-open', 'x-www-browser', 'firefox'\r\n\r\nI tried using specific commands using `webbrowser.get(NAME).open` but none worked (except Firefox). \r\n\r\nIt seems like the XDG_DATA_DIRS environment variable is being over ridden: \r\n\r\n`print('XDG_DATA_DIRS: ' + os.environ.get('XDG_DATA_DIRS', 'not found'))` \r\n\r\nyields \r\n\r\n`/usr/share/ubuntu:/usr/share/gnome:/usr/local/share:/usr/share:/var/lib/snapd/desktop:/var/lib/snapd/desktop` \r\n\r\nin development mode and \r\n\r\n`SOURCE_DIR/dist/linux/app/share`\r\n\r\nin the packaged executable. \r\n\r\nI'm using Python 3.5.2 and pyinstaller 3.3.1 on Ubuntu 16.04 with the command \r\n\r\n pyinstaller --add-data=\"static:static\" app.py\r\n\r\nStack overflow link: https://stackoverflow.com/questions/51657864/pyinstaller-webbrowser-open-doesnt-work-in-packaged-app\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2015-2022, PyInstaller Development Team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#\n# SPDX-License-Identifier: Apache-2.0\n#-----------------------------------------------------------------------------\n\nimport os\nimport sys\n\nos.environ['XDG_DATA_DIRS'] = os.path.join(sys._MEIPASS, 'share')\n", "path": "PyInstaller/hooks/rthooks/pyi_rth_glib.py"}]}
1,054
334
gh_patches_debug_28621
rasdani/github-patches
git_diff
conan-io__conan-center-index-20413
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> cocoyaxi: make deprecated Specify library name and version: **cocoyaxi/*** --- - [x] I've read the [contributing guidelines](https://github.com/conan-io/conan-center-index/blob/master/CONTRIBUTING.md). - [ ] I've used a [recent](https://github.com/conan-io/conan/releases/latest) Conan client version close to the [currently deployed](https://github.com/conan-io/conan-center-index/blob/master/.c3i/config_v1.yml#L6). - [ ] I've tried at least one configuration locally with the [conan-center hook](https://github.com/conan-io/hooks.git) activated. </issue> <code> [start of recipes/cocoyaxi/all/conanfile.py] 1 from conan import ConanFile 2 from conan.errors import ConanInvalidConfiguration 3 from conan.tools.build import check_min_cppstd 4 from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout 5 from conan.tools.files import copy, get 6 from conan.tools.microsoft import is_msvc, is_msvc_static_runtime 7 import os 8 9 required_conan_version = ">=1.50.0" 10 11 12 class CocoyaxiConan(ConanFile): 13 name = "cocoyaxi" 14 url = "https://github.com/conan-io/conan-center-index" 15 homepage = "https://github.com/idealvin/cocoyaxi" 16 license = "MIT" 17 description = "A go-style coroutine library in C++11 and more." 18 topics = ("cocoyaxi", "coroutine", "c++11") 19 20 settings = "os", "arch", "compiler", "build_type" 21 options = { 22 "shared": [True, False], 23 "fPIC": [True, False], 24 "with_libcurl": [True, False], 25 "with_openssl": [True, False], 26 } 27 default_options = { 28 "shared": False, 29 "fPIC": True, 30 "with_libcurl": False, 31 "with_openssl": False, 32 } 33 34 def config_options(self): 35 if self.settings.os == "Windows": 36 del self.options.fPIC 37 38 def configure(self): 39 if self.options.shared: 40 del self.options.fPIC 41 42 def requirements(self): 43 if self.options.with_libcurl: 44 self.requires("libcurl/7.80.0") 45 if self.options.with_libcurl or self.options.with_openssl: 46 self.requires("openssl/1.1.1q") 47 48 def validate(self): 49 if self.info.settings.compiler.cppstd: 50 check_min_cppstd(self, 11) 51 if self.info.options.with_libcurl: 52 if not self.info.options.with_openssl: 53 raise ConanInvalidConfiguration(f"{self.name} requires with_openssl=True when using with_libcurl=True") 54 if self.dependencies["libcurl"].options.with_ssl != "openssl": 55 raise ConanInvalidConfiguration(f"{self.name} requires libcurl:with_ssl='openssl' to be enabled") 56 if not self.dependencies["libcurl"].options.with_zlib: 57 raise ConanInvalidConfiguration(f"{self.name} requires libcurl:with_zlib=True to be enabled") 58 59 def layout(self): 60 cmake_layout(self, src_folder="src") 61 62 def source(self): 63 get(self, **self.conan_data["sources"][self.version], 64 destination=self.source_folder, strip_root=True) 65 66 def generate(self): 67 tc = CMakeToolchain(self) 68 if is_msvc(self): 69 tc.variables["STATIC_VS_CRT"] = is_msvc_static_runtime(self) 70 tc.variables["WITH_LIBCURL"] = self.options.with_libcurl 71 tc.variables["WITH_OPENSSL"] = self.options.with_openssl 72 tc.generate() 73 cd = CMakeDeps(self) 74 cd.generate() 75 76 def build(self): 77 cmake = CMake(self) 78 cmake.configure() 79 cmake.build() 80 81 def package(self): 82 copy(self, "LICENSE.md", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses")) 83 cmake = CMake(self) 84 cmake.install() 85 86 def package_info(self): 87 self.cpp_info.set_property("cmake_file_name", "cocoyaxi") 88 self.cpp_info.set_property("cmake_target_name", "cocoyaxi::co") 89 # TODO: back to global scope in conan v2 once legacy generators removed 90 self.cpp_info.components["co"].libs = ["co"] 91 92 # TODO: to remove in conan v2 once legacy generators removed 93 self.cpp_info.components["co"].set_property("cmake_target_name", "cocoyaxi::co") 94 if self.options.with_libcurl: 95 self.cpp_info.components["co"].requires.append("libcurl::libcurl") 96 if self.options.with_libcurl or self.options.with_openssl: 97 self.cpp_info.components["co"].requires.append("openssl::openssl") 98 [end of recipes/cocoyaxi/all/conanfile.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/recipes/cocoyaxi/all/conanfile.py b/recipes/cocoyaxi/all/conanfile.py --- a/recipes/cocoyaxi/all/conanfile.py +++ b/recipes/cocoyaxi/all/conanfile.py @@ -30,6 +30,7 @@ "with_libcurl": False, "with_openssl": False, } + deprecated = "coost" def config_options(self): if self.settings.os == "Windows": @@ -37,13 +38,13 @@ def configure(self): if self.options.shared: - del self.options.fPIC + self.options.rm_safe("fPIC") def requirements(self): if self.options.with_libcurl: - self.requires("libcurl/7.80.0") + self.requires("libcurl/8.2.1") if self.options.with_libcurl or self.options.with_openssl: - self.requires("openssl/1.1.1q") + self.requires("openssl/[>=1.1 <4]") def validate(self): if self.info.settings.compiler.cppstd: @@ -95,3 +96,7 @@ self.cpp_info.components["co"].requires.append("libcurl::libcurl") if self.options.with_libcurl or self.options.with_openssl: self.cpp_info.components["co"].requires.append("openssl::openssl") + if self.settings.os in ["Linux", "FreeBSD"]: + self.cpp_info.components["co"].system_libs.extend(["pthread", "dl", "m"]) + elif self.settings.os == "Windows": + self.cpp_info.components["co"].system_libs.append("ws2_32")
{"golden_diff": "diff --git a/recipes/cocoyaxi/all/conanfile.py b/recipes/cocoyaxi/all/conanfile.py\n--- a/recipes/cocoyaxi/all/conanfile.py\n+++ b/recipes/cocoyaxi/all/conanfile.py\n@@ -30,6 +30,7 @@\n \"with_libcurl\": False,\n \"with_openssl\": False,\n }\n+ deprecated = \"coost\"\n \n def config_options(self):\n if self.settings.os == \"Windows\":\n@@ -37,13 +38,13 @@\n \n def configure(self):\n if self.options.shared:\n- del self.options.fPIC\n+ self.options.rm_safe(\"fPIC\")\n \n def requirements(self):\n if self.options.with_libcurl:\n- self.requires(\"libcurl/7.80.0\")\n+ self.requires(\"libcurl/8.2.1\")\n if self.options.with_libcurl or self.options.with_openssl:\n- self.requires(\"openssl/1.1.1q\")\n+ self.requires(\"openssl/[>=1.1 <4]\")\n \n def validate(self):\n if self.info.settings.compiler.cppstd:\n@@ -95,3 +96,7 @@\n self.cpp_info.components[\"co\"].requires.append(\"libcurl::libcurl\")\n if self.options.with_libcurl or self.options.with_openssl:\n self.cpp_info.components[\"co\"].requires.append(\"openssl::openssl\")\n+ if self.settings.os in [\"Linux\", \"FreeBSD\"]:\n+ self.cpp_info.components[\"co\"].system_libs.extend([\"pthread\", \"dl\", \"m\"])\n+ elif self.settings.os == \"Windows\":\n+ self.cpp_info.components[\"co\"].system_libs.append(\"ws2_32\")\n", "issue": "cocoyaxi: make deprecated\nSpecify library name and version: **cocoyaxi/***\r\n\r\n---\r\n\r\n- [x] I've read the [contributing guidelines](https://github.com/conan-io/conan-center-index/blob/master/CONTRIBUTING.md).\r\n- [ ] I've used a [recent](https://github.com/conan-io/conan/releases/latest) Conan client version close to the [currently deployed](https://github.com/conan-io/conan-center-index/blob/master/.c3i/config_v1.yml#L6).\r\n- [ ] I've tried at least one configuration locally with the [conan-center hook](https://github.com/conan-io/hooks.git) activated.\r\n\n", "before_files": [{"content": "from conan import ConanFile\nfrom conan.errors import ConanInvalidConfiguration\nfrom conan.tools.build import check_min_cppstd\nfrom conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout\nfrom conan.tools.files import copy, get\nfrom conan.tools.microsoft import is_msvc, is_msvc_static_runtime\nimport os\n\nrequired_conan_version = \">=1.50.0\"\n\n\nclass CocoyaxiConan(ConanFile):\n name = \"cocoyaxi\"\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://github.com/idealvin/cocoyaxi\"\n license = \"MIT\"\n description = \"A go-style coroutine library in C++11 and more.\"\n topics = (\"cocoyaxi\", \"coroutine\", \"c++11\")\n\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\n \"shared\": [True, False],\n \"fPIC\": [True, False],\n \"with_libcurl\": [True, False],\n \"with_openssl\": [True, False],\n }\n default_options = {\n \"shared\": False,\n \"fPIC\": True,\n \"with_libcurl\": False,\n \"with_openssl\": False,\n }\n\n def config_options(self):\n if self.settings.os == \"Windows\":\n del self.options.fPIC\n\n def configure(self):\n if self.options.shared:\n del self.options.fPIC\n\n def requirements(self):\n if self.options.with_libcurl:\n self.requires(\"libcurl/7.80.0\")\n if self.options.with_libcurl or self.options.with_openssl:\n self.requires(\"openssl/1.1.1q\")\n\n def validate(self):\n if self.info.settings.compiler.cppstd:\n check_min_cppstd(self, 11)\n if self.info.options.with_libcurl:\n if not self.info.options.with_openssl:\n raise ConanInvalidConfiguration(f\"{self.name} requires with_openssl=True when using with_libcurl=True\")\n if self.dependencies[\"libcurl\"].options.with_ssl != \"openssl\":\n raise ConanInvalidConfiguration(f\"{self.name} requires libcurl:with_ssl='openssl' to be enabled\")\n if not self.dependencies[\"libcurl\"].options.with_zlib:\n raise ConanInvalidConfiguration(f\"{self.name} requires libcurl:with_zlib=True to be enabled\")\n\n def layout(self):\n cmake_layout(self, src_folder=\"src\")\n\n def source(self):\n get(self, **self.conan_data[\"sources\"][self.version],\n destination=self.source_folder, strip_root=True)\n\n def generate(self):\n tc = CMakeToolchain(self)\n if is_msvc(self):\n tc.variables[\"STATIC_VS_CRT\"] = is_msvc_static_runtime(self)\n tc.variables[\"WITH_LIBCURL\"] = self.options.with_libcurl\n tc.variables[\"WITH_OPENSSL\"] = self.options.with_openssl\n tc.generate()\n cd = CMakeDeps(self)\n cd.generate()\n\n def build(self):\n cmake = CMake(self)\n cmake.configure()\n cmake.build()\n\n def package(self):\n copy(self, \"LICENSE.md\", src=self.source_folder, dst=os.path.join(self.package_folder, \"licenses\"))\n cmake = CMake(self)\n cmake.install()\n\n def package_info(self):\n self.cpp_info.set_property(\"cmake_file_name\", \"cocoyaxi\")\n self.cpp_info.set_property(\"cmake_target_name\", \"cocoyaxi::co\")\n # TODO: back to global scope in conan v2 once legacy generators removed\n self.cpp_info.components[\"co\"].libs = [\"co\"]\n\n # TODO: to remove in conan v2 once legacy generators removed\n self.cpp_info.components[\"co\"].set_property(\"cmake_target_name\", \"cocoyaxi::co\")\n if self.options.with_libcurl:\n self.cpp_info.components[\"co\"].requires.append(\"libcurl::libcurl\")\n if self.options.with_libcurl or self.options.with_openssl:\n self.cpp_info.components[\"co\"].requires.append(\"openssl::openssl\")\n", "path": "recipes/cocoyaxi/all/conanfile.py"}]}
1,787
386
gh_patches_debug_11424
rasdani/github-patches
git_diff
cobbler__cobbler-3552
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> SafeConfigParser was removed in 3.12 ### Describe the bug From https://docs.python.org/3.12/whatsnew/3.12.html: Several names deprecated in the configparser way back in 3.2 have been removed per gh-89336: - configparser.ParsingError no longer has a filename attribute or argument. Use the source attribute and argument instead. - configparser no longer has a SafeConfigParser class. Use the shorter ConfigParser name instead. - configparser.ConfigParser no longer has a readfp method. Use read_file() instead. <!--- HINT: You can paste gist.github.com links for long logs or larger files --> ### Steps to reproduce 1. systemctl start cobblerd ### Expected behavior <!--- A clear and concise description of what you expected to happen. --> ### Cobbler version <!--- Paste output from `cobbler version` --> ````paste below Cobbler 3.4.0 ```` ### Operating system Fedora 39 ### Cobbler log <!--- Paste (partial) output from `/var/log/cobbler/cobbler.log` --> ````paste below cobblerd[1614]: INFO | Exception raised when loading module authorization.configfile cobblerd[1614]: INFO | Exception occurred: <class 'ImportError'> cobblerd[1614]: INFO | Exception value: cannot import name 'SafeConfigParser' from 'configparser' (/usr/lib64/python3.12/configparser.py) ```` ### Screenshots <!--- If applicable, add screenshots to help explain your problem. --> ### Additional information <!--- Add any other context about the problem here. --> </issue> <code> [start of cobbler/modules/authorization/configfile.py] 1 """ 2 Authorization module that allow users listed in 3 /etc/cobbler/users.conf to be permitted to access resources. 4 For instance, when using authz_ldap, you want to use authn_configfile, 5 not authz_allowall, which will most likely NOT do what you want. 6 """ 7 # SPDX-License-Identifier: GPL-2.0-or-later 8 # SPDX-FileCopyrightText: Copyright 2007-2009, Red Hat, Inc and Others 9 # SPDX-FileCopyrightText: Michael DeHaan <michael.dehaan AT gmail> 10 11 12 import os 13 from configparser import SafeConfigParser 14 from typing import TYPE_CHECKING, Any, Dict 15 16 if TYPE_CHECKING: 17 from cobbler.api import CobblerAPI 18 19 20 CONFIG_FILE = "/etc/cobbler/users.conf" 21 22 23 def register() -> str: 24 """ 25 The mandatory Cobbler module registration hook. 26 27 :return: Always "authz". 28 """ 29 return "authz" 30 31 32 def __parse_config() -> Dict[str, Dict[Any, Any]]: 33 """ 34 Parse the the users.conf file. 35 36 :return: The data of the config file. 37 """ 38 if not os.path.exists(CONFIG_FILE): 39 return {} 40 config = SafeConfigParser() 41 config.read(CONFIG_FILE) 42 alldata: Dict[str, Dict[str, Any]] = {} 43 groups = config.sections() 44 for group in groups: 45 alldata[str(group)] = {} 46 options = config.options(group) 47 for option in options: 48 alldata[group][option] = 1 49 return alldata 50 51 52 def authorize( 53 api_handle: "CobblerAPI", 54 user: str, 55 resource: str, 56 arg1: Any = None, 57 arg2: Any = None, 58 ) -> int: 59 """ 60 Validate a user against a resource. All users in the file are permitted by this module. 61 62 :param api_handle: This parameter is not used currently. 63 :param user: The user to authorize. 64 :param resource: This parameter is not used currently. 65 :param arg1: This parameter is not used currently. 66 :param arg2: This parameter is not used currently. 67 :return: "0" if no authorized, "1" if authorized. 68 """ 69 # FIXME: this must be modified to use the new ACL engine 70 71 data = __parse_config() 72 for _, group_data in data.items(): 73 if user.lower() in group_data: 74 return 1 75 return 0 76 [end of cobbler/modules/authorization/configfile.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/cobbler/modules/authorization/configfile.py b/cobbler/modules/authorization/configfile.py --- a/cobbler/modules/authorization/configfile.py +++ b/cobbler/modules/authorization/configfile.py @@ -10,7 +10,7 @@ import os -from configparser import SafeConfigParser +from configparser import ConfigParser from typing import TYPE_CHECKING, Any, Dict if TYPE_CHECKING: @@ -37,7 +37,7 @@ """ if not os.path.exists(CONFIG_FILE): return {} - config = SafeConfigParser() + config = ConfigParser() config.read(CONFIG_FILE) alldata: Dict[str, Dict[str, Any]] = {} groups = config.sections()
{"golden_diff": "diff --git a/cobbler/modules/authorization/configfile.py b/cobbler/modules/authorization/configfile.py\n--- a/cobbler/modules/authorization/configfile.py\n+++ b/cobbler/modules/authorization/configfile.py\n@@ -10,7 +10,7 @@\n \n \n import os\n-from configparser import SafeConfigParser\n+from configparser import ConfigParser\n from typing import TYPE_CHECKING, Any, Dict\n \n if TYPE_CHECKING:\n@@ -37,7 +37,7 @@\n \"\"\"\n if not os.path.exists(CONFIG_FILE):\n return {}\n- config = SafeConfigParser()\n+ config = ConfigParser()\n config.read(CONFIG_FILE)\n alldata: Dict[str, Dict[str, Any]] = {}\n groups = config.sections()\n", "issue": "SafeConfigParser was removed in 3.12\n### Describe the bug\r\n\r\nFrom https://docs.python.org/3.12/whatsnew/3.12.html:\r\n\r\nSeveral names deprecated in the configparser way back in 3.2 have been removed per gh-89336:\r\n- configparser.ParsingError no longer has a filename attribute or argument. Use the source attribute and argument instead.\r\n- configparser no longer has a SafeConfigParser class. Use the shorter ConfigParser name instead.\r\n- configparser.ConfigParser no longer has a readfp method. Use read_file() instead.\r\n\r\n\r\n<!--- HINT: You can paste gist.github.com links for long logs or larger files -->\r\n\r\n### Steps to reproduce\r\n\r\n1. systemctl start cobblerd\r\n\r\n### Expected behavior\r\n\r\n<!--- A clear and concise description of what you expected to happen. -->\r\n\r\n### Cobbler version\r\n\r\n<!--- Paste output from `cobbler version` -->\r\n````paste below\r\nCobbler 3.4.0\r\n````\r\n\r\n### Operating system\r\n\r\nFedora 39\r\n\r\n### Cobbler log\r\n\r\n<!--- Paste (partial) output from `/var/log/cobbler/cobbler.log` -->\r\n````paste below\r\ncobblerd[1614]: INFO | Exception raised when loading module authorization.configfile\r\ncobblerd[1614]: INFO | Exception occurred: <class 'ImportError'>\r\ncobblerd[1614]: INFO | Exception value: cannot import name 'SafeConfigParser' from 'configparser' (/usr/lib64/python3.12/configparser.py)\r\n````\r\n\r\n### Screenshots\r\n\r\n<!--- If applicable, add screenshots to help explain your problem. -->\r\n\r\n### Additional information\r\n\r\n<!--- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "\"\"\"\nAuthorization module that allow users listed in\n/etc/cobbler/users.conf to be permitted to access resources.\nFor instance, when using authz_ldap, you want to use authn_configfile,\nnot authz_allowall, which will most likely NOT do what you want.\n\"\"\"\n# SPDX-License-Identifier: GPL-2.0-or-later\n# SPDX-FileCopyrightText: Copyright 2007-2009, Red Hat, Inc and Others\n# SPDX-FileCopyrightText: Michael DeHaan <michael.dehaan AT gmail>\n\n\nimport os\nfrom configparser import SafeConfigParser\nfrom typing import TYPE_CHECKING, Any, Dict\n\nif TYPE_CHECKING:\n from cobbler.api import CobblerAPI\n\n\nCONFIG_FILE = \"/etc/cobbler/users.conf\"\n\n\ndef register() -> str:\n \"\"\"\n The mandatory Cobbler module registration hook.\n\n :return: Always \"authz\".\n \"\"\"\n return \"authz\"\n\n\ndef __parse_config() -> Dict[str, Dict[Any, Any]]:\n \"\"\"\n Parse the the users.conf file.\n\n :return: The data of the config file.\n \"\"\"\n if not os.path.exists(CONFIG_FILE):\n return {}\n config = SafeConfigParser()\n config.read(CONFIG_FILE)\n alldata: Dict[str, Dict[str, Any]] = {}\n groups = config.sections()\n for group in groups:\n alldata[str(group)] = {}\n options = config.options(group)\n for option in options:\n alldata[group][option] = 1\n return alldata\n\n\ndef authorize(\n api_handle: \"CobblerAPI\",\n user: str,\n resource: str,\n arg1: Any = None,\n arg2: Any = None,\n) -> int:\n \"\"\"\n Validate a user against a resource. All users in the file are permitted by this module.\n\n :param api_handle: This parameter is not used currently.\n :param user: The user to authorize.\n :param resource: This parameter is not used currently.\n :param arg1: This parameter is not used currently.\n :param arg2: This parameter is not used currently.\n :return: \"0\" if no authorized, \"1\" if authorized.\n \"\"\"\n # FIXME: this must be modified to use the new ACL engine\n\n data = __parse_config()\n for _, group_data in data.items():\n if user.lower() in group_data:\n return 1\n return 0\n", "path": "cobbler/modules/authorization/configfile.py"}]}
1,596
162
gh_patches_debug_19861
rasdani/github-patches
git_diff
doccano__doccano-1958
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [Bug report] Static files are not copied on pip installation How to reproduce the behaviour --------- Seems like (some?) static files are not copied on pip installation. For instance `http://site.com/favicon.ico` is available on Docker Compose installation. But it is 404'd on pip installation. Your Environment --------- <!-- Include details of your environment.--> * Operating System: CentOS 8.3 * Python Version Used: 3.8.8 * When you install doccano: v1.2.1 * How did you install doccano (Heroku button etc): pip </issue> <code> [start of backend/config/urls.py] 1 """app URL Configuration 2 3 The `urlpatterns` list routes URLs to views. For more information please see: 4 https://docs.djangoproject.com/en/2.0/topics/http/urls/ 5 Examples: 6 Function views 7 1. Add an import: from my_app import views 8 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 Class-based views 10 1. Add an import: from other_app.views import Home 11 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 Including another URLconf 13 1. Import the include() function: from django.urls import include, path 14 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 """ 16 import os 17 import re 18 19 from django.conf import settings 20 from django.contrib import admin 21 from django.contrib.auth.views import TemplateView 22 from django.urls import include, path, re_path 23 from django.views.static import serve 24 from drf_yasg import openapi 25 from drf_yasg.views import get_schema_view 26 27 schema_view = get_schema_view( 28 openapi.Info( 29 title="doccano API", 30 default_version="v1", 31 description="doccano API description", 32 license=openapi.License(name="MIT License"), 33 ), 34 public=True, 35 ) 36 37 urlpatterns = [] 38 if settings.DEBUG or os.environ.get("STANDALONE", False): 39 # For showing images and audios in the case of pip and Docker. 40 urlpatterns.append( 41 re_path( 42 r"^%s(?P<path>.*)$" % re.escape(settings.MEDIA_URL.lstrip("/")), 43 serve, 44 {"document_root": settings.MEDIA_ROOT}, 45 ) 46 ) 47 48 urlpatterns += [ 49 path("admin/", admin.site.urls), 50 path("api-auth/", include("rest_framework.urls")), 51 path("v1/health/", include("health_check.urls")), 52 path("v1/", include("api.urls")), 53 path("v1/", include("roles.urls")), 54 path("v1/", include("users.urls")), 55 path("v1/", include("data_import.urls")), 56 path("v1/", include("data_export.urls")), 57 path("v1/", include("projects.urls")), 58 path("v1/projects/<int:project_id>/metrics/", include("metrics.urls")), 59 path("v1/projects/<int:project_id>/", include("auto_labeling.urls")), 60 path("v1/projects/<int:project_id>/", include("examples.urls")), 61 path("v1/projects/<int:project_id>/", include("labels.urls")), 62 path("v1/projects/<int:project_id>/", include("label_types.urls")), 63 path("swagger/", schema_view.with_ui("swagger", cache_timeout=0), name="schema-swagger-ui"), 64 re_path("", TemplateView.as_view(template_name="index.html")), 65 ] 66 [end of backend/config/urls.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/backend/config/urls.py b/backend/config/urls.py --- a/backend/config/urls.py +++ b/backend/config/urls.py @@ -15,6 +15,7 @@ """ import os import re +from pathlib import Path from django.conf import settings from django.contrib import admin @@ -36,6 +37,7 @@ urlpatterns = [] if settings.DEBUG or os.environ.get("STANDALONE", False): + static_dir = Path(__file__).resolve().parent.parent / "client" / "dist" # For showing images and audios in the case of pip and Docker. urlpatterns.append( re_path( @@ -44,6 +46,8 @@ {"document_root": settings.MEDIA_ROOT}, ) ) + # For showing favicon on the case of pip and Docker. + urlpatterns.append(path("favicon.ico", serve, {"document_root": static_dir, "path": "favicon.ico"})) urlpatterns += [ path("admin/", admin.site.urls),
{"golden_diff": "diff --git a/backend/config/urls.py b/backend/config/urls.py\n--- a/backend/config/urls.py\n+++ b/backend/config/urls.py\n@@ -15,6 +15,7 @@\n \"\"\"\n import os\n import re\n+from pathlib import Path\n \n from django.conf import settings\n from django.contrib import admin\n@@ -36,6 +37,7 @@\n \n urlpatterns = []\n if settings.DEBUG or os.environ.get(\"STANDALONE\", False):\n+ static_dir = Path(__file__).resolve().parent.parent / \"client\" / \"dist\"\n # For showing images and audios in the case of pip and Docker.\n urlpatterns.append(\n re_path(\n@@ -44,6 +46,8 @@\n {\"document_root\": settings.MEDIA_ROOT},\n )\n )\n+ # For showing favicon on the case of pip and Docker.\n+ urlpatterns.append(path(\"favicon.ico\", serve, {\"document_root\": static_dir, \"path\": \"favicon.ico\"}))\n \n urlpatterns += [\n path(\"admin/\", admin.site.urls),\n", "issue": "[Bug report] Static files are not copied on pip installation\nHow to reproduce the behaviour\r\n---------\r\nSeems like (some?) static files are not copied on pip installation.\r\nFor instance `http://site.com/favicon.ico` is available on Docker Compose installation. But it is 404'd on pip installation.\r\n\r\nYour Environment\r\n---------\r\n<!-- Include details of your environment.-->\r\n* Operating System: CentOS 8.3\r\n* Python Version Used: 3.8.8\r\n* When you install doccano: v1.2.1\r\n* How did you install doccano (Heroku button etc): pip\r\n\n", "before_files": [{"content": "\"\"\"app URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.0/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nimport os\nimport re\n\nfrom django.conf import settings\nfrom django.contrib import admin\nfrom django.contrib.auth.views import TemplateView\nfrom django.urls import include, path, re_path\nfrom django.views.static import serve\nfrom drf_yasg import openapi\nfrom drf_yasg.views import get_schema_view\n\nschema_view = get_schema_view(\n openapi.Info(\n title=\"doccano API\",\n default_version=\"v1\",\n description=\"doccano API description\",\n license=openapi.License(name=\"MIT License\"),\n ),\n public=True,\n)\n\nurlpatterns = []\nif settings.DEBUG or os.environ.get(\"STANDALONE\", False):\n # For showing images and audios in the case of pip and Docker.\n urlpatterns.append(\n re_path(\n r\"^%s(?P<path>.*)$\" % re.escape(settings.MEDIA_URL.lstrip(\"/\")),\n serve,\n {\"document_root\": settings.MEDIA_ROOT},\n )\n )\n\nurlpatterns += [\n path(\"admin/\", admin.site.urls),\n path(\"api-auth/\", include(\"rest_framework.urls\")),\n path(\"v1/health/\", include(\"health_check.urls\")),\n path(\"v1/\", include(\"api.urls\")),\n path(\"v1/\", include(\"roles.urls\")),\n path(\"v1/\", include(\"users.urls\")),\n path(\"v1/\", include(\"data_import.urls\")),\n path(\"v1/\", include(\"data_export.urls\")),\n path(\"v1/\", include(\"projects.urls\")),\n path(\"v1/projects/<int:project_id>/metrics/\", include(\"metrics.urls\")),\n path(\"v1/projects/<int:project_id>/\", include(\"auto_labeling.urls\")),\n path(\"v1/projects/<int:project_id>/\", include(\"examples.urls\")),\n path(\"v1/projects/<int:project_id>/\", include(\"labels.urls\")),\n path(\"v1/projects/<int:project_id>/\", include(\"label_types.urls\")),\n path(\"swagger/\", schema_view.with_ui(\"swagger\", cache_timeout=0), name=\"schema-swagger-ui\"),\n re_path(\"\", TemplateView.as_view(template_name=\"index.html\")),\n]\n", "path": "backend/config/urls.py"}]}
1,398
224
gh_patches_debug_3129
rasdani/github-patches
git_diff
mathesar-foundation__mathesar-3523
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Demo is broken for 0.1.6 ## Description <!-- A clear and concise description of what the bug is. --> The demo can't be deployed with the current `master` branch (0.1.6) The reasons are: - We didn't add the fix to the demo settings in #3499 (just to prod), and - We have an extra dependency in the demo setup which we need to build in but the current `Dockerfile` doesn't support that. ## Expected behavior <!-- A clear and concise description of what you expected to happen. --> We should be able to demo version 0.1.6 on our demo site. </issue> <code> [start of demo/settings.py] 1 from config.settings.common_settings import * # noqa 2 from decouple import config as decouple_config 3 4 INSTALLED_APPS += [ # noqa 5 "demo", 6 "health_check", 7 ] 8 9 MIDDLEWARE += [ # noqa 10 "demo.middleware.LiveDemoModeMiddleware", 11 ] 12 13 MATHESAR_MODE = 'PRODUCTION' 14 MATHESAR_LIVE_DEMO = True 15 MATHESAR_LIVE_DEMO_USERNAME = decouple_config('MATHESAR_LIVE_DEMO_USERNAME', default=None) 16 MATHESAR_LIVE_DEMO_PASSWORD = decouple_config('MATHESAR_LIVE_DEMO_PASSWORD', default=None) 17 18 MATHESAR_DEMO_TEMPLATE = 'mathesar_demo_template' 19 MATHESAR_DEMO_ARXIV_LOG_PATH = decouple_config( 20 'MATHESAR_DEMO_ARXIV_LOG_PATH', 21 default='/var/lib/mathesar/demo/arxiv_db_schema_log' 22 ) 23 BASE_TEMPLATE_ADDITIONAL_SCRIPT_TEMPLATES += ['demo/analytics.html'] # noqa 24 ROOT_URLCONF = "demo.urls" 25 [end of demo/settings.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/demo/settings.py b/demo/settings.py --- a/demo/settings.py +++ b/demo/settings.py @@ -10,6 +10,9 @@ "demo.middleware.LiveDemoModeMiddleware", ] + +SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') + MATHESAR_MODE = 'PRODUCTION' MATHESAR_LIVE_DEMO = True MATHESAR_LIVE_DEMO_USERNAME = decouple_config('MATHESAR_LIVE_DEMO_USERNAME', default=None)
{"golden_diff": "diff --git a/demo/settings.py b/demo/settings.py\n--- a/demo/settings.py\n+++ b/demo/settings.py\n@@ -10,6 +10,9 @@\n \"demo.middleware.LiveDemoModeMiddleware\",\n ]\n \n+\n+SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')\n+\n MATHESAR_MODE = 'PRODUCTION'\n MATHESAR_LIVE_DEMO = True\n MATHESAR_LIVE_DEMO_USERNAME = decouple_config('MATHESAR_LIVE_DEMO_USERNAME', default=None)\n", "issue": "Demo is broken for 0.1.6\n## Description\r\n<!-- A clear and concise description of what the bug is. -->\r\n\r\nThe demo can't be deployed with the current `master` branch (0.1.6)\r\n\r\nThe reasons are:\r\n\r\n- We didn't add the fix to the demo settings in #3499 (just to prod), and\r\n- We have an extra dependency in the demo setup which we need to build in but the current `Dockerfile` doesn't support that.\r\n\r\n## Expected behavior\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n\r\nWe should be able to demo version 0.1.6 on our demo site.\n", "before_files": [{"content": "from config.settings.common_settings import * # noqa\nfrom decouple import config as decouple_config\n\nINSTALLED_APPS += [ # noqa\n \"demo\",\n \"health_check\",\n]\n\nMIDDLEWARE += [ # noqa\n \"demo.middleware.LiveDemoModeMiddleware\",\n]\n\nMATHESAR_MODE = 'PRODUCTION'\nMATHESAR_LIVE_DEMO = True\nMATHESAR_LIVE_DEMO_USERNAME = decouple_config('MATHESAR_LIVE_DEMO_USERNAME', default=None)\nMATHESAR_LIVE_DEMO_PASSWORD = decouple_config('MATHESAR_LIVE_DEMO_PASSWORD', default=None)\n\nMATHESAR_DEMO_TEMPLATE = 'mathesar_demo_template'\nMATHESAR_DEMO_ARXIV_LOG_PATH = decouple_config(\n 'MATHESAR_DEMO_ARXIV_LOG_PATH',\n default='/var/lib/mathesar/demo/arxiv_db_schema_log'\n)\nBASE_TEMPLATE_ADDITIONAL_SCRIPT_TEMPLATES += ['demo/analytics.html'] # noqa\nROOT_URLCONF = \"demo.urls\"\n", "path": "demo/settings.py"}]}
936
115
gh_patches_debug_43254
rasdani/github-patches
git_diff
streamlink__streamlink-4759
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> plugins.atresplayer: Live streams is not working. ### Checklist - [X] This is a plugin issue and not a different kind of issue - [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink) - [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22) - [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master) ### Streamlink version Latest stable release ### Description As for today, Atresplayer live streams is not working. ### Debug log ```text [cli][debug] OS: Windows 10 [cli][debug] Python: 3.10.6 [cli][debug] Streamlink: 4.3.0 [cli][debug] Dependencies: [cli][debug] isodate: 0.6.1 [cli][debug] lxml: 4.9.1 [cli][debug] pycountry: 22.3.5 [cli][debug] pycryptodome: 3.15.0 [cli][debug] PySocks: 1.7.1 [cli][debug] requests: 2.28.1 [cli][debug] websocket-client: 1.3.3 [cli][debug] Arguments: [cli][debug] url=https://www.atresplayer.com/directos/antena3/ [cli][debug] stream=['best'] [cli][debug] --loglevel=debug [cli][debug] --hls-live-edge=1 [cli][debug] --ffmpeg-ffmpeg=C:\Program Files\Streamlink\ffmpeg\ffmpeg.exe [cli][info] Found matching plugin atresplayer for URL https://www.atresplayer.com/directos/antena3/ error: Unable to validate response text: ValidationError(NoneOrAllSchema): ValidationError(type): Type of <generator object search_dict at 0x000002C64BA79930> should be list, but is generator ``` </issue> <code> [start of src/streamlink/plugins/atresplayer.py] 1 """ 2 $description Spanish live TV channels from Atresmedia Television, including Antena 3 and laSexta. 3 $url atresplayer.com 4 $type live 5 $region Spain 6 """ 7 8 import logging 9 import re 10 11 from streamlink.plugin import Plugin, pluginmatcher 12 from streamlink.plugin.api import validate 13 from streamlink.stream.dash import DASHStream 14 from streamlink.stream.hls import HLSStream 15 from streamlink.utils.data import search_dict 16 from streamlink.utils.url import update_scheme 17 18 log = logging.getLogger(__name__) 19 20 21 @pluginmatcher(re.compile( 22 r"https?://(?:www\.)?atresplayer\.com/" 23 )) 24 class AtresPlayer(Plugin): 25 def _get_streams(self): 26 self.url = update_scheme("https://", self.url) 27 28 api_url = self.session.http.get(self.url, schema=validate.Schema( 29 re.compile(r"""window.__PRELOADED_STATE__\s*=\s*({.*?});""", re.DOTALL), 30 validate.none_or_all( 31 validate.get(1), 32 validate.parse_json(), 33 validate.transform(search_dict, key="href"), 34 [validate.url()], 35 validate.get(0), 36 ), 37 )) 38 if not api_url: 39 return 40 log.debug(f"API URL: {api_url}") 41 42 player_api_url = self.session.http.get(api_url, schema=validate.Schema( 43 validate.parse_json(), 44 validate.transform(search_dict, key="urlVideo"), 45 )) 46 47 stream_schema = validate.Schema( 48 validate.parse_json(), 49 { 50 "sources": [ 51 validate.all( 52 { 53 "src": validate.url(), 54 validate.optional("type"): str, 55 }, 56 ), 57 ], 58 }, 59 validate.get("sources"), 60 ) 61 62 for api_url in player_api_url: 63 log.debug(f"Player API URL: {api_url}") 64 for source in self.session.http.get(api_url, schema=stream_schema): 65 log.debug(f"Stream source: {source['src']} ({source.get('type', 'n/a')})") 66 67 if "type" not in source or source["type"] == "application/vnd.apple.mpegurl": 68 streams = HLSStream.parse_variant_playlist(self.session, source["src"]) 69 if not streams: 70 yield "live", HLSStream(self.session, source["src"]) 71 else: 72 yield from streams.items() 73 elif source["type"] == "application/dash+xml": 74 yield from DASHStream.parse_manifest(self.session, source["src"]).items() 75 76 77 __plugin__ = AtresPlayer 78 [end of src/streamlink/plugins/atresplayer.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/streamlink/plugins/atresplayer.py b/src/streamlink/plugins/atresplayer.py --- a/src/streamlink/plugins/atresplayer.py +++ b/src/streamlink/plugins/atresplayer.py @@ -7,12 +7,12 @@ import logging import re +from urllib.parse import urlparse from streamlink.plugin import Plugin, pluginmatcher from streamlink.plugin.api import validate from streamlink.stream.dash import DASHStream from streamlink.stream.hls import HLSStream -from streamlink.utils.data import search_dict from streamlink.utils.url import update_scheme log = logging.getLogger(__name__) @@ -24,15 +24,15 @@ class AtresPlayer(Plugin): def _get_streams(self): self.url = update_scheme("https://", self.url) + path = urlparse(self.url).path api_url = self.session.http.get(self.url, schema=validate.Schema( re.compile(r"""window.__PRELOADED_STATE__\s*=\s*({.*?});""", re.DOTALL), validate.none_or_all( validate.get(1), validate.parse_json(), - validate.transform(search_dict, key="href"), - [validate.url()], - validate.get(0), + {"links": {path: {"href": validate.url()}}}, + validate.get(("links", path, "href")), ), )) if not api_url: @@ -41,37 +41,46 @@ player_api_url = self.session.http.get(api_url, schema=validate.Schema( validate.parse_json(), - validate.transform(search_dict, key="urlVideo"), + {"urlVideo": validate.url()}, + validate.get("urlVideo"), )) - stream_schema = validate.Schema( + log.debug(f"Player API URL: {player_api_url}") + sources = self.session.http.get(player_api_url, acceptable_status=(200, 403), schema=validate.Schema( validate.parse_json(), - { - "sources": [ - validate.all( - { - "src": validate.url(), - validate.optional("type"): str, - }, - ), - ], - }, - validate.get("sources"), - ) + validate.any( + { + "error": str, + "error_description": str, + }, + { + "sources": [ + validate.all( + { + "src": validate.url(), + validate.optional("type"): str, + }, + validate.union_get("type", "src"), + ), + ], + }, + ), + )) + if "error" in sources: + log.error(f"Player API error: {sources['error']} - {sources['error_description']}") + return - for api_url in player_api_url: - log.debug(f"Player API URL: {api_url}") - for source in self.session.http.get(api_url, schema=stream_schema): - log.debug(f"Stream source: {source['src']} ({source.get('type', 'n/a')})") + for streamtype, streamsrc in sources.get("sources"): + log.debug(f"Stream source: {streamsrc} ({streamtype or 'n/a'})") - if "type" not in source or source["type"] == "application/vnd.apple.mpegurl": - streams = HLSStream.parse_variant_playlist(self.session, source["src"]) - if not streams: - yield "live", HLSStream(self.session, source["src"]) - else: - yield from streams.items() - elif source["type"] == "application/dash+xml": - yield from DASHStream.parse_manifest(self.session, source["src"]).items() + if streamtype == "application/vnd.apple.mpegurl": + streams = HLSStream.parse_variant_playlist(self.session, streamsrc) + if not streams: + yield "live", HLSStream(self.session, streamsrc) + else: + yield from streams.items() + elif streamtype == "application/dash+xml": + yield from DASHStream.parse_manifest(self.session, streamsrc).items() __plugin__ = AtresPlayer
{"golden_diff": "diff --git a/src/streamlink/plugins/atresplayer.py b/src/streamlink/plugins/atresplayer.py\n--- a/src/streamlink/plugins/atresplayer.py\n+++ b/src/streamlink/plugins/atresplayer.py\n@@ -7,12 +7,12 @@\n \n import logging\n import re\n+from urllib.parse import urlparse\n \n from streamlink.plugin import Plugin, pluginmatcher\n from streamlink.plugin.api import validate\n from streamlink.stream.dash import DASHStream\n from streamlink.stream.hls import HLSStream\n-from streamlink.utils.data import search_dict\n from streamlink.utils.url import update_scheme\n \n log = logging.getLogger(__name__)\n@@ -24,15 +24,15 @@\n class AtresPlayer(Plugin):\n def _get_streams(self):\n self.url = update_scheme(\"https://\", self.url)\n+ path = urlparse(self.url).path\n \n api_url = self.session.http.get(self.url, schema=validate.Schema(\n re.compile(r\"\"\"window.__PRELOADED_STATE__\\s*=\\s*({.*?});\"\"\", re.DOTALL),\n validate.none_or_all(\n validate.get(1),\n validate.parse_json(),\n- validate.transform(search_dict, key=\"href\"),\n- [validate.url()],\n- validate.get(0),\n+ {\"links\": {path: {\"href\": validate.url()}}},\n+ validate.get((\"links\", path, \"href\")),\n ),\n ))\n if not api_url:\n@@ -41,37 +41,46 @@\n \n player_api_url = self.session.http.get(api_url, schema=validate.Schema(\n validate.parse_json(),\n- validate.transform(search_dict, key=\"urlVideo\"),\n+ {\"urlVideo\": validate.url()},\n+ validate.get(\"urlVideo\"),\n ))\n \n- stream_schema = validate.Schema(\n+ log.debug(f\"Player API URL: {player_api_url}\")\n+ sources = self.session.http.get(player_api_url, acceptable_status=(200, 403), schema=validate.Schema(\n validate.parse_json(),\n- {\n- \"sources\": [\n- validate.all(\n- {\n- \"src\": validate.url(),\n- validate.optional(\"type\"): str,\n- },\n- ),\n- ],\n- },\n- validate.get(\"sources\"),\n- )\n+ validate.any(\n+ {\n+ \"error\": str,\n+ \"error_description\": str,\n+ },\n+ {\n+ \"sources\": [\n+ validate.all(\n+ {\n+ \"src\": validate.url(),\n+ validate.optional(\"type\"): str,\n+ },\n+ validate.union_get(\"type\", \"src\"),\n+ ),\n+ ],\n+ },\n+ ),\n+ ))\n+ if \"error\" in sources:\n+ log.error(f\"Player API error: {sources['error']} - {sources['error_description']}\")\n+ return\n \n- for api_url in player_api_url:\n- log.debug(f\"Player API URL: {api_url}\")\n- for source in self.session.http.get(api_url, schema=stream_schema):\n- log.debug(f\"Stream source: {source['src']} ({source.get('type', 'n/a')})\")\n+ for streamtype, streamsrc in sources.get(\"sources\"):\n+ log.debug(f\"Stream source: {streamsrc} ({streamtype or 'n/a'})\")\n \n- if \"type\" not in source or source[\"type\"] == \"application/vnd.apple.mpegurl\":\n- streams = HLSStream.parse_variant_playlist(self.session, source[\"src\"])\n- if not streams:\n- yield \"live\", HLSStream(self.session, source[\"src\"])\n- else:\n- yield from streams.items()\n- elif source[\"type\"] == \"application/dash+xml\":\n- yield from DASHStream.parse_manifest(self.session, source[\"src\"]).items()\n+ if streamtype == \"application/vnd.apple.mpegurl\":\n+ streams = HLSStream.parse_variant_playlist(self.session, streamsrc)\n+ if not streams:\n+ yield \"live\", HLSStream(self.session, streamsrc)\n+ else:\n+ yield from streams.items()\n+ elif streamtype == \"application/dash+xml\":\n+ yield from DASHStream.parse_manifest(self.session, streamsrc).items()\n \n \n __plugin__ = AtresPlayer\n", "issue": "plugins.atresplayer: Live streams is not working.\n### Checklist\r\n\r\n- [X] This is a plugin issue and not a different kind of issue\r\n- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)\r\n- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)\r\n- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)\r\n\r\n### Streamlink version\r\n\r\nLatest stable release\r\n\r\n### Description\r\n\r\nAs for today, Atresplayer live streams is not working.\r\n\r\n### Debug log\r\n\r\n```text\r\n[cli][debug] OS: Windows 10\r\n[cli][debug] Python: 3.10.6\r\n[cli][debug] Streamlink: 4.3.0\r\n[cli][debug] Dependencies:\r\n[cli][debug] isodate: 0.6.1\r\n[cli][debug] lxml: 4.9.1\r\n[cli][debug] pycountry: 22.3.5\r\n[cli][debug] pycryptodome: 3.15.0\r\n[cli][debug] PySocks: 1.7.1\r\n[cli][debug] requests: 2.28.1\r\n[cli][debug] websocket-client: 1.3.3\r\n[cli][debug] Arguments:\r\n[cli][debug] url=https://www.atresplayer.com/directos/antena3/\r\n[cli][debug] stream=['best']\r\n[cli][debug] --loglevel=debug\r\n[cli][debug] --hls-live-edge=1\r\n[cli][debug] --ffmpeg-ffmpeg=C:\\Program Files\\Streamlink\\ffmpeg\\ffmpeg.exe\r\n[cli][info] Found matching plugin atresplayer for URL https://www.atresplayer.com/directos/antena3/\r\nerror: Unable to validate response text: ValidationError(NoneOrAllSchema):\r\n ValidationError(type):\r\n Type of <generator object search_dict at 0x000002C64BA79930> should be list, but is generator\r\n```\r\n\n", "before_files": [{"content": "\"\"\"\n$description Spanish live TV channels from Atresmedia Television, including Antena 3 and laSexta.\n$url atresplayer.com\n$type live\n$region Spain\n\"\"\"\n\nimport logging\nimport re\n\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream.dash import DASHStream\nfrom streamlink.stream.hls import HLSStream\nfrom streamlink.utils.data import search_dict\nfrom streamlink.utils.url import update_scheme\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(re.compile(\n r\"https?://(?:www\\.)?atresplayer\\.com/\"\n))\nclass AtresPlayer(Plugin):\n def _get_streams(self):\n self.url = update_scheme(\"https://\", self.url)\n\n api_url = self.session.http.get(self.url, schema=validate.Schema(\n re.compile(r\"\"\"window.__PRELOADED_STATE__\\s*=\\s*({.*?});\"\"\", re.DOTALL),\n validate.none_or_all(\n validate.get(1),\n validate.parse_json(),\n validate.transform(search_dict, key=\"href\"),\n [validate.url()],\n validate.get(0),\n ),\n ))\n if not api_url:\n return\n log.debug(f\"API URL: {api_url}\")\n\n player_api_url = self.session.http.get(api_url, schema=validate.Schema(\n validate.parse_json(),\n validate.transform(search_dict, key=\"urlVideo\"),\n ))\n\n stream_schema = validate.Schema(\n validate.parse_json(),\n {\n \"sources\": [\n validate.all(\n {\n \"src\": validate.url(),\n validate.optional(\"type\"): str,\n },\n ),\n ],\n },\n validate.get(\"sources\"),\n )\n\n for api_url in player_api_url:\n log.debug(f\"Player API URL: {api_url}\")\n for source in self.session.http.get(api_url, schema=stream_schema):\n log.debug(f\"Stream source: {source['src']} ({source.get('type', 'n/a')})\")\n\n if \"type\" not in source or source[\"type\"] == \"application/vnd.apple.mpegurl\":\n streams = HLSStream.parse_variant_playlist(self.session, source[\"src\"])\n if not streams:\n yield \"live\", HLSStream(self.session, source[\"src\"])\n else:\n yield from streams.items()\n elif source[\"type\"] == \"application/dash+xml\":\n yield from DASHStream.parse_manifest(self.session, source[\"src\"]).items()\n\n\n__plugin__ = AtresPlayer\n", "path": "src/streamlink/plugins/atresplayer.py"}]}
1,743
921
gh_patches_debug_81
rasdani/github-patches
git_diff
ocadotechnology__aimmo-60
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Fix warning about deprecation of TEMPLATE_DEBUG When starting aimmo locally the following message is displayed: > WARNINGS: > ?: (1_8.W001) The standalone TEMPLATE_\* settings were deprecated in Django 1.8 and the TEMPLATES dictionary takes precedence. You must put the values of the following settings into your default TEMPLATES dict: TEMPLATE_DEBUG. The value in question is in `aimmo/example_project/example_project/settings.py`: `TEMPLATE_DEBUG = DEBUG` The TEMPLATES dictionary _maybe_ the one here `aimmo/players/autoconfig.py` (?): ``` 'TEMPLATES': [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } } ], ``` </issue> <code> [start of example_project/example_project/settings.py] 1 # -*- coding: utf-8 -*- 2 # Code for Life 3 # 4 # Copyright (C) 2015, Ocado Innovation Limited 5 # 6 # This program is free software: you can redistribute it and/or modify 7 # it under the terms of the GNU Affero General Public License as 8 # published by the Free Software Foundation, either version 3 of the 9 # License, or (at your option) any later version. 10 # 11 # This program is distributed in the hope that it will be useful, 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 # GNU Affero General Public License for more details. 15 # 16 # You should have received a copy of the GNU Affero General Public License 17 # along with this program. If not, see <http://www.gnu.org/licenses/>. 18 # 19 # ADDITIONAL TERMS – Section 7 GNU General Public Licence 20 # 21 # This licence does not grant any right, title or interest in any β€œOcado” logos, 22 # trade names or the trademark β€œOcado” or any other trademarks or domain names 23 # owned by Ocado Innovation Limited or the Ocado group of companies or any other 24 # distinctive brand features of β€œOcado” as may be secured from time to time. You 25 # must not distribute any modification of this program using the trademark 26 # β€œOcado” or claim any affiliation or association with Ocado or its employees. 27 # 28 # You are not authorised to use the name Ocado (or any of its trade names) or 29 # the names of any author or contributor in advertising or for publicity purposes 30 # pertaining to the distribution of this program, without the prior written 31 # authorisation of Ocado. 32 # 33 # Any propagation, distribution or conveyance of this program must include this 34 # copyright notice and these terms. You must not misrepresent the origins of this 35 # program; modified versions of the program must be marked as such and not 36 # identified as the original program. 37 '''Django settings for example_project project.''' 38 import os 39 40 DEBUG = True 41 TEMPLATE_DEBUG = DEBUG 42 43 DATABASES = { 44 'default': { 45 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 46 'NAME': os.path.join(os.path.abspath(os.path.dirname(__file__)),'db.sqlite3'),# Or path to database file if using sqlite3. 47 } 48 } 49 50 USE_I18N = True 51 USE_L10N = True 52 53 TIME_ZONE = 'Europe/London' 54 LANGUAGE_CODE = 'en-gb' 55 STATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static') 56 STATIC_URL = '/static/' 57 SECRET_KEY = 'not-a-secret' 58 59 ROOT_URLCONF = 'django_autoconfig.autourlconf' 60 61 WSGI_APPLICATION = 'example_project.wsgi.application' 62 63 INSTALLED_APPS = ( 64 'django.contrib.admin', 65 'players', 66 ) 67 68 LOGGING = { 69 'version': 1, 70 'disable_existing_loggers': False, 71 'filters': { 72 'require_debug_false': { 73 '()': 'django.utils.log.RequireDebugFalse' 74 } 75 }, 76 'handlers': { 77 'console': { 78 'level': 'DEBUG', 79 'class': 'logging.StreamHandler' 80 }, 81 }, 82 'loggers': { 83 'views': { 84 'handlers': ['console'], 85 'level': 'DEBUG' 86 }, 87 } 88 } 89 90 try: 91 from example_project.local_settings import * # pylint: disable=E0611 92 except ImportError: 93 pass 94 95 from django_autoconfig import autoconfig 96 autoconfig.configure_settings(globals()) 97 [end of example_project/example_project/settings.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/example_project/example_project/settings.py b/example_project/example_project/settings.py --- a/example_project/example_project/settings.py +++ b/example_project/example_project/settings.py @@ -38,7 +38,6 @@ import os DEBUG = True -TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': {
{"golden_diff": "diff --git a/example_project/example_project/settings.py b/example_project/example_project/settings.py\n--- a/example_project/example_project/settings.py\n+++ b/example_project/example_project/settings.py\n@@ -38,7 +38,6 @@\n import os\n \n DEBUG = True\n-TEMPLATE_DEBUG = DEBUG\n \n DATABASES = {\n 'default': {\n", "issue": "Fix warning about deprecation of TEMPLATE_DEBUG\nWhen starting aimmo locally the following message is displayed:\n\n> WARNINGS:\n> ?: (1_8.W001) The standalone TEMPLATE_\\* settings were deprecated in Django 1.8 and the TEMPLATES dictionary takes precedence. You must put the values of the following settings into your default TEMPLATES dict: TEMPLATE_DEBUG.\n\nThe value in question is in `aimmo/example_project/example_project/settings.py`:\n`TEMPLATE_DEBUG = DEBUG`\n\nThe TEMPLATES dictionary _maybe_ the one here `aimmo/players/autoconfig.py` (?):\n\n```\n'TEMPLATES': [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ]\n }\n }\n ],\n```\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Code for Life\n#\n# Copyright (C) 2015, Ocado Innovation Limited\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n# ADDITIONAL TERMS \u2013 Section 7 GNU General Public Licence\n#\n# This licence does not grant any right, title or interest in any \u201cOcado\u201d logos,\n# trade names or the trademark \u201cOcado\u201d or any other trademarks or domain names\n# owned by Ocado Innovation Limited or the Ocado group of companies or any other\n# distinctive brand features of \u201cOcado\u201d as may be secured from time to time. You\n# must not distribute any modification of this program using the trademark\n# \u201cOcado\u201d or claim any affiliation or association with Ocado or its employees.\n#\n# You are not authorised to use the name Ocado (or any of its trade names) or\n# the names of any author or contributor in advertising or for publicity purposes\n# pertaining to the distribution of this program, without the prior written\n# authorisation of Ocado.\n#\n# Any propagation, distribution or conveyance of this program must include this\n# copyright notice and these terms. You must not misrepresent the origins of this\n# program; modified versions of the program must be marked as such and not\n# identified as the original program.\n'''Django settings for example_project project.'''\nimport os\n\nDEBUG = True\nTEMPLATE_DEBUG = DEBUG\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.\n 'NAME': os.path.join(os.path.abspath(os.path.dirname(__file__)),'db.sqlite3'),# Or path to database file if using sqlite3.\n }\n}\n\nUSE_I18N = True\nUSE_L10N = True\n\nTIME_ZONE = 'Europe/London'\nLANGUAGE_CODE = 'en-gb'\nSTATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static')\nSTATIC_URL = '/static/'\nSECRET_KEY = 'not-a-secret'\n\nROOT_URLCONF = 'django_autoconfig.autourlconf'\n\nWSGI_APPLICATION = 'example_project.wsgi.application'\n\nINSTALLED_APPS = (\n 'django.contrib.admin',\n 'players',\n)\n\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse'\n }\n },\n 'handlers': {\n 'console': {\n 'level': 'DEBUG',\n 'class': 'logging.StreamHandler'\n },\n },\n 'loggers': {\n 'views': {\n 'handlers': ['console'],\n 'level': 'DEBUG'\n },\n }\n}\n\ntry:\n from example_project.local_settings import * # pylint: disable=E0611\nexcept ImportError:\n pass\n\nfrom django_autoconfig import autoconfig\nautoconfig.configure_settings(globals())\n", "path": "example_project/example_project/settings.py"}]}
1,702
71
gh_patches_debug_38408
rasdani/github-patches
git_diff
great-expectations__great_expectations-5077
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Use cleaner solution for non-truncating division in python 2 Prefer `from __future__ import division` to `1.*x/y` </issue> <code> [start of great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py] 1 import logging 2 from typing import Dict, Optional, Type 3 4 from great_expectations.rule_based_profiler.data_assistant import DataAssistant 5 from great_expectations.rule_based_profiler.data_assistant.data_assistant_runner import ( 6 DataAssistantRunner, 7 ) 8 9 logger = logging.getLogger(__name__) 10 11 12 class DataAssistantDispatcher: 13 """ 14 DataAssistantDispatcher intercepts requests for "DataAssistant" classes by their registered names and manages their 15 associated "DataAssistantRunner" objects, which process invocations of calls to "DataAssistant" "run()" methods. 16 """ 17 18 _registered_data_assistants: Dict[str, Type[DataAssistant]] = {} 19 20 def __init__(self, data_context: "BaseDataContext") -> None: # noqa: F821 21 """ 22 Args: 23 data_context: BaseDataContext associated with DataAssistantDispatcher 24 """ 25 self._data_context = data_context 26 27 self._data_assistant_runner_cache = {} 28 29 def __getattr__(self, name: str) -> DataAssistantRunner: 30 # Both, registered data_assistant_type and alias name are supported for invocation. 31 32 # _registered_data_assistants has both aliases and full names 33 data_assistant_cls: Optional[ 34 Type[DataAssistant] 35 ] = DataAssistantDispatcher.get_data_assistant_impl(name=name) 36 37 # If "DataAssistant" is not registered, then raise "AttributeError", which is appropriate for "__getattr__()". 38 if data_assistant_cls is None: 39 raise AttributeError( 40 f'"{type(self).__name__}" object has no attribute "{name}".' 41 ) 42 43 data_assistant_name: str = data_assistant_cls.data_assistant_type 44 data_assistant_runner: Optional[ 45 DataAssistantRunner 46 ] = self._data_assistant_runner_cache.get(data_assistant_name) 47 if data_assistant_runner is None: 48 data_assistant_runner = DataAssistantRunner( 49 data_assistant_cls=data_assistant_cls, 50 data_context=self._data_context, 51 ) 52 self._data_assistant_runner_cache[ 53 data_assistant_name 54 ] = data_assistant_runner 55 56 return data_assistant_runner 57 58 @classmethod 59 def register_data_assistant( 60 cls, 61 data_assistant: Type[DataAssistant], # noqa: F821 62 ) -> None: 63 """ 64 This method executes "run()" of effective "RuleBasedProfiler" and fills "DataAssistantResult" object with outputs. 65 66 Args: 67 data_assistant: "DataAssistant" class to be registered 68 """ 69 data_assistant_type = data_assistant.data_assistant_type 70 cls._register(data_assistant_type, data_assistant) 71 72 alias: Optional[str] = data_assistant.__alias__ 73 if alias is not None: 74 cls._register(alias, data_assistant) 75 76 @classmethod 77 def _register(cls, name: str, data_assistant: Type[DataAssistant]) -> None: 78 registered_data_assistants = cls._registered_data_assistants 79 80 if name in registered_data_assistants: 81 raise ValueError(f'Existing declarations of DataAssistant "{name}" found.') 82 83 logger.debug( 84 f'Registering the declaration of DataAssistant "{name}" took place.' 85 ) 86 registered_data_assistants[name] = data_assistant 87 88 @classmethod 89 def get_data_assistant_impl( 90 cls, 91 name: Optional[str], 92 ) -> Optional[Type[DataAssistant]]: # noqa: F821 93 """ 94 This method obtains (previously registered) "DataAssistant" class from DataAssistant Registry. 95 96 Note that it will clean the input string before checking against registered assistants. 97 98 Args: 99 data_assistant_type: String representing "snake case" version of "DataAssistant" class type 100 101 Returns: 102 Class inheriting "DataAssistant" if found; otherwise, None 103 """ 104 if name is None: 105 return None 106 name = name.lower() 107 return cls._registered_data_assistants.get(name) 108 [end of great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py b/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py --- a/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py +++ b/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py @@ -1,5 +1,5 @@ import logging -from typing import Dict, Optional, Type +from typing import Dict, Optional, Set, Type from great_expectations.rule_based_profiler.data_assistant import DataAssistant from great_expectations.rule_based_profiler.data_assistant.data_assistant_runner import ( @@ -61,7 +61,7 @@ data_assistant: Type[DataAssistant], # noqa: F821 ) -> None: """ - This method executes "run()" of effective "RuleBasedProfiler" and fills "DataAssistantResult" object with outputs. + This method registers "DataAssistant" subclass for future instantiation and execution of its "run()" method. Args: data_assistant: "DataAssistant" class to be registered @@ -89,14 +89,14 @@ def get_data_assistant_impl( cls, name: Optional[str], - ) -> Optional[Type[DataAssistant]]: # noqa: F821 + ) -> Optional[Type[DataAssistant]]: """ This method obtains (previously registered) "DataAssistant" class from DataAssistant Registry. Note that it will clean the input string before checking against registered assistants. Args: - data_assistant_type: String representing "snake case" version of "DataAssistant" class type + name: String representing "snake case" version of "DataAssistant" class type Returns: Class inheriting "DataAssistant" if found; otherwise, None @@ -105,3 +105,23 @@ return None name = name.lower() return cls._registered_data_assistants.get(name) + + def __dir__(self): + """ + This custom magic method is used to enable tab completion on "DataAssistantDispatcher" objects. + """ + data_assistant_dispatcher_attrs: Set[str] = set(super().__dir__()) + data_assistant_registered_names: Set[ + str + ] = get_registered_data_assistant_names() + combined_dir_attrs: Set[str] = ( + data_assistant_dispatcher_attrs | data_assistant_registered_names + ) + return list(combined_dir_attrs) + + +def get_registered_data_assistant_names() -> Set[str]: + """ + This method returns names (registered data_assistant_type and alias name) of registered "DataAssistant" classes. + """ + return set(DataAssistantDispatcher._registered_data_assistants.keys())
{"golden_diff": "diff --git a/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py b/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py\n--- a/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py\n+++ b/great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py\n@@ -1,5 +1,5 @@\n import logging\n-from typing import Dict, Optional, Type\n+from typing import Dict, Optional, Set, Type\n \n from great_expectations.rule_based_profiler.data_assistant import DataAssistant\n from great_expectations.rule_based_profiler.data_assistant.data_assistant_runner import (\n@@ -61,7 +61,7 @@\n data_assistant: Type[DataAssistant], # noqa: F821\n ) -> None:\n \"\"\"\n- This method executes \"run()\" of effective \"RuleBasedProfiler\" and fills \"DataAssistantResult\" object with outputs.\n+ This method registers \"DataAssistant\" subclass for future instantiation and execution of its \"run()\" method.\n \n Args:\n data_assistant: \"DataAssistant\" class to be registered\n@@ -89,14 +89,14 @@\n def get_data_assistant_impl(\n cls,\n name: Optional[str],\n- ) -> Optional[Type[DataAssistant]]: # noqa: F821\n+ ) -> Optional[Type[DataAssistant]]:\n \"\"\"\n This method obtains (previously registered) \"DataAssistant\" class from DataAssistant Registry.\n \n Note that it will clean the input string before checking against registered assistants.\n \n Args:\n- data_assistant_type: String representing \"snake case\" version of \"DataAssistant\" class type\n+ name: String representing \"snake case\" version of \"DataAssistant\" class type\n \n Returns:\n Class inheriting \"DataAssistant\" if found; otherwise, None\n@@ -105,3 +105,23 @@\n return None\n name = name.lower()\n return cls._registered_data_assistants.get(name)\n+\n+ def __dir__(self):\n+ \"\"\"\n+ This custom magic method is used to enable tab completion on \"DataAssistantDispatcher\" objects.\n+ \"\"\"\n+ data_assistant_dispatcher_attrs: Set[str] = set(super().__dir__())\n+ data_assistant_registered_names: Set[\n+ str\n+ ] = get_registered_data_assistant_names()\n+ combined_dir_attrs: Set[str] = (\n+ data_assistant_dispatcher_attrs | data_assistant_registered_names\n+ )\n+ return list(combined_dir_attrs)\n+\n+\n+def get_registered_data_assistant_names() -> Set[str]:\n+ \"\"\"\n+ This method returns names (registered data_assistant_type and alias name) of registered \"DataAssistant\" classes.\n+ \"\"\"\n+ return set(DataAssistantDispatcher._registered_data_assistants.keys())\n", "issue": "Use cleaner solution for non-truncating division in python 2\nPrefer `from __future__ import division` to `1.*x/y`\n", "before_files": [{"content": "import logging\nfrom typing import Dict, Optional, Type\n\nfrom great_expectations.rule_based_profiler.data_assistant import DataAssistant\nfrom great_expectations.rule_based_profiler.data_assistant.data_assistant_runner import (\n DataAssistantRunner,\n)\n\nlogger = logging.getLogger(__name__)\n\n\nclass DataAssistantDispatcher:\n \"\"\"\n DataAssistantDispatcher intercepts requests for \"DataAssistant\" classes by their registered names and manages their\n associated \"DataAssistantRunner\" objects, which process invocations of calls to \"DataAssistant\" \"run()\" methods.\n \"\"\"\n\n _registered_data_assistants: Dict[str, Type[DataAssistant]] = {}\n\n def __init__(self, data_context: \"BaseDataContext\") -> None: # noqa: F821\n \"\"\"\n Args:\n data_context: BaseDataContext associated with DataAssistantDispatcher\n \"\"\"\n self._data_context = data_context\n\n self._data_assistant_runner_cache = {}\n\n def __getattr__(self, name: str) -> DataAssistantRunner:\n # Both, registered data_assistant_type and alias name are supported for invocation.\n\n # _registered_data_assistants has both aliases and full names\n data_assistant_cls: Optional[\n Type[DataAssistant]\n ] = DataAssistantDispatcher.get_data_assistant_impl(name=name)\n\n # If \"DataAssistant\" is not registered, then raise \"AttributeError\", which is appropriate for \"__getattr__()\".\n if data_assistant_cls is None:\n raise AttributeError(\n f'\"{type(self).__name__}\" object has no attribute \"{name}\".'\n )\n\n data_assistant_name: str = data_assistant_cls.data_assistant_type\n data_assistant_runner: Optional[\n DataAssistantRunner\n ] = self._data_assistant_runner_cache.get(data_assistant_name)\n if data_assistant_runner is None:\n data_assistant_runner = DataAssistantRunner(\n data_assistant_cls=data_assistant_cls,\n data_context=self._data_context,\n )\n self._data_assistant_runner_cache[\n data_assistant_name\n ] = data_assistant_runner\n\n return data_assistant_runner\n\n @classmethod\n def register_data_assistant(\n cls,\n data_assistant: Type[DataAssistant], # noqa: F821\n ) -> None:\n \"\"\"\n This method executes \"run()\" of effective \"RuleBasedProfiler\" and fills \"DataAssistantResult\" object with outputs.\n\n Args:\n data_assistant: \"DataAssistant\" class to be registered\n \"\"\"\n data_assistant_type = data_assistant.data_assistant_type\n cls._register(data_assistant_type, data_assistant)\n\n alias: Optional[str] = data_assistant.__alias__\n if alias is not None:\n cls._register(alias, data_assistant)\n\n @classmethod\n def _register(cls, name: str, data_assistant: Type[DataAssistant]) -> None:\n registered_data_assistants = cls._registered_data_assistants\n\n if name in registered_data_assistants:\n raise ValueError(f'Existing declarations of DataAssistant \"{name}\" found.')\n\n logger.debug(\n f'Registering the declaration of DataAssistant \"{name}\" took place.'\n )\n registered_data_assistants[name] = data_assistant\n\n @classmethod\n def get_data_assistant_impl(\n cls,\n name: Optional[str],\n ) -> Optional[Type[DataAssistant]]: # noqa: F821\n \"\"\"\n This method obtains (previously registered) \"DataAssistant\" class from DataAssistant Registry.\n\n Note that it will clean the input string before checking against registered assistants.\n\n Args:\n data_assistant_type: String representing \"snake case\" version of \"DataAssistant\" class type\n\n Returns:\n Class inheriting \"DataAssistant\" if found; otherwise, None\n \"\"\"\n if name is None:\n return None\n name = name.lower()\n return cls._registered_data_assistants.get(name)\n", "path": "great_expectations/rule_based_profiler/data_assistant/data_assistant_dispatcher.py"}]}
1,662
626
gh_patches_debug_13415
rasdani/github-patches
git_diff
AnalogJ__lexicon-106
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> function update_record() in namesilo.py has wrong query parameter origin: data['rdata'] = content modified: data['rrvalue'] = content </issue> <code> [start of lexicon/providers/namesilo.py] 1 from __future__ import print_function 2 from __future__ import absolute_import 3 from .base import Provider as BaseProvider 4 import requests 5 from xml.etree import ElementTree 6 7 def ProviderParser(subparser): 8 subparser.add_argument("--auth-token", help="specify key used authenticate") 9 10 11 class Provider(BaseProvider): 12 13 def __init__(self, options, provider_options={}): 14 super(Provider, self).__init__(options) 15 self.domain_id = None 16 self.api_endpoint = provider_options.get('api_endpoint') or 'https://www.namesilo.com/api' 17 18 def authenticate(self): 19 20 payload = self._get('/getDomainInfo', {'domain': self.options['domain']}) 21 self.domain_id = self.options['domain'] 22 23 24 # Create record. If record already exists with the same content, do nothing' 25 def create_record(self, type, name, content): 26 record = { 27 'domain': self.domain_id, 28 'rrhost': self._relative_name(name), 29 'rrtype': type, 30 'rrvalue': content 31 } 32 if self.options.get('ttl'): 33 record['rrttl'] = self.options.get('ttl') 34 payload = self._get('/dnsAddRecord', record) 35 print('create_record: {0}'.format(True)) 36 return True 37 38 # List all records. Return an empty list if no records found 39 # type, name and content are used to filter records. 40 # If possible filter during the query, otherwise filter after response is received. 41 def list_records(self, type=None, name=None, content=None): 42 query = {'domain': self.domain_id} 43 44 payload = self._get('/dnsListRecords', query) 45 records = [] 46 for record in payload.find('reply').findall('resource_record'): 47 processed_record = { 48 'type': record.find('type').text, 49 'name': record.find('host').text, 50 'ttl': record.find('ttl').text, 51 'content': record.find('value').text, 52 'id': record.find('record_id').text 53 } 54 records.append(processed_record) 55 56 if type: 57 records = [record for record in records if record['type'] == type] 58 if name: 59 records = [record for record in records if record['name'] == self._full_name(name)] 60 if content: 61 records = [record for record in records if record['content'] == content] 62 63 print('list_records: {0}'.format(records)) 64 return records 65 66 # Create or update a record. 67 def update_record(self, identifier, type=None, name=None, content=None): 68 69 data = { 70 'domain': self.domain_id, 71 'rrid': identifier 72 } 73 # if type: 74 # data['rtype'] = type 75 if name: 76 data['rrhost'] = self._relative_name(name) 77 if content: 78 data['rdata'] = content 79 if self.options.get('ttl'): 80 data['rrttl'] = self.options.get('ttl') 81 82 payload = self._get('/dnsUpdateRecord', data) 83 84 print('update_record: {0}'.format(True)) 85 return True 86 87 # Delete an existing record. 88 # If record does not exist, do nothing. 89 def delete_record(self, identifier=None, type=None, name=None, content=None): 90 data = { 91 'domain': self.domain_id 92 } 93 if not identifier: 94 records = self.list_records(type, name, content) 95 print(records) 96 if len(records) == 1: 97 data['rrid'] = records[0]['id'] 98 else: 99 raise Exception('Record identifier could not be found.') 100 else: 101 data['rrid'] = identifier 102 payload = self._get('/dnsDeleteRecord', data) 103 104 print('delete_record: {0}'.format(True)) 105 return True 106 107 108 # Helpers 109 def _request(self, action='GET', url='/', data=None, query_params=None): 110 if data is None: 111 data = {} 112 if query_params is None: 113 query_params = {} 114 query_params['version'] = 1 115 query_params['type'] = 'xml' 116 query_params['key'] = self.options['auth_token'] 117 r = requests.request(action, self.api_endpoint + url, params=query_params) 118 #data=json.dumps(data)) 119 r.raise_for_status() # if the request fails for any reason, throw an error. 120 # TODO: check if the response is an error using 121 tree = ElementTree.ElementTree(ElementTree.fromstring(r.content)) 122 root = tree.getroot() 123 if root.find('reply').find('code').text != '300': 124 raise Exception('An error occurred: {0}, {1}'.format(root.find('reply').find('detail').text, root.find('reply').find('code').text)) 125 126 127 return root [end of lexicon/providers/namesilo.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/lexicon/providers/namesilo.py b/lexicon/providers/namesilo.py --- a/lexicon/providers/namesilo.py +++ b/lexicon/providers/namesilo.py @@ -75,7 +75,7 @@ if name: data['rrhost'] = self._relative_name(name) if content: - data['rdata'] = content + data['rrvalue'] = content if self.options.get('ttl'): data['rrttl'] = self.options.get('ttl') @@ -124,4 +124,4 @@ raise Exception('An error occurred: {0}, {1}'.format(root.find('reply').find('detail').text, root.find('reply').find('code').text)) - return root \ No newline at end of file + return root
{"golden_diff": "diff --git a/lexicon/providers/namesilo.py b/lexicon/providers/namesilo.py\n--- a/lexicon/providers/namesilo.py\n+++ b/lexicon/providers/namesilo.py\n@@ -75,7 +75,7 @@\n if name:\n data['rrhost'] = self._relative_name(name)\n if content:\n- data['rdata'] = content\n+ data['rrvalue'] = content\n if self.options.get('ttl'):\n data['rrttl'] = self.options.get('ttl')\n \n@@ -124,4 +124,4 @@\n raise Exception('An error occurred: {0}, {1}'.format(root.find('reply').find('detail').text, root.find('reply').find('code').text))\n \n \n- return root\n\\ No newline at end of file\n+ return root\n", "issue": "function update_record() in namesilo.py has wrong query parameter\norigin: data['rdata'] = content\r\nmodified: data['rrvalue'] = content\n", "before_files": [{"content": "from __future__ import print_function\nfrom __future__ import absolute_import\nfrom .base import Provider as BaseProvider\nimport requests\nfrom xml.etree import ElementTree\n\ndef ProviderParser(subparser):\n subparser.add_argument(\"--auth-token\", help=\"specify key used authenticate\")\n\n\nclass Provider(BaseProvider):\n\n def __init__(self, options, provider_options={}):\n super(Provider, self).__init__(options)\n self.domain_id = None\n self.api_endpoint = provider_options.get('api_endpoint') or 'https://www.namesilo.com/api'\n\n def authenticate(self):\n\n payload = self._get('/getDomainInfo', {'domain': self.options['domain']})\n self.domain_id = self.options['domain']\n\n\n # Create record. If record already exists with the same content, do nothing'\n def create_record(self, type, name, content):\n record = {\n 'domain': self.domain_id,\n 'rrhost': self._relative_name(name),\n 'rrtype': type,\n 'rrvalue': content\n }\n if self.options.get('ttl'):\n record['rrttl'] = self.options.get('ttl')\n payload = self._get('/dnsAddRecord', record)\n print('create_record: {0}'.format(True))\n return True\n\n # List all records. Return an empty list if no records found\n # type, name and content are used to filter records.\n # If possible filter during the query, otherwise filter after response is received.\n def list_records(self, type=None, name=None, content=None):\n query = {'domain': self.domain_id}\n\n payload = self._get('/dnsListRecords', query)\n records = []\n for record in payload.find('reply').findall('resource_record'):\n processed_record = {\n 'type': record.find('type').text,\n 'name': record.find('host').text,\n 'ttl': record.find('ttl').text,\n 'content': record.find('value').text,\n 'id': record.find('record_id').text\n }\n records.append(processed_record)\n\n if type:\n records = [record for record in records if record['type'] == type]\n if name:\n records = [record for record in records if record['name'] == self._full_name(name)]\n if content:\n records = [record for record in records if record['content'] == content]\n\n print('list_records: {0}'.format(records))\n return records\n\n # Create or update a record.\n def update_record(self, identifier, type=None, name=None, content=None):\n\n data = {\n 'domain': self.domain_id,\n 'rrid': identifier\n }\n # if type:\n # data['rtype'] = type\n if name:\n data['rrhost'] = self._relative_name(name)\n if content:\n data['rdata'] = content\n if self.options.get('ttl'):\n data['rrttl'] = self.options.get('ttl')\n\n payload = self._get('/dnsUpdateRecord', data)\n\n print('update_record: {0}'.format(True))\n return True\n\n # Delete an existing record.\n # If record does not exist, do nothing.\n def delete_record(self, identifier=None, type=None, name=None, content=None):\n data = {\n 'domain': self.domain_id\n }\n if not identifier:\n records = self.list_records(type, name, content)\n print(records)\n if len(records) == 1:\n data['rrid'] = records[0]['id']\n else:\n raise Exception('Record identifier could not be found.')\n else:\n data['rrid'] = identifier\n payload = self._get('/dnsDeleteRecord', data)\n\n print('delete_record: {0}'.format(True))\n return True\n\n\n # Helpers\n def _request(self, action='GET', url='/', data=None, query_params=None):\n if data is None:\n data = {}\n if query_params is None:\n query_params = {}\n query_params['version'] = 1\n query_params['type'] = 'xml'\n query_params['key'] = self.options['auth_token']\n r = requests.request(action, self.api_endpoint + url, params=query_params)\n #data=json.dumps(data))\n r.raise_for_status() # if the request fails for any reason, throw an error.\n # TODO: check if the response is an error using\n tree = ElementTree.ElementTree(ElementTree.fromstring(r.content))\n root = tree.getroot()\n if root.find('reply').find('code').text != '300':\n raise Exception('An error occurred: {0}, {1}'.format(root.find('reply').find('detail').text, root.find('reply').find('code').text))\n\n\n return root", "path": "lexicon/providers/namesilo.py"}]}
1,897
190
gh_patches_debug_16041
rasdani/github-patches
git_diff
pypa__setuptools-3709
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [BUG] AttributeError: module 'distutils.log' has no attribute 'warning' ### setuptools version setuptools==65.6.2 ### Python version Python 3.8.15 ### OS macOS 12.5.1 ### Additional environment information _No response_ ### Description Installing a package `kappa=0.6.0` doesn't work. I think it could be the issue on setuptools's side. ### Expected behavior Installing the package works. ### How to Reproduce ```console $ python3.8 -m venv .venv $ source .venv/bin/activate $ pip install setuptools==65.6.2 $ pip install wheel $ pip install kappa==0.6.0 ``` ### Output ```console (.venv) dmytro@Dmytros-MacBook-Pro install_kappa % pip install kappa==0.6.0 Collecting kappa==0.6.0 Using cached kappa-0.6.0.tar.gz (29 kB) Preparing metadata (setup.py) ... error error: subprocess-exited-with-error Γ— python setup.py egg_info did not run successfully. β”‚ exit code: 1 ╰─> [20 lines of output] Traceback (most recent call last): File "<string>", line 2, in <module> File "<pip-setuptools-caller>", line 34, in <module> File "/private/var/folders/kv/zxwjm57d3jdgwyylrg2db6r80000gn/T/pip-install-xjv7l4bl/kappa_6f4dfc99aa59445e99fd8697c5e00cd7/setup.py", line 54, in <module> run_setup() File "/private/var/folders/kv/zxwjm57d3jdgwyylrg2db6r80000gn/T/pip-install-xjv7l4bl/kappa_6f4dfc99aa59445e99fd8697c5e00cd7/setup.py", line 18, in run_setup setup( File "/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/__init__.py", line 87, in setup return distutils.core.setup(**attrs) File "/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/core.py", line 147, in setup _setup_distribution = dist = klass(attrs) File "/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/dist.py", line 475, in __init__ _Distribution.__init__( File "/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/dist.py", line 258, in __init__ getattr(self.metadata, "set_" + key)(val) File "/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/dist.py", line 1242, in set_classifiers self.classifiers = _ensure_list(value, 'classifiers') File "/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/dist.py", line 48, in _ensure_list log.warning(msg) AttributeError: module 'distutils.log' has no attribute 'warning' [end of output] note: This error originates from a subprocess, and is likely not a problem with pip. error: metadata-generation-failed Γ— Encountered error while generating package metadata. ╰─> See above for output. note: This is an issue with the package mentioned above, not pip. hint: See above for details. ``` </issue> <code> [start of setuptools/logging.py] 1 import sys 2 import logging 3 import distutils.log 4 from . import monkey 5 6 7 def _not_warning(record): 8 return record.levelno < logging.WARNING 9 10 11 def configure(): 12 """ 13 Configure logging to emit warning and above to stderr 14 and everything else to stdout. This behavior is provided 15 for compatibility with distutils.log but may change in 16 the future. 17 """ 18 err_handler = logging.StreamHandler() 19 err_handler.setLevel(logging.WARNING) 20 out_handler = logging.StreamHandler(sys.stdout) 21 out_handler.addFilter(_not_warning) 22 handlers = err_handler, out_handler 23 logging.basicConfig( 24 format="{message}", style='{', handlers=handlers, level=logging.DEBUG) 25 if hasattr(distutils.log, 'Log'): 26 monkey.patch_func(set_threshold, distutils.log, 'set_threshold') 27 # For some reason `distutils.log` module is getting cached in `distutils.dist` 28 # and then loaded again when patched, 29 # implying: id(distutils.log) != id(distutils.dist.log). 30 # Make sure the same module object is used everywhere: 31 distutils.dist.log = distutils.log 32 33 34 def set_threshold(level): 35 logging.root.setLevel(level*10) 36 return set_threshold.unpatched(level) 37 [end of setuptools/logging.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setuptools/logging.py b/setuptools/logging.py --- a/setuptools/logging.py +++ b/setuptools/logging.py @@ -1,4 +1,5 @@ import sys +import inspect import logging import distutils.log from . import monkey @@ -22,7 +23,7 @@ handlers = err_handler, out_handler logging.basicConfig( format="{message}", style='{', handlers=handlers, level=logging.DEBUG) - if hasattr(distutils.log, 'Log'): + if inspect.ismodule(distutils.dist.log): monkey.patch_func(set_threshold, distutils.log, 'set_threshold') # For some reason `distutils.log` module is getting cached in `distutils.dist` # and then loaded again when patched,
{"golden_diff": "diff --git a/setuptools/logging.py b/setuptools/logging.py\n--- a/setuptools/logging.py\n+++ b/setuptools/logging.py\n@@ -1,4 +1,5 @@\n import sys\n+import inspect\n import logging\n import distutils.log\n from . import monkey\n@@ -22,7 +23,7 @@\n handlers = err_handler, out_handler\n logging.basicConfig(\n format=\"{message}\", style='{', handlers=handlers, level=logging.DEBUG)\n- if hasattr(distutils.log, 'Log'):\n+ if inspect.ismodule(distutils.dist.log):\n monkey.patch_func(set_threshold, distutils.log, 'set_threshold')\n # For some reason `distutils.log` module is getting cached in `distutils.dist`\n # and then loaded again when patched,\n", "issue": "[BUG] AttributeError: module 'distutils.log' has no attribute 'warning'\n### setuptools version\n\nsetuptools==65.6.2\n\n### Python version\n\nPython 3.8.15\n\n### OS\n\nmacOS 12.5.1\n\n### Additional environment information\n\n_No response_\n\n### Description\n\nInstalling a package `kappa=0.6.0` doesn't work. I think it could be the issue on setuptools's side.\n\n### Expected behavior\n\nInstalling the package works.\n\n### How to Reproduce\n\n```console\r\n$ python3.8 -m venv .venv\r\n$ source .venv/bin/activate\r\n$ pip install setuptools==65.6.2\r\n$ pip install wheel\r\n$ pip install kappa==0.6.0\r\n```\n\n### Output\n\n```console\r\n(.venv) dmytro@Dmytros-MacBook-Pro install_kappa % pip install kappa==0.6.0\r\nCollecting kappa==0.6.0\r\n Using cached kappa-0.6.0.tar.gz (29 kB)\r\n Preparing metadata (setup.py) ... error\r\n error: subprocess-exited-with-error\r\n\r\n \u00d7 python setup.py egg_info did not run successfully.\r\n \u2502 exit code: 1\r\n \u2570\u2500> [20 lines of output]\r\n Traceback (most recent call last):\r\n File \"<string>\", line 2, in <module>\r\n File \"<pip-setuptools-caller>\", line 34, in <module>\r\n File \"/private/var/folders/kv/zxwjm57d3jdgwyylrg2db6r80000gn/T/pip-install-xjv7l4bl/kappa_6f4dfc99aa59445e99fd8697c5e00cd7/setup.py\", line 54, in <module>\r\n run_setup()\r\n File \"/private/var/folders/kv/zxwjm57d3jdgwyylrg2db6r80000gn/T/pip-install-xjv7l4bl/kappa_6f4dfc99aa59445e99fd8697c5e00cd7/setup.py\", line 18, in run_setup\r\n setup(\r\n File \"/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/__init__.py\", line 87, in setup\r\n return distutils.core.setup(**attrs)\r\n File \"/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/core.py\", line 147, in setup\r\n _setup_distribution = dist = klass(attrs)\r\n File \"/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/dist.py\", line 475, in __init__\r\n _Distribution.__init__(\r\n File \"/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/dist.py\", line 258, in __init__\r\n getattr(self.metadata, \"set_\" + key)(val)\r\n File \"/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/dist.py\", line 1242, in set_classifiers\r\n self.classifiers = _ensure_list(value, 'classifiers')\r\n File \"/Users/dmytro/Tests/install_kappa/.venv/lib/python3.8/site-packages/setuptools/_distutils/dist.py\", line 48, in _ensure_list\r\n log.warning(msg)\r\n AttributeError: module 'distutils.log' has no attribute 'warning'\r\n [end of output]\r\n\r\n note: This error originates from a subprocess, and is likely not a problem with pip.\r\nerror: metadata-generation-failed\r\n\r\n\u00d7 Encountered error while generating package metadata.\r\n\u2570\u2500> See above for output.\r\n\r\nnote: This is an issue with the package mentioned above, not pip.\r\nhint: See above for details.\r\n```\r\n\n", "before_files": [{"content": "import sys\nimport logging\nimport distutils.log\nfrom . import monkey\n\n\ndef _not_warning(record):\n return record.levelno < logging.WARNING\n\n\ndef configure():\n \"\"\"\n Configure logging to emit warning and above to stderr\n and everything else to stdout. This behavior is provided\n for compatibility with distutils.log but may change in\n the future.\n \"\"\"\n err_handler = logging.StreamHandler()\n err_handler.setLevel(logging.WARNING)\n out_handler = logging.StreamHandler(sys.stdout)\n out_handler.addFilter(_not_warning)\n handlers = err_handler, out_handler\n logging.basicConfig(\n format=\"{message}\", style='{', handlers=handlers, level=logging.DEBUG)\n if hasattr(distutils.log, 'Log'):\n monkey.patch_func(set_threshold, distutils.log, 'set_threshold')\n # For some reason `distutils.log` module is getting cached in `distutils.dist`\n # and then loaded again when patched,\n # implying: id(distutils.log) != id(distutils.dist.log).\n # Make sure the same module object is used everywhere:\n distutils.dist.log = distutils.log\n\n\ndef set_threshold(level):\n logging.root.setLevel(level*10)\n return set_threshold.unpatched(level)\n", "path": "setuptools/logging.py"}]}
1,742
166
gh_patches_debug_6045
rasdani/github-patches
git_diff
ManimCommunity__manim-509
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> ValueTracker must support increment also using += Just what the title says. Here's an example ```python class Test(Scene): def construct(self): tracker = ValueTracker(0.0) tracker.set_value(2.0) print(tracker.get_value()) # -> 2.000 tracker.increment_value(3.0) print(tracker.get_value()) # -> 5.000 tracker += 4.0 # NEW proposed feature print(tracker.get_value()) # -> 9.000 ``` </issue> <code> [start of manim/mobject/value_tracker.py] 1 """Mobjects that dynamically show the change of a variable.""" 2 3 __all__ = ["ValueTracker", "ExponentialValueTracker", "ComplexValueTracker"] 4 5 6 import numpy as np 7 8 from ..utils.paths import straight_path 9 from ..mobject.mobject import Mobject 10 11 12 class ValueTracker(Mobject): 13 """A mobject that can be used for tracking (real-valued) parameters. 14 Useful for animating parameter changes. 15 16 Not meant to be displayed. Instead the position encodes some 17 number, often one which another animation or continual_animation 18 uses for its update function, and by treating it as a mobject it can 19 still be animated and manipulated just like anything else. 20 21 Examples 22 -------- 23 .. manim:: ValueTrackerExample 24 25 class ValueTrackerExample(Scene): 26 def construct(self): 27 number_line = NumberLine() 28 pointer = Vector(DOWN) 29 label = MathTex("x").add_updater(lambda m: m.next_to(pointer, UP)) 30 31 pointer_value = ValueTracker(0) 32 pointer.add_updater( 33 lambda m: m.next_to( 34 number_line.n2p(pointer_value.get_value()), 35 UP 36 ) 37 ) 38 self.add(number_line, pointer,label) 39 self.play(pointer_value.set_value, 5) 40 self.wait() 41 self.play(pointer_value.set_value, 3) 42 43 """ 44 45 def __init__(self, value=0, **kwargs): 46 Mobject.__init__(self, **kwargs) 47 self.points = np.zeros((1, 3)) 48 self.set_value(value) 49 50 def get_value(self): 51 return self.points[0, 0] 52 53 def set_value(self, value): 54 self.points[0, 0] = value 55 return self 56 57 def increment_value(self, d_value): 58 self.set_value(self.get_value() + d_value) 59 60 def interpolate(self, mobject1, mobject2, alpha, path_func=straight_path): 61 """ 62 Turns self into an interpolation between mobject1 63 and mobject2. 64 """ 65 self.points = path_func(mobject1.points, mobject2.points, alpha) 66 return self 67 68 69 class ExponentialValueTracker(ValueTracker): 70 """ 71 Operates just like ValueTracker, except it encodes the value as the 72 exponential of a position coordinate, which changes how interpolation 73 behaves 74 """ 75 76 def get_value(self): 77 return np.exp(ValueTracker.get_value(self)) 78 79 def set_value(self, value): 80 return ValueTracker.set_value(self, np.log(value)) 81 82 83 class ComplexValueTracker(ValueTracker): 84 def get_value(self): 85 return complex(*self.points[0, :2]) 86 87 def set_value(self, z): 88 z = complex(z) 89 self.points[0, :2] = (z.real, z.imag) 90 return self 91 [end of manim/mobject/value_tracker.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/manim/mobject/value_tracker.py b/manim/mobject/value_tracker.py --- a/manim/mobject/value_tracker.py +++ b/manim/mobject/value_tracker.py @@ -57,6 +57,10 @@ def increment_value(self, d_value): self.set_value(self.get_value() + d_value) + def __iadd__(self, d_value): + self.increment_value(d_value) + return self + def interpolate(self, mobject1, mobject2, alpha, path_func=straight_path): """ Turns self into an interpolation between mobject1
{"golden_diff": "diff --git a/manim/mobject/value_tracker.py b/manim/mobject/value_tracker.py\n--- a/manim/mobject/value_tracker.py\n+++ b/manim/mobject/value_tracker.py\n@@ -57,6 +57,10 @@\n def increment_value(self, d_value):\n self.set_value(self.get_value() + d_value)\n \n+ def __iadd__(self, d_value):\n+ self.increment_value(d_value)\n+ return self\n+\n def interpolate(self, mobject1, mobject2, alpha, path_func=straight_path):\n \"\"\"\n Turns self into an interpolation between mobject1\n", "issue": "ValueTracker must support increment also using +=\nJust what the title says. Here's an example \r\n```python\r\nclass Test(Scene):\r\n def construct(self):\r\n tracker = ValueTracker(0.0)\r\n tracker.set_value(2.0)\r\n print(tracker.get_value()) # -> 2.000\r\n tracker.increment_value(3.0)\r\n print(tracker.get_value()) # -> 5.000\r\n tracker += 4.0 # NEW proposed feature\r\n print(tracker.get_value()) # -> 9.000\r\n```\r\n\n", "before_files": [{"content": "\"\"\"Mobjects that dynamically show the change of a variable.\"\"\"\n\n__all__ = [\"ValueTracker\", \"ExponentialValueTracker\", \"ComplexValueTracker\"]\n\n\nimport numpy as np\n\nfrom ..utils.paths import straight_path\nfrom ..mobject.mobject import Mobject\n\n\nclass ValueTracker(Mobject):\n \"\"\"A mobject that can be used for tracking (real-valued) parameters.\n Useful for animating parameter changes.\n\n Not meant to be displayed. Instead the position encodes some\n number, often one which another animation or continual_animation\n uses for its update function, and by treating it as a mobject it can\n still be animated and manipulated just like anything else.\n\n Examples\n --------\n .. manim:: ValueTrackerExample\n\n class ValueTrackerExample(Scene):\n def construct(self):\n number_line = NumberLine()\n pointer = Vector(DOWN)\n label = MathTex(\"x\").add_updater(lambda m: m.next_to(pointer, UP))\n\n pointer_value = ValueTracker(0)\n pointer.add_updater(\n lambda m: m.next_to(\n number_line.n2p(pointer_value.get_value()),\n UP\n )\n )\n self.add(number_line, pointer,label)\n self.play(pointer_value.set_value, 5)\n self.wait()\n self.play(pointer_value.set_value, 3)\n\n \"\"\"\n\n def __init__(self, value=0, **kwargs):\n Mobject.__init__(self, **kwargs)\n self.points = np.zeros((1, 3))\n self.set_value(value)\n\n def get_value(self):\n return self.points[0, 0]\n\n def set_value(self, value):\n self.points[0, 0] = value\n return self\n\n def increment_value(self, d_value):\n self.set_value(self.get_value() + d_value)\n\n def interpolate(self, mobject1, mobject2, alpha, path_func=straight_path):\n \"\"\"\n Turns self into an interpolation between mobject1\n and mobject2.\n \"\"\"\n self.points = path_func(mobject1.points, mobject2.points, alpha)\n return self\n\n\nclass ExponentialValueTracker(ValueTracker):\n \"\"\"\n Operates just like ValueTracker, except it encodes the value as the\n exponential of a position coordinate, which changes how interpolation\n behaves\n \"\"\"\n\n def get_value(self):\n return np.exp(ValueTracker.get_value(self))\n\n def set_value(self, value):\n return ValueTracker.set_value(self, np.log(value))\n\n\nclass ComplexValueTracker(ValueTracker):\n def get_value(self):\n return complex(*self.points[0, :2])\n\n def set_value(self, z):\n z = complex(z)\n self.points[0, :2] = (z.real, z.imag)\n return self\n", "path": "manim/mobject/value_tracker.py"}]}
1,447
136
gh_patches_debug_3807
rasdani/github-patches
git_diff
quantumlib__Cirq-3574
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Docs build is failing Since the black formatter merge the RTD builds are failing with some weird pip error: https://readthedocs.org/projects/cirq/builds/ Need to look into it and resolve it if the error is on our end or report it to the RTD team if it's on their end. </issue> <code> [start of setup.py] 1 # Copyright 2018 The Cirq Developers 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # https://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import io 16 import os 17 from setuptools import find_packages, setup 18 19 # This reads the __version__ variable from cirq/_version.py 20 __version__ = '' 21 exec(open('cirq/_version.py').read()) 22 23 name = 'cirq' 24 25 description = ( 26 'A framework for creating, editing, and invoking ' 27 'Noisy Intermediate Scale Quantum (NISQ) circuits.' 28 ) 29 30 # README file as long_description. 31 long_description = io.open('README.rst', encoding='utf-8').read() 32 33 # If CIRQ_PRE_RELEASE_VERSION is set then we update the version to this value. 34 # It is assumed that it ends with one of `.devN`, `.aN`, `.bN`, `.rcN` and hence 35 # it will be a pre-release version on PyPi. See 36 # https://packaging.python.org/guides/distributing-packages-using-setuptools/#pre-release-versioning 37 # for more details. 38 if 'CIRQ_PRE_RELEASE_VERSION' in os.environ: 39 __version__ = os.environ['CIRQ_PRE_RELEASE_VERSION'] 40 long_description = ( 41 "**This is a development version of Cirq and may be " 42 "unstable.**\n\n**For the latest stable release of Cirq " 43 "see**\n`here <https://pypi.org/project/cirq>`__.\n\n" + long_description 44 ) 45 46 # Read in requirements 47 requirements = open('requirements.txt').readlines() 48 requirements = [r.strip() for r in requirements] 49 contrib_requirements = open('cirq/contrib/contrib-requirements.txt').readlines() 50 contrib_requirements = [r.strip() for r in contrib_requirements] 51 dev_requirements = open('dev_tools/conf/pip-list-dev-tools.txt').readlines() 52 dev_requirements = [r.strip() for r in dev_requirements] 53 54 cirq_packages = ['cirq'] + ['cirq.' + package for package in find_packages(where='cirq')] 55 56 # Sanity check 57 assert __version__, 'Version string cannot be empty' 58 59 setup( 60 name=name, 61 version=__version__, 62 url='http://github.com/quantumlib/cirq', 63 author='The Cirq Developers', 64 author_email='[email protected]', 65 python_requires=('>=3.6.0'), 66 install_requires=requirements, 67 extras_require={ 68 'contrib': contrib_requirements, 69 'dev_env': dev_requirements + contrib_requirements, 70 }, 71 license='Apache 2', 72 description=description, 73 long_description=long_description, 74 packages=cirq_packages, 75 package_data={ 76 'cirq': ['py.typed'], 77 'cirq.google.api.v1': ['*.proto', '*.pyi'], 78 'cirq.google.api.v2': ['*.proto', '*.pyi'], 79 'cirq.protocols.json_test_data': ['*'], 80 }, 81 ) 82 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -62,7 +62,7 @@ url='http://github.com/quantumlib/cirq', author='The Cirq Developers', author_email='[email protected]', - python_requires=('>=3.6.0'), + python_requires=('>=3.7.0'), install_requires=requirements, extras_require={ 'contrib': contrib_requirements,
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -62,7 +62,7 @@\n url='http://github.com/quantumlib/cirq',\n author='The Cirq Developers',\n author_email='[email protected]',\n- python_requires=('>=3.6.0'),\n+ python_requires=('>=3.7.0'),\n install_requires=requirements,\n extras_require={\n 'contrib': contrib_requirements,\n", "issue": "Docs build is failing\nSince the black formatter merge the RTD builds are failing with some weird pip error:\r\n\r\nhttps://readthedocs.org/projects/cirq/builds/\r\n\r\nNeed to look into it and resolve it if the error is on our end or report it to the RTD team if it's on their end.\n", "before_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\nfrom setuptools import find_packages, setup\n\n# This reads the __version__ variable from cirq/_version.py\n__version__ = ''\nexec(open('cirq/_version.py').read())\n\nname = 'cirq'\n\ndescription = (\n 'A framework for creating, editing, and invoking '\n 'Noisy Intermediate Scale Quantum (NISQ) circuits.'\n)\n\n# README file as long_description.\nlong_description = io.open('README.rst', encoding='utf-8').read()\n\n# If CIRQ_PRE_RELEASE_VERSION is set then we update the version to this value.\n# It is assumed that it ends with one of `.devN`, `.aN`, `.bN`, `.rcN` and hence\n# it will be a pre-release version on PyPi. See\n# https://packaging.python.org/guides/distributing-packages-using-setuptools/#pre-release-versioning\n# for more details.\nif 'CIRQ_PRE_RELEASE_VERSION' in os.environ:\n __version__ = os.environ['CIRQ_PRE_RELEASE_VERSION']\n long_description = (\n \"**This is a development version of Cirq and may be \"\n \"unstable.**\\n\\n**For the latest stable release of Cirq \"\n \"see**\\n`here <https://pypi.org/project/cirq>`__.\\n\\n\" + long_description\n )\n\n# Read in requirements\nrequirements = open('requirements.txt').readlines()\nrequirements = [r.strip() for r in requirements]\ncontrib_requirements = open('cirq/contrib/contrib-requirements.txt').readlines()\ncontrib_requirements = [r.strip() for r in contrib_requirements]\ndev_requirements = open('dev_tools/conf/pip-list-dev-tools.txt').readlines()\ndev_requirements = [r.strip() for r in dev_requirements]\n\ncirq_packages = ['cirq'] + ['cirq.' + package for package in find_packages(where='cirq')]\n\n# Sanity check\nassert __version__, 'Version string cannot be empty'\n\nsetup(\n name=name,\n version=__version__,\n url='http://github.com/quantumlib/cirq',\n author='The Cirq Developers',\n author_email='[email protected]',\n python_requires=('>=3.6.0'),\n install_requires=requirements,\n extras_require={\n 'contrib': contrib_requirements,\n 'dev_env': dev_requirements + contrib_requirements,\n },\n license='Apache 2',\n description=description,\n long_description=long_description,\n packages=cirq_packages,\n package_data={\n 'cirq': ['py.typed'],\n 'cirq.google.api.v1': ['*.proto', '*.pyi'],\n 'cirq.google.api.v2': ['*.proto', '*.pyi'],\n 'cirq.protocols.json_test_data': ['*'],\n },\n)\n", "path": "setup.py"}]}
1,481
108
gh_patches_debug_2706
rasdani/github-patches
git_diff
fossasia__open-event-server-4302
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Custom-forms: Change data.type in custom-form **I'm submitting a ...** (check one with "x") - [x] bug report - [ ] feature request - [ ] support request => Please do not submit support requests here, instead ask your query in out Gitter channel at https://gitter.im/fossasia/open-event-orga-server **Current behavior:** The type attribute is `custom_form` which leads to error 409 while making a request after #4300 **Expected behavior:** The type attribute should be `custom-form` @enigmaeth Can you please check? </issue> <code> [start of app/api/custom_forms.py] 1 from flask_rest_jsonapi import ResourceDetail, ResourceList, ResourceRelationship 2 from marshmallow_jsonapi.flask import Schema, Relationship 3 from marshmallow_jsonapi import fields 4 import marshmallow.validate as validate 5 from app.api.helpers.permissions import jwt_required 6 from flask_rest_jsonapi.exceptions import ObjectNotFound 7 8 from app.api.bootstrap import api 9 from app.api.helpers.utilities import dasherize 10 from app.models import db 11 from app.models.custom_form import CustomForms 12 from app.models.event import Event 13 from app.api.helpers.db import safe_query 14 from app.api.helpers.utilities import require_relationship 15 from app.api.helpers.permission_manager import has_access 16 from app.api.helpers.query import event_query 17 18 19 class CustomFormSchema(Schema): 20 """ 21 API Schema for Custom Forms database model 22 """ 23 class Meta: 24 """ 25 Meta class for CustomForm Schema 26 """ 27 type_ = 'custom_form' 28 self_view = 'v1.custom_form_detail' 29 self_view_kwargs = {'id': '<id>'} 30 inflect = dasherize 31 32 id = fields.Integer(dump_only=True) 33 field_identifier = fields.Str(required=True) 34 form = fields.Str(required=True) 35 type = fields.Str(default="text", validate=validate.OneOf( 36 choices=["text", "checkbox", "select", "file", "image"])) 37 is_required = fields.Boolean(default=False) 38 is_included = fields.Boolean(default=False) 39 is_fixed = fields.Boolean(default=False) 40 event = Relationship(attribute='event', 41 self_view='v1.custom_form_event', 42 self_view_kwargs={'id': '<id>'}, 43 related_view='v1.event_detail', 44 related_view_kwargs={'custom_form_id': '<id>'}, 45 schema='EventSchema', 46 type_='event') 47 48 49 class CustomFormListPost(ResourceList): 50 """ 51 Create and List Custom Forms 52 """ 53 54 def before_post(self, args, kwargs, data): 55 """ 56 method to check for required relationship with event 57 :param args: 58 :param kwargs: 59 :param data: 60 :return: 61 """ 62 require_relationship(['event'], data) 63 if not has_access('is_coorganizer', event_id=data['event']): 64 raise ObjectNotFound({'parameter': 'event_id'}, 65 "Event: {} not found".format(data['event_id'])) 66 67 schema = CustomFormSchema 68 methods = ['POST', ] 69 data_layer = {'session': db.session, 70 'model': CustomForms 71 } 72 73 74 class CustomFormList(ResourceList): 75 """ 76 Create and List Custom Forms 77 """ 78 def query(self, view_kwargs): 79 """ 80 query method for different view_kwargs 81 :param view_kwargs: 82 :return: 83 """ 84 query_ = self.session.query(CustomForms) 85 query_ = event_query(self, query_, view_kwargs) 86 return query_ 87 88 view_kwargs = True 89 decorators = (jwt_required, ) 90 methods = ['GET', ] 91 schema = CustomFormSchema 92 data_layer = {'session': db.session, 93 'model': CustomForms, 94 'methods': { 95 'query': query 96 }} 97 98 99 class CustomFormDetail(ResourceDetail): 100 """ 101 CustomForm Resource 102 """ 103 104 def before_get_object(self, view_kwargs): 105 """ 106 before get method 107 :param view_kwargs: 108 :return: 109 """ 110 event = None 111 if view_kwargs.get('event_id'): 112 event = safe_query(self, Event, 'id', view_kwargs['event_id'], 'event_id') 113 elif view_kwargs.get('event_identifier'): 114 event = safe_query(self, Event, 'identifier', view_kwargs['event_identifier'], 'event_identifier') 115 116 if event: 117 custom_form = safe_query(self, CustomForms, 'event_id', event.id, 'event_id') 118 view_kwargs['id'] = custom_form.id 119 120 decorators = (api.has_permission('is_coorganizer', fetch='event_id', 121 fetch_as="event_id", model=CustomForms, methods="PATCH,DELETE"), ) 122 schema = CustomFormSchema 123 data_layer = {'session': db.session, 124 'model': CustomForms} 125 126 127 class CustomFormRelationshipRequired(ResourceRelationship): 128 """ 129 CustomForm Relationship (Required) 130 """ 131 decorators = (api.has_permission('is_coorganizer', fetch='event_id', 132 fetch_as="event_id", model=CustomForms, methods="PATCH"),) 133 methods = ['GET', 'PATCH'] 134 schema = CustomFormSchema 135 data_layer = {'session': db.session, 136 'model': CustomForms} 137 [end of app/api/custom_forms.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/app/api/custom_forms.py b/app/api/custom_forms.py --- a/app/api/custom_forms.py +++ b/app/api/custom_forms.py @@ -24,7 +24,7 @@ """ Meta class for CustomForm Schema """ - type_ = 'custom_form' + type_ = 'custom-form' self_view = 'v1.custom_form_detail' self_view_kwargs = {'id': '<id>'} inflect = dasherize
{"golden_diff": "diff --git a/app/api/custom_forms.py b/app/api/custom_forms.py\n--- a/app/api/custom_forms.py\n+++ b/app/api/custom_forms.py\n@@ -24,7 +24,7 @@\n \"\"\"\n Meta class for CustomForm Schema\n \"\"\"\n- type_ = 'custom_form'\n+ type_ = 'custom-form'\n self_view = 'v1.custom_form_detail'\n self_view_kwargs = {'id': '<id>'}\n inflect = dasherize\n", "issue": "Custom-forms: Change data.type in custom-form\n**I'm submitting a ...** (check one with \"x\")\r\n- [x] bug report\r\n- [ ] feature request\r\n- [ ] support request => Please do not submit support requests here, instead ask your query in out Gitter channel at https://gitter.im/fossasia/open-event-orga-server\r\n\r\n**Current behavior:**\r\nThe type attribute is `custom_form` which leads to error 409 while making a request after #4300 \r\n\r\n**Expected behavior:**\r\nThe type attribute should be `custom-form` \r\n\r\n@enigmaeth Can you please check?\n", "before_files": [{"content": "from flask_rest_jsonapi import ResourceDetail, ResourceList, ResourceRelationship\nfrom marshmallow_jsonapi.flask import Schema, Relationship\nfrom marshmallow_jsonapi import fields\nimport marshmallow.validate as validate\nfrom app.api.helpers.permissions import jwt_required\nfrom flask_rest_jsonapi.exceptions import ObjectNotFound\n\nfrom app.api.bootstrap import api\nfrom app.api.helpers.utilities import dasherize\nfrom app.models import db\nfrom app.models.custom_form import CustomForms\nfrom app.models.event import Event\nfrom app.api.helpers.db import safe_query\nfrom app.api.helpers.utilities import require_relationship\nfrom app.api.helpers.permission_manager import has_access\nfrom app.api.helpers.query import event_query\n\n\nclass CustomFormSchema(Schema):\n \"\"\"\n API Schema for Custom Forms database model\n \"\"\"\n class Meta:\n \"\"\"\n Meta class for CustomForm Schema\n \"\"\"\n type_ = 'custom_form'\n self_view = 'v1.custom_form_detail'\n self_view_kwargs = {'id': '<id>'}\n inflect = dasherize\n\n id = fields.Integer(dump_only=True)\n field_identifier = fields.Str(required=True)\n form = fields.Str(required=True)\n type = fields.Str(default=\"text\", validate=validate.OneOf(\n choices=[\"text\", \"checkbox\", \"select\", \"file\", \"image\"]))\n is_required = fields.Boolean(default=False)\n is_included = fields.Boolean(default=False)\n is_fixed = fields.Boolean(default=False)\n event = Relationship(attribute='event',\n self_view='v1.custom_form_event',\n self_view_kwargs={'id': '<id>'},\n related_view='v1.event_detail',\n related_view_kwargs={'custom_form_id': '<id>'},\n schema='EventSchema',\n type_='event')\n\n\nclass CustomFormListPost(ResourceList):\n \"\"\"\n Create and List Custom Forms\n \"\"\"\n\n def before_post(self, args, kwargs, data):\n \"\"\"\n method to check for required relationship with event\n :param args:\n :param kwargs:\n :param data:\n :return:\n \"\"\"\n require_relationship(['event'], data)\n if not has_access('is_coorganizer', event_id=data['event']):\n raise ObjectNotFound({'parameter': 'event_id'},\n \"Event: {} not found\".format(data['event_id']))\n\n schema = CustomFormSchema\n methods = ['POST', ]\n data_layer = {'session': db.session,\n 'model': CustomForms\n }\n\n\nclass CustomFormList(ResourceList):\n \"\"\"\n Create and List Custom Forms\n \"\"\"\n def query(self, view_kwargs):\n \"\"\"\n query method for different view_kwargs\n :param view_kwargs:\n :return:\n \"\"\"\n query_ = self.session.query(CustomForms)\n query_ = event_query(self, query_, view_kwargs)\n return query_\n\n view_kwargs = True\n decorators = (jwt_required, )\n methods = ['GET', ]\n schema = CustomFormSchema\n data_layer = {'session': db.session,\n 'model': CustomForms,\n 'methods': {\n 'query': query\n }}\n\n\nclass CustomFormDetail(ResourceDetail):\n \"\"\"\n CustomForm Resource\n \"\"\"\n\n def before_get_object(self, view_kwargs):\n \"\"\"\n before get method\n :param view_kwargs:\n :return:\n \"\"\"\n event = None\n if view_kwargs.get('event_id'):\n event = safe_query(self, Event, 'id', view_kwargs['event_id'], 'event_id')\n elif view_kwargs.get('event_identifier'):\n event = safe_query(self, Event, 'identifier', view_kwargs['event_identifier'], 'event_identifier')\n\n if event:\n custom_form = safe_query(self, CustomForms, 'event_id', event.id, 'event_id')\n view_kwargs['id'] = custom_form.id\n\n decorators = (api.has_permission('is_coorganizer', fetch='event_id',\n fetch_as=\"event_id\", model=CustomForms, methods=\"PATCH,DELETE\"), )\n schema = CustomFormSchema\n data_layer = {'session': db.session,\n 'model': CustomForms}\n\n\nclass CustomFormRelationshipRequired(ResourceRelationship):\n \"\"\"\n CustomForm Relationship (Required)\n \"\"\"\n decorators = (api.has_permission('is_coorganizer', fetch='event_id',\n fetch_as=\"event_id\", model=CustomForms, methods=\"PATCH\"),)\n methods = ['GET', 'PATCH']\n schema = CustomFormSchema\n data_layer = {'session': db.session,\n 'model': CustomForms}\n", "path": "app/api/custom_forms.py"}]}
1,929
106
gh_patches_debug_227
rasdani/github-patches
git_diff
sktime__sktime-3618
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [BUG] ShapeletTransformClassifier numba error when dtype is not float64 **Describe the bug** Seems that when using `ShapeletTransformClassifier` there is some Numba accelerated functions that break if the data in the input data frame are of type `int32`. **To Reproduce** MRE as below: ```python import warnings warnings.simplefilter('ignore', category=FutureWarning) import numpy as np import pandas as pd from sklearn.model_selection import train_test_split from sktime.classification.shapelet_based import ShapeletTransformClassifier from sktime.contrib.vector_classifiers._rotation_forest import RotationForest # make fake data data = pd.DataFrame(np.random.random((5000, 250))).astype(np.float32) # reshape to input into Shapelet Classifier data4train = data.apply(lambda row: pd.Series({ 'time-series': pd.Series(row.values) }), axis=1) # make targets targets = pd.Series(2500 * [1] + 2500 * [0]) # train test split X_train, X_test, y_train, y_test = train_test_split( data4train, targets, test_size=0.7, random_state=42 ) # train clf = ShapeletTransformClassifier( estimator=RotationForest(n_estimators=3), n_shapelet_samples=500, max_shapelets=20, batch_size=100, ) clf.fit(X_train, y_train) ``` **Expected behavior** will not throw an error, and also enforce conversion to float32 or float64 within the classifier? **Additional context** removing conversion to `float32` (hence `dtype == float64`) will make the code running without issues. **Versions** numba 0.55.1 sklearn 0.24.1 sktime 0.11.0 pandas 1.4.2 python 3.8.10 **Stacktrace output** ```bash TypingError: Failed in nopython mode pipeline (step: nopython frontend) Cannot unify array(float64, 1d, C) and array(float32, 1d, C) for 'X_n.2', defined at /path_to_mypython/python/lib/python3.8/site-packages/sktime/utils/numba/general.py (39) File "../python/lib/python3.8/site-packages/sktime/utils/numba/general.py", line 39: def z_normalise_series(X): <source elided> return X_n ``` </issue> <code> [start of sktime/utils/numba/general.py] 1 # -*- coding: utf-8 -*- 2 """General numba utilities.""" 3 4 import numpy as np 5 from numba import njit 6 7 8 @njit(fastmath=True, cache=True) 9 def unique_count(X): 10 """Numba unique count function for a 1D array.""" 11 if len(X) > 0: 12 X = np.sort(X) 13 unique = np.zeros(len(X)) 14 unique[0] = X[0] 15 counts = np.zeros(len(X), dtype=np.int_) 16 counts[0] = 1 17 unique_count = 0 18 19 for i in X[1:]: 20 if i != unique[unique_count]: 21 unique_count += 1 22 unique[unique_count] = i 23 counts[unique_count] = 1 24 else: 25 counts[unique_count] += 1 26 return unique[: unique_count + 1], counts[: unique_count + 1] 27 return None, np.zeros(0, dtype=np.int_) 28 29 30 @njit(fastmath=True, cache=True) 31 def z_normalise_series(X): 32 """Numba z-normalisation function for a single time series.""" 33 std = np.std(X) 34 if std > 0: 35 X_n = (X - np.mean(X)) / std 36 else: 37 X_n = np.zeros(len(X)) 38 39 return X_n 40 [end of sktime/utils/numba/general.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/sktime/utils/numba/general.py b/sktime/utils/numba/general.py --- a/sktime/utils/numba/general.py +++ b/sktime/utils/numba/general.py @@ -34,6 +34,5 @@ if std > 0: X_n = (X - np.mean(X)) / std else: - X_n = np.zeros(len(X)) - + X_n = X - np.mean(X) return X_n
{"golden_diff": "diff --git a/sktime/utils/numba/general.py b/sktime/utils/numba/general.py\n--- a/sktime/utils/numba/general.py\n+++ b/sktime/utils/numba/general.py\n@@ -34,6 +34,5 @@\n if std > 0:\n X_n = (X - np.mean(X)) / std\n else:\n- X_n = np.zeros(len(X))\n-\n+ X_n = X - np.mean(X)\n return X_n\n", "issue": "[BUG] ShapeletTransformClassifier numba error when dtype is not float64\n**Describe the bug**\r\nSeems that when using `ShapeletTransformClassifier` there is some Numba accelerated functions that break if the data in the input data frame are of type `int32`.\r\n\r\n**To Reproduce**\r\nMRE as below:\r\n\r\n```python\r\nimport warnings\r\nwarnings.simplefilter('ignore', category=FutureWarning)\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\n\r\nfrom sklearn.model_selection import train_test_split\r\n\r\nfrom sktime.classification.shapelet_based import ShapeletTransformClassifier\r\nfrom sktime.contrib.vector_classifiers._rotation_forest import RotationForest\r\n\r\n# make fake data\r\ndata = pd.DataFrame(np.random.random((5000, 250))).astype(np.float32)\r\n\r\n# reshape to input into Shapelet Classifier\r\ndata4train = data.apply(lambda row: pd.Series({\r\n 'time-series': pd.Series(row.values)\r\n}), axis=1)\r\n\r\n# make targets\r\ntargets = pd.Series(2500 * [1] + 2500 * [0])\r\n\r\n# train test split\r\nX_train, X_test, y_train, y_test = train_test_split(\r\n data4train, targets, test_size=0.7, random_state=42\r\n)\r\n\r\n# train\r\nclf = ShapeletTransformClassifier(\r\n estimator=RotationForest(n_estimators=3),\r\n n_shapelet_samples=500,\r\n max_shapelets=20,\r\n batch_size=100,\r\n)\r\n\r\nclf.fit(X_train, y_train)\r\n```\r\n\r\n**Expected behavior**\r\nwill not throw an error, and also enforce conversion to float32 or float64 within the classifier?\r\n**Additional context**\r\nremoving conversion to `float32` (hence `dtype == float64`) will make the code running without issues.\r\n\r\n**Versions**\r\nnumba 0.55.1\r\nsklearn 0.24.1\r\nsktime 0.11.0\r\npandas 1.4.2\r\npython 3.8.10\r\n\r\n**Stacktrace output**\r\n```bash\r\nTypingError: Failed in nopython mode pipeline (step: nopython frontend)\r\nCannot unify array(float64, 1d, C) and array(float32, 1d, C) for 'X_n.2', defined at /path_to_mypython/python/lib/python3.8/site-packages/sktime/utils/numba/general.py (39)\r\n\r\nFile \"../python/lib/python3.8/site-packages/sktime/utils/numba/general.py\", line 39:\r\ndef z_normalise_series(X):\r\n <source elided>\r\n\r\n return X_n\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"General numba utilities.\"\"\"\n\nimport numpy as np\nfrom numba import njit\n\n\n@njit(fastmath=True, cache=True)\ndef unique_count(X):\n \"\"\"Numba unique count function for a 1D array.\"\"\"\n if len(X) > 0:\n X = np.sort(X)\n unique = np.zeros(len(X))\n unique[0] = X[0]\n counts = np.zeros(len(X), dtype=np.int_)\n counts[0] = 1\n unique_count = 0\n\n for i in X[1:]:\n if i != unique[unique_count]:\n unique_count += 1\n unique[unique_count] = i\n counts[unique_count] = 1\n else:\n counts[unique_count] += 1\n return unique[: unique_count + 1], counts[: unique_count + 1]\n return None, np.zeros(0, dtype=np.int_)\n\n\n@njit(fastmath=True, cache=True)\ndef z_normalise_series(X):\n \"\"\"Numba z-normalisation function for a single time series.\"\"\"\n std = np.std(X)\n if std > 0:\n X_n = (X - np.mean(X)) / std\n else:\n X_n = np.zeros(len(X))\n\n return X_n\n", "path": "sktime/utils/numba/general.py"}]}
1,469
112
gh_patches_debug_20253
rasdani/github-patches
git_diff
mathesar-foundation__mathesar-901
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> `mathesar_temp_schema` should be hidden ## Description <!-- A clear and concise description of what the bug is. --> Currently, the system schema `mathesar_temp_schema` is returned as a standard schema, and ends up displayed as a result in the UI. This is confusing, since that schema is used for system operations, and shouldn't be available to the user. ## Expected behavior <!-- A clear and concise description of what you expected to happen. --> The schema `mathesar_temp_schema` should be hidden. ## To Reproduce <!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. --> After starting the service and doing type inference on at least one CSV loading into a table, go to `http://localhost:8000/api/v0/schemas/`. Note that `mathesar_temp_schema` will be one of the schemata in the `mathesar_tables` DB. ## Additional context <!-- Add any other context about the problem or screenshots here. --> We're already hiding some schemata, e.g., `mathesar_types`. The implementer should figure out where the list of such schemata is, and add `mathesar_temp_schema` to that list. </issue> <code> [start of db/tables/operations/infer_types.py] 1 from time import time 2 3 from sqlalchemy import select 4 5 from db import constants 6 from db.columns.base import MathesarColumn 7 from db.columns.operations.infer_types import infer_column_type 8 from db.schemas.operations.create import create_schema 9 from db.tables.operations.create import CreateTableAs 10 from db.tables.operations.select import reflect_table 11 12 13 TEMP_SCHEMA = f"{constants.MATHESAR_PREFIX}temp_schema" 14 TEMP_TABLE = f"{constants.MATHESAR_PREFIX}temp_table_%s" 15 16 17 def update_table_column_types(schema, table_name, engine): 18 table = reflect_table(table_name, schema, engine) 19 # we only want to infer (modify) the type of non-default columns 20 inferable_column_names = ( 21 col.name for col in table.columns 22 if not MathesarColumn.from_column(col).is_default 23 and not col.primary_key 24 and not col.foreign_keys 25 ) 26 for column_name in inferable_column_names: 27 infer_column_type( 28 schema, 29 table_name, 30 column_name, 31 engine, 32 ) 33 34 35 def infer_table_column_types(schema, table_name, engine): 36 table = reflect_table(table_name, schema, engine) 37 38 temp_name = TEMP_TABLE % (int(time())) 39 create_schema(TEMP_SCHEMA, engine) 40 with engine.begin() as conn: 41 while engine.dialect.has_table(conn, temp_name, schema=TEMP_SCHEMA): 42 temp_name = TEMP_TABLE.format(int(time())) 43 44 full_temp_name = f"{TEMP_SCHEMA}.{temp_name}" 45 46 select_table = select(table) 47 with engine.begin() as conn: 48 conn.execute(CreateTableAs(full_temp_name, select_table)) 49 temp_table = reflect_table(temp_name, TEMP_SCHEMA, engine) 50 51 try: 52 update_table_column_types( 53 TEMP_SCHEMA, temp_table.name, engine, 54 ) 55 except Exception as e: 56 # Ensure the temp table is deleted 57 temp_table.drop() 58 raise e 59 else: 60 temp_table = reflect_table(temp_name, TEMP_SCHEMA, engine) 61 types = [c.type.__class__ for c in temp_table.columns] 62 temp_table.drop() 63 return types 64 [end of db/tables/operations/infer_types.py] [start of db/schemas/operations/select.py] 1 import warnings 2 3 from sqlalchemy import MetaData, select, and_, not_, or_, Table 4 5 from db import types 6 7 8 TYPES_SCHEMA = types.base.SCHEMA 9 EXCLUDED_SCHEMATA = [TYPES_SCHEMA, "information_schema"] 10 11 12 def reflect_schema(engine, name=None, oid=None): 13 # If we have both arguments, the behavior is undefined. 14 try: 15 assert name is None or oid is None 16 except AssertionError as e: 17 raise e 18 metadata = MetaData() 19 with warnings.catch_warnings(): 20 warnings.filterwarnings("ignore", message="Did not recognize type") 21 pg_namespace = Table("pg_namespace", metadata, autoload_with=engine) 22 sel = ( 23 select(pg_namespace.c.oid, pg_namespace.c.nspname.label("name")) 24 .where(or_(pg_namespace.c.nspname == name, pg_namespace.c.oid == oid)) 25 ) 26 with engine.begin() as conn: 27 schema_info = conn.execute(sel).fetchone() 28 return schema_info 29 30 31 def get_mathesar_schemas_with_oids(engine): 32 metadata = MetaData() 33 with warnings.catch_warnings(): 34 warnings.filterwarnings("ignore", message="Did not recognize type") 35 pg_namespace = Table("pg_namespace", metadata, autoload_with=engine) 36 sel = ( 37 select(pg_namespace.c.nspname.label('schema'), pg_namespace.c.oid) 38 .where( 39 and_( 40 *[pg_namespace.c.nspname != schema for schema in EXCLUDED_SCHEMATA], 41 not_(pg_namespace.c.nspname.like("pg_%")) 42 ) 43 ) 44 ) 45 with engine.begin() as conn: 46 result = conn.execute(sel).fetchall() 47 return result 48 [end of db/schemas/operations/select.py] [start of db/constants.py] 1 MATHESAR_PREFIX = "mathesar_" 2 ID = "id" 3 ID_ORIGINAL = "id_original" 4 [end of db/constants.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/db/constants.py b/db/constants.py --- a/db/constants.py +++ b/db/constants.py @@ -1,3 +1,4 @@ MATHESAR_PREFIX = "mathesar_" ID = "id" ID_ORIGINAL = "id_original" +INFERENCE_SCHEMA = f"{MATHESAR_PREFIX}inference_schema" diff --git a/db/schemas/operations/select.py b/db/schemas/operations/select.py --- a/db/schemas/operations/select.py +++ b/db/schemas/operations/select.py @@ -2,11 +2,12 @@ from sqlalchemy import MetaData, select, and_, not_, or_, Table +from db import constants from db import types - TYPES_SCHEMA = types.base.SCHEMA -EXCLUDED_SCHEMATA = [TYPES_SCHEMA, "information_schema"] +TEMP_INFER_SCHEMA = constants.INFERENCE_SCHEMA +EXCLUDED_SCHEMATA = [TYPES_SCHEMA, TEMP_INFER_SCHEMA, "information_schema"] def reflect_schema(engine, name=None, oid=None): diff --git a/db/tables/operations/infer_types.py b/db/tables/operations/infer_types.py --- a/db/tables/operations/infer_types.py +++ b/db/tables/operations/infer_types.py @@ -10,7 +10,7 @@ from db.tables.operations.select import reflect_table -TEMP_SCHEMA = f"{constants.MATHESAR_PREFIX}temp_schema" +TEMP_SCHEMA = constants.INFERENCE_SCHEMA TEMP_TABLE = f"{constants.MATHESAR_PREFIX}temp_table_%s"
{"golden_diff": "diff --git a/db/constants.py b/db/constants.py\n--- a/db/constants.py\n+++ b/db/constants.py\n@@ -1,3 +1,4 @@\n MATHESAR_PREFIX = \"mathesar_\"\n ID = \"id\"\n ID_ORIGINAL = \"id_original\"\n+INFERENCE_SCHEMA = f\"{MATHESAR_PREFIX}inference_schema\"\ndiff --git a/db/schemas/operations/select.py b/db/schemas/operations/select.py\n--- a/db/schemas/operations/select.py\n+++ b/db/schemas/operations/select.py\n@@ -2,11 +2,12 @@\n \n from sqlalchemy import MetaData, select, and_, not_, or_, Table\n \n+from db import constants\n from db import types\n \n-\n TYPES_SCHEMA = types.base.SCHEMA\n-EXCLUDED_SCHEMATA = [TYPES_SCHEMA, \"information_schema\"]\n+TEMP_INFER_SCHEMA = constants.INFERENCE_SCHEMA\n+EXCLUDED_SCHEMATA = [TYPES_SCHEMA, TEMP_INFER_SCHEMA, \"information_schema\"]\n \n \n def reflect_schema(engine, name=None, oid=None):\ndiff --git a/db/tables/operations/infer_types.py b/db/tables/operations/infer_types.py\n--- a/db/tables/operations/infer_types.py\n+++ b/db/tables/operations/infer_types.py\n@@ -10,7 +10,7 @@\n from db.tables.operations.select import reflect_table\n \n \n-TEMP_SCHEMA = f\"{constants.MATHESAR_PREFIX}temp_schema\"\n+TEMP_SCHEMA = constants.INFERENCE_SCHEMA\n TEMP_TABLE = f\"{constants.MATHESAR_PREFIX}temp_table_%s\"\n", "issue": "`mathesar_temp_schema` should be hidden\n## Description\r\n<!-- A clear and concise description of what the bug is. -->\r\n\r\nCurrently, the system schema `mathesar_temp_schema` is returned as a standard schema, and ends up displayed as a result in the UI. This is confusing, since that schema is used for system operations, and shouldn't be available to the user.\r\n\r\n## Expected behavior\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n\r\nThe schema `mathesar_temp_schema` should be hidden.\r\n\r\n## To Reproduce\r\n<!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. -->\r\n\r\nAfter starting the service and doing type inference on at least one CSV loading into a table, go to `http://localhost:8000/api/v0/schemas/`. Note that `mathesar_temp_schema` will be one of the schemata in the `mathesar_tables` DB.\r\n\r\n## Additional context\r\n<!-- Add any other context about the problem or screenshots here. -->\r\n\r\nWe're already hiding some schemata, e.g., `mathesar_types`. The implementer should figure out where the list of such schemata is, and add `mathesar_temp_schema` to that list.\n", "before_files": [{"content": "from time import time\n\nfrom sqlalchemy import select\n\nfrom db import constants\nfrom db.columns.base import MathesarColumn\nfrom db.columns.operations.infer_types import infer_column_type\nfrom db.schemas.operations.create import create_schema\nfrom db.tables.operations.create import CreateTableAs\nfrom db.tables.operations.select import reflect_table\n\n\nTEMP_SCHEMA = f\"{constants.MATHESAR_PREFIX}temp_schema\"\nTEMP_TABLE = f\"{constants.MATHESAR_PREFIX}temp_table_%s\"\n\n\ndef update_table_column_types(schema, table_name, engine):\n table = reflect_table(table_name, schema, engine)\n # we only want to infer (modify) the type of non-default columns\n inferable_column_names = (\n col.name for col in table.columns\n if not MathesarColumn.from_column(col).is_default\n and not col.primary_key\n and not col.foreign_keys\n )\n for column_name in inferable_column_names:\n infer_column_type(\n schema,\n table_name,\n column_name,\n engine,\n )\n\n\ndef infer_table_column_types(schema, table_name, engine):\n table = reflect_table(table_name, schema, engine)\n\n temp_name = TEMP_TABLE % (int(time()))\n create_schema(TEMP_SCHEMA, engine)\n with engine.begin() as conn:\n while engine.dialect.has_table(conn, temp_name, schema=TEMP_SCHEMA):\n temp_name = TEMP_TABLE.format(int(time()))\n\n full_temp_name = f\"{TEMP_SCHEMA}.{temp_name}\"\n\n select_table = select(table)\n with engine.begin() as conn:\n conn.execute(CreateTableAs(full_temp_name, select_table))\n temp_table = reflect_table(temp_name, TEMP_SCHEMA, engine)\n\n try:\n update_table_column_types(\n TEMP_SCHEMA, temp_table.name, engine,\n )\n except Exception as e:\n # Ensure the temp table is deleted\n temp_table.drop()\n raise e\n else:\n temp_table = reflect_table(temp_name, TEMP_SCHEMA, engine)\n types = [c.type.__class__ for c in temp_table.columns]\n temp_table.drop()\n return types\n", "path": "db/tables/operations/infer_types.py"}, {"content": "import warnings\n\nfrom sqlalchemy import MetaData, select, and_, not_, or_, Table\n\nfrom db import types\n\n\nTYPES_SCHEMA = types.base.SCHEMA\nEXCLUDED_SCHEMATA = [TYPES_SCHEMA, \"information_schema\"]\n\n\ndef reflect_schema(engine, name=None, oid=None):\n # If we have both arguments, the behavior is undefined.\n try:\n assert name is None or oid is None\n except AssertionError as e:\n raise e\n metadata = MetaData()\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", message=\"Did not recognize type\")\n pg_namespace = Table(\"pg_namespace\", metadata, autoload_with=engine)\n sel = (\n select(pg_namespace.c.oid, pg_namespace.c.nspname.label(\"name\"))\n .where(or_(pg_namespace.c.nspname == name, pg_namespace.c.oid == oid))\n )\n with engine.begin() as conn:\n schema_info = conn.execute(sel).fetchone()\n return schema_info\n\n\ndef get_mathesar_schemas_with_oids(engine):\n metadata = MetaData()\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", message=\"Did not recognize type\")\n pg_namespace = Table(\"pg_namespace\", metadata, autoload_with=engine)\n sel = (\n select(pg_namespace.c.nspname.label('schema'), pg_namespace.c.oid)\n .where(\n and_(\n *[pg_namespace.c.nspname != schema for schema in EXCLUDED_SCHEMATA],\n not_(pg_namespace.c.nspname.like(\"pg_%\"))\n )\n )\n )\n with engine.begin() as conn:\n result = conn.execute(sel).fetchall()\n return result\n", "path": "db/schemas/operations/select.py"}, {"content": "MATHESAR_PREFIX = \"mathesar_\"\nID = \"id\"\nID_ORIGINAL = \"id_original\"\n", "path": "db/constants.py"}]}
1,889
339
gh_patches_debug_50801
rasdani/github-patches
git_diff
googleapis__google-cloud-python-6841
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Container: Regional Cluster support for GKE clusters I'm unable to get or create regional clusters using the container_v1 client APIs. The [documentation](https://googleapis.github.io/google-cloud-python/latest/container/gapic/v1/api.html#google.cloud.container_v1.ClusterManagerClient.create_cluster) suggests that this is possible by using the `parent` parameter to describe the project/region to launch the cluster but I get the following errors: ```bash (env) david@ ~ $ which python ~/dev/env/bin/python (env) david@ ~ $ pip freeze ... google-api-core==1.6.0 google-auth==1.6.1 google-cloud==0.34.0 google-cloud-container==0.1.1 googleapis-common-protos==1.5.5 grpcio==1.16.1 ... (env) david@ ~ $ python --version Python 2.7.10 (env) david@ ~ $ python ./get_cluster.py Traceback (most recent call last): File "./get_cluster.py", line 6, in <module> cluster = client.get_cluster(project_id=credentials.project_id, parent='projects/<project_id>/locations/us-east1', cluster_id='ha-cluster-1') TypeError: get_cluster() got an unexpected keyword argument 'parent' ``` Is it possible that the API documentation has been updated before the feature was merged or is it more likely an environment issue on my end? Any insight into this would be appreciated I have also looked at using the [google-api-python-client](https://github.com/googleapis/google-api-python-client#google-api-client) to launch regional clusters but I would prefer to use this library if the feature is supported. Are there any known workarounds for this? </issue> <code> [start of container/setup.py] 1 # Copyright 2018 Google LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import io 16 import os 17 18 import setuptools 19 20 21 # Package metadata. 22 23 name = 'google-cloud-container' 24 description = 'Google Container Engine API client library' 25 version = '0.1.1' 26 # Should be one of: 27 # 'Development Status :: 3 - Alpha' 28 # 'Development Status :: 4 - Beta' 29 # 'Development Status :: 5 - Production/Stable' 30 release_status = 'Development Status :: 3 - Alpha' 31 dependencies = [ 32 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', 33 ] 34 extras = { 35 } 36 37 38 # Setup boilerplate below this line. 39 40 package_root = os.path.abspath(os.path.dirname(__file__)) 41 42 readme_filename = os.path.join(package_root, 'README.rst') 43 with io.open(readme_filename, encoding='utf-8') as readme_file: 44 readme = readme_file.read() 45 46 # Only include packages under the 'google' namespace. Do not include tests, 47 # benchmarks, etc. 48 packages = [ 49 package for package in setuptools.find_packages() 50 if package.startswith('google')] 51 52 # Determine which namespaces are needed. 53 namespaces = ['google'] 54 if 'google.cloud' in packages: 55 namespaces.append('google.cloud') 56 57 58 setuptools.setup( 59 name=name, 60 version=version, 61 description=description, 62 long_description=readme, 63 author='Google LLC', 64 author_email='[email protected]', 65 license='Apache 2.0', 66 url='https://github.com/GoogleCloudPlatform/google-cloud-python', 67 classifiers=[ 68 release_status, 69 'Intended Audience :: Developers', 70 'License :: OSI Approved :: Apache Software License', 71 'Programming Language :: Python', 72 'Programming Language :: Python :: 2', 73 'Programming Language :: Python :: 2.7', 74 'Programming Language :: Python :: 3', 75 'Programming Language :: Python :: 3.4', 76 'Programming Language :: Python :: 3.5', 77 'Programming Language :: Python :: 3.6', 78 'Operating System :: OS Independent', 79 'Topic :: Internet', 80 ], 81 platforms='Posix; MacOS X; Windows', 82 packages=packages, 83 namespace_packages=namespaces, 84 install_requires=dependencies, 85 extras_require=extras, 86 include_package_data=True, 87 zip_safe=False, 88 ) 89 [end of container/setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/container/setup.py b/container/setup.py --- a/container/setup.py +++ b/container/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-container' description = 'Google Container Engine API client library' -version = '0.1.1' +version = '0.2.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'
{"golden_diff": "diff --git a/container/setup.py b/container/setup.py\n--- a/container/setup.py\n+++ b/container/setup.py\n@@ -22,7 +22,7 @@\n \n name = 'google-cloud-container'\n description = 'Google Container Engine API client library'\n-version = '0.1.1'\n+version = '0.2.0'\n # Should be one of:\n # 'Development Status :: 3 - Alpha'\n # 'Development Status :: 4 - Beta'\n", "issue": "Container: Regional Cluster support for GKE clusters\n\r\nI'm unable to get or create regional clusters using the container_v1 client APIs. The [documentation](https://googleapis.github.io/google-cloud-python/latest/container/gapic/v1/api.html#google.cloud.container_v1.ClusterManagerClient.create_cluster) suggests that this is possible by using the `parent` parameter to describe the project/region to launch the cluster but I get the following errors:\r\n\r\n```bash\r\n(env) david@ ~ $ which python\r\n~/dev/env/bin/python \r\n\r\n(env) david@ ~ $ pip freeze\r\n...\r\ngoogle-api-core==1.6.0\r\ngoogle-auth==1.6.1\r\ngoogle-cloud==0.34.0\r\ngoogle-cloud-container==0.1.1\r\ngoogleapis-common-protos==1.5.5\r\ngrpcio==1.16.1\r\n...\r\n\r\n(env) david@ ~ $ python --version\r\nPython 2.7.10\r\n\r\n(env) david@ ~ $ python ./get_cluster.py\r\nTraceback (most recent call last):\r\n File \"./get_cluster.py\", line 6, in <module>\r\n cluster = client.get_cluster(project_id=credentials.project_id, parent='projects/<project_id>/locations/us-east1', cluster_id='ha-cluster-1')\r\nTypeError: get_cluster() got an unexpected keyword argument 'parent'\r\n```\r\n \r\nIs it possible that the API documentation has been updated before the feature was merged or is it more likely an environment issue on my end? Any insight into this would be appreciated\r\n\r\nI have also looked at using the [google-api-python-client](https://github.com/googleapis/google-api-python-client#google-api-client) to launch regional clusters but I would prefer to use this library if the feature is supported. Are there any known workarounds for this?\r\n\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = 'google-cloud-container'\ndescription = 'Google Container Engine API client library'\nversion = '0.1.1'\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = 'Development Status :: 3 - Alpha'\ndependencies = [\n 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev',\n]\nextras = {\n}\n\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, 'README.rst')\nwith io.open(readme_filename, encoding='utf-8') as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages()\n if package.startswith('google')]\n\n# Determine which namespaces are needed.\nnamespaces = ['google']\nif 'google.cloud' in packages:\n namespaces.append('google.cloud')\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author='Google LLC',\n author_email='[email protected]',\n license='Apache 2.0',\n url='https://github.com/GoogleCloudPlatform/google-cloud-python',\n classifiers=[\n release_status,\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Operating System :: OS Independent',\n 'Topic :: Internet',\n ],\n platforms='Posix; MacOS X; Windows',\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "container/setup.py"}]}
1,699
100
gh_patches_debug_31286
rasdani/github-patches
git_diff
ocf__ocfweb-57
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Home page should have a link to password reset, check print quota, view print queue </issue> <code> [start of ocfweb/context_processors.py] 1 import re 2 from datetime import date 3 from datetime import datetime 4 5 from ocflib.lab.hours import DayHours 6 7 from ocfweb.component.lab_status import get_lab_status 8 9 10 def ocf_template_processor(request): 11 now = datetime.now() 12 today = date.today() 13 hours = DayHours.from_date(today) 14 15 base_css_classes = [] 16 if request.resolver_match.url_name: 17 page_class = 'page-' + request.resolver_match.url_name 18 base_css_classes.append(page_class) 19 20 for arg in request.resolver_match.args: 21 page_class += '-' + re.sub('[^a-zA-Z_\-]', '-', arg) 22 base_css_classes.append(page_class) 23 24 return { 25 'lab_is_open': hours.is_open(now), 26 'current_lab_hours': hours, 27 'lab_status': get_lab_status(), 28 'base_css_classes': ' '.join(base_css_classes), 29 } 30 [end of ocfweb/context_processors.py] [start of setup.py] 1 from setuptools import find_packages 2 from setuptools import setup 3 4 try: 5 with open('.version') as f: 6 VERSION = f.readline().strip() 7 except IOError: 8 VERSION = 'unknown' 9 10 setup( 11 name='ocfweb', 12 version=VERSION, 13 packages=find_packages(exclude=['debian', 'virtualenv_run']), 14 include_package_data=True, 15 url='https://www.ocf.berkeley.edu/', 16 author='Open Computing Facility', 17 author_email='[email protected]', 18 install_requires=[ 19 'cachetools', 20 'django>=1.8,<1.8.999', 21 'gunicorn', 22 'libsass', 23 'lxml', 24 'mistune', 25 'ocflib', 26 'pygments', 27 'python-dateutil', 28 ], 29 sass_manifests={ 30 'ocfweb': ('static/scss',), 31 }, 32 ) 33 [end of setup.py] [start of ocfweb/main/home.py] 1 from datetime import date 2 from datetime import timedelta 3 4 from django.shortcuts import render_to_response 5 from django.template import RequestContext 6 from django.utils import timezone 7 from ocflib.lab.hours import get_hours 8 from ocflib.lab.staff_hours import get_staff_hours_soonest_first 9 10 from ocfweb.component.blog import get_blog_posts 11 from ocfweb.component.lab_status import get_lab_status 12 13 14 def home(request): 15 hours = [ 16 get_hours(date.today() + timedelta(days=i)) for i in range(5) 17 ] 18 19 blog_posts = [ 20 post for post 21 in get_blog_posts() 22 if timezone.now() - post.published < timedelta(days=365) 23 ][:2] 24 25 return render_to_response( 26 'home.html', 27 { 28 'fulltitle': 'Open Computing Facility at UC Berkeley', 29 'description': ( 30 'The Open Computing Facility is an all-volunteer student ' 31 'organization dedicated to free and open-source computing for all UC ' 32 'Berkeley students.''' 33 ), 34 'staff_hours': get_staff_hours_soonest_first()[:2], 35 'hours': hours, 36 'today': hours[0], 37 'blog_posts': blog_posts, 38 'lab_status': get_lab_status(), 39 }, 40 context_instance=RequestContext(request), 41 ) 42 [end of ocfweb/main/home.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ocfweb/context_processors.py b/ocfweb/context_processors.py --- a/ocfweb/context_processors.py +++ b/ocfweb/context_processors.py @@ -1,12 +1,20 @@ import re from datetime import date from datetime import datetime +from ipaddress import ip_address +from ipaddress import ip_network +from ipware.ip import get_real_ip from ocflib.lab.hours import DayHours from ocfweb.component.lab_status import get_lab_status +def is_ocf_ip(ip): + # TODO: move this to ocflib when it drops Python 3.2 support + return ip_address(ip) in ip_network('169.229.10.0/24') + + def ocf_template_processor(request): now = datetime.now() today = date.today() @@ -21,9 +29,12 @@ page_class += '-' + re.sub('[^a-zA-Z_\-]', '-', arg) base_css_classes.append(page_class) + real_ip = get_real_ip(request) + return { 'lab_is_open': hours.is_open(now), 'current_lab_hours': hours, 'lab_status': get_lab_status(), 'base_css_classes': ' '.join(base_css_classes), + 'is_ocf_ip': is_ocf_ip(real_ip) if real_ip else True, } diff --git a/ocfweb/main/home.py b/ocfweb/main/home.py --- a/ocfweb/main/home.py +++ b/ocfweb/main/home.py @@ -13,7 +13,7 @@ def home(request): hours = [ - get_hours(date.today() + timedelta(days=i)) for i in range(5) + get_hours(date.today() + timedelta(days=i)) for i in range(3) ] blog_posts = [ diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ install_requires=[ 'cachetools', 'django>=1.8,<1.8.999', + 'django-ipware', 'gunicorn', 'libsass', 'lxml',
{"golden_diff": "diff --git a/ocfweb/context_processors.py b/ocfweb/context_processors.py\n--- a/ocfweb/context_processors.py\n+++ b/ocfweb/context_processors.py\n@@ -1,12 +1,20 @@\n import re\n from datetime import date\n from datetime import datetime\n+from ipaddress import ip_address\n+from ipaddress import ip_network\n \n+from ipware.ip import get_real_ip\n from ocflib.lab.hours import DayHours\n \n from ocfweb.component.lab_status import get_lab_status\n \n \n+def is_ocf_ip(ip):\n+ # TODO: move this to ocflib when it drops Python 3.2 support\n+ return ip_address(ip) in ip_network('169.229.10.0/24')\n+\n+\n def ocf_template_processor(request):\n now = datetime.now()\n today = date.today()\n@@ -21,9 +29,12 @@\n page_class += '-' + re.sub('[^a-zA-Z_\\-]', '-', arg)\n base_css_classes.append(page_class)\n \n+ real_ip = get_real_ip(request)\n+\n return {\n 'lab_is_open': hours.is_open(now),\n 'current_lab_hours': hours,\n 'lab_status': get_lab_status(),\n 'base_css_classes': ' '.join(base_css_classes),\n+ 'is_ocf_ip': is_ocf_ip(real_ip) if real_ip else True,\n }\ndiff --git a/ocfweb/main/home.py b/ocfweb/main/home.py\n--- a/ocfweb/main/home.py\n+++ b/ocfweb/main/home.py\n@@ -13,7 +13,7 @@\n \n def home(request):\n hours = [\n- get_hours(date.today() + timedelta(days=i)) for i in range(5)\n+ get_hours(date.today() + timedelta(days=i)) for i in range(3)\n ]\n \n blog_posts = [\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -18,6 +18,7 @@\n install_requires=[\n 'cachetools',\n 'django>=1.8,<1.8.999',\n+ 'django-ipware',\n 'gunicorn',\n 'libsass',\n 'lxml',\n", "issue": "Home page should have a link to password reset, check print quota, view print queue\n\n", "before_files": [{"content": "import re\nfrom datetime import date\nfrom datetime import datetime\n\nfrom ocflib.lab.hours import DayHours\n\nfrom ocfweb.component.lab_status import get_lab_status\n\n\ndef ocf_template_processor(request):\n now = datetime.now()\n today = date.today()\n hours = DayHours.from_date(today)\n\n base_css_classes = []\n if request.resolver_match.url_name:\n page_class = 'page-' + request.resolver_match.url_name\n base_css_classes.append(page_class)\n\n for arg in request.resolver_match.args:\n page_class += '-' + re.sub('[^a-zA-Z_\\-]', '-', arg)\n base_css_classes.append(page_class)\n\n return {\n 'lab_is_open': hours.is_open(now),\n 'current_lab_hours': hours,\n 'lab_status': get_lab_status(),\n 'base_css_classes': ' '.join(base_css_classes),\n }\n", "path": "ocfweb/context_processors.py"}, {"content": "from setuptools import find_packages\nfrom setuptools import setup\n\ntry:\n with open('.version') as f:\n VERSION = f.readline().strip()\nexcept IOError:\n VERSION = 'unknown'\n\nsetup(\n name='ocfweb',\n version=VERSION,\n packages=find_packages(exclude=['debian', 'virtualenv_run']),\n include_package_data=True,\n url='https://www.ocf.berkeley.edu/',\n author='Open Computing Facility',\n author_email='[email protected]',\n install_requires=[\n 'cachetools',\n 'django>=1.8,<1.8.999',\n 'gunicorn',\n 'libsass',\n 'lxml',\n 'mistune',\n 'ocflib',\n 'pygments',\n 'python-dateutil',\n ],\n sass_manifests={\n 'ocfweb': ('static/scss',),\n },\n)\n", "path": "setup.py"}, {"content": "from datetime import date\nfrom datetime import timedelta\n\nfrom django.shortcuts import render_to_response\nfrom django.template import RequestContext\nfrom django.utils import timezone\nfrom ocflib.lab.hours import get_hours\nfrom ocflib.lab.staff_hours import get_staff_hours_soonest_first\n\nfrom ocfweb.component.blog import get_blog_posts\nfrom ocfweb.component.lab_status import get_lab_status\n\n\ndef home(request):\n hours = [\n get_hours(date.today() + timedelta(days=i)) for i in range(5)\n ]\n\n blog_posts = [\n post for post\n in get_blog_posts()\n if timezone.now() - post.published < timedelta(days=365)\n ][:2]\n\n return render_to_response(\n 'home.html',\n {\n 'fulltitle': 'Open Computing Facility at UC Berkeley',\n 'description': (\n 'The Open Computing Facility is an all-volunteer student '\n 'organization dedicated to free and open-source computing for all UC '\n 'Berkeley students.'''\n ),\n 'staff_hours': get_staff_hours_soonest_first()[:2],\n 'hours': hours,\n 'today': hours[0],\n 'blog_posts': blog_posts,\n 'lab_status': get_lab_status(),\n },\n context_instance=RequestContext(request),\n )\n", "path": "ocfweb/main/home.py"}]}
1,430
499
gh_patches_debug_16219
rasdani/github-patches
git_diff
getsentry__sentry-5339
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Localhost filter should consider affected URL Right now the "localhost" filter only consider's the affected User's IP: https://github.com/getsentry/sentry/blob/master/src/sentry/filters/localhost.py But users are also expecting that this should filter server errors triggered from a server running on localhost (e.g. local development). See also: #4729, #4762 </issue> <code> [start of src/sentry/filters/localhost.py] 1 from __future__ import absolute_import 2 3 from .base import Filter 4 5 LOCAL_IPS = frozenset(['127.0.0.1', '::1']) 6 7 8 class LocalhostFilter(Filter): 9 id = 'localhost' 10 name = 'Filter out errors coming from localhost' 11 description = 'This applies to to both IPv4 (``127.0.0.1``) and IPv6 (``::1``) addresses.' 12 13 def get_ip_address(self, data): 14 try: 15 return data['sentry.interfaces.User']['ip_address'] 16 except KeyError: 17 return '' 18 19 def test(self, data): 20 return self.get_ip_address(data) in LOCAL_IPS 21 [end of src/sentry/filters/localhost.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/sentry/filters/localhost.py b/src/sentry/filters/localhost.py --- a/src/sentry/filters/localhost.py +++ b/src/sentry/filters/localhost.py @@ -1,8 +1,10 @@ from __future__ import absolute_import from .base import Filter +from six.moves.urllib.parse import urlparse LOCAL_IPS = frozenset(['127.0.0.1', '::1']) +LOCAL_DOMAINS = frozenset(['127.0.0.1', 'localhost']) class LocalhostFilter(Filter): @@ -16,5 +18,14 @@ except KeyError: return '' + def get_url(self, data): + try: + return data['sentry.interfaces.Http']['url'] or '' + except KeyError: + return '' + + def get_domain(self, data): + return urlparse(self.get_url(data)).netloc + def test(self, data): - return self.get_ip_address(data) in LOCAL_IPS + return self.get_ip_address(data) in LOCAL_IPS or self.get_domain(data) in LOCAL_DOMAINS
{"golden_diff": "diff --git a/src/sentry/filters/localhost.py b/src/sentry/filters/localhost.py\n--- a/src/sentry/filters/localhost.py\n+++ b/src/sentry/filters/localhost.py\n@@ -1,8 +1,10 @@\n from __future__ import absolute_import\n \n from .base import Filter\n+from six.moves.urllib.parse import urlparse\n \n LOCAL_IPS = frozenset(['127.0.0.1', '::1'])\n+LOCAL_DOMAINS = frozenset(['127.0.0.1', 'localhost'])\n \n \n class LocalhostFilter(Filter):\n@@ -16,5 +18,14 @@\n except KeyError:\n return ''\n \n+ def get_url(self, data):\n+ try:\n+ return data['sentry.interfaces.Http']['url'] or ''\n+ except KeyError:\n+ return ''\n+\n+ def get_domain(self, data):\n+ return urlparse(self.get_url(data)).netloc\n+\n def test(self, data):\n- return self.get_ip_address(data) in LOCAL_IPS\n+ return self.get_ip_address(data) in LOCAL_IPS or self.get_domain(data) in LOCAL_DOMAINS\n", "issue": "Localhost filter should consider affected URL\nRight now the \"localhost\" filter only consider's the affected User's IP: https://github.com/getsentry/sentry/blob/master/src/sentry/filters/localhost.py\r\n\r\nBut users are also expecting that this should filter server errors triggered from a server running on localhost (e.g. local development).\r\n\r\nSee also: #4729, #4762\n", "before_files": [{"content": "from __future__ import absolute_import\n\nfrom .base import Filter\n\nLOCAL_IPS = frozenset(['127.0.0.1', '::1'])\n\n\nclass LocalhostFilter(Filter):\n id = 'localhost'\n name = 'Filter out errors coming from localhost'\n description = 'This applies to to both IPv4 (``127.0.0.1``) and IPv6 (``::1``) addresses.'\n\n def get_ip_address(self, data):\n try:\n return data['sentry.interfaces.User']['ip_address']\n except KeyError:\n return ''\n\n def test(self, data):\n return self.get_ip_address(data) in LOCAL_IPS\n", "path": "src/sentry/filters/localhost.py"}]}
810
255
gh_patches_debug_1182
rasdani/github-patches
git_diff
cloud-custodian__cloud-custodian-1049
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> efs tag support I am finding that searching for tagging of EFS resources does not consistently report the correct results. It did find an EFS that was incorrectly tagged, but after it was corrected it continues to report the same resource. I use the same filter for other resource types and do not see this behavior. ``` - name: efs-tag-compliance resource: efs description: Notify if an EFS does not comply with tagging best practices. mode: type: periodic schedule: "rate(24 hours)" role: arn:aws:iam::MYACCOUNT:role/cloud-custodian filters: - or: - "tag:CostCenter": absent - "tag:POC": absent - "tag:Service": absent - "tag:Name": absent ... ``` </issue> <code> [start of c7n/resources/efs.py] 1 # Copyright 2016 Capital One Services, LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 from c7n.actions import Action 15 from c7n.manager import resources 16 from c7n.query import QueryResourceManager 17 from c7n.utils import local_session, type_schema, get_retry 18 19 20 @resources.register('efs') 21 class ElasticFileSystem(QueryResourceManager): 22 23 class resource_type(object): 24 service = 'efs' 25 enum_spec = ('describe_file_systems', 'FileSystems', None) 26 id = 'FileSystemId' 27 name = 'Name' 28 date = 'CreationTime' 29 dimension = None 30 31 32 @ElasticFileSystem.action_registry.register('delete') 33 class Delete(Action): 34 35 schema = type_schema('delete') 36 permissions = ('efs:DescribeMountTargets', 37 'efs:DeleteMountTargets', 38 'efs:DeleteFileSystem') 39 40 def process(self, resources): 41 client = local_session(self.manager.session_factory).client('efs') 42 self.unmount_filesystems(resources) 43 retry = get_retry(('FileSystemInUse',), 12) 44 for r in resources: 45 retry(client.delete_file_system, FileSystemId=r['FileSystemId']) 46 47 def unmount_filesystems(self, resources): 48 client = local_session(self.manager.session_factory).client('efs') 49 for r in resources: 50 if not r['NumberOfMountTargets']: 51 continue 52 for t in client.describe_mount_targets( 53 FileSystemId=r['FileSystemId'])['MountTargets']: 54 client.delete_mount_target(MountTargetId=t['MountTargetId']) 55 [end of c7n/resources/efs.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/c7n/resources/efs.py b/c7n/resources/efs.py --- a/c7n/resources/efs.py +++ b/c7n/resources/efs.py @@ -27,6 +27,7 @@ name = 'Name' date = 'CreationTime' dimension = None + detail_spec = ('describe_tags', 'FileSystemId', 'FileSystemId', None) @ElasticFileSystem.action_registry.register('delete')
{"golden_diff": "diff --git a/c7n/resources/efs.py b/c7n/resources/efs.py\n--- a/c7n/resources/efs.py\n+++ b/c7n/resources/efs.py\n@@ -27,6 +27,7 @@\n name = 'Name'\n date = 'CreationTime'\n dimension = None\n+ detail_spec = ('describe_tags', 'FileSystemId', 'FileSystemId', None)\n \n \n @ElasticFileSystem.action_registry.register('delete')\n", "issue": "efs tag support\nI am finding that searching for tagging of EFS resources does not consistently report the correct results. It did find an EFS that was incorrectly tagged, but after it was corrected it continues to report the same resource. I use the same filter for other resource types and do not see this behavior.\r\n\r\n```\r\n- name: efs-tag-compliance\r\n resource: efs\r\n description:\r\n Notify if an EFS does not comply with tagging best practices.\r\n mode:\r\n type: periodic\r\n schedule: \"rate(24 hours)\"\r\n role: arn:aws:iam::MYACCOUNT:role/cloud-custodian\r\n filters:\r\n - or:\r\n - \"tag:CostCenter\": absent\r\n - \"tag:POC\": absent\r\n - \"tag:Service\": absent\r\n - \"tag:Name\": absent\r\n...\r\n```\n", "before_files": [{"content": "# Copyright 2016 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom c7n.actions import Action\nfrom c7n.manager import resources\nfrom c7n.query import QueryResourceManager\nfrom c7n.utils import local_session, type_schema, get_retry\n\n\[email protected]('efs')\nclass ElasticFileSystem(QueryResourceManager):\n\n class resource_type(object):\n service = 'efs'\n enum_spec = ('describe_file_systems', 'FileSystems', None)\n id = 'FileSystemId'\n name = 'Name'\n date = 'CreationTime'\n dimension = None\n\n\[email protected]_registry.register('delete')\nclass Delete(Action):\n\n schema = type_schema('delete')\n permissions = ('efs:DescribeMountTargets',\n 'efs:DeleteMountTargets',\n 'efs:DeleteFileSystem')\n\n def process(self, resources):\n client = local_session(self.manager.session_factory).client('efs')\n self.unmount_filesystems(resources)\n retry = get_retry(('FileSystemInUse',), 12)\n for r in resources:\n retry(client.delete_file_system, FileSystemId=r['FileSystemId'])\n\n def unmount_filesystems(self, resources):\n client = local_session(self.manager.session_factory).client('efs')\n for r in resources:\n if not r['NumberOfMountTargets']:\n continue\n for t in client.describe_mount_targets(\n FileSystemId=r['FileSystemId'])['MountTargets']:\n client.delete_mount_target(MountTargetId=t['MountTargetId'])\n", "path": "c7n/resources/efs.py"}]}
1,260
101
gh_patches_debug_13132
rasdani/github-patches
git_diff
conan-io__conan-14185
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [bug] Can't call `conan upload --recipe-only` twice with backup sources enabled ### Steps to reproduce 1. Enable backup sources 2. Export a recipe that downloads file 3. Call conan upload only recipe for ref 4. Do it again, it fails due to KeyError Found while prepping for https://github.com/conan-io/conan-center-index/pull/18082 </issue> <code> [start of conans/client/downloaders/download_cache.py] 1 import json 2 import os 3 from contextlib import contextmanager 4 from threading import Lock 5 6 from conans.util.dates import timestamp_now 7 from conans.util.files import load, save 8 from conans.util.locks import SimpleLock 9 from conans.util.sha import sha256 as compute_sha256 10 11 12 class DownloadCache: 13 """ The download cache has 3 folders 14 - "s": SOURCE_BACKUP for the files.download(internet_url) backup sources feature 15 - "c": CONAN_CACHE: for caching Conan packages artifacts 16 - "locks": The LOCKS folder containing the file locks for concurrent access to the cache 17 """ 18 _LOCKS = "locks" 19 _SOURCE_BACKUP = "s" 20 _CONAN_CACHE = "c" 21 22 def __init__(self, path: str): 23 self._path: str = path 24 25 def source_path(self, sha256): 26 return os.path.join(self._path, self._SOURCE_BACKUP, sha256) 27 28 def cached_path(self, url): 29 h = compute_sha256(url.encode()) 30 return os.path.join(self._path, self._CONAN_CACHE, h), h 31 32 _thread_locks = {} # Needs to be shared among all instances 33 34 @contextmanager 35 def lock(self, lock_id): 36 lock = os.path.join(self._path, self._LOCKS, lock_id) 37 with SimpleLock(lock): 38 # Once the process has access, make sure multithread is locked too 39 # as SimpleLock doesn't work multithread 40 thread_lock = self._thread_locks.setdefault(lock, Lock()) 41 thread_lock.acquire() 42 try: 43 yield 44 finally: 45 thread_lock.release() 46 47 def get_backup_sources_files_to_upload(self, package_list, excluded_urls): 48 """ from a package_list of packages to upload, collect from the backup-sources cache 49 the matching references to upload those backups too 50 """ 51 def should_upload_sources(package): 52 return any(prev["upload"] for prev in package["revisions"].values()) 53 54 files_to_upload = [] 55 path_backups = os.path.join(self._path, self._SOURCE_BACKUP) 56 57 if not os.path.exists(path_backups): 58 return [] 59 60 if excluded_urls is None: 61 excluded_urls = [] 62 63 all_refs = {str(k) for k, ref in package_list.refs() 64 if ref.get("upload") or any(should_upload_sources(p) 65 for p in ref["packages"].values())} 66 for f in os.listdir(path_backups): 67 if f.endswith(".json"): 68 f = os.path.join(path_backups, f) 69 content = json.loads(load(f)) 70 refs = content["references"] 71 # unknown entries are not uploaded at this moment, the flow is not expected. 72 for ref, urls in refs.items(): 73 is_excluded = all(any(url.startswith(excluded_url) 74 for excluded_url in excluded_urls) 75 for url in urls) 76 if not is_excluded and ref in all_refs: 77 files_to_upload.append(f) 78 files_to_upload.append(f[:-5]) 79 break 80 return files_to_upload 81 82 @staticmethod 83 def update_backup_sources_json(cached_path, conanfile, urls): 84 """ create or update the sha256.json file with the references and new urls used 85 """ 86 summary_path = cached_path + ".json" 87 if os.path.exists(summary_path): 88 summary = json.loads(load(summary_path)) 89 else: 90 summary = {"references": {}, "timestamp": timestamp_now()} 91 92 try: 93 summary_key = str(conanfile.ref) 94 except AttributeError: 95 # The recipe path would be different between machines 96 # So best we can do is to set this as unknown 97 summary_key = "unknown" 98 99 if not isinstance(urls, (list, tuple)): 100 urls = [urls] 101 existing_urls = summary["references"].setdefault(summary_key, []) 102 existing_urls.extend(url for url in urls if url not in existing_urls) 103 conanfile.output.verbose(f"Updating ${summary_path} summary file") 104 summary_dump = json.dumps(summary) 105 conanfile.output.debug(f"New summary: ${summary_dump}") 106 save(summary_path, json.dumps(summary)) 107 [end of conans/client/downloaders/download_cache.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/conans/client/downloaders/download_cache.py b/conans/client/downloaders/download_cache.py --- a/conans/client/downloaders/download_cache.py +++ b/conans/client/downloaders/download_cache.py @@ -60,9 +60,12 @@ if excluded_urls is None: excluded_urls = [] - all_refs = {str(k) for k, ref in package_list.refs() - if ref.get("upload") or any(should_upload_sources(p) - for p in ref["packages"].values())} + all_refs = set() + for k, ref in package_list.refs(): + packages = ref.get("packages", {}).values() + if ref.get("upload") or any(should_upload_sources(p) for p in packages): + all_refs.add(str(k)) + for f in os.listdir(path_backups): if f.endswith(".json"): f = os.path.join(path_backups, f)
{"golden_diff": "diff --git a/conans/client/downloaders/download_cache.py b/conans/client/downloaders/download_cache.py\n--- a/conans/client/downloaders/download_cache.py\n+++ b/conans/client/downloaders/download_cache.py\n@@ -60,9 +60,12 @@\n if excluded_urls is None:\n excluded_urls = []\n \n- all_refs = {str(k) for k, ref in package_list.refs()\n- if ref.get(\"upload\") or any(should_upload_sources(p)\n- for p in ref[\"packages\"].values())}\n+ all_refs = set()\n+ for k, ref in package_list.refs():\n+ packages = ref.get(\"packages\", {}).values()\n+ if ref.get(\"upload\") or any(should_upload_sources(p) for p in packages):\n+ all_refs.add(str(k))\n+\n for f in os.listdir(path_backups):\n if f.endswith(\".json\"):\n f = os.path.join(path_backups, f)\n", "issue": "[bug] Can't call `conan upload --recipe-only` twice with backup sources enabled\n### Steps to reproduce\r\n\r\n1. Enable backup sources\r\n2. Export a recipe that downloads file\r\n3. Call conan upload only recipe for ref\r\n4. Do it again, it fails due to KeyError\r\n\r\n\r\nFound while prepping for https://github.com/conan-io/conan-center-index/pull/18082\n", "before_files": [{"content": "import json\nimport os\nfrom contextlib import contextmanager\nfrom threading import Lock\n\nfrom conans.util.dates import timestamp_now\nfrom conans.util.files import load, save\nfrom conans.util.locks import SimpleLock\nfrom conans.util.sha import sha256 as compute_sha256\n\n\nclass DownloadCache:\n \"\"\" The download cache has 3 folders\n - \"s\": SOURCE_BACKUP for the files.download(internet_url) backup sources feature\n - \"c\": CONAN_CACHE: for caching Conan packages artifacts\n - \"locks\": The LOCKS folder containing the file locks for concurrent access to the cache\n \"\"\"\n _LOCKS = \"locks\"\n _SOURCE_BACKUP = \"s\"\n _CONAN_CACHE = \"c\"\n\n def __init__(self, path: str):\n self._path: str = path\n\n def source_path(self, sha256):\n return os.path.join(self._path, self._SOURCE_BACKUP, sha256)\n\n def cached_path(self, url):\n h = compute_sha256(url.encode())\n return os.path.join(self._path, self._CONAN_CACHE, h), h\n\n _thread_locks = {} # Needs to be shared among all instances\n\n @contextmanager\n def lock(self, lock_id):\n lock = os.path.join(self._path, self._LOCKS, lock_id)\n with SimpleLock(lock):\n # Once the process has access, make sure multithread is locked too\n # as SimpleLock doesn't work multithread\n thread_lock = self._thread_locks.setdefault(lock, Lock())\n thread_lock.acquire()\n try:\n yield\n finally:\n thread_lock.release()\n\n def get_backup_sources_files_to_upload(self, package_list, excluded_urls):\n \"\"\" from a package_list of packages to upload, collect from the backup-sources cache\n the matching references to upload those backups too\n \"\"\"\n def should_upload_sources(package):\n return any(prev[\"upload\"] for prev in package[\"revisions\"].values())\n\n files_to_upload = []\n path_backups = os.path.join(self._path, self._SOURCE_BACKUP)\n\n if not os.path.exists(path_backups):\n return []\n\n if excluded_urls is None:\n excluded_urls = []\n\n all_refs = {str(k) for k, ref in package_list.refs()\n if ref.get(\"upload\") or any(should_upload_sources(p)\n for p in ref[\"packages\"].values())}\n for f in os.listdir(path_backups):\n if f.endswith(\".json\"):\n f = os.path.join(path_backups, f)\n content = json.loads(load(f))\n refs = content[\"references\"]\n # unknown entries are not uploaded at this moment, the flow is not expected.\n for ref, urls in refs.items():\n is_excluded = all(any(url.startswith(excluded_url)\n for excluded_url in excluded_urls)\n for url in urls)\n if not is_excluded and ref in all_refs:\n files_to_upload.append(f)\n files_to_upload.append(f[:-5])\n break\n return files_to_upload\n\n @staticmethod\n def update_backup_sources_json(cached_path, conanfile, urls):\n \"\"\" create or update the sha256.json file with the references and new urls used\n \"\"\"\n summary_path = cached_path + \".json\"\n if os.path.exists(summary_path):\n summary = json.loads(load(summary_path))\n else:\n summary = {\"references\": {}, \"timestamp\": timestamp_now()}\n\n try:\n summary_key = str(conanfile.ref)\n except AttributeError:\n # The recipe path would be different between machines\n # So best we can do is to set this as unknown\n summary_key = \"unknown\"\n\n if not isinstance(urls, (list, tuple)):\n urls = [urls]\n existing_urls = summary[\"references\"].setdefault(summary_key, [])\n existing_urls.extend(url for url in urls if url not in existing_urls)\n conanfile.output.verbose(f\"Updating ${summary_path} summary file\")\n summary_dump = json.dumps(summary)\n conanfile.output.debug(f\"New summary: ${summary_dump}\")\n save(summary_path, json.dumps(summary))\n", "path": "conans/client/downloaders/download_cache.py"}]}
1,750
206
gh_patches_debug_15574
rasdani/github-patches
git_diff
HypothesisWorks__hypothesis-872
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Verbose output not shown unless -s is specified I am running a test suite with hypothesis using py.test, when setting HYPOTHESIS_VERBOSITY_LEVEL=verbose environment variable I expected to see the intermediate results. However I need to specify -s when invokin py.test otherwise the intermediate results are suppressed. Python 3.6.0a1 py.test 2.9.2 hypothesis 3.4.2 </issue> <code> [start of docs/conf.py] 1 # coding=utf-8 2 # 3 # This file is part of Hypothesis, which may be found at 4 # https://github.com/HypothesisWorks/hypothesis-python 5 # 6 # Most of this work is copyright (C) 2013-2017 David R. MacIver 7 # ([email protected]), but it contains contributions by others. See 8 # CONTRIBUTING.rst for a full list of people who may hold copyright, and 9 # consult the git log if you need to determine who owns an individual 10 # contribution. 11 # 12 # This Source Code Form is subject to the terms of the Mozilla Public License, 13 # v. 2.0. If a copy of the MPL was not distributed with this file, You can 14 # obtain one at http://mozilla.org/MPL/2.0/. 15 # 16 # END HEADER 17 18 # -*- coding: utf-8 -*- 19 20 from __future__ import division, print_function, absolute_import 21 22 # on_rtd is whether we are on readthedocs.org 23 import os 24 import sys 25 import datetime 26 27 from hypothesis import __version__ 28 29 on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 30 31 sys.path.append( 32 os.path.join(os.path.dirname(__file__), '..', 'src') 33 ) 34 35 36 autodoc_member_order = 'bysource' 37 38 extensions = [ 39 'sphinx.ext.autodoc', 40 'sphinx.ext.doctest', 41 'sphinx.ext.extlinks', 42 'sphinx.ext.viewcode', 43 'sphinx.ext.intersphinx', 44 ] 45 46 templates_path = ['_templates'] 47 48 source_suffix = '.rst' 49 50 # The master toctree document. 51 master_doc = 'index' 52 53 # General information about the project. 54 project = u'Hypothesis' 55 copyright = u'2013-%s, David R. MacIver' % datetime.datetime.utcnow().year 56 author = u'David R. MacIver' 57 58 version = __version__ 59 release = __version__ 60 61 language = None 62 63 exclude_patterns = ['_build'] 64 65 pygments_style = 'sphinx' 66 67 todo_include_todos = False 68 69 intersphinx_mapping = { 70 'python': ('https://docs.python.org/3/', None), 71 'numpy': ('https://docs.scipy.org/doc/numpy/', None), 72 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None) 73 } 74 75 autodoc_mock_imports = ['numpy', 'pandas'] 76 77 doctest_global_setup = ''' 78 # Some standard imports 79 from hypothesis import * 80 from hypothesis.strategies import * 81 # Ensure that output (including from strategies) is deterministic 82 import random 83 random.seed(0) 84 # don't save examples 85 settings.register_profile('doctests', settings(database=None)) 86 settings.load_profile('doctests') 87 import warnings 88 warnings.filterwarnings('error', category=HypothesisDeprecationWarning) 89 ''' 90 91 # This config value must be a dictionary of external sites, mapping unique 92 # short alias names to a base URL and a prefix. 93 # See http://sphinx-doc.org/ext/extlinks.html 94 extlinks = { 95 'commit': ('https://github.com/HypothesisWorks/hypothesis-python/commit/%s', 'commit '), 96 'gh-file': ('https://github.com/HypothesisWorks/hypothesis-python/blob/master/%s', ''), 97 'gh-link': ('https://github.com/HypothesisWorks/hypothesis-python/%s', ''), 98 'issue': ('https://github.com/HypothesisWorks/hypothesis-python/issues/%s', 'issue #'), 99 'pull': ('https://github.com/HypothesisWorks/hypothesis-python/pulls/%s', 'pull request #'), 100 } 101 102 # -- Options for HTML output ---------------------------------------------- 103 104 if not on_rtd: # only import and set the theme if we're building docs locally 105 import sphinx_rtd_theme 106 html_theme = 'sphinx_rtd_theme' 107 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 108 109 html_static_path = ['_static'] 110 111 htmlhelp_basename = 'Hypothesisdoc' 112 113 # -- Options for LaTeX output --------------------------------------------- 114 115 latex_elements = { 116 } 117 118 latex_documents = [ 119 (master_doc, 'Hypothesis.tex', u'Hypothesis Documentation', 120 u'David R. MacIver', 'manual'), 121 ] 122 123 man_pages = [ 124 (master_doc, 'hypothesis', u'Hypothesis Documentation', 125 [author], 1) 126 ] 127 128 texinfo_documents = [ 129 (master_doc, 'Hypothesis', u'Hypothesis Documentation', 130 author, 'Hypothesis', 'One line description of project.', 131 'Miscellaneous'), 132 ] 133 [end of docs/conf.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -69,7 +69,8 @@ intersphinx_mapping = { 'python': ('https://docs.python.org/3/', None), 'numpy': ('https://docs.scipy.org/doc/numpy/', None), - 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None) + 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None), + 'pytest': ('https://docs.pytest.org/en/stable/', None), } autodoc_mock_imports = ['numpy', 'pandas'] @@ -127,6 +128,6 @@ texinfo_documents = [ (master_doc, 'Hypothesis', u'Hypothesis Documentation', - author, 'Hypothesis', 'One line description of project.', + author, 'Hypothesis', 'Advanced property-based testing for Python.', 'Miscellaneous'), ]
{"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -69,7 +69,8 @@\n intersphinx_mapping = {\n 'python': ('https://docs.python.org/3/', None),\n 'numpy': ('https://docs.scipy.org/doc/numpy/', None),\n- 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None)\n+ 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None),\n+ 'pytest': ('https://docs.pytest.org/en/stable/', None),\n }\n \n autodoc_mock_imports = ['numpy', 'pandas']\n@@ -127,6 +128,6 @@\n \n texinfo_documents = [\n (master_doc, 'Hypothesis', u'Hypothesis Documentation',\n- author, 'Hypothesis', 'One line description of project.',\n+ author, 'Hypothesis', 'Advanced property-based testing for Python.',\n 'Miscellaneous'),\n ]\n", "issue": "Verbose output not shown unless -s is specified\nI am running a test suite with hypothesis using py.test, when setting HYPOTHESIS_VERBOSITY_LEVEL=verbose environment variable I expected to see the intermediate results. However I need to specify -s when invokin py.test otherwise the intermediate results are suppressed.\n\nPython 3.6.0a1\npy.test 2.9.2\nhypothesis 3.4.2\n\n", "before_files": [{"content": "# coding=utf-8\n#\n# This file is part of Hypothesis, which may be found at\n# https://github.com/HypothesisWorks/hypothesis-python\n#\n# Most of this work is copyright (C) 2013-2017 David R. MacIver\n# ([email protected]), but it contains contributions by others. See\n# CONTRIBUTING.rst for a full list of people who may hold copyright, and\n# consult the git log if you need to determine who owns an individual\n# contribution.\n#\n# This Source Code Form is subject to the terms of the Mozilla Public License,\n# v. 2.0. If a copy of the MPL was not distributed with this file, You can\n# obtain one at http://mozilla.org/MPL/2.0/.\n#\n# END HEADER\n\n# -*- coding: utf-8 -*-\n\nfrom __future__ import division, print_function, absolute_import\n\n# on_rtd is whether we are on readthedocs.org\nimport os\nimport sys\nimport datetime\n\nfrom hypothesis import __version__\n\non_rtd = os.environ.get('READTHEDOCS', None) == 'True'\n\nsys.path.append(\n os.path.join(os.path.dirname(__file__), '..', 'src')\n)\n\n\nautodoc_member_order = 'bysource'\n\nextensions = [\n 'sphinx.ext.autodoc',\n 'sphinx.ext.doctest',\n 'sphinx.ext.extlinks',\n 'sphinx.ext.viewcode',\n 'sphinx.ext.intersphinx',\n]\n\ntemplates_path = ['_templates']\n\nsource_suffix = '.rst'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'Hypothesis'\ncopyright = u'2013-%s, David R. MacIver' % datetime.datetime.utcnow().year\nauthor = u'David R. MacIver'\n\nversion = __version__\nrelease = __version__\n\nlanguage = None\n\nexclude_patterns = ['_build']\n\npygments_style = 'sphinx'\n\ntodo_include_todos = False\n\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3/', None),\n 'numpy': ('https://docs.scipy.org/doc/numpy/', None),\n 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None)\n}\n\nautodoc_mock_imports = ['numpy', 'pandas']\n\ndoctest_global_setup = '''\n# Some standard imports\nfrom hypothesis import *\nfrom hypothesis.strategies import *\n# Ensure that output (including from strategies) is deterministic\nimport random\nrandom.seed(0)\n# don't save examples\nsettings.register_profile('doctests', settings(database=None))\nsettings.load_profile('doctests')\nimport warnings\nwarnings.filterwarnings('error', category=HypothesisDeprecationWarning)\n'''\n\n# This config value must be a dictionary of external sites, mapping unique\n# short alias names to a base URL and a prefix.\n# See http://sphinx-doc.org/ext/extlinks.html\nextlinks = {\n 'commit': ('https://github.com/HypothesisWorks/hypothesis-python/commit/%s', 'commit '),\n 'gh-file': ('https://github.com/HypothesisWorks/hypothesis-python/blob/master/%s', ''),\n 'gh-link': ('https://github.com/HypothesisWorks/hypothesis-python/%s', ''),\n 'issue': ('https://github.com/HypothesisWorks/hypothesis-python/issues/%s', 'issue #'),\n 'pull': ('https://github.com/HypothesisWorks/hypothesis-python/pulls/%s', 'pull request #'),\n}\n\n# -- Options for HTML output ----------------------------------------------\n\nif not on_rtd: # only import and set the theme if we're building docs locally\n import sphinx_rtd_theme\n html_theme = 'sphinx_rtd_theme'\n html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\nhtml_static_path = ['_static']\n\nhtmlhelp_basename = 'Hypothesisdoc'\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n}\n\nlatex_documents = [\n (master_doc, 'Hypothesis.tex', u'Hypothesis Documentation',\n u'David R. MacIver', 'manual'),\n]\n\nman_pages = [\n (master_doc, 'hypothesis', u'Hypothesis Documentation',\n [author], 1)\n]\n\ntexinfo_documents = [\n (master_doc, 'Hypothesis', u'Hypothesis Documentation',\n author, 'Hypothesis', 'One line description of project.',\n 'Miscellaneous'),\n]\n", "path": "docs/conf.py"}]}
1,916
229
gh_patches_debug_22014
rasdani/github-patches
git_diff
pytorch__text-361
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> MosesTokenizer has been moved out of NLTK due to licensing issues @jekbradbury great work here! Due to https://github.com/nltk/nltk/issues/2000, we had to remove MosesTokenizer out of NLTK but now it's hosted on https://github.com/alvations/sacremoses ``` pip install sacremoses ``` The silver lining is that the package comes with the data needed for tokenization so there's no need to keep the `nltk_data` directory =) ---- I would propose adding `sacremoses` on top of `nltk` because NLTK has another port of a nice tokenizer (by @jonsafari) that people overlook, https://github.com/nltk/nltk/blob/develop/nltk/tokenize/toktok.py (I think it's fast too) </issue> <code> [start of torchtext/data/utils.py] 1 import random 2 from contextlib import contextmanager 3 from copy import deepcopy 4 5 6 def get_tokenizer(tokenizer): 7 if callable(tokenizer): 8 return tokenizer 9 if tokenizer == "spacy": 10 try: 11 import spacy 12 spacy_en = spacy.load('en') 13 return lambda s: [tok.text for tok in spacy_en.tokenizer(s)] 14 except ImportError: 15 print("Please install SpaCy and the SpaCy English tokenizer. " 16 "See the docs at https://spacy.io for more information.") 17 raise 18 except AttributeError: 19 print("Please install SpaCy and the SpaCy English tokenizer. " 20 "See the docs at https://spacy.io for more information.") 21 raise 22 elif tokenizer == "moses": 23 try: 24 from nltk.tokenize.moses import MosesTokenizer 25 moses_tokenizer = MosesTokenizer() 26 return moses_tokenizer.tokenize 27 except ImportError: 28 print("Please install NLTK. " 29 "See the docs at http://nltk.org for more information.") 30 raise 31 except LookupError: 32 print("Please install the necessary NLTK corpora. " 33 "See the docs at http://nltk.org for more information.") 34 raise 35 elif tokenizer == 'revtok': 36 try: 37 import revtok 38 return revtok.tokenize 39 except ImportError: 40 print("Please install revtok.") 41 raise 42 elif tokenizer == 'subword': 43 try: 44 import revtok 45 return lambda x: revtok.tokenize(x, decap=True) 46 except ImportError: 47 print("Please install revtok.") 48 raise 49 raise ValueError("Requested tokenizer {}, valid choices are a " 50 "callable that takes a single string as input, " 51 "\"revtok\" for the revtok reversible tokenizer, " 52 "\"subword\" for the revtok caps-aware tokenizer, " 53 "\"spacy\" for the SpaCy English tokenizer, or " 54 "\"moses\" for the NLTK port of the Moses tokenization " 55 "script.".format(tokenizer)) 56 57 58 def interleave_keys(a, b): 59 """Interleave bits from two sort keys to form a joint sort key. 60 61 Examples that are similar in both of the provided keys will have similar 62 values for the key defined by this function. Useful for tasks with two 63 text fields like machine translation or natural language inference. 64 """ 65 def interleave(args): 66 return ''.join([x for t in zip(*args) for x in t]) 67 return int(''.join(interleave(format(x, '016b') for x in (a, b))), base=2) 68 69 70 def get_torch_version(): 71 import torch 72 v = torch.__version__ 73 version_substrings = v.split('.') 74 major, minor = version_substrings[0], version_substrings[1] 75 return int(major), int(minor) 76 77 78 class RandomShuffler(object): 79 """Use random functions while keeping track of the random state to make it 80 reproducible and deterministic.""" 81 82 def __init__(self, random_state=None): 83 self._random_state = random_state 84 if self._random_state is None: 85 self._random_state = random.getstate() 86 87 @contextmanager 88 def use_internal_state(self): 89 """Use a specific RNG state.""" 90 old_state = random.getstate() 91 random.setstate(self._random_state) 92 yield 93 self._random_state = random.getstate() 94 random.setstate(old_state) 95 96 @property 97 def random_state(self): 98 return deepcopy(self._random_state) 99 100 @random_state.setter 101 def random_state(self, s): 102 self._random_state = s 103 104 def __call__(self, data): 105 """Shuffle and return a new list.""" 106 with self.use_internal_state(): 107 return random.sample(data, len(data)) 108 [end of torchtext/data/utils.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/torchtext/data/utils.py b/torchtext/data/utils.py --- a/torchtext/data/utils.py +++ b/torchtext/data/utils.py @@ -21,16 +21,22 @@ raise elif tokenizer == "moses": try: - from nltk.tokenize.moses import MosesTokenizer + from sacremoses import MosesTokenizer moses_tokenizer = MosesTokenizer() return moses_tokenizer.tokenize except ImportError: - print("Please install NLTK. " - "See the docs at http://nltk.org for more information.") + print("Please install SacreMoses. " + "See the docs at https://github.com/alvations/sacremoses " + "for more information.") raise - except LookupError: - print("Please install the necessary NLTK corpora. " - "See the docs at http://nltk.org for more information.") + elif tokenizer == "toktok": + try: + from nltk.tokenize.toktok import ToktokTokenizer + toktok = ToktokTokenizer() + return toktok.tokenize + except ImportError: + print("Please install NLTK. " + "See the docs at https://nltk.org for more information.") raise elif tokenizer == 'revtok': try:
{"golden_diff": "diff --git a/torchtext/data/utils.py b/torchtext/data/utils.py\n--- a/torchtext/data/utils.py\n+++ b/torchtext/data/utils.py\n@@ -21,16 +21,22 @@\n raise\n elif tokenizer == \"moses\":\n try:\n- from nltk.tokenize.moses import MosesTokenizer\n+ from sacremoses import MosesTokenizer\n moses_tokenizer = MosesTokenizer()\n return moses_tokenizer.tokenize\n except ImportError:\n- print(\"Please install NLTK. \"\n- \"See the docs at http://nltk.org for more information.\")\n+ print(\"Please install SacreMoses. \"\n+ \"See the docs at https://github.com/alvations/sacremoses \"\n+ \"for more information.\")\n raise\n- except LookupError:\n- print(\"Please install the necessary NLTK corpora. \"\n- \"See the docs at http://nltk.org for more information.\")\n+ elif tokenizer == \"toktok\":\n+ try:\n+ from nltk.tokenize.toktok import ToktokTokenizer\n+ toktok = ToktokTokenizer()\n+ return toktok.tokenize\n+ except ImportError:\n+ print(\"Please install NLTK. \"\n+ \"See the docs at https://nltk.org for more information.\")\n raise\n elif tokenizer == 'revtok':\n try:\n", "issue": "MosesTokenizer has been moved out of NLTK due to licensing issues\n@jekbradbury great work here!\r\n\r\nDue to https://github.com/nltk/nltk/issues/2000, we had to remove MosesTokenizer out of NLTK but now it's hosted on https://github.com/alvations/sacremoses \r\n\r\n```\r\npip install sacremoses\r\n```\r\n\r\nThe silver lining is that the package comes with the data needed for tokenization so there's no need to keep the `nltk_data` directory =)\r\n\r\n----\r\n\r\nI would propose adding `sacremoses` on top of `nltk` because NLTK has another port of a nice tokenizer (by @jonsafari) that people overlook, https://github.com/nltk/nltk/blob/develop/nltk/tokenize/toktok.py (I think it's fast too)\n", "before_files": [{"content": "import random\nfrom contextlib import contextmanager\nfrom copy import deepcopy\n\n\ndef get_tokenizer(tokenizer):\n if callable(tokenizer):\n return tokenizer\n if tokenizer == \"spacy\":\n try:\n import spacy\n spacy_en = spacy.load('en')\n return lambda s: [tok.text for tok in spacy_en.tokenizer(s)]\n except ImportError:\n print(\"Please install SpaCy and the SpaCy English tokenizer. \"\n \"See the docs at https://spacy.io for more information.\")\n raise\n except AttributeError:\n print(\"Please install SpaCy and the SpaCy English tokenizer. \"\n \"See the docs at https://spacy.io for more information.\")\n raise\n elif tokenizer == \"moses\":\n try:\n from nltk.tokenize.moses import MosesTokenizer\n moses_tokenizer = MosesTokenizer()\n return moses_tokenizer.tokenize\n except ImportError:\n print(\"Please install NLTK. \"\n \"See the docs at http://nltk.org for more information.\")\n raise\n except LookupError:\n print(\"Please install the necessary NLTK corpora. \"\n \"See the docs at http://nltk.org for more information.\")\n raise\n elif tokenizer == 'revtok':\n try:\n import revtok\n return revtok.tokenize\n except ImportError:\n print(\"Please install revtok.\")\n raise\n elif tokenizer == 'subword':\n try:\n import revtok\n return lambda x: revtok.tokenize(x, decap=True)\n except ImportError:\n print(\"Please install revtok.\")\n raise\n raise ValueError(\"Requested tokenizer {}, valid choices are a \"\n \"callable that takes a single string as input, \"\n \"\\\"revtok\\\" for the revtok reversible tokenizer, \"\n \"\\\"subword\\\" for the revtok caps-aware tokenizer, \"\n \"\\\"spacy\\\" for the SpaCy English tokenizer, or \"\n \"\\\"moses\\\" for the NLTK port of the Moses tokenization \"\n \"script.\".format(tokenizer))\n\n\ndef interleave_keys(a, b):\n \"\"\"Interleave bits from two sort keys to form a joint sort key.\n\n Examples that are similar in both of the provided keys will have similar\n values for the key defined by this function. Useful for tasks with two\n text fields like machine translation or natural language inference.\n \"\"\"\n def interleave(args):\n return ''.join([x for t in zip(*args) for x in t])\n return int(''.join(interleave(format(x, '016b') for x in (a, b))), base=2)\n\n\ndef get_torch_version():\n import torch\n v = torch.__version__\n version_substrings = v.split('.')\n major, minor = version_substrings[0], version_substrings[1]\n return int(major), int(minor)\n\n\nclass RandomShuffler(object):\n \"\"\"Use random functions while keeping track of the random state to make it\n reproducible and deterministic.\"\"\"\n\n def __init__(self, random_state=None):\n self._random_state = random_state\n if self._random_state is None:\n self._random_state = random.getstate()\n\n @contextmanager\n def use_internal_state(self):\n \"\"\"Use a specific RNG state.\"\"\"\n old_state = random.getstate()\n random.setstate(self._random_state)\n yield\n self._random_state = random.getstate()\n random.setstate(old_state)\n\n @property\n def random_state(self):\n return deepcopy(self._random_state)\n\n @random_state.setter\n def random_state(self, s):\n self._random_state = s\n\n def __call__(self, data):\n \"\"\"Shuffle and return a new list.\"\"\"\n with self.use_internal_state():\n return random.sample(data, len(data))\n", "path": "torchtext/data/utils.py"}]}
1,745
300
gh_patches_debug_4660
rasdani/github-patches
git_diff
bridgecrewio__checkov-2935
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> False positive for CKV_AZURE_43 when using the "random" provider resources **Describe the issue** Check ID: CKV_AZURE_43 When using any of the random_* resources from the [random provider](https://registry.terraform.io/providers/hashicorp/random/latest/docs) check CKV_AZURE_43 fails. StorageAccountName.py probably needs the VARIABLE_REFS list expanded to include the random_* resources. **Examples** ``` resource "random_string" "random" { length = 4 number = true lower = false special = false upper = false } resource "azurerm_storage_account" "vmstorageaccount" { name = "storage${random_string.random}" .... } ``` **Version:** - Checkov Version 2.0.113 </issue> <code> [start of checkov/terraform/checks/resource/azure/StorageAccountName.py] 1 import re 2 from typing import List, Dict, Any 3 4 from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck 5 from checkov.common.models.enums import CheckResult, CheckCategories 6 7 STO_NAME_REGEX = re.compile(r"^[a-z0-9]{3,24}$") 8 VARIABLE_REFS = ("local.", "module.", "var.") 9 10 11 class StorageAccountName(BaseResourceCheck): 12 def __init__(self) -> None: 13 name = "Ensure Storage Accounts adhere to the naming rules" 14 id = "CKV_AZURE_43" 15 supported_resources = ["azurerm_storage_account"] 16 categories = [CheckCategories.CONVENTION] 17 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 18 19 def scan_resource_conf(self, conf: Dict[str, Any]) -> CheckResult: 20 """ 21 The Storage Account naming reference: 22 https://docs.microsoft.com/en-us/azure/storage/common/storage-account-overview#naming-storage-accounts 23 :param conf: azurerm_storage_account configuration 24 :return: <CheckResult> 25 """ 26 name = conf.get("name") 27 if name: 28 name = str(name[0]) 29 if any(x in name for x in VARIABLE_REFS): 30 # in the case we couldn't evaluate the name, just ignore 31 return CheckResult.UNKNOWN 32 if re.findall(STO_NAME_REGEX, str(conf["name"][0])): 33 return CheckResult.PASSED 34 35 return CheckResult.FAILED 36 37 def get_evaluated_keys(self) -> List[str]: 38 return ["name"] 39 40 41 check = StorageAccountName() 42 [end of checkov/terraform/checks/resource/azure/StorageAccountName.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/checkov/terraform/checks/resource/azure/StorageAccountName.py b/checkov/terraform/checks/resource/azure/StorageAccountName.py --- a/checkov/terraform/checks/resource/azure/StorageAccountName.py +++ b/checkov/terraform/checks/resource/azure/StorageAccountName.py @@ -5,7 +5,7 @@ from checkov.common.models.enums import CheckResult, CheckCategories STO_NAME_REGEX = re.compile(r"^[a-z0-9]{3,24}$") -VARIABLE_REFS = ("local.", "module.", "var.") +VARIABLE_REFS = ("local.", "module.", "var.", "random_string.", "random_id.", "random_integer.", "random_pet.") class StorageAccountName(BaseResourceCheck):
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/azure/StorageAccountName.py b/checkov/terraform/checks/resource/azure/StorageAccountName.py\n--- a/checkov/terraform/checks/resource/azure/StorageAccountName.py\n+++ b/checkov/terraform/checks/resource/azure/StorageAccountName.py\n@@ -5,7 +5,7 @@\n from checkov.common.models.enums import CheckResult, CheckCategories\n \n STO_NAME_REGEX = re.compile(r\"^[a-z0-9]{3,24}$\")\n-VARIABLE_REFS = (\"local.\", \"module.\", \"var.\")\n+VARIABLE_REFS = (\"local.\", \"module.\", \"var.\", \"random_string.\", \"random_id.\", \"random_integer.\", \"random_pet.\")\n \n \n class StorageAccountName(BaseResourceCheck):\n", "issue": "False positive for CKV_AZURE_43 when using the \"random\" provider resources\n**Describe the issue**\r\nCheck ID: CKV_AZURE_43\r\nWhen using any of the random_* resources from the [random provider](https://registry.terraform.io/providers/hashicorp/random/latest/docs) check CKV_AZURE_43 fails.\r\n\r\nStorageAccountName.py probably needs the VARIABLE_REFS list expanded to include the random_* resources.\r\n\r\n**Examples**\r\n```\r\nresource \"random_string\" \"random\" {\r\n length = 4\r\n number = true\r\n lower = false\r\n special = false\r\n upper = false\r\n}\r\n\r\nresource \"azurerm_storage_account\" \"vmstorageaccount\" {\r\n name = \"storage${random_string.random}\"\r\n ....\r\n}\r\n```\r\n\r\n**Version:**\r\n - Checkov Version 2.0.113\n", "before_files": [{"content": "import re\nfrom typing import List, Dict, Any\n\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\nfrom checkov.common.models.enums import CheckResult, CheckCategories\n\nSTO_NAME_REGEX = re.compile(r\"^[a-z0-9]{3,24}$\")\nVARIABLE_REFS = (\"local.\", \"module.\", \"var.\")\n\n\nclass StorageAccountName(BaseResourceCheck):\n def __init__(self) -> None:\n name = \"Ensure Storage Accounts adhere to the naming rules\"\n id = \"CKV_AZURE_43\"\n supported_resources = [\"azurerm_storage_account\"]\n categories = [CheckCategories.CONVENTION]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf: Dict[str, Any]) -> CheckResult:\n \"\"\"\n The Storage Account naming reference:\n https://docs.microsoft.com/en-us/azure/storage/common/storage-account-overview#naming-storage-accounts\n :param conf: azurerm_storage_account configuration\n :return: <CheckResult>\n \"\"\"\n name = conf.get(\"name\")\n if name:\n name = str(name[0])\n if any(x in name for x in VARIABLE_REFS):\n # in the case we couldn't evaluate the name, just ignore\n return CheckResult.UNKNOWN\n if re.findall(STO_NAME_REGEX, str(conf[\"name\"][0])):\n return CheckResult.PASSED\n\n return CheckResult.FAILED\n\n def get_evaluated_keys(self) -> List[str]:\n return [\"name\"]\n\n\ncheck = StorageAccountName()\n", "path": "checkov/terraform/checks/resource/azure/StorageAccountName.py"}]}
1,170
168
gh_patches_debug_14368
rasdani/github-patches
git_diff
scrapy__scrapy-1131
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Unhandled error in Deferred (RobotsTxtMiddleware) **Dev story**: Let's say spider downloads all .zip files from http://habrahabr.ru/post/212029/ page Url with .zip files looks like this: http://layer6.jenkins.tox.im/job/qt_gui_win32/lastSuccessfulBuild/artifact/qt/build/release/TOX-Qt-GUI.zip It's a polite spider, so settings file contains: `ROBOTSTXT_OBEY = True` Middleware parses habrahabr.ru robots.txt file as well as 'external' robots.txt file from layer6.jenkins.tox.im. It's expected behaviour. But if request will be returned with error then the output would be: ``` 2015-04-02 17:06:16+0300 [habrahabr] DEBUG: Gave up retrying <GET http://layer6.jenkins.tox.im/robots.txt> (failed 1 times): DNS lookup failed: address 'layer6.jenkins.tox.im' not found: [Errno 8] nodename nor servname provided, or not known. 2015-04-02 17:06:16+0300 [-] ERROR: Unhandled error in Deferred: 2015-04-02 17:06:16+0300 [-] Unhandled Error Traceback (most recent call last): Failure: twisted.internet.error.DNSLookupError: DNS lookup failed: address 'layer6.jenkins.tox.im' not found: [Errno 8] nodename nor servname provided, or not known. ``` </issue> <code> [start of scrapy/contrib/downloadermiddleware/robotstxt.py] 1 """ 2 This is a middleware to respect robots.txt policies. To activate it you must 3 enable this middleware and enable the ROBOTSTXT_OBEY setting. 4 5 """ 6 7 from six.moves.urllib import robotparser 8 9 from scrapy import signals, log 10 from scrapy.exceptions import NotConfigured, IgnoreRequest 11 from scrapy.http import Request 12 from scrapy.utils.httpobj import urlparse_cached 13 14 15 class RobotsTxtMiddleware(object): 16 DOWNLOAD_PRIORITY = 1000 17 18 def __init__(self, crawler): 19 if not crawler.settings.getbool('ROBOTSTXT_OBEY'): 20 raise NotConfigured 21 22 self.crawler = crawler 23 self._useragent = crawler.settings.get('USER_AGENT') 24 self._parsers = {} 25 26 @classmethod 27 def from_crawler(cls, crawler): 28 return cls(crawler) 29 30 def process_request(self, request, spider): 31 if request.meta.get('dont_obey_robotstxt'): 32 return 33 rp = self.robot_parser(request, spider) 34 if rp and not rp.can_fetch(self._useragent, request.url): 35 log.msg(format="Forbidden by robots.txt: %(request)s", 36 level=log.DEBUG, request=request) 37 raise IgnoreRequest 38 39 def robot_parser(self, request, spider): 40 url = urlparse_cached(request) 41 netloc = url.netloc 42 if netloc not in self._parsers: 43 self._parsers[netloc] = None 44 robotsurl = "%s://%s/robots.txt" % (url.scheme, url.netloc) 45 robotsreq = Request( 46 robotsurl, 47 priority=self.DOWNLOAD_PRIORITY, 48 meta={'dont_obey_robotstxt': True} 49 ) 50 dfd = self.crawler.engine.download(robotsreq, spider) 51 dfd.addCallback(self._parse_robots) 52 return self._parsers[netloc] 53 54 def _parse_robots(self, response): 55 rp = robotparser.RobotFileParser(response.url) 56 rp.parse(response.body.splitlines()) 57 self._parsers[urlparse_cached(response).netloc] = rp 58 [end of scrapy/contrib/downloadermiddleware/robotstxt.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/scrapy/contrib/downloadermiddleware/robotstxt.py b/scrapy/contrib/downloadermiddleware/robotstxt.py --- a/scrapy/contrib/downloadermiddleware/robotstxt.py +++ b/scrapy/contrib/downloadermiddleware/robotstxt.py @@ -49,8 +49,14 @@ ) dfd = self.crawler.engine.download(robotsreq, spider) dfd.addCallback(self._parse_robots) + dfd.addErrback(self._logerror, robotsreq, spider) return self._parsers[netloc] + def _logerror(self, failure, request, spider): + if failure.type is not IgnoreRequest: + log.msg(format="Error downloading %%(request)s: %s" % failure.value, + level=log.ERROR, request=request, spider=spider) + def _parse_robots(self, response): rp = robotparser.RobotFileParser(response.url) rp.parse(response.body.splitlines())
{"golden_diff": "diff --git a/scrapy/contrib/downloadermiddleware/robotstxt.py b/scrapy/contrib/downloadermiddleware/robotstxt.py\n--- a/scrapy/contrib/downloadermiddleware/robotstxt.py\n+++ b/scrapy/contrib/downloadermiddleware/robotstxt.py\n@@ -49,8 +49,14 @@\n )\n dfd = self.crawler.engine.download(robotsreq, spider)\n dfd.addCallback(self._parse_robots)\n+ dfd.addErrback(self._logerror, robotsreq, spider)\n return self._parsers[netloc]\n \n+ def _logerror(self, failure, request, spider):\n+ if failure.type is not IgnoreRequest:\n+ log.msg(format=\"Error downloading %%(request)s: %s\" % failure.value,\n+ level=log.ERROR, request=request, spider=spider)\n+\n def _parse_robots(self, response):\n rp = robotparser.RobotFileParser(response.url)\n rp.parse(response.body.splitlines())\n", "issue": "Unhandled error in Deferred (RobotsTxtMiddleware)\n**Dev story**:\nLet's say spider downloads all .zip files from http://habrahabr.ru/post/212029/ page\nUrl with .zip files looks like this: http://layer6.jenkins.tox.im/job/qt_gui_win32/lastSuccessfulBuild/artifact/qt/build/release/TOX-Qt-GUI.zip\n\nIt's a polite spider, so settings file contains:\n`ROBOTSTXT_OBEY = True`\n\nMiddleware parses habrahabr.ru robots.txt file as well as 'external' robots.txt file from layer6.jenkins.tox.im. It's expected behaviour. \nBut if request will be returned with error then the output would be:\n\n```\n2015-04-02 17:06:16+0300 [habrahabr] DEBUG: Gave up retrying <GET http://layer6.jenkins.tox.im/robots.txt> (failed 1 times): DNS lookup failed: address 'layer6.jenkins.tox.im' not found: [Errno 8] nodename nor servname provided, or not known.\n\n2015-04-02 17:06:16+0300 [-] ERROR: Unhandled error in Deferred:\n2015-04-02 17:06:16+0300 [-] Unhandled Error\n Traceback (most recent call last):\n Failure: twisted.internet.error.DNSLookupError: DNS lookup failed: address 'layer6.jenkins.tox.im' not found: [Errno 8] nodename nor servname provided, or not known.\n```\n\n", "before_files": [{"content": "\"\"\"\nThis is a middleware to respect robots.txt policies. To activate it you must\nenable this middleware and enable the ROBOTSTXT_OBEY setting.\n\n\"\"\"\n\nfrom six.moves.urllib import robotparser\n\nfrom scrapy import signals, log\nfrom scrapy.exceptions import NotConfigured, IgnoreRequest\nfrom scrapy.http import Request\nfrom scrapy.utils.httpobj import urlparse_cached\n\n\nclass RobotsTxtMiddleware(object):\n DOWNLOAD_PRIORITY = 1000\n\n def __init__(self, crawler):\n if not crawler.settings.getbool('ROBOTSTXT_OBEY'):\n raise NotConfigured\n\n self.crawler = crawler\n self._useragent = crawler.settings.get('USER_AGENT')\n self._parsers = {}\n\n @classmethod\n def from_crawler(cls, crawler):\n return cls(crawler)\n\n def process_request(self, request, spider):\n if request.meta.get('dont_obey_robotstxt'):\n return\n rp = self.robot_parser(request, spider)\n if rp and not rp.can_fetch(self._useragent, request.url):\n log.msg(format=\"Forbidden by robots.txt: %(request)s\",\n level=log.DEBUG, request=request)\n raise IgnoreRequest\n\n def robot_parser(self, request, spider):\n url = urlparse_cached(request)\n netloc = url.netloc\n if netloc not in self._parsers:\n self._parsers[netloc] = None\n robotsurl = \"%s://%s/robots.txt\" % (url.scheme, url.netloc)\n robotsreq = Request(\n robotsurl,\n priority=self.DOWNLOAD_PRIORITY,\n meta={'dont_obey_robotstxt': True}\n )\n dfd = self.crawler.engine.download(robotsreq, spider)\n dfd.addCallback(self._parse_robots)\n return self._parsers[netloc]\n\n def _parse_robots(self, response):\n rp = robotparser.RobotFileParser(response.url)\n rp.parse(response.body.splitlines())\n self._parsers[urlparse_cached(response).netloc] = rp\n", "path": "scrapy/contrib/downloadermiddleware/robotstxt.py"}]}
1,459
218
gh_patches_debug_3525
rasdani/github-patches
git_diff
microsoft__botbuilder-python-1747
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> port: turn memory scope includesnapshot to false (#5441) The changes in [turn memory scope includesnapshot to false (#5441)](https://github.com/microsoft/botbuilder-dotnet/pull/5441) may need to be ported to maintain parity with `microsoft/botbuilder-dotnet`. <blockquote> Fixes #5432 </blockquote> Please review and, if necessary, port the changes. </issue> <code> [start of libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py] 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. 3 4 from botbuilder.dialogs.memory import scope_path 5 6 from .memory_scope import MemoryScope 7 8 9 class CaseInsensitiveDict(dict): 10 # pylint: disable=protected-access 11 12 @classmethod 13 def _k(cls, key): 14 return key.lower() if isinstance(key, str) else key 15 16 def __init__(self, *args, **kwargs): 17 super(CaseInsensitiveDict, self).__init__(*args, **kwargs) 18 self._convert_keys() 19 20 def __getitem__(self, key): 21 return super(CaseInsensitiveDict, self).__getitem__(self.__class__._k(key)) 22 23 def __setitem__(self, key, value): 24 super(CaseInsensitiveDict, self).__setitem__(self.__class__._k(key), value) 25 26 def __delitem__(self, key): 27 return super(CaseInsensitiveDict, self).__delitem__(self.__class__._k(key)) 28 29 def __contains__(self, key): 30 return super(CaseInsensitiveDict, self).__contains__(self.__class__._k(key)) 31 32 def pop(self, key, *args, **kwargs): 33 return super(CaseInsensitiveDict, self).pop( 34 self.__class__._k(key), *args, **kwargs 35 ) 36 37 def get(self, key, *args, **kwargs): 38 return super(CaseInsensitiveDict, self).get( 39 self.__class__._k(key), *args, **kwargs 40 ) 41 42 def setdefault(self, key, *args, **kwargs): 43 return super(CaseInsensitiveDict, self).setdefault( 44 self.__class__._k(key), *args, **kwargs 45 ) 46 47 def update(self, e=None, **f): 48 if e is None: 49 e = {} 50 super(CaseInsensitiveDict, self).update(self.__class__(e)) 51 super(CaseInsensitiveDict, self).update(self.__class__(**f)) 52 53 def _convert_keys(self): 54 for k in list(self.keys()): 55 val = super(CaseInsensitiveDict, self).pop(k) 56 self.__setitem__(k, val) 57 58 59 class TurnMemoryScope(MemoryScope): 60 def __init__(self): 61 super().__init__(scope_path.TURN) 62 63 def get_memory(self, dialog_context: "DialogContext") -> object: 64 if not dialog_context: 65 raise TypeError(f"Expecting: DialogContext, but received None") 66 67 turn_value = dialog_context.context.turn_state.get(scope_path.TURN, None) 68 69 if not turn_value: 70 turn_value = CaseInsensitiveDict() 71 dialog_context.context.turn_state[scope_path.TURN] = turn_value 72 73 return turn_value 74 75 def set_memory(self, dialog_context: "DialogContext", memory: object): 76 if not dialog_context: 77 raise TypeError(f"Expecting: DialogContext, but received None") 78 79 dialog_context.context.turn_state[scope_path.TURN] = memory 80 [end of libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py b/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py --- a/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py +++ b/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py @@ -58,7 +58,7 @@ class TurnMemoryScope(MemoryScope): def __init__(self): - super().__init__(scope_path.TURN) + super().__init__(scope_path.TURN, False) def get_memory(self, dialog_context: "DialogContext") -> object: if not dialog_context:
{"golden_diff": "diff --git a/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py b/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py\n--- a/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py\n+++ b/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py\n@@ -58,7 +58,7 @@\n \n class TurnMemoryScope(MemoryScope):\n def __init__(self):\n- super().__init__(scope_path.TURN)\n+ super().__init__(scope_path.TURN, False)\n \n def get_memory(self, dialog_context: \"DialogContext\") -> object:\n if not dialog_context:\n", "issue": "port: turn memory scope includesnapshot to false (#5441)\nThe changes in [turn memory scope includesnapshot to false (#5441)](https://github.com/microsoft/botbuilder-dotnet/pull/5441) may need to be ported to maintain parity with `microsoft/botbuilder-dotnet`.\n\n<blockquote>\nFixes #5432\n</blockquote>\n\nPlease review and, if necessary, port the changes.\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n\nfrom botbuilder.dialogs.memory import scope_path\n\nfrom .memory_scope import MemoryScope\n\n\nclass CaseInsensitiveDict(dict):\n # pylint: disable=protected-access\n\n @classmethod\n def _k(cls, key):\n return key.lower() if isinstance(key, str) else key\n\n def __init__(self, *args, **kwargs):\n super(CaseInsensitiveDict, self).__init__(*args, **kwargs)\n self._convert_keys()\n\n def __getitem__(self, key):\n return super(CaseInsensitiveDict, self).__getitem__(self.__class__._k(key))\n\n def __setitem__(self, key, value):\n super(CaseInsensitiveDict, self).__setitem__(self.__class__._k(key), value)\n\n def __delitem__(self, key):\n return super(CaseInsensitiveDict, self).__delitem__(self.__class__._k(key))\n\n def __contains__(self, key):\n return super(CaseInsensitiveDict, self).__contains__(self.__class__._k(key))\n\n def pop(self, key, *args, **kwargs):\n return super(CaseInsensitiveDict, self).pop(\n self.__class__._k(key), *args, **kwargs\n )\n\n def get(self, key, *args, **kwargs):\n return super(CaseInsensitiveDict, self).get(\n self.__class__._k(key), *args, **kwargs\n )\n\n def setdefault(self, key, *args, **kwargs):\n return super(CaseInsensitiveDict, self).setdefault(\n self.__class__._k(key), *args, **kwargs\n )\n\n def update(self, e=None, **f):\n if e is None:\n e = {}\n super(CaseInsensitiveDict, self).update(self.__class__(e))\n super(CaseInsensitiveDict, self).update(self.__class__(**f))\n\n def _convert_keys(self):\n for k in list(self.keys()):\n val = super(CaseInsensitiveDict, self).pop(k)\n self.__setitem__(k, val)\n\n\nclass TurnMemoryScope(MemoryScope):\n def __init__(self):\n super().__init__(scope_path.TURN)\n\n def get_memory(self, dialog_context: \"DialogContext\") -> object:\n if not dialog_context:\n raise TypeError(f\"Expecting: DialogContext, but received None\")\n\n turn_value = dialog_context.context.turn_state.get(scope_path.TURN, None)\n\n if not turn_value:\n turn_value = CaseInsensitiveDict()\n dialog_context.context.turn_state[scope_path.TURN] = turn_value\n\n return turn_value\n\n def set_memory(self, dialog_context: \"DialogContext\", memory: object):\n if not dialog_context:\n raise TypeError(f\"Expecting: DialogContext, but received None\")\n\n dialog_context.context.turn_state[scope_path.TURN] = memory\n", "path": "libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/turn_memory_scope.py"}]}
1,458
168
gh_patches_debug_30826
rasdani/github-patches
git_diff
freedomofpress__securedrop-4133
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> [xenial] Verify Trusty backup -> Xenial recovery story We should ensure that a SecureDrop backup completed on 14.04 can successfully be recovered on 16.04. Whether we ask admins to perform this step manually, or whether we automate it, it may be a required part of the Xenial migration and will certainly be highly recommended. If clean upgrades to Xenial are not yet implemented one should complete this ticket by following these steps instead: 1. Create a backup on 14.04 server 2. Create fresh install on 16.04 and then attempt to run the restore Part of #3204, may result in follow-up issues. </issue> <code> [start of install_files/ansible-base/roles/restore/files/restore.py] 1 #!/usr/bin/python2.7 2 """ 3 This script and backup archive should be copied to the App server and run by 4 the Ansible playbook. When run (as root), it restores the contents of the 0.3 5 backup file to the machine it's run on. 6 7 python restore.py sd-backup-TIMESTAMP.tar.gz 8 """ 9 10 import os 11 import subprocess 12 import sys 13 import tarfile 14 15 16 def verify_args(): 17 usage = """ 18 Usage: restore.py <backup file> 19 20 <backup file> Path to a SecureDrop 0.3 backup created by backup.py" 21 """ 22 if len(sys.argv) != 2: 23 print(usage) 24 sys.exit(1) 25 26 if not os.path.exists(sys.argv[1]): 27 print("<backup file> '{}' not found".format(sys.argv[1])) 28 sys.exit(1) 29 30 if os.geteuid() != 0: 31 print("This program must be run as root!") 32 sys.exit(1) 33 34 35 def main(): 36 verify_args() 37 38 with tarfile.open(sys.argv[1], 'r:*') as backup: 39 # This assumes that both the old installation (source of the backup) 40 # and the new installation (destination of the restore) used the 41 # default paths for various locations. 42 backup.extractall(path='/') 43 44 # Reload Tor and the web server so they pick up the new configuration 45 # If the process exits with a non-zero return code, raises an exception. 46 subprocess.check_call(['service', 'apache2', 'restart']) 47 subprocess.check_call(['service', 'tor', 'reload']) 48 # Apply database migrations (if backed-up version < version to restore) 49 subprocess.check_call(['dpkg-reconfigure', 'securedrop-app-code']) 50 51 52 if __name__ == "__main__": 53 main() 54 [end of install_files/ansible-base/roles/restore/files/restore.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/install_files/ansible-base/roles/restore/files/restore.py b/install_files/ansible-base/roles/restore/files/restore.py --- a/install_files/ansible-base/roles/restore/files/restore.py +++ b/install_files/ansible-base/roles/restore/files/restore.py @@ -8,6 +8,7 @@ """ import os +import shutil import subprocess import sys import tarfile @@ -35,18 +36,29 @@ def main(): verify_args() + # Remove the /var/lib/tor/services directories to purge values that may have been + # generated by running the ansible playbooks + for d in ['journalist', 'source']: + full_path = os.path.join('/var/lib/tor/services', d) + if os.path.exists(full_path): + shutil.rmtree(full_path) + with tarfile.open(sys.argv[1], 'r:*') as backup: # This assumes that both the old installation (source of the backup) # and the new installation (destination of the restore) used the # default paths for various locations. backup.extractall(path='/') + # Apply database migrations (if backed-up version < version to restore) + subprocess.check_call(['dpkg-reconfigure', 'securedrop-app-code']) + + # Update the configs + subprocess.check_call(['dpkg-reconfigure', 'securedrop-config']) + # Reload Tor and the web server so they pick up the new configuration # If the process exits with a non-zero return code, raises an exception. subprocess.check_call(['service', 'apache2', 'restart']) subprocess.check_call(['service', 'tor', 'reload']) - # Apply database migrations (if backed-up version < version to restore) - subprocess.check_call(['dpkg-reconfigure', 'securedrop-app-code']) if __name__ == "__main__":
{"golden_diff": "diff --git a/install_files/ansible-base/roles/restore/files/restore.py b/install_files/ansible-base/roles/restore/files/restore.py\n--- a/install_files/ansible-base/roles/restore/files/restore.py\n+++ b/install_files/ansible-base/roles/restore/files/restore.py\n@@ -8,6 +8,7 @@\n \"\"\"\n \n import os\n+import shutil\n import subprocess\n import sys\n import tarfile\n@@ -35,18 +36,29 @@\n def main():\n verify_args()\n \n+ # Remove the /var/lib/tor/services directories to purge values that may have been\n+ # generated by running the ansible playbooks\n+ for d in ['journalist', 'source']:\n+ full_path = os.path.join('/var/lib/tor/services', d)\n+ if os.path.exists(full_path):\n+ shutil.rmtree(full_path)\n+\n with tarfile.open(sys.argv[1], 'r:*') as backup:\n # This assumes that both the old installation (source of the backup)\n # and the new installation (destination of the restore) used the\n # default paths for various locations.\n backup.extractall(path='/')\n \n+ # Apply database migrations (if backed-up version < version to restore)\n+ subprocess.check_call(['dpkg-reconfigure', 'securedrop-app-code'])\n+\n+ # Update the configs\n+ subprocess.check_call(['dpkg-reconfigure', 'securedrop-config'])\n+\n # Reload Tor and the web server so they pick up the new configuration\n # If the process exits with a non-zero return code, raises an exception.\n subprocess.check_call(['service', 'apache2', 'restart'])\n subprocess.check_call(['service', 'tor', 'reload'])\n- # Apply database migrations (if backed-up version < version to restore)\n- subprocess.check_call(['dpkg-reconfigure', 'securedrop-app-code'])\n \n \n if __name__ == \"__main__\":\n", "issue": "[xenial] Verify Trusty backup -> Xenial recovery story\nWe should ensure that a SecureDrop backup completed on 14.04 can successfully be recovered on 16.04. Whether we ask admins to perform this step manually, or whether we automate it, it may be a required part of the Xenial migration and will certainly be highly recommended.\r\n\r\nIf clean upgrades to Xenial are not yet implemented one should complete this ticket by following these steps instead:\r\n\r\n1. Create a backup on 14.04 server\r\n2. Create fresh install on 16.04 and then attempt to run the restore\r\n\r\nPart of #3204, may result in follow-up issues.\n", "before_files": [{"content": "#!/usr/bin/python2.7\n\"\"\"\nThis script and backup archive should be copied to the App server and run by\nthe Ansible playbook. When run (as root), it restores the contents of the 0.3\nbackup file to the machine it's run on.\n\npython restore.py sd-backup-TIMESTAMP.tar.gz\n\"\"\"\n\nimport os\nimport subprocess\nimport sys\nimport tarfile\n\n\ndef verify_args():\n usage = \"\"\"\nUsage: restore.py <backup file>\n\n <backup file> Path to a SecureDrop 0.3 backup created by backup.py\"\n \"\"\"\n if len(sys.argv) != 2:\n print(usage)\n sys.exit(1)\n\n if not os.path.exists(sys.argv[1]):\n print(\"<backup file> '{}' not found\".format(sys.argv[1]))\n sys.exit(1)\n\n if os.geteuid() != 0:\n print(\"This program must be run as root!\")\n sys.exit(1)\n\n\ndef main():\n verify_args()\n\n with tarfile.open(sys.argv[1], 'r:*') as backup:\n # This assumes that both the old installation (source of the backup)\n # and the new installation (destination of the restore) used the\n # default paths for various locations.\n backup.extractall(path='/')\n\n # Reload Tor and the web server so they pick up the new configuration\n # If the process exits with a non-zero return code, raises an exception.\n subprocess.check_call(['service', 'apache2', 'restart'])\n subprocess.check_call(['service', 'tor', 'reload'])\n # Apply database migrations (if backed-up version < version to restore)\n subprocess.check_call(['dpkg-reconfigure', 'securedrop-app-code'])\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "install_files/ansible-base/roles/restore/files/restore.py"}]}
1,186
416
gh_patches_debug_34300
rasdani/github-patches
git_diff
jupyterhub__jupyterhub-142
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Admin UI suggestions As I've been using the admin UI a bit for my class, I just had a few things that I thought would be nice to have/change in it: - move the "add user" button to the top -- it is annoying with lots of users to have to scroll all the way down to find it - add some simple statistics at the top of the page: number of running servers, number of currently active users, etc. - it would be awesome to be able to sort users by the different columns -- admin, alphabetically, by last seen - currently, if you shut down a user's server, it causes the page to refresh which often jumps you up to the top (or just somewhere else). It would be nice if the update could be done in the background without actually reloading the page. Obviously, none of these are urgent, but I think they would make the admin experience a little easier. I can open separate issues for them if so desired. </issue> <code> [start of jupyterhub/handlers/pages.py] 1 """Basic html-rendering handlers.""" 2 3 # Copyright (c) Jupyter Development Team. 4 # Distributed under the terms of the Modified BSD License. 5 6 from tornado import web 7 8 from .. import orm 9 from ..utils import admin_only, url_path_join 10 from .base import BaseHandler 11 12 13 class RootHandler(BaseHandler): 14 """Render the Hub root page. 15 16 Currently redirects to home if logged in, 17 shows big fat login button otherwise. 18 """ 19 def get(self): 20 if self.get_current_user(): 21 self.redirect( 22 url_path_join(self.hub.server.base_url, 'home'), 23 permanent=False, 24 ) 25 return 26 27 html = self.render_template('index.html', 28 login_url=self.settings['login_url'], 29 ) 30 self.finish(html) 31 32 class HomeHandler(BaseHandler): 33 """Render the user's home page.""" 34 35 @web.authenticated 36 def get(self): 37 html = self.render_template('home.html', 38 user=self.get_current_user(), 39 ) 40 self.finish(html) 41 42 43 class AdminHandler(BaseHandler): 44 """Render the admin page.""" 45 46 @admin_only 47 def get(self): 48 html = self.render_template('admin.html', 49 user=self.get_current_user(), 50 users=self.db.query(orm.User), 51 admin_access=self.settings.get('admin_access', False), 52 ) 53 self.finish(html) 54 55 56 default_handlers = [ 57 (r'/', RootHandler), 58 (r'/home', HomeHandler), 59 (r'/admin', AdminHandler), 60 ] 61 [end of jupyterhub/handlers/pages.py] [start of jupyterhub/apihandlers/base.py] 1 """Base API handlers""" 2 # Copyright (c) Jupyter Development Team. 3 # Distributed under the terms of the Modified BSD License. 4 5 import json 6 7 from http.client import responses 8 9 from tornado import web 10 11 from ..handlers import BaseHandler 12 13 class APIHandler(BaseHandler): 14 def get_json_body(self): 15 """Return the body of the request as JSON data.""" 16 if not self.request.body: 17 return None 18 body = self.request.body.strip().decode('utf-8') 19 try: 20 model = json.loads(body) 21 except Exception: 22 self.log.debug("Bad JSON: %r", body) 23 self.log.error("Couldn't parse JSON", exc_info=True) 24 raise web.HTTPError(400, 'Invalid JSON in body of request') 25 return model 26 27 28 def write_error(self, status_code, **kwargs): 29 """Write JSON errors instead of HTML""" 30 exc_info = kwargs.get('exc_info') 31 message = '' 32 status_message = responses.get(status_code, 'Unknown Error') 33 if exc_info: 34 exception = exc_info[1] 35 # get the custom message, if defined 36 try: 37 message = exception.log_message % exception.args 38 except Exception: 39 pass 40 41 # construct the custom reason, if defined 42 reason = getattr(exception, 'reason', '') 43 if reason: 44 status_message = reason 45 46 self.write(json.dumps({ 47 'status': status_code, 48 'message': message or status_message, 49 })) 50 [end of jupyterhub/apihandlers/base.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/jupyterhub/apihandlers/base.py b/jupyterhub/apihandlers/base.py --- a/jupyterhub/apihandlers/base.py +++ b/jupyterhub/apihandlers/base.py @@ -42,7 +42,7 @@ reason = getattr(exception, 'reason', '') if reason: status_message = reason - + self.set_header('Content-Type', 'application/json') self.write(json.dumps({ 'status': status_code, 'message': message or status_message, diff --git a/jupyterhub/handlers/pages.py b/jupyterhub/handlers/pages.py --- a/jupyterhub/handlers/pages.py +++ b/jupyterhub/handlers/pages.py @@ -45,10 +45,52 @@ @admin_only def get(self): + available = {'name', 'admin', 'running', 'last_activity'} + default_sort = ['admin', 'name'] + mapping = { + 'running': '_server_id' + } + default_order = { + 'name': 'asc', + 'last_activity': 'desc', + 'admin': 'desc', + 'running': 'desc', + } + sorts = self.get_arguments('sort') or default_sort + orders = self.get_arguments('order') + + for bad in set(sorts).difference(available): + self.log.warn("ignoring invalid sort: %r", bad) + sorts.remove(bad) + for bad in set(orders).difference({'asc', 'desc'}): + self.log.warn("ignoring invalid order: %r", bad) + orders.remove(bad) + + # add default sort as secondary + for s in default_sort: + if s not in sorts: + sorts.append(s) + if len(orders) < len(sorts): + for col in sorts[len(orders):]: + orders.append(default_order[col]) + else: + orders = orders[:len(sorts)] + + # this could be one incomprehensible nested list comprehension + # get User columns + cols = [ getattr(orm.User, mapping.get(c, c)) for c in sorts ] + # get User.col.desc() order objects + ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ] + + users = self.db.query(orm.User).order_by(*ordered) + running = users.filter(orm.User.server != None) + html = self.render_template('admin.html', user=self.get_current_user(), - users=self.db.query(orm.User), admin_access=self.settings.get('admin_access', False), + users=users, + running=running, + sort={s:o for s,o in zip(sorts, orders)}, ) self.finish(html)
{"golden_diff": "diff --git a/jupyterhub/apihandlers/base.py b/jupyterhub/apihandlers/base.py\n--- a/jupyterhub/apihandlers/base.py\n+++ b/jupyterhub/apihandlers/base.py\n@@ -42,7 +42,7 @@\n reason = getattr(exception, 'reason', '')\n if reason:\n status_message = reason\n- \n+ self.set_header('Content-Type', 'application/json')\n self.write(json.dumps({\n 'status': status_code,\n 'message': message or status_message,\ndiff --git a/jupyterhub/handlers/pages.py b/jupyterhub/handlers/pages.py\n--- a/jupyterhub/handlers/pages.py\n+++ b/jupyterhub/handlers/pages.py\n@@ -45,10 +45,52 @@\n \n @admin_only\n def get(self):\n+ available = {'name', 'admin', 'running', 'last_activity'}\n+ default_sort = ['admin', 'name']\n+ mapping = {\n+ 'running': '_server_id'\n+ }\n+ default_order = {\n+ 'name': 'asc',\n+ 'last_activity': 'desc',\n+ 'admin': 'desc',\n+ 'running': 'desc',\n+ }\n+ sorts = self.get_arguments('sort') or default_sort\n+ orders = self.get_arguments('order')\n+ \n+ for bad in set(sorts).difference(available):\n+ self.log.warn(\"ignoring invalid sort: %r\", bad)\n+ sorts.remove(bad)\n+ for bad in set(orders).difference({'asc', 'desc'}):\n+ self.log.warn(\"ignoring invalid order: %r\", bad)\n+ orders.remove(bad)\n+ \n+ # add default sort as secondary\n+ for s in default_sort:\n+ if s not in sorts:\n+ sorts.append(s)\n+ if len(orders) < len(sorts):\n+ for col in sorts[len(orders):]:\n+ orders.append(default_order[col])\n+ else:\n+ orders = orders[:len(sorts)]\n+ \n+ # this could be one incomprehensible nested list comprehension\n+ # get User columns\n+ cols = [ getattr(orm.User, mapping.get(c, c)) for c in sorts ]\n+ # get User.col.desc() order objects\n+ ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ]\n+ \n+ users = self.db.query(orm.User).order_by(*ordered)\n+ running = users.filter(orm.User.server != None)\n+ \n html = self.render_template('admin.html',\n user=self.get_current_user(),\n- users=self.db.query(orm.User),\n admin_access=self.settings.get('admin_access', False),\n+ users=users,\n+ running=running,\n+ sort={s:o for s,o in zip(sorts, orders)},\n )\n self.finish(html)\n", "issue": "Admin UI suggestions\nAs I've been using the admin UI a bit for my class, I just had a few things that I thought would be nice to have/change in it:\n- move the \"add user\" button to the top -- it is annoying with lots of users to have to scroll all the way down to find it\n- add some simple statistics at the top of the page: number of running servers, number of currently active users, etc.\n- it would be awesome to be able to sort users by the different columns -- admin, alphabetically, by last seen\n- currently, if you shut down a user's server, it causes the page to refresh which often jumps you up to the top (or just somewhere else). It would be nice if the update could be done in the background without actually reloading the page.\n\nObviously, none of these are urgent, but I think they would make the admin experience a little easier. I can open separate issues for them if so desired.\n\n", "before_files": [{"content": "\"\"\"Basic html-rendering handlers.\"\"\"\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nfrom tornado import web\n\nfrom .. import orm\nfrom ..utils import admin_only, url_path_join\nfrom .base import BaseHandler\n\n\nclass RootHandler(BaseHandler):\n \"\"\"Render the Hub root page.\n \n Currently redirects to home if logged in,\n shows big fat login button otherwise.\n \"\"\"\n def get(self):\n if self.get_current_user():\n self.redirect(\n url_path_join(self.hub.server.base_url, 'home'),\n permanent=False,\n )\n return\n \n html = self.render_template('index.html',\n login_url=self.settings['login_url'],\n )\n self.finish(html)\n\nclass HomeHandler(BaseHandler):\n \"\"\"Render the user's home page.\"\"\"\n\n @web.authenticated\n def get(self):\n html = self.render_template('home.html',\n user=self.get_current_user(),\n )\n self.finish(html)\n\n\nclass AdminHandler(BaseHandler):\n \"\"\"Render the admin page.\"\"\"\n\n @admin_only\n def get(self):\n html = self.render_template('admin.html',\n user=self.get_current_user(),\n users=self.db.query(orm.User),\n admin_access=self.settings.get('admin_access', False),\n )\n self.finish(html)\n\n\ndefault_handlers = [\n (r'/', RootHandler),\n (r'/home', HomeHandler),\n (r'/admin', AdminHandler),\n]\n", "path": "jupyterhub/handlers/pages.py"}, {"content": "\"\"\"Base API handlers\"\"\"\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nimport json\n\nfrom http.client import responses\n\nfrom tornado import web\n\nfrom ..handlers import BaseHandler\n\nclass APIHandler(BaseHandler):\n def get_json_body(self):\n \"\"\"Return the body of the request as JSON data.\"\"\"\n if not self.request.body:\n return None\n body = self.request.body.strip().decode('utf-8')\n try:\n model = json.loads(body)\n except Exception:\n self.log.debug(\"Bad JSON: %r\", body)\n self.log.error(\"Couldn't parse JSON\", exc_info=True)\n raise web.HTTPError(400, 'Invalid JSON in body of request')\n return model\n \n \n def write_error(self, status_code, **kwargs):\n \"\"\"Write JSON errors instead of HTML\"\"\"\n exc_info = kwargs.get('exc_info')\n message = ''\n status_message = responses.get(status_code, 'Unknown Error')\n if exc_info:\n exception = exc_info[1]\n # get the custom message, if defined\n try:\n message = exception.log_message % exception.args\n except Exception:\n pass\n\n # construct the custom reason, if defined\n reason = getattr(exception, 'reason', '')\n if reason:\n status_message = reason\n \n self.write(json.dumps({\n 'status': status_code,\n 'message': message or status_message,\n }))\n", "path": "jupyterhub/apihandlers/base.py"}]}
1,598
630
gh_patches_debug_23562
rasdani/github-patches
git_diff
internetarchive__openlibrary-6807
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> librarian merge queue fixes Closes #6807 - allows flexible sorting with ?order=asc or desc -- piggy backs on #6785 - adds total counts to Open and Closed - removes "All" - fixes bug where page? persists when switching modes -- fixes **half** of #6782 (i.e. mode part, not submitter!) <!-- What does this PR achieve? [feature|hotfix|fix|refactor] --> ### Stakeholders <!-- @ tag stakeholders of this bug --> @jimchamp <!-- Attribution Disclaimer: By proposing this pull request, I affirm to have made a best-effort and exercised my discretion to make sure relevant sections of this code which substantially leverage code suggestions, code generation, or code snippets from sources (e.g. Stack Overflow, GitHub) have been annotated with basic attribution so reviewers & contributors may have confidence and access to the correct context to evaluate and use this code. --> </issue> <code> [start of openlibrary/plugins/upstream/edits.py] 1 """Librarian Edits 2 """ 3 4 import json 5 import web 6 7 from openlibrary import accounts 8 from openlibrary.core.edits import CommunityEditsQueue, get_status_for_view 9 from infogami.utils import delegate 10 from infogami.utils.view import render_template 11 12 13 def create_request(olids: str, username: str, comment: str = None): 14 work_ids = olids.split(',') 15 return CommunityEditsQueue.submit_work_merge_request( 16 work_ids, 17 submitter=username, 18 comment=comment, 19 ) 20 21 22 class community_edits_queue(delegate.page): 23 path = '/merges' 24 25 def POST(self): 26 def response(status='ok', **kwargs): 27 return {'status': status, **kwargs} 28 29 i = web.input( 30 work_ids="", # Comma-separated OLIDs (OL1W,OL2W,OL3W,...,OL111W) 31 rtype="merge-works", 32 mrid=None, 33 action=None, # create, approve, decline, comment, unassign, create-merged 34 comment=None, 35 ) 36 user = accounts.get_current_user() 37 username = user['key'].split('/')[-1] 38 if i.mrid: # We are updating an existing merge request 39 if i.action == 'comment': 40 if i.comment: 41 CommunityEditsQueue.comment_request(i.mrid, username, i.comment) 42 return delegate.RawText( 43 json.dumps(response()), content_type="application/json" 44 ) 45 else: 46 return delegate.RawText( 47 json.dumps( 48 response( 49 status='error', error='No comment sent in request.' 50 ) 51 ) 52 ) 53 elif i.action == 'claim': 54 result = CommunityEditsQueue.assign_request(i.mrid, username) 55 return delegate.RawText( 56 json.dumps(response(**result)), content_type="application/json" 57 ) 58 elif i.action == 'unassign': 59 CommunityEditsQueue.unassign_request(i.mrid) 60 status = get_status_for_view(CommunityEditsQueue.STATUS['PENDING']) 61 return delegate.RawText(json.dumps(response(newStatus=status))) 62 else: 63 if i.action == "decline": 64 status = CommunityEditsQueue.STATUS['DECLINED'] 65 elif i.action == 'approve': 66 status = CommunityEditsQueue.STATUS['MERGED'] 67 CommunityEditsQueue.update_request_status( 68 i.mrid, status, username, comment=i.comment 69 ) 70 return delegate.RawText( 71 json.dumps(response()), content_type="application/json" 72 ) 73 elif i.rtype == "merge-works": 74 if i.action == 'create': 75 result = create_request(i.work_ids, username, i.comment) 76 resp = ( 77 response(id=result) 78 if result 79 else response( 80 status='error', 81 error='A request to merge these works has already been submitted.', 82 ) 83 ) 84 return delegate.RawText( 85 json.dumps(resp), content_type="application/json" 86 ) 87 elif i.action == 'create-merged': 88 result = CommunityEditsQueue.submit_work_merge_request( 89 i.work_ids.split(','), 90 submitter=username, 91 reviewer=username, 92 status=CommunityEditsQueue.STATUS['MERGED'], 93 ) 94 return delegate.RawText( 95 json.dumps(response(id=result)), content_type='application/json' 96 ) 97 98 def GET(self): 99 i = web.input(page=1, limit=25, mode="open", submitter=None, reviewer=None) 100 merge_requests = CommunityEditsQueue.get_requests( 101 page=int(i.page), 102 limit=int(i.limit), 103 mode=i.mode, 104 submitter=i.submitter, 105 reviewer=i.reviewer, 106 order='created desc', 107 ).list() 108 109 total_found = CommunityEditsQueue.get_counts_by_mode( 110 mode=i.mode, submitter=i.submitter, reviewer=i.reviewer 111 ) 112 return render_template( 113 'merge_queue/merge_queue', 114 total_found, 115 merge_requests=merge_requests, 116 ) 117 118 def extract_olids(self, url): 119 query_string = url.split('?')[1] 120 split_params = query_string.split('&') 121 params = {} 122 for p in split_params: 123 kv = p.split('=') 124 params[kv[0]] = kv[1] 125 return params['records'].split(',') 126 127 128 class ui_partials(delegate.page): 129 path = '/merges/partials' 130 131 def GET(self): 132 i = web.input(type=None, comment='') 133 if i.type == 'comment': 134 component = render_template('merge_queue/comment', comment_str=i.comment) 135 return delegate.RawText(component) 136 137 138 def setup(): 139 pass 140 [end of openlibrary/plugins/upstream/edits.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/openlibrary/plugins/upstream/edits.py b/openlibrary/plugins/upstream/edits.py --- a/openlibrary/plugins/upstream/edits.py +++ b/openlibrary/plugins/upstream/edits.py @@ -96,19 +96,26 @@ ) def GET(self): - i = web.input(page=1, limit=25, mode="open", submitter=None, reviewer=None) + i = web.input( + page=1, limit=25, mode="open", submitter=None, reviewer=None, order='desc' + ) merge_requests = CommunityEditsQueue.get_requests( page=int(i.page), limit=int(i.limit), mode=i.mode, submitter=i.submitter, reviewer=i.reviewer, - order='created desc', + order=f'created {i.order}', ).list() - total_found = CommunityEditsQueue.get_counts_by_mode( - mode=i.mode, submitter=i.submitter, reviewer=i.reviewer - ) + total_found = { + "open": CommunityEditsQueue.get_counts_by_mode( + mode='open', submitter=i.submitter, reviewer=i.reviewer + ), + "closed": CommunityEditsQueue.get_counts_by_mode( + mode='closed', submitter=i.submitter, reviewer=i.reviewer + ), + } return render_template( 'merge_queue/merge_queue', total_found,
{"golden_diff": "diff --git a/openlibrary/plugins/upstream/edits.py b/openlibrary/plugins/upstream/edits.py\n--- a/openlibrary/plugins/upstream/edits.py\n+++ b/openlibrary/plugins/upstream/edits.py\n@@ -96,19 +96,26 @@\n )\n \n def GET(self):\n- i = web.input(page=1, limit=25, mode=\"open\", submitter=None, reviewer=None)\n+ i = web.input(\n+ page=1, limit=25, mode=\"open\", submitter=None, reviewer=None, order='desc'\n+ )\n merge_requests = CommunityEditsQueue.get_requests(\n page=int(i.page),\n limit=int(i.limit),\n mode=i.mode,\n submitter=i.submitter,\n reviewer=i.reviewer,\n- order='created desc',\n+ order=f'created {i.order}',\n ).list()\n \n- total_found = CommunityEditsQueue.get_counts_by_mode(\n- mode=i.mode, submitter=i.submitter, reviewer=i.reviewer\n- )\n+ total_found = {\n+ \"open\": CommunityEditsQueue.get_counts_by_mode(\n+ mode='open', submitter=i.submitter, reviewer=i.reviewer\n+ ),\n+ \"closed\": CommunityEditsQueue.get_counts_by_mode(\n+ mode='closed', submitter=i.submitter, reviewer=i.reviewer\n+ ),\n+ }\n return render_template(\n 'merge_queue/merge_queue',\n total_found,\n", "issue": "librarian merge queue fixes\nCloses #6807\r\n\r\n- allows flexible sorting with ?order=asc or desc -- piggy backs on #6785 \r\n- adds total counts to Open and Closed\r\n- removes \"All\"\r\n- fixes bug where page? persists when switching modes -- fixes **half** of #6782 (i.e. mode part, not submitter!)\r\n\r\n\r\n<!-- What does this PR achieve? [feature|hotfix|fix|refactor] -->\r\n\r\n### Stakeholders\r\n<!-- @ tag stakeholders of this bug -->\r\n@jimchamp \r\n\r\n<!-- Attribution Disclaimer: By proposing this pull request, I affirm to have made a best-effort and exercised my discretion to make sure relevant sections of this code which substantially leverage code suggestions, code generation, or code snippets from sources (e.g. Stack Overflow, GitHub) have been annotated with basic attribution so reviewers & contributors may have confidence and access to the correct context to evaluate and use this code. -->\r\n\n", "before_files": [{"content": "\"\"\"Librarian Edits\n\"\"\"\n\nimport json\nimport web\n\nfrom openlibrary import accounts\nfrom openlibrary.core.edits import CommunityEditsQueue, get_status_for_view\nfrom infogami.utils import delegate\nfrom infogami.utils.view import render_template\n\n\ndef create_request(olids: str, username: str, comment: str = None):\n work_ids = olids.split(',')\n return CommunityEditsQueue.submit_work_merge_request(\n work_ids,\n submitter=username,\n comment=comment,\n )\n\n\nclass community_edits_queue(delegate.page):\n path = '/merges'\n\n def POST(self):\n def response(status='ok', **kwargs):\n return {'status': status, **kwargs}\n\n i = web.input(\n work_ids=\"\", # Comma-separated OLIDs (OL1W,OL2W,OL3W,...,OL111W)\n rtype=\"merge-works\",\n mrid=None,\n action=None, # create, approve, decline, comment, unassign, create-merged\n comment=None,\n )\n user = accounts.get_current_user()\n username = user['key'].split('/')[-1]\n if i.mrid: # We are updating an existing merge request\n if i.action == 'comment':\n if i.comment:\n CommunityEditsQueue.comment_request(i.mrid, username, i.comment)\n return delegate.RawText(\n json.dumps(response()), content_type=\"application/json\"\n )\n else:\n return delegate.RawText(\n json.dumps(\n response(\n status='error', error='No comment sent in request.'\n )\n )\n )\n elif i.action == 'claim':\n result = CommunityEditsQueue.assign_request(i.mrid, username)\n return delegate.RawText(\n json.dumps(response(**result)), content_type=\"application/json\"\n )\n elif i.action == 'unassign':\n CommunityEditsQueue.unassign_request(i.mrid)\n status = get_status_for_view(CommunityEditsQueue.STATUS['PENDING'])\n return delegate.RawText(json.dumps(response(newStatus=status)))\n else:\n if i.action == \"decline\":\n status = CommunityEditsQueue.STATUS['DECLINED']\n elif i.action == 'approve':\n status = CommunityEditsQueue.STATUS['MERGED']\n CommunityEditsQueue.update_request_status(\n i.mrid, status, username, comment=i.comment\n )\n return delegate.RawText(\n json.dumps(response()), content_type=\"application/json\"\n )\n elif i.rtype == \"merge-works\":\n if i.action == 'create':\n result = create_request(i.work_ids, username, i.comment)\n resp = (\n response(id=result)\n if result\n else response(\n status='error',\n error='A request to merge these works has already been submitted.',\n )\n )\n return delegate.RawText(\n json.dumps(resp), content_type=\"application/json\"\n )\n elif i.action == 'create-merged':\n result = CommunityEditsQueue.submit_work_merge_request(\n i.work_ids.split(','),\n submitter=username,\n reviewer=username,\n status=CommunityEditsQueue.STATUS['MERGED'],\n )\n return delegate.RawText(\n json.dumps(response(id=result)), content_type='application/json'\n )\n\n def GET(self):\n i = web.input(page=1, limit=25, mode=\"open\", submitter=None, reviewer=None)\n merge_requests = CommunityEditsQueue.get_requests(\n page=int(i.page),\n limit=int(i.limit),\n mode=i.mode,\n submitter=i.submitter,\n reviewer=i.reviewer,\n order='created desc',\n ).list()\n\n total_found = CommunityEditsQueue.get_counts_by_mode(\n mode=i.mode, submitter=i.submitter, reviewer=i.reviewer\n )\n return render_template(\n 'merge_queue/merge_queue',\n total_found,\n merge_requests=merge_requests,\n )\n\n def extract_olids(self, url):\n query_string = url.split('?')[1]\n split_params = query_string.split('&')\n params = {}\n for p in split_params:\n kv = p.split('=')\n params[kv[0]] = kv[1]\n return params['records'].split(',')\n\n\nclass ui_partials(delegate.page):\n path = '/merges/partials'\n\n def GET(self):\n i = web.input(type=None, comment='')\n if i.type == 'comment':\n component = render_template('merge_queue/comment', comment_str=i.comment)\n return delegate.RawText(component)\n\n\ndef setup():\n pass\n", "path": "openlibrary/plugins/upstream/edits.py"}]}
2,036
324
gh_patches_debug_15287
rasdani/github-patches
git_diff
cookiecutter__cookiecutter-642
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Expand Environment Variables in Cookiecutter Configuration File I set my cookiecutterrc file via an environment variable, like this: ``` export COOKIECUTTER_CONFIG="$XDG_CONFIG_HOME/cookiecutter/cookiecutterrc" ``` In my cookiecutterrc, I'd like to use those same environment variables to set paths, however they don't currently expand: ``` default_context: full_name: "Nathan Farrar" email: "[email protected]" github_username: "nfarrar" cookiecutters_dir: "$XDG_CACHE_HOME/cookiecutter/template" replay_dir: "$XDG_CACHE_HOME/cookiecutter/replay" abbreviations: pp: https://github.com/audreyr/cookiecutter-pypackage.git gh: https://github.com/{0}.git bb: https://bitbucket.org/{0} ``` For example: ``` $ cookiecutter pp $ ls ~/ ... drwxr-xr-x 3 nfarrar staff 102 Feb 28 07:37 '$XDG_CACHE_HOME' ... ``` </issue> <code> [start of cookiecutter/config.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 4 """ 5 cookiecutter.config 6 ------------------- 7 8 Global configuration handling 9 """ 10 11 from __future__ import unicode_literals 12 import copy 13 import logging 14 import os 15 import io 16 17 import poyo 18 19 from .exceptions import ConfigDoesNotExistException 20 from .exceptions import InvalidConfiguration 21 22 23 logger = logging.getLogger(__name__) 24 25 USER_CONFIG_PATH = os.path.expanduser('~/.cookiecutterrc') 26 27 DEFAULT_CONFIG = { 28 'cookiecutters_dir': os.path.expanduser('~/.cookiecutters/'), 29 'replay_dir': os.path.expanduser('~/.cookiecutter_replay/'), 30 'default_context': {} 31 } 32 33 34 def get_config(config_path): 35 """ 36 Retrieve the config from the specified path, returning it as a config dict. 37 """ 38 39 if not os.path.exists(config_path): 40 raise ConfigDoesNotExistException 41 42 logger.debug('config_path is {0}'.format(config_path)) 43 with io.open(config_path, encoding='utf-8') as file_handle: 44 try: 45 yaml_dict = poyo.parse_string(file_handle.read()) 46 except poyo.exceptions.PoyoException as e: 47 raise InvalidConfiguration( 48 'Unable to parse YAML file {}. Error: {}' 49 ''.format(config_path, e) 50 ) 51 52 config_dict = copy.copy(DEFAULT_CONFIG) 53 config_dict.update(yaml_dict) 54 55 return config_dict 56 57 58 def get_user_config(config_file=USER_CONFIG_PATH): 59 """Retrieve the config from a file or return the defaults if None is 60 passed. If an environment variable `COOKIECUTTER_CONFIG` is set up, try 61 to load its value. Otherwise fall back to a default file or config. 62 """ 63 # Do NOT load a config. Return defaults instead. 64 if config_file is None: 65 return copy.copy(DEFAULT_CONFIG) 66 67 # Load the given config file 68 if config_file and config_file is not USER_CONFIG_PATH: 69 return get_config(config_file) 70 71 try: 72 # Does the user set up a config environment variable? 73 env_config_file = os.environ['COOKIECUTTER_CONFIG'] 74 except KeyError: 75 # Load an optional user config if it exists 76 # otherwise return the defaults 77 if os.path.exists(USER_CONFIG_PATH): 78 return get_config(USER_CONFIG_PATH) 79 else: 80 return copy.copy(DEFAULT_CONFIG) 81 else: 82 # There is a config environment variable. Try to load it. 83 # Do not check for existence, so invalid file paths raise an error. 84 return get_config(env_config_file) 85 [end of cookiecutter/config.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/cookiecutter/config.py b/cookiecutter/config.py --- a/cookiecutter/config.py +++ b/cookiecutter/config.py @@ -31,6 +31,13 @@ } +def _expand_path(path): + """Expand both environment variables and user home in the given path.""" + path = os.path.expandvars(path) + path = os.path.expanduser(path) + return path + + def get_config(config_path): """ Retrieve the config from the specified path, returning it as a config dict. @@ -52,6 +59,12 @@ config_dict = copy.copy(DEFAULT_CONFIG) config_dict.update(yaml_dict) + raw_replay_dir = config_dict['replay_dir'] + config_dict['replay_dir'] = _expand_path(raw_replay_dir) + + raw_cookies_dir = config_dict['cookiecutters_dir'] + config_dict['cookiecutters_dir'] = _expand_path(raw_cookies_dir) + return config_dict
{"golden_diff": "diff --git a/cookiecutter/config.py b/cookiecutter/config.py\n--- a/cookiecutter/config.py\n+++ b/cookiecutter/config.py\n@@ -31,6 +31,13 @@\n }\n \n \n+def _expand_path(path):\n+ \"\"\"Expand both environment variables and user home in the given path.\"\"\"\n+ path = os.path.expandvars(path)\n+ path = os.path.expanduser(path)\n+ return path\n+\n+\n def get_config(config_path):\n \"\"\"\n Retrieve the config from the specified path, returning it as a config dict.\n@@ -52,6 +59,12 @@\n config_dict = copy.copy(DEFAULT_CONFIG)\n config_dict.update(yaml_dict)\n \n+ raw_replay_dir = config_dict['replay_dir']\n+ config_dict['replay_dir'] = _expand_path(raw_replay_dir)\n+\n+ raw_cookies_dir = config_dict['cookiecutters_dir']\n+ config_dict['cookiecutters_dir'] = _expand_path(raw_cookies_dir)\n+\n return config_dict\n", "issue": "Expand Environment Variables in Cookiecutter Configuration File\nI set my cookiecutterrc file via an environment variable, like this:\n\n```\nexport COOKIECUTTER_CONFIG=\"$XDG_CONFIG_HOME/cookiecutter/cookiecutterrc\"\n```\n\nIn my cookiecutterrc, I'd like to use those same environment variables to set paths, however they don't currently expand:\n\n```\ndefault_context:\n full_name: \"Nathan Farrar\"\n email: \"[email protected]\"\n github_username: \"nfarrar\"\ncookiecutters_dir: \"$XDG_CACHE_HOME/cookiecutter/template\"\nreplay_dir: \"$XDG_CACHE_HOME/cookiecutter/replay\"\nabbreviations:\n pp: https://github.com/audreyr/cookiecutter-pypackage.git\n gh: https://github.com/{0}.git\n bb: https://bitbucket.org/{0}\n```\n\nFor example:\n\n```\n$ cookiecutter pp\n$ ls ~/\n...\ndrwxr-xr-x 3 nfarrar staff 102 Feb 28 07:37 '$XDG_CACHE_HOME'\n...\n```\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\ncookiecutter.config\n-------------------\n\nGlobal configuration handling\n\"\"\"\n\nfrom __future__ import unicode_literals\nimport copy\nimport logging\nimport os\nimport io\n\nimport poyo\n\nfrom .exceptions import ConfigDoesNotExistException\nfrom .exceptions import InvalidConfiguration\n\n\nlogger = logging.getLogger(__name__)\n\nUSER_CONFIG_PATH = os.path.expanduser('~/.cookiecutterrc')\n\nDEFAULT_CONFIG = {\n 'cookiecutters_dir': os.path.expanduser('~/.cookiecutters/'),\n 'replay_dir': os.path.expanduser('~/.cookiecutter_replay/'),\n 'default_context': {}\n}\n\n\ndef get_config(config_path):\n \"\"\"\n Retrieve the config from the specified path, returning it as a config dict.\n \"\"\"\n\n if not os.path.exists(config_path):\n raise ConfigDoesNotExistException\n\n logger.debug('config_path is {0}'.format(config_path))\n with io.open(config_path, encoding='utf-8') as file_handle:\n try:\n yaml_dict = poyo.parse_string(file_handle.read())\n except poyo.exceptions.PoyoException as e:\n raise InvalidConfiguration(\n 'Unable to parse YAML file {}. Error: {}'\n ''.format(config_path, e)\n )\n\n config_dict = copy.copy(DEFAULT_CONFIG)\n config_dict.update(yaml_dict)\n\n return config_dict\n\n\ndef get_user_config(config_file=USER_CONFIG_PATH):\n \"\"\"Retrieve the config from a file or return the defaults if None is\n passed. If an environment variable `COOKIECUTTER_CONFIG` is set up, try\n to load its value. Otherwise fall back to a default file or config.\n \"\"\"\n # Do NOT load a config. Return defaults instead.\n if config_file is None:\n return copy.copy(DEFAULT_CONFIG)\n\n # Load the given config file\n if config_file and config_file is not USER_CONFIG_PATH:\n return get_config(config_file)\n\n try:\n # Does the user set up a config environment variable?\n env_config_file = os.environ['COOKIECUTTER_CONFIG']\n except KeyError:\n # Load an optional user config if it exists\n # otherwise return the defaults\n if os.path.exists(USER_CONFIG_PATH):\n return get_config(USER_CONFIG_PATH)\n else:\n return copy.copy(DEFAULT_CONFIG)\n else:\n # There is a config environment variable. Try to load it.\n # Do not check for existence, so invalid file paths raise an error.\n return get_config(env_config_file)\n", "path": "cookiecutter/config.py"}]}
1,487
227
gh_patches_debug_29313
rasdani/github-patches
git_diff
bokeh__bokeh-7934
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> flask_gunicorn_embed.py does not work with Tornado 5 ref: https://github.com/bokeh/bokeh/blob/master/examples/howto/server_embed/flask_gunicorn_embed.py Running as is gets: ``` Exception in thread Thread-1: Traceback (most recent call last): File "/Users/bryanv/anaconda/envs/01216/lib/python3.6/threading.py", line 916, in _bootstrap_inner self.run() File "/Users/bryanv/anaconda/envs/01216/lib/python3.6/threading.py", line 864, in run self._target(*self._args, **self._kwargs) File "/Users/bryanv/work/bokeh/examples/howto/server_embed/flask_gunicorn_embed.py", line 72, in bk_worker server.start() File "/Users/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/bokeh/server/server.py", line 149, in start self._tornado.start() File "/Users/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/bokeh/server/tornado.py", line 372, in start self._stats_job.start() File "/Users/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/tornado/ioloop.py", line 1185, in start self.io_loop = IOLoop.current() File "/Use rs/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/tornado/ioloop.py", line 282, in current loop = asyncio.get_event_loop() File "/Users/bryanv/anaconda/envs/01216/lib/python3.6/asyncio/events.py", line 694, in get_event_loop return get_event_loop_policy().get_event_loop() File "/Users/bryanv/anaconda/envs/01216/lib/python3.6/asyncio/events.py", line 602, in get_event_loop % threading.current_thread().name) RuntimeError: There is no current event loop in thread 'Thread-1'. ``` Tried changing worker to ``` def bk_worker(): io_loop = IOLoop()) server = BaseServer(io_loop, bokeh_tornado, bokeh_http) server.start() server.io_loop.start() ``` but then the http requests to the `HTTPServer` just hang (the workers are getting executed the right number of times though) cc @bdarnell any quick ideas? </issue> <code> [start of examples/howto/server_embed/flask_gunicorn_embed.py] 1 from flask import Flask, render_template 2 3 from tornado.httpserver import HTTPServer 4 from tornado.ioloop import IOLoop 5 6 from bokeh.application import Application 7 from bokeh.application.handlers import FunctionHandler 8 from bokeh.embed import server_document 9 from bokeh.layouts import column 10 from bokeh.models import ColumnDataSource, Slider 11 from bokeh.plotting import figure 12 from bokeh.server.server import BaseServer 13 from bokeh.server.tornado import BokehTornado 14 from bokeh.server.util import bind_sockets 15 from bokeh.themes import Theme 16 17 if __name__ == '__main__': 18 print('This script is intended to be run with gunicorn. e.g.') 19 print() 20 print(' gunicorn -w 4 flask_gunicorn_embed:app') 21 print() 22 print('will start the app on four processes') 23 import sys 24 sys.exit() 25 26 from bokeh.sampledata.sea_surface_temperature import sea_surface_temperature 27 28 app = Flask(__name__) 29 30 def modify_doc(doc): 31 df = sea_surface_temperature.copy() 32 source = ColumnDataSource(data=df) 33 34 plot = figure(x_axis_type='datetime', y_range=(0, 25), y_axis_label='Temperature (Celsius)', 35 title="Sea Surface Temperature at 43.18, -70.43") 36 plot.line('time', 'temperature', source=source) 37 38 def callback(attr, old, new): 39 if new == 0: 40 data = df 41 else: 42 data = df.rolling('{0}D'.format(new)).mean() 43 source.data = ColumnDataSource(data=data).data 44 45 slider = Slider(start=0, end=30, value=0, step=1, title="Smoothing by N Days") 46 slider.on_change('value', callback) 47 48 doc.add_root(column(slider, plot)) 49 50 doc.theme = Theme(filename="theme.yaml") 51 52 # can't use shortcuts here, since we are passing to low level BokehTornado 53 bkapp = Application(FunctionHandler(modify_doc)) 54 55 bokeh_tornado = BokehTornado({'/bkapp': bkapp}, extra_websocket_origins=["localhost:8000"]) 56 bokeh_http = HTTPServer(bokeh_tornado) 57 58 # This is so that if this app is run using something like "gunicorn -w 4" then 59 # each process will listen on its own port 60 sockets, port = bind_sockets("localhost", 0) 61 bokeh_http.add_sockets(sockets) 62 63 @app.route('/', methods=['GET']) 64 def bkapp_page(): 65 script = server_document('http://localhost:%d/bkapp' % port) 66 return render_template("embed.html", script=script, template="Flask") 67 68 def bk_worker(): 69 io_loop = IOLoop.current() 70 server = BaseServer(io_loop, bokeh_tornado, bokeh_http) 71 server.start() 72 server.io_loop.start() 73 74 from threading import Thread 75 Thread(target=bk_worker).start() 76 [end of examples/howto/server_embed/flask_gunicorn_embed.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/examples/howto/server_embed/flask_gunicorn_embed.py b/examples/howto/server_embed/flask_gunicorn_embed.py --- a/examples/howto/server_embed/flask_gunicorn_embed.py +++ b/examples/howto/server_embed/flask_gunicorn_embed.py @@ -1,3 +1,8 @@ +try: + import asyncio +except ImportError: + raise RuntimeError("This example requries Python3 / asyncio") + from flask import Flask, render_template from tornado.httpserver import HTTPServer @@ -52,13 +57,9 @@ # can't use shortcuts here, since we are passing to low level BokehTornado bkapp = Application(FunctionHandler(modify_doc)) -bokeh_tornado = BokehTornado({'/bkapp': bkapp}, extra_websocket_origins=["localhost:8000"]) -bokeh_http = HTTPServer(bokeh_tornado) - # This is so that if this app is run using something like "gunicorn -w 4" then # each process will listen on its own port sockets, port = bind_sockets("localhost", 0) -bokeh_http.add_sockets(sockets) @app.route('/', methods=['GET']) def bkapp_page(): @@ -66,8 +67,13 @@ return render_template("embed.html", script=script, template="Flask") def bk_worker(): - io_loop = IOLoop.current() - server = BaseServer(io_loop, bokeh_tornado, bokeh_http) + asyncio.set_event_loop(asyncio.new_event_loop()) + + bokeh_tornado = BokehTornado({'/bkapp': bkapp}, extra_websocket_origins=["localhost:8000"]) + bokeh_http = HTTPServer(bokeh_tornado) + bokeh_http.add_sockets(sockets) + + server = BaseServer(IOLoop.current(), bokeh_tornado, bokeh_http) server.start() server.io_loop.start()
{"golden_diff": "diff --git a/examples/howto/server_embed/flask_gunicorn_embed.py b/examples/howto/server_embed/flask_gunicorn_embed.py\n--- a/examples/howto/server_embed/flask_gunicorn_embed.py\n+++ b/examples/howto/server_embed/flask_gunicorn_embed.py\n@@ -1,3 +1,8 @@\n+try:\n+ import asyncio\n+except ImportError:\n+ raise RuntimeError(\"This example requries Python3 / asyncio\")\n+\n from flask import Flask, render_template\n \n from tornado.httpserver import HTTPServer\n@@ -52,13 +57,9 @@\n # can't use shortcuts here, since we are passing to low level BokehTornado\n bkapp = Application(FunctionHandler(modify_doc))\n \n-bokeh_tornado = BokehTornado({'/bkapp': bkapp}, extra_websocket_origins=[\"localhost:8000\"])\n-bokeh_http = HTTPServer(bokeh_tornado)\n-\n # This is so that if this app is run using something like \"gunicorn -w 4\" then\n # each process will listen on its own port\n sockets, port = bind_sockets(\"localhost\", 0)\n-bokeh_http.add_sockets(sockets)\n \n @app.route('/', methods=['GET'])\n def bkapp_page():\n@@ -66,8 +67,13 @@\n return render_template(\"embed.html\", script=script, template=\"Flask\")\n \n def bk_worker():\n- io_loop = IOLoop.current()\n- server = BaseServer(io_loop, bokeh_tornado, bokeh_http)\n+ asyncio.set_event_loop(asyncio.new_event_loop())\n+\n+ bokeh_tornado = BokehTornado({'/bkapp': bkapp}, extra_websocket_origins=[\"localhost:8000\"])\n+ bokeh_http = HTTPServer(bokeh_tornado)\n+ bokeh_http.add_sockets(sockets)\n+\n+ server = BaseServer(IOLoop.current(), bokeh_tornado, bokeh_http)\n server.start()\n server.io_loop.start()\n", "issue": "flask_gunicorn_embed.py does not work with Tornado 5\nref: https://github.com/bokeh/bokeh/blob/master/examples/howto/server_embed/flask_gunicorn_embed.py\r\n\r\nRunning as is gets:\r\n```\r\nException in thread Thread-1:\r\nTraceback (most recent call last):\r\n File \"/Users/bryanv/anaconda/envs/01216/lib/python3.6/threading.py\", line 916, in _bootstrap_inner\r\n self.run()\r\n File \"/Users/bryanv/anaconda/envs/01216/lib/python3.6/threading.py\", line 864, in run\r\n self._target(*self._args, **self._kwargs)\r\n File \"/Users/bryanv/work/bokeh/examples/howto/server_embed/flask_gunicorn_embed.py\", line 72, in bk_worker\r\n server.start()\r\n File \"/Users/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/bokeh/server/server.py\", line 149, in start\r\n self._tornado.start()\r\n File \"/Users/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/bokeh/server/tornado.py\", line 372, in start\r\n self._stats_job.start()\r\n File \"/Users/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/tornado/ioloop.py\", line 1185, in start\r\n self.io_loop = IOLoop.current()\r\n File \"/Use\r\nrs/bryanv/anaconda/envs/01216/lib/python3.6/site-packages/tornado/ioloop.py\", line 282, in current\r\n loop = asyncio.get_event_loop()\r\n File \"/Users/bryanv/anaconda/envs/01216/lib/python3.6/asyncio/events.py\", line 694, in get_event_loop\r\n return get_event_loop_policy().get_event_loop()\r\n File \"/Users/bryanv/anaconda/envs/01216/lib/python3.6/asyncio/events.py\", line 602, in get_event_loop\r\n % threading.current_thread().name)\r\nRuntimeError: There is no current event loop in thread 'Thread-1'.\r\n```\r\n\r\nTried changing worker to \r\n```\r\ndef bk_worker():\r\n io_loop = IOLoop())\r\n server = BaseServer(io_loop, bokeh_tornado, bokeh_http)\r\n server.start()\r\n server.io_loop.start()\r\n```\r\n\r\n\r\nbut then the http requests to the `HTTPServer` just hang (the workers are getting executed the right number of times though)\r\n\r\ncc @bdarnell any quick ideas?\n", "before_files": [{"content": "from flask import Flask, render_template\n\nfrom tornado.httpserver import HTTPServer\nfrom tornado.ioloop import IOLoop\n\nfrom bokeh.application import Application\nfrom bokeh.application.handlers import FunctionHandler\nfrom bokeh.embed import server_document\nfrom bokeh.layouts import column\nfrom bokeh.models import ColumnDataSource, Slider\nfrom bokeh.plotting import figure\nfrom bokeh.server.server import BaseServer\nfrom bokeh.server.tornado import BokehTornado\nfrom bokeh.server.util import bind_sockets\nfrom bokeh.themes import Theme\n\nif __name__ == '__main__':\n print('This script is intended to be run with gunicorn. e.g.')\n print()\n print(' gunicorn -w 4 flask_gunicorn_embed:app')\n print()\n print('will start the app on four processes')\n import sys\n sys.exit()\n\nfrom bokeh.sampledata.sea_surface_temperature import sea_surface_temperature\n\napp = Flask(__name__)\n\ndef modify_doc(doc):\n df = sea_surface_temperature.copy()\n source = ColumnDataSource(data=df)\n\n plot = figure(x_axis_type='datetime', y_range=(0, 25), y_axis_label='Temperature (Celsius)',\n title=\"Sea Surface Temperature at 43.18, -70.43\")\n plot.line('time', 'temperature', source=source)\n\n def callback(attr, old, new):\n if new == 0:\n data = df\n else:\n data = df.rolling('{0}D'.format(new)).mean()\n source.data = ColumnDataSource(data=data).data\n\n slider = Slider(start=0, end=30, value=0, step=1, title=\"Smoothing by N Days\")\n slider.on_change('value', callback)\n\n doc.add_root(column(slider, plot))\n\n doc.theme = Theme(filename=\"theme.yaml\")\n\n# can't use shortcuts here, since we are passing to low level BokehTornado\nbkapp = Application(FunctionHandler(modify_doc))\n\nbokeh_tornado = BokehTornado({'/bkapp': bkapp}, extra_websocket_origins=[\"localhost:8000\"])\nbokeh_http = HTTPServer(bokeh_tornado)\n\n# This is so that if this app is run using something like \"gunicorn -w 4\" then\n# each process will listen on its own port\nsockets, port = bind_sockets(\"localhost\", 0)\nbokeh_http.add_sockets(sockets)\n\[email protected]('/', methods=['GET'])\ndef bkapp_page():\n script = server_document('http://localhost:%d/bkapp' % port)\n return render_template(\"embed.html\", script=script, template=\"Flask\")\n\ndef bk_worker():\n io_loop = IOLoop.current()\n server = BaseServer(io_loop, bokeh_tornado, bokeh_http)\n server.start()\n server.io_loop.start()\n\nfrom threading import Thread\nThread(target=bk_worker).start()\n", "path": "examples/howto/server_embed/flask_gunicorn_embed.py"}]}
1,921
429
gh_patches_debug_9231
rasdani/github-patches
git_diff
privacyidea__privacyidea-2615
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> no serial in polling on /ttype/push We may have a bug in push polling. This exception occurs on polling https://gist.github.com/laclaro/743618d11f61f8a817e273db6b804a9a This may be related to #2534. </issue> <code> [start of privacyidea/api/ttype.py] 1 # -*- coding: utf-8 -*- 2 # 3 # http://www.privacyidea.org 4 # (c) Cornelius KΓΆlbel, privacyidea.org 5 # 6 # 2015-09-01 Cornelius KΓΆlbel, <[email protected]> 7 # Initial writeup 8 # 9 # This code is free software; you can redistribute it and/or 10 # modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE 11 # License as published by the Free Software Foundation; either 12 # version 3 of the License, or any later version. 13 # 14 # This code is distributed in the hope that it will be useful, 15 # but WITHOUT ANY WARRANTY; without even the implied warranty of 16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 # GNU AFFERO GENERAL PUBLIC LICENSE for more details. 18 # 19 # You should have received a copy of the GNU Affero General Public 20 # License along with this program. If not, see <http://www.gnu.org/licenses/>. 21 # 22 """ 23 This API endpoint is a generic endpoint that can be used by any token 24 type. 25 26 The tokentype needs to implement a classmethod *api_endpoint* and can then be 27 called by /ttype/<tokentype>. 28 This way, each tokentype can create its own API without the need to change 29 the core API. 30 31 The TiQR Token uses this API to implement its special functionalities. See 32 :ref:`code_tiqr_token`. 33 """ 34 from flask import (Blueprint, 35 request) 36 from .lib.utils import getParam 37 from ..lib.log import log_with 38 from flask import g, jsonify, current_app 39 import logging 40 from privacyidea.api.lib.utils import get_all_params 41 from privacyidea.lib.policy import PolicyClass 42 from privacyidea.lib.audit import getAudit 43 from privacyidea.lib.config import (get_token_class, get_from_config, 44 SYSCONF, ensure_no_config_object) 45 from privacyidea.lib.user import get_user_from_param 46 from privacyidea.lib.utils import get_client_ip 47 import json 48 49 log = logging.getLogger(__name__) 50 51 ttype_blueprint = Blueprint('ttype_blueprint', __name__) 52 53 54 @ttype_blueprint.before_request 55 def before_request(): 56 """ 57 This is executed before the request 58 """ 59 ensure_no_config_object() 60 request.all_data = get_all_params(request.values, request.data) 61 privacyidea_server = current_app.config.get("PI_AUDIT_SERVERNAME") or \ 62 request.host 63 # Create a policy_object, that reads the database audit settings 64 # and contains the complete policy definition during the request. 65 # This audit_object can be used in the postpolicy and prepolicy and it 66 # can be passed to the innerpolicies. 67 g.policy_object = PolicyClass() 68 g.audit_object = getAudit(current_app.config) 69 # access_route contains the ip adresses of all clients, hops and proxies. 70 g.client_ip = get_client_ip(request, 71 get_from_config(SYSCONF.OVERRIDECLIENT)) 72 g.audit_object.log({"success": False, 73 "action_detail": "", 74 "client": g.client_ip, 75 "client_user_agent": request.user_agent.browser, 76 "privacyidea_server": privacyidea_server, 77 "action": "{0!s} {1!s}".format(request.method, request.url_rule), 78 "info": ""}) 79 80 81 @ttype_blueprint.route('/<ttype>', methods=['POST', 'GET']) 82 @log_with(log) 83 def token(ttype=None): 84 """ 85 This is a special token function. Each token type can define an 86 additional API call, that does not need authentication on the REST API 87 level. 88 89 :return: Token Type dependent 90 """ 91 tokenc = get_token_class(ttype) 92 res = tokenc.api_endpoint(request, g) 93 serial = getParam(request.all_data, "serial") 94 user = get_user_from_param(request.all_data) 95 g.audit_object.log({"success": 1, 96 "user": user.login, 97 "realm": user.realm, 98 "serial": serial, 99 "token_type": ttype}) 100 if res[0] == "json": 101 return jsonify(res[1]) 102 elif res[0] in ["html", "plain"]: 103 return current_app.response_class(res[1], mimetype="text/{0!s}".format(res[0])) 104 elif len(res) == 2: 105 return current_app.response_class(json.dumps(res[1]), 106 mimetype="application/{0!s}".format(res[0])) 107 else: 108 return current_app.response_class(res[1], mimetype="application/octet-binary", 109 headers=res[2]) 110 [end of privacyidea/api/ttype.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/privacyidea/api/ttype.py b/privacyidea/api/ttype.py --- a/privacyidea/api/ttype.py +++ b/privacyidea/api/ttype.py @@ -69,6 +69,7 @@ # access_route contains the ip adresses of all clients, hops and proxies. g.client_ip = get_client_ip(request, get_from_config(SYSCONF.OVERRIDECLIENT)) + g.serial = getParam(request.all_data, "serial") or None g.audit_object.log({"success": False, "action_detail": "", "client": g.client_ip,
{"golden_diff": "diff --git a/privacyidea/api/ttype.py b/privacyidea/api/ttype.py\n--- a/privacyidea/api/ttype.py\n+++ b/privacyidea/api/ttype.py\n@@ -69,6 +69,7 @@\n # access_route contains the ip adresses of all clients, hops and proxies.\n g.client_ip = get_client_ip(request,\n get_from_config(SYSCONF.OVERRIDECLIENT))\n+ g.serial = getParam(request.all_data, \"serial\") or None\n g.audit_object.log({\"success\": False,\n \"action_detail\": \"\",\n \"client\": g.client_ip,\n", "issue": "no serial in polling on /ttype/push\nWe may have a bug in push polling. This exception occurs on polling\r\n\r\nhttps://gist.github.com/laclaro/743618d11f61f8a817e273db6b804a9a\r\n\r\nThis may be related to #2534.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# http://www.privacyidea.org\n# (c) Cornelius K\u00f6lbel, privacyidea.org\n#\n# 2015-09-01 Cornelius K\u00f6lbel, <[email protected]>\n# Initial writeup\n#\n# This code is free software; you can redistribute it and/or\n# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or any later version.\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU AFFERO GENERAL PUBLIC LICENSE for more details.\n#\n# You should have received a copy of the GNU Affero General Public\n# License along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n\"\"\"\nThis API endpoint is a generic endpoint that can be used by any token\ntype.\n\nThe tokentype needs to implement a classmethod *api_endpoint* and can then be\ncalled by /ttype/<tokentype>.\nThis way, each tokentype can create its own API without the need to change\nthe core API.\n\nThe TiQR Token uses this API to implement its special functionalities. See\n:ref:`code_tiqr_token`.\n\"\"\"\nfrom flask import (Blueprint,\n request)\nfrom .lib.utils import getParam\nfrom ..lib.log import log_with\nfrom flask import g, jsonify, current_app\nimport logging\nfrom privacyidea.api.lib.utils import get_all_params\nfrom privacyidea.lib.policy import PolicyClass\nfrom privacyidea.lib.audit import getAudit\nfrom privacyidea.lib.config import (get_token_class, get_from_config,\n SYSCONF, ensure_no_config_object)\nfrom privacyidea.lib.user import get_user_from_param\nfrom privacyidea.lib.utils import get_client_ip\nimport json\n\nlog = logging.getLogger(__name__)\n\nttype_blueprint = Blueprint('ttype_blueprint', __name__)\n\n\n@ttype_blueprint.before_request\ndef before_request():\n \"\"\"\n This is executed before the request\n \"\"\"\n ensure_no_config_object()\n request.all_data = get_all_params(request.values, request.data)\n privacyidea_server = current_app.config.get(\"PI_AUDIT_SERVERNAME\") or \\\n request.host\n # Create a policy_object, that reads the database audit settings\n # and contains the complete policy definition during the request.\n # This audit_object can be used in the postpolicy and prepolicy and it\n # can be passed to the innerpolicies.\n g.policy_object = PolicyClass()\n g.audit_object = getAudit(current_app.config)\n # access_route contains the ip adresses of all clients, hops and proxies.\n g.client_ip = get_client_ip(request,\n get_from_config(SYSCONF.OVERRIDECLIENT))\n g.audit_object.log({\"success\": False,\n \"action_detail\": \"\",\n \"client\": g.client_ip,\n \"client_user_agent\": request.user_agent.browser,\n \"privacyidea_server\": privacyidea_server,\n \"action\": \"{0!s} {1!s}\".format(request.method, request.url_rule),\n \"info\": \"\"})\n\n\n@ttype_blueprint.route('/<ttype>', methods=['POST', 'GET'])\n@log_with(log)\ndef token(ttype=None):\n \"\"\"\n This is a special token function. Each token type can define an\n additional API call, that does not need authentication on the REST API\n level.\n\n :return: Token Type dependent\n \"\"\"\n tokenc = get_token_class(ttype)\n res = tokenc.api_endpoint(request, g)\n serial = getParam(request.all_data, \"serial\")\n user = get_user_from_param(request.all_data)\n g.audit_object.log({\"success\": 1,\n \"user\": user.login,\n \"realm\": user.realm,\n \"serial\": serial,\n \"token_type\": ttype})\n if res[0] == \"json\":\n return jsonify(res[1])\n elif res[0] in [\"html\", \"plain\"]:\n return current_app.response_class(res[1], mimetype=\"text/{0!s}\".format(res[0]))\n elif len(res) == 2:\n return current_app.response_class(json.dumps(res[1]),\n mimetype=\"application/{0!s}\".format(res[0]))\n else:\n return current_app.response_class(res[1], mimetype=\"application/octet-binary\",\n headers=res[2])\n", "path": "privacyidea/api/ttype.py"}]}
1,812
135
gh_patches_debug_6576
rasdani/github-patches
git_diff
ephios-dev__ephios-757
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Make minors identifiable on event detail page As an Einsatzleiter, I want to quickly grasp which participants are younger than 18 years. For that purpose, I want to have the participation boxes on the event detail page/shift box to display a small warning/indication, e.g. a red corner or similar. </issue> <code> [start of ephios/core/signup/participants.py] 1 import dataclasses 2 import functools 3 from datetime import date 4 from typing import Optional 5 6 from django.contrib.auth import get_user_model 7 from django.db.models import QuerySet 8 from django.urls import reverse 9 from django.utils.safestring import mark_safe 10 from django.utils.translation import gettext_lazy as _ 11 12 from ephios.core.models import AbstractParticipation, LocalParticipation, Qualification 13 from ephios.core.models.events import PlaceholderParticipation 14 15 16 @dataclasses.dataclass(frozen=True) 17 class AbstractParticipant: 18 first_name: str 19 last_name: str 20 qualifications: QuerySet = dataclasses.field(hash=False) 21 date_of_birth: Optional[date] 22 email: Optional[str] # if set to None, no notifications are sent 23 24 def get_age(self, today: date = None): 25 if self.date_of_birth is None: 26 return None 27 today, born = today or date.today(), self.date_of_birth 28 return today.year - born.year - ((today.month, today.day) < (born.month, born.day)) 29 30 def __str__(self): 31 return f"{self.first_name} {self.last_name}" 32 33 def new_participation(self, shift): 34 raise NotImplementedError 35 36 def participation_for(self, shift): 37 """Return the participation object for a shift. Return None if it does not exist.""" 38 raise NotImplementedError 39 40 def all_participations(self): 41 """Return all participations for this participant""" 42 raise NotImplementedError 43 44 @functools.lru_cache(maxsize=64) 45 def collect_all_qualifications(self) -> set: 46 return Qualification.collect_all_included_qualifications(self.qualifications) 47 48 def has_qualifications(self, qualifications): 49 return set(qualifications) <= self.collect_all_qualifications() 50 51 def reverse_signup_action(self, shift): 52 raise NotImplementedError 53 54 def reverse_event_detail(self, event): 55 raise NotImplementedError 56 57 @property 58 def icon(self): 59 return mark_safe('<span class="fa fa-user"></span>') 60 61 62 @dataclasses.dataclass(frozen=True) 63 class LocalUserParticipant(AbstractParticipant): 64 user: get_user_model() 65 66 def new_participation(self, shift): 67 return LocalParticipation(shift=shift, user=self.user) 68 69 def participation_for(self, shift): 70 try: 71 return LocalParticipation.objects.get(shift=shift, user=self.user) 72 except LocalParticipation.DoesNotExist: 73 return None 74 75 def all_participations(self): 76 return LocalParticipation.objects.filter(user=self.user) 77 78 def reverse_signup_action(self, shift): 79 return reverse("core:signup_action", kwargs=dict(pk=shift.pk)) 80 81 def reverse_event_detail(self, event): 82 return event.get_absolute_url() 83 84 85 @dataclasses.dataclass(frozen=True) 86 class PlaceholderParticipant(AbstractParticipant): 87 def new_participation(self, shift): 88 return PlaceholderParticipation( 89 shift=shift, first_name=self.first_name, last_name=self.last_name 90 ) 91 92 def participation_for(self, shift): 93 try: 94 return PlaceholderParticipation.objects.get( 95 shift=shift, first_name=self.first_name, last_name=self.last_name 96 ) 97 except PlaceholderParticipation.DoesNotExist: 98 return None 99 100 def all_participations(self): 101 return AbstractParticipation.objects.none() 102 103 def reverse_signup_action(self, shift): 104 raise NotImplementedError 105 106 def reverse_event_detail(self, event): 107 raise NotImplementedError 108 109 @property 110 def icon(self): 111 return mark_safe( 112 f'<span class="fa fa-user-tag" data-toggle="tooltip" data-placement="left" title="{_("Placeholder")}"></span>' 113 ) 114 [end of ephios/core/signup/participants.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/ephios/core/signup/participants.py b/ephios/core/signup/participants.py --- a/ephios/core/signup/participants.py +++ b/ephios/core/signup/participants.py @@ -27,6 +27,12 @@ today, born = today or date.today(), self.date_of_birth return today.year - born.year - ((today.month, today.day) < (born.month, born.day)) + @property + def is_minor(self): + if age := self.get_age(): + return age < 18 + return False + def __str__(self): return f"{self.first_name} {self.last_name}"
{"golden_diff": "diff --git a/ephios/core/signup/participants.py b/ephios/core/signup/participants.py\n--- a/ephios/core/signup/participants.py\n+++ b/ephios/core/signup/participants.py\n@@ -27,6 +27,12 @@\n today, born = today or date.today(), self.date_of_birth\n return today.year - born.year - ((today.month, today.day) < (born.month, born.day))\n \n+ @property\n+ def is_minor(self):\n+ if age := self.get_age():\n+ return age < 18\n+ return False\n+\n def __str__(self):\n return f\"{self.first_name} {self.last_name}\"\n", "issue": "Make minors identifiable on event detail page\nAs an Einsatzleiter, I want to quickly grasp which participants are younger than 18 years. For that purpose, I want to have the participation boxes on the event detail page/shift box to display a small warning/indication, e.g. a red corner or similar.\n", "before_files": [{"content": "import dataclasses\nimport functools\nfrom datetime import date\nfrom typing import Optional\n\nfrom django.contrib.auth import get_user_model\nfrom django.db.models import QuerySet\nfrom django.urls import reverse\nfrom django.utils.safestring import mark_safe\nfrom django.utils.translation import gettext_lazy as _\n\nfrom ephios.core.models import AbstractParticipation, LocalParticipation, Qualification\nfrom ephios.core.models.events import PlaceholderParticipation\n\n\[email protected](frozen=True)\nclass AbstractParticipant:\n first_name: str\n last_name: str\n qualifications: QuerySet = dataclasses.field(hash=False)\n date_of_birth: Optional[date]\n email: Optional[str] # if set to None, no notifications are sent\n\n def get_age(self, today: date = None):\n if self.date_of_birth is None:\n return None\n today, born = today or date.today(), self.date_of_birth\n return today.year - born.year - ((today.month, today.day) < (born.month, born.day))\n\n def __str__(self):\n return f\"{self.first_name} {self.last_name}\"\n\n def new_participation(self, shift):\n raise NotImplementedError\n\n def participation_for(self, shift):\n \"\"\"Return the participation object for a shift. Return None if it does not exist.\"\"\"\n raise NotImplementedError\n\n def all_participations(self):\n \"\"\"Return all participations for this participant\"\"\"\n raise NotImplementedError\n\n @functools.lru_cache(maxsize=64)\n def collect_all_qualifications(self) -> set:\n return Qualification.collect_all_included_qualifications(self.qualifications)\n\n def has_qualifications(self, qualifications):\n return set(qualifications) <= self.collect_all_qualifications()\n\n def reverse_signup_action(self, shift):\n raise NotImplementedError\n\n def reverse_event_detail(self, event):\n raise NotImplementedError\n\n @property\n def icon(self):\n return mark_safe('<span class=\"fa fa-user\"></span>')\n\n\[email protected](frozen=True)\nclass LocalUserParticipant(AbstractParticipant):\n user: get_user_model()\n\n def new_participation(self, shift):\n return LocalParticipation(shift=shift, user=self.user)\n\n def participation_for(self, shift):\n try:\n return LocalParticipation.objects.get(shift=shift, user=self.user)\n except LocalParticipation.DoesNotExist:\n return None\n\n def all_participations(self):\n return LocalParticipation.objects.filter(user=self.user)\n\n def reverse_signup_action(self, shift):\n return reverse(\"core:signup_action\", kwargs=dict(pk=shift.pk))\n\n def reverse_event_detail(self, event):\n return event.get_absolute_url()\n\n\[email protected](frozen=True)\nclass PlaceholderParticipant(AbstractParticipant):\n def new_participation(self, shift):\n return PlaceholderParticipation(\n shift=shift, first_name=self.first_name, last_name=self.last_name\n )\n\n def participation_for(self, shift):\n try:\n return PlaceholderParticipation.objects.get(\n shift=shift, first_name=self.first_name, last_name=self.last_name\n )\n except PlaceholderParticipation.DoesNotExist:\n return None\n\n def all_participations(self):\n return AbstractParticipation.objects.none()\n\n def reverse_signup_action(self, shift):\n raise NotImplementedError\n\n def reverse_event_detail(self, event):\n raise NotImplementedError\n\n @property\n def icon(self):\n return mark_safe(\n f'<span class=\"fa fa-user-tag\" data-toggle=\"tooltip\" data-placement=\"left\" title=\"{_(\"Placeholder\")}\"></span>'\n )\n", "path": "ephios/core/signup/participants.py"}]}
1,608
153
gh_patches_debug_37692
rasdani/github-patches
git_diff
astronomer__astro-sdk-325
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Astro Build's Integration Test breaking on 0.8.1 broken on - 0.8.1 , but works with 0.7.0. In this test dag, task_5 joins tables from task_3 (snowflake) and task_4 (postgres). The task’s print statement shows the joined table, suggesting successful ingestion and transformation. However, the error below suggests the the postgres output conn is expected to contain the database element of the snowflake connection. In other words, the task fails becausepostgres_conn doesn’t have the database attribute associated with snowflake_conn. ``` import time from datetime import datetime import pandas as pd from airflow.decorators import dag, task from airflow.models import Variable from airflow.utils import timezone from airflow.utils.dates import days_ago from astro import dataframe as df from astro import sql as aql from astro.sql.table import Table, TempTable @df() def task_1_func(): return pd.DataFrame({'a':[1,2,3]}) @aql.transform(conn_id='postgres_conn') def task_2_func(execution_date: Table): return """SELECT * FROM actor WHERE startdate < '{{ execution_date }}'""" @aql.transform(conn_id='snowflake_conn') def task_3_func(): return """SELECT * FROM "ASTROBUILD"."BUILDSCHEMA"."MYTABLE" LIMIT 10;""" @aql.transform(conn_id='postgres_conn') def task_4_func(): return """SELECT * FROM actor LIMIT 10;""" @df(conn_id='postgres_conn') def task_5_func(task_3: pd.DataFrame, task_4: pd.DataFrame): df=task_3.join(task_4) print(df) return df @dag(schedule_interval='0 0 * * *', start_date=datetime(2022, 4, 15, 11, 28, 8), catchup=False, tags=['tag_1', 'tag_1']) def dag_1(): task_1 = task_1_func() task_2 = task_2_func(output_table=Table(conn_id='postgres_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_2'), execution_date=Table(conn_id='postgres_conn', table_name='execution_date')) task_3 = task_3_func(output_table=Table(conn_id='snowflake_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_3')) task_4 = task_4_func(output_table=Table(conn_id='postgres_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_4')) task_5 = task_5_func(task_3, task_4, output_table=Table(conn_id='postgres_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_5')) dag_obj = dag_1() ``` Error: ``` INFO - Using connection to: id: postgres_conn. Host: 127.0.0.1, Port: 8999, Schema: postgres, Login: postgres, Password: ***, extra: {} *** psycopg2.OperationalError: connection to server at "127.0.0.1", port 8999 failed: FATAL: database "ASTROBUILD" does not exist ``` </issue> <code> [start of src/astro/utils/table_handler.py] 1 import inspect 2 from typing import Optional 3 4 import pandas 5 6 from astro.settings import SCHEMA 7 from astro.sql.table import Table 8 9 10 class TableHandler: 11 def _set_variables_from_first_table(self): 12 """ 13 When we create our SQL operation, we run with the assumption that the first table given is the "main table". 14 This means that a user doesn't need to define default conn_id, database, etc. in the function unless they want 15 to create default values. 16 """ 17 first_table: Optional[Table] = None 18 if self.op_args: 19 table_index = [x for x, t in enumerate(self.op_args) if type(t) == Table] 20 if table_index: 21 first_table = self.op_args[table_index[0]] 22 elif not first_table: 23 table_kwargs = [ 24 x 25 for x in inspect.signature(self.python_callable).parameters.values() 26 if ( 27 x.annotation == Table 28 and type(self.op_kwargs[x.name]) == Table 29 or x.annotation == pandas.DataFrame 30 and type(self.op_kwargs[x.name]) == Table 31 ) 32 ] 33 if table_kwargs: 34 first_table = self.op_kwargs[table_kwargs[0].name] 35 36 # If there is no first table via op_ags or kwargs, we check the parameters 37 elif not first_table: 38 if self.parameters: 39 param_tables = [t for t in self.parameters.values() if type(t) == Table] 40 if param_tables: 41 first_table = param_tables[0] 42 43 if first_table: 44 self.conn_id = first_table.conn_id or self.conn_id 45 self.database = first_table.database or self.database 46 self.schema = first_table.schema or self.schema 47 self.warehouse = first_table.warehouse or self.warehouse 48 self.role = first_table.role or self.role 49 50 def populate_output_table(self): 51 self.output_table.conn_id = self.output_table.conn_id or self.conn_id 52 self.output_table.database = self.output_table.database or self.database 53 self.output_table.warehouse = self.output_table.warehouse or self.warehouse 54 self.output_table.schema = self.output_table.schema or SCHEMA 55 [end of src/astro/utils/table_handler.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/src/astro/utils/table_handler.py b/src/astro/utils/table_handler.py --- a/src/astro/utils/table_handler.py +++ b/src/astro/utils/table_handler.py @@ -16,28 +16,51 @@ """ first_table: Optional[Table] = None if self.op_args: - table_index = [x for x, t in enumerate(self.op_args) if type(t) == Table] - if table_index: + table_index = [ + x for x, t in enumerate(self.op_args) if isinstance(t, Table) + ] + conn_id_set = {x.conn_id for x in self.op_args if isinstance(x, Table)} + # Check to see if all tables belong to same conn_id. Otherwise, we this can go wrong for cases + # 1. When we have tables from different DBs. + # 2. When we have tables from different conn_id, since they can be configured with different + # database/schema etc. + if table_index and len(conn_id_set) == 1: first_table = self.op_args[table_index[0]] - elif not first_table: + + if not first_table and self.op_kwargs and self.python_callable: table_kwargs = [ x for x in inspect.signature(self.python_callable).parameters.values() if ( x.annotation == Table - and type(self.op_kwargs[x.name]) == Table + and isinstance(self.op_kwargs[x.name], Table) or x.annotation == pandas.DataFrame - and type(self.op_kwargs[x.name]) == Table + and isinstance(self.op_kwargs[x.name], Table) ) ] - if table_kwargs: + conn_id_set = { + self.op_kwargs[x.name].conn_id + for x in inspect.signature(self.python_callable).parameters.values() + if ( + x.annotation == Table + and isinstance(self.op_kwargs[x.name], Table) + or x.annotation == pandas.DataFrame + and isinstance(self.op_kwargs[x.name], Table) + ) + } + if table_kwargs and len(conn_id_set) == 1: first_table = self.op_kwargs[table_kwargs[0].name] # If there is no first table via op_ags or kwargs, we check the parameters - elif not first_table: + if not first_table and self.parameters: if self.parameters: - param_tables = [t for t in self.parameters.values() if type(t) == Table] - if param_tables: + param_tables = [ + t for t in self.parameters.values() if isinstance(t, Table) + ] + conn_id_set = { + t.conn_id for t in self.parameters.values() if isinstance(t, Table) + } + if param_tables and len(conn_id_set) == 1: first_table = param_tables[0] if first_table:
{"golden_diff": "diff --git a/src/astro/utils/table_handler.py b/src/astro/utils/table_handler.py\n--- a/src/astro/utils/table_handler.py\n+++ b/src/astro/utils/table_handler.py\n@@ -16,28 +16,51 @@\n \"\"\"\n first_table: Optional[Table] = None\n if self.op_args:\n- table_index = [x for x, t in enumerate(self.op_args) if type(t) == Table]\n- if table_index:\n+ table_index = [\n+ x for x, t in enumerate(self.op_args) if isinstance(t, Table)\n+ ]\n+ conn_id_set = {x.conn_id for x in self.op_args if isinstance(x, Table)}\n+ # Check to see if all tables belong to same conn_id. Otherwise, we this can go wrong for cases\n+ # 1. When we have tables from different DBs.\n+ # 2. When we have tables from different conn_id, since they can be configured with different\n+ # database/schema etc.\n+ if table_index and len(conn_id_set) == 1:\n first_table = self.op_args[table_index[0]]\n- elif not first_table:\n+\n+ if not first_table and self.op_kwargs and self.python_callable:\n table_kwargs = [\n x\n for x in inspect.signature(self.python_callable).parameters.values()\n if (\n x.annotation == Table\n- and type(self.op_kwargs[x.name]) == Table\n+ and isinstance(self.op_kwargs[x.name], Table)\n or x.annotation == pandas.DataFrame\n- and type(self.op_kwargs[x.name]) == Table\n+ and isinstance(self.op_kwargs[x.name], Table)\n )\n ]\n- if table_kwargs:\n+ conn_id_set = {\n+ self.op_kwargs[x.name].conn_id\n+ for x in inspect.signature(self.python_callable).parameters.values()\n+ if (\n+ x.annotation == Table\n+ and isinstance(self.op_kwargs[x.name], Table)\n+ or x.annotation == pandas.DataFrame\n+ and isinstance(self.op_kwargs[x.name], Table)\n+ )\n+ }\n+ if table_kwargs and len(conn_id_set) == 1:\n first_table = self.op_kwargs[table_kwargs[0].name]\n \n # If there is no first table via op_ags or kwargs, we check the parameters\n- elif not first_table:\n+ if not first_table and self.parameters:\n if self.parameters:\n- param_tables = [t for t in self.parameters.values() if type(t) == Table]\n- if param_tables:\n+ param_tables = [\n+ t for t in self.parameters.values() if isinstance(t, Table)\n+ ]\n+ conn_id_set = {\n+ t.conn_id for t in self.parameters.values() if isinstance(t, Table)\n+ }\n+ if param_tables and len(conn_id_set) == 1:\n first_table = param_tables[0]\n \n if first_table:\n", "issue": "Astro Build's Integration Test breaking on 0.8.1\nbroken on - 0.8.1 , but works with 0.7.0.\n\nIn this test dag, task_5 joins tables from task_3 (snowflake) and task_4 (postgres). The task\u2019s print statement shows the joined table, suggesting successful ingestion and transformation. However, the error below suggests the the postgres output conn is expected to contain the database element of the snowflake connection.\nIn other words, the task fails becausepostgres_conn doesn\u2019t have the database attribute associated with snowflake_conn.\n\n```\nimport time\nfrom datetime import datetime\n\nimport pandas as pd\nfrom airflow.decorators import dag, task\nfrom airflow.models import Variable\nfrom airflow.utils import timezone\nfrom airflow.utils.dates import days_ago\nfrom astro import dataframe as df\nfrom astro import sql as aql\nfrom astro.sql.table import Table, TempTable\n\n@df()\ndef task_1_func():\n return pd.DataFrame({'a':[1,2,3]})\n\[email protected](conn_id='postgres_conn')\ndef task_2_func(execution_date: Table):\n return \"\"\"SELECT * FROM actor WHERE startdate < '{{ execution_date }}'\"\"\"\n\[email protected](conn_id='snowflake_conn')\ndef task_3_func():\n return \"\"\"SELECT * FROM \"ASTROBUILD\".\"BUILDSCHEMA\".\"MYTABLE\" LIMIT 10;\"\"\"\n\[email protected](conn_id='postgres_conn')\ndef task_4_func():\n return \"\"\"SELECT * FROM actor LIMIT 10;\"\"\"\n\n@df(conn_id='postgres_conn')\ndef task_5_func(task_3: pd.DataFrame, task_4: pd.DataFrame):\n df=task_3.join(task_4)\n print(df)\n return df\n\n@dag(schedule_interval='0 0 * * *', start_date=datetime(2022, 4, 15, 11, 28, 8), catchup=False, tags=['tag_1', 'tag_1'])\ndef dag_1():\n task_1 = task_1_func()\n task_2 = task_2_func(output_table=Table(conn_id='postgres_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_2'), execution_date=Table(conn_id='postgres_conn', table_name='execution_date'))\n task_3 = task_3_func(output_table=Table(conn_id='snowflake_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_3'))\n task_4 = task_4_func(output_table=Table(conn_id='postgres_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_4'))\n task_5 = task_5_func(task_3, task_4, output_table=Table(conn_id='postgres_conn', schema='tmp_astro', table_name='tmp_astro_dag_1_task_5'))\n\ndag_obj = dag_1()\n```\n\nError:\n\n```\nINFO - Using connection to: id: postgres_conn. Host: 127.0.0.1, Port: 8999, Schema: postgres, Login: postgres, Password: ***, extra: {}\n*** psycopg2.OperationalError: connection to server at \"127.0.0.1\", port 8999 failed: FATAL: database \"ASTROBUILD\" does not exist\n```\n", "before_files": [{"content": "import inspect\nfrom typing import Optional\n\nimport pandas\n\nfrom astro.settings import SCHEMA\nfrom astro.sql.table import Table\n\n\nclass TableHandler:\n def _set_variables_from_first_table(self):\n \"\"\"\n When we create our SQL operation, we run with the assumption that the first table given is the \"main table\".\n This means that a user doesn't need to define default conn_id, database, etc. in the function unless they want\n to create default values.\n \"\"\"\n first_table: Optional[Table] = None\n if self.op_args:\n table_index = [x for x, t in enumerate(self.op_args) if type(t) == Table]\n if table_index:\n first_table = self.op_args[table_index[0]]\n elif not first_table:\n table_kwargs = [\n x\n for x in inspect.signature(self.python_callable).parameters.values()\n if (\n x.annotation == Table\n and type(self.op_kwargs[x.name]) == Table\n or x.annotation == pandas.DataFrame\n and type(self.op_kwargs[x.name]) == Table\n )\n ]\n if table_kwargs:\n first_table = self.op_kwargs[table_kwargs[0].name]\n\n # If there is no first table via op_ags or kwargs, we check the parameters\n elif not first_table:\n if self.parameters:\n param_tables = [t for t in self.parameters.values() if type(t) == Table]\n if param_tables:\n first_table = param_tables[0]\n\n if first_table:\n self.conn_id = first_table.conn_id or self.conn_id\n self.database = first_table.database or self.database\n self.schema = first_table.schema or self.schema\n self.warehouse = first_table.warehouse or self.warehouse\n self.role = first_table.role or self.role\n\n def populate_output_table(self):\n self.output_table.conn_id = self.output_table.conn_id or self.conn_id\n self.output_table.database = self.output_table.database or self.database\n self.output_table.warehouse = self.output_table.warehouse or self.warehouse\n self.output_table.schema = self.output_table.schema or SCHEMA\n", "path": "src/astro/utils/table_handler.py"}]}
1,818
636
gh_patches_debug_64110
rasdani/github-patches
git_diff
projectmesa__mesa-561
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Update tests to use pytest, not nose Update tests to use pytest, not nose. nose is not maintained anymore. </issue> <code> [start of setup.py] 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 import re 4 5 from setuptools import setup, find_packages 6 from codecs import open 7 8 requires = [ 9 'click', 10 'cookiecutter', 11 'jupyter', 12 'networkx', 13 'numpy', 14 'pandas', 15 'tornado >= 4.2, < 5.0.0', 16 'tqdm', 17 ] 18 19 extras_require = { 20 'dev': [ 21 'coverage', 22 'flake8', 23 'nose', 24 'sphinx', 25 ], 26 'docs': [ 27 'sphinx', 28 ] 29 } 30 31 version = '' 32 with open('mesa/__init__.py', 'r') as fd: 33 version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', 34 fd.read(), re.MULTILINE).group(1) 35 36 with open('README.rst', 'rb', encoding='utf-8') as f: 37 readme = f.read() 38 39 setup( 40 name='Mesa', 41 version=version, 42 description="Agent-based modeling (ABM) in Python 3+", 43 long_description=readme, 44 author='Project Mesa Team', 45 author_email='[email protected]', 46 url='https://github.com/projectmesa/mesa', 47 packages=find_packages(), 48 package_data={'mesa': ['visualization/templates/*.html', 'visualization/templates/css/*', 49 'visualization/templates/fonts/*', 'visualization/templates/js/*'], 50 'cookiecutter-mesa': ['cookiecutter-mesa/*']}, 51 include_package_data=True, 52 install_requires=requires, 53 extras_require=extras_require, 54 keywords='agent based modeling model ABM simulation multi-agent', 55 license='Apache 2.0', 56 zip_safe=False, 57 classifiers=[ 58 'Topic :: Scientific/Engineering', 59 'Topic :: Scientific/Engineering :: Artificial Life', 60 'Topic :: Scientific/Engineering :: Artificial Intelligence', 61 'Intended Audience :: Science/Research', 62 'Programming Language :: Python :: 3 :: Only', 63 'License :: OSI Approved :: Apache Software License', 64 'Operating System :: OS Independent', 65 'Development Status :: 3 - Alpha', 66 'Natural Language :: English', 67 ], 68 entry_points=''' 69 [console_scripts] 70 mesa=mesa.main:cli 71 ''', 72 ) 73 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -20,7 +20,8 @@ 'dev': [ 'coverage', 'flake8', - 'nose', + 'pytest', + 'pytest-cov', 'sphinx', ], 'docs': [
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -20,7 +20,8 @@\n 'dev': [\n 'coverage',\n 'flake8',\n- 'nose',\n+ 'pytest',\n+ 'pytest-cov',\n 'sphinx',\n ],\n 'docs': [\n", "issue": "Update tests to use pytest, not nose\nUpdate tests to use pytest, not nose. nose is not maintained anymore. \n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport re\n\nfrom setuptools import setup, find_packages\nfrom codecs import open\n\nrequires = [\n 'click',\n 'cookiecutter',\n 'jupyter',\n 'networkx',\n 'numpy',\n 'pandas',\n 'tornado >= 4.2, < 5.0.0',\n 'tqdm',\n]\n\nextras_require = {\n 'dev': [\n 'coverage',\n 'flake8',\n 'nose',\n 'sphinx',\n ],\n 'docs': [\n 'sphinx',\n ]\n}\n\nversion = ''\nwith open('mesa/__init__.py', 'r') as fd:\n version = re.search(r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]',\n fd.read(), re.MULTILINE).group(1)\n\nwith open('README.rst', 'rb', encoding='utf-8') as f:\n readme = f.read()\n\nsetup(\n name='Mesa',\n version=version,\n description=\"Agent-based modeling (ABM) in Python 3+\",\n long_description=readme,\n author='Project Mesa Team',\n author_email='[email protected]',\n url='https://github.com/projectmesa/mesa',\n packages=find_packages(),\n package_data={'mesa': ['visualization/templates/*.html', 'visualization/templates/css/*',\n 'visualization/templates/fonts/*', 'visualization/templates/js/*'],\n 'cookiecutter-mesa': ['cookiecutter-mesa/*']},\n include_package_data=True,\n install_requires=requires,\n extras_require=extras_require,\n keywords='agent based modeling model ABM simulation multi-agent',\n license='Apache 2.0',\n zip_safe=False,\n classifiers=[\n 'Topic :: Scientific/Engineering',\n 'Topic :: Scientific/Engineering :: Artificial Life',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Intended Audience :: Science/Research',\n 'Programming Language :: Python :: 3 :: Only',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: OS Independent',\n 'Development Status :: 3 - Alpha',\n 'Natural Language :: English',\n ],\n entry_points='''\n [console_scripts]\n mesa=mesa.main:cli\n ''',\n)\n", "path": "setup.py"}]}
1,194
77
gh_patches_debug_3606
rasdani/github-patches
git_diff
OBOFoundry__OBOFoundry.github.io-802
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> travis on master failing, due to metadata violations from new jsonschema checks There are two things wrong: - the validate script assumes a util/reports folder - hp is failing; we already know that hp has a custom license and this should be reported elsewhere and is not a schema violation </issue> <code> [start of util/validate-metadata.py] 1 #!/usr/bin/env python3 2 3 import ast 4 import sys 5 import json 6 import jsonschema 7 import re 8 9 # file paths 10 data_file = "../registry/ontologies.jsonld" 11 schema_file = "metadata-schema.json" 12 schema_lite_file = "metadata-schema-lite.json" 13 report_file = "reports/metadata-violations.csv" 14 15 # ultra-escaped regex strings 16 email_sub = 'does not match \'\\^\\[\\^@\\]\\+\\$\'' 17 fmt_sub = ('does not match \'\\^\\[0\\-9A\\-Za\\-z\\-_\\\\\\\\/\\]\\+' 18 '\\\\\\\\.\\(owl\\|obo\\|json\\|omn\\|ofn\\|owx\\|ttl\\|owl' 19 '\\\\\\\\.gz\\)\\$\'') 20 21 def validate(): 22 """ 23 Validate registry metadata. 24 """ 25 print("--- validating metadata against {0} ---".format(schema_file)) 26 data = load_data() 27 schema = load_schema() 28 # validate each object 29 errors = {} 30 for item in data["ontologies"]: 31 if 'is_obsolete' in item and item["is_obsolete"] is True: 32 continue 33 # skip any 'validate: false' ontologies 34 if 'validate' in item and item["validate"] is False: 35 continue 36 ont_id = item["id"] 37 try: 38 jsonschema.validate(item, schema) 39 except jsonschema.exceptions.ValidationError as ve: 40 print("ERROR in {0}".format(ont_id)) 41 errors[ont_id] = format_msg(ve) 42 if errors: 43 write_errors(errors) 44 else: 45 print("SUCCESS - no errors found in metadata") 46 sys.exit(0) 47 48 def format_msg(ve): 49 """ 50 Format exception message from jsonchema.validate(...). 51 """ 52 # replace u characters 53 replace_u = re.sub('u\'', '\'', ve.message) 54 # replace scary regex strings 55 replace_email = re.sub( 56 email_sub, 'is not valid for \'contact.label\'', replace_u) 57 msg = re.sub(fmt_sub, 'is not valid for \'products.id\'', replace_email) 58 59 # check if output is for license error 60 is_license = re.search('({\'url\'.+?\'label\'.+?})', msg) 61 if is_license: 62 return format_license_msg(is_license.group(1)) 63 64 # check if output is for list error 65 is_list = re.search('(\\[.+?\\]) is not of type \'string\'', msg) 66 if is_list: 67 return format_list_msg(is_list.group(1), ve) 68 69 # otherwise return the message 70 return msg 71 72 def format_license_msg(substr): 73 """ 74 Format an exception message for a license issue. 75 """ 76 # process to dict 77 d = json.loads(substr.replace('\'', '"')) 78 url = d['url'] 79 label = d['label'] 80 return '\'{0}\' <{1}> is not valid for \'license\''.format(label, url) 81 82 def format_list_msg(substr, ve): 83 """ 84 Format an exception for an unexpected list. 85 """ 86 l = json.loads(substr.replace('\'', '"')) 87 # use the full message to find the violating property 88 prop_find = re.search('On instance\\[(\'.+?\')\\]', str(ve)) 89 if prop_find: 90 prop = prop_find.group(1) 91 return '{0} expects one value, got {1}'.format(prop, len(l)) 92 else: 93 return substr 94 95 def load_schema(): 96 """ 97 Load the schema to validate against. 98 """ 99 # read the schema 100 with open(schema_file) as f: 101 schema = json.load(f) 102 return schema 103 104 def load_data(): 105 """ 106 Load the data to validate. 107 """ 108 # read the JSON-LD data 109 with open(data_file) as f: 110 data = json.load(f) 111 return data 112 113 def write_errors(errors): 114 """ 115 Write validation errors to a user-friendly report. 116 """ 117 with open(report_file, 'w+') as f: 118 f.write("ID,ERROR\n") 119 for ont_id, msg in errors.items(): 120 f.write('"' + ont_id + '","' + msg + '"\n') 121 print( 122 "VALIDATION FAILED: {0} errors - see {1} for details".format( 123 len(errors), report_file)) 124 sys.exit(1) 125 126 # run the process! 127 if __name__ == '__main__': 128 validate() 129 [end of util/validate-metadata.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/util/validate-metadata.py b/util/validate-metadata.py --- a/util/validate-metadata.py +++ b/util/validate-metadata.py @@ -7,9 +7,9 @@ import re # file paths -data_file = "../registry/ontologies.jsonld" -schema_file = "metadata-schema.json" -schema_lite_file = "metadata-schema-lite.json" +data_file = "registry/ontologies.jsonld" +schema_file = "util/metadata-schema.json" +schema_lite_file = "util/metadata-schema-lite.json" report_file = "reports/metadata-violations.csv" # ultra-escaped regex strings
{"golden_diff": "diff --git a/util/validate-metadata.py b/util/validate-metadata.py\n--- a/util/validate-metadata.py\n+++ b/util/validate-metadata.py\n@@ -7,9 +7,9 @@\n import re\n \n # file paths\n-data_file = \"../registry/ontologies.jsonld\"\n-schema_file = \"metadata-schema.json\"\n-schema_lite_file = \"metadata-schema-lite.json\"\n+data_file = \"registry/ontologies.jsonld\"\n+schema_file = \"util/metadata-schema.json\"\n+schema_lite_file = \"util/metadata-schema-lite.json\"\n report_file = \"reports/metadata-violations.csv\"\n \n # ultra-escaped regex strings\n", "issue": "travis on master failing, due to metadata violations from new jsonschema checks\nThere are two things wrong:\r\n\r\n - the validate script assumes a util/reports folder\r\n - hp is failing; we already know that hp has a custom license and this should be reported elsewhere and is not a schema violation\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nimport ast\nimport sys\nimport json\nimport jsonschema\nimport re\n\n# file paths\ndata_file = \"../registry/ontologies.jsonld\"\nschema_file = \"metadata-schema.json\"\nschema_lite_file = \"metadata-schema-lite.json\"\nreport_file = \"reports/metadata-violations.csv\"\n\n# ultra-escaped regex strings\nemail_sub = 'does not match \\'\\\\^\\\\[\\\\^@\\\\]\\\\+\\\\$\\''\nfmt_sub = ('does not match \\'\\\\^\\\\[0\\\\-9A\\\\-Za\\\\-z\\\\-_\\\\\\\\\\\\\\\\/\\\\]\\\\+'\n '\\\\\\\\\\\\\\\\.\\\\(owl\\\\|obo\\\\|json\\\\|omn\\\\|ofn\\\\|owx\\\\|ttl\\\\|owl'\n '\\\\\\\\\\\\\\\\.gz\\\\)\\\\$\\'')\n\ndef validate():\n\t\"\"\"\n\tValidate registry metadata.\n\t\"\"\"\n\tprint(\"--- validating metadata against {0} ---\".format(schema_file))\n\tdata = load_data()\n\tschema = load_schema()\n\t# validate each object\n\terrors = {}\n\tfor item in data[\"ontologies\"]:\n\t\tif 'is_obsolete' in item and item[\"is_obsolete\"] is True:\n\t\t\tcontinue\n\t\t# skip any 'validate: false' ontologies\n\t\tif 'validate' in item and item[\"validate\"] is False:\n\t\t\tcontinue\n\t\tont_id = item[\"id\"]\n\t\ttry:\n\t\t\tjsonschema.validate(item, schema)\n\t\texcept jsonschema.exceptions.ValidationError as ve:\n\t\t\tprint(\"ERROR in {0}\".format(ont_id))\n\t\t\terrors[ont_id] = format_msg(ve)\n\tif errors:\n\t\twrite_errors(errors)\n\telse:\n\t\tprint(\"SUCCESS - no errors found in metadata\")\n\t\tsys.exit(0)\n\ndef format_msg(ve):\n\t\"\"\"\n\tFormat exception message from jsonchema.validate(...).\n\t\"\"\"\n\t# replace u characters\n\treplace_u = re.sub('u\\'', '\\'', ve.message)\n\t# replace scary regex strings\n\treplace_email = re.sub(\n\t\temail_sub, 'is not valid for \\'contact.label\\'', replace_u)\n\tmsg = re.sub(fmt_sub, 'is not valid for \\'products.id\\'', replace_email)\n\n\t# check if output is for license error\n\tis_license = re.search('({\\'url\\'.+?\\'label\\'.+?})', msg)\n\tif is_license:\n\t\treturn format_license_msg(is_license.group(1))\n\n\t# check if output is for list error\n\tis_list = re.search('(\\\\[.+?\\\\]) is not of type \\'string\\'', msg)\n\tif is_list:\n\t\treturn format_list_msg(is_list.group(1), ve)\n\n\t# otherwise return the message\n\treturn msg\n\ndef format_license_msg(substr):\n\t\"\"\"\n\tFormat an exception message for a license issue.\n\t\"\"\"\n\t# process to dict\n\td = json.loads(substr.replace('\\'', '\"'))\n\turl = d['url']\n\tlabel = d['label']\n\treturn '\\'{0}\\' <{1}> is not valid for \\'license\\''.format(label, url)\n\ndef format_list_msg(substr, ve):\n\t\"\"\"\n\tFormat an exception for an unexpected list.\n\t\"\"\"\n\tl = json.loads(substr.replace('\\'', '\"'))\n\t# use the full message to find the violating property\n\tprop_find = re.search('On instance\\\\[(\\'.+?\\')\\\\]', str(ve))\n\tif prop_find:\n\t\tprop = prop_find.group(1)\n\t\treturn '{0} expects one value, got {1}'.format(prop, len(l))\n\telse:\n\t\treturn substr\n\ndef load_schema():\n\t\"\"\"\n\tLoad the schema to validate against.\n\t\"\"\"\n\t# read the schema\n\twith open(schema_file) as f:\n\t\tschema = json.load(f)\n\treturn schema\n\ndef load_data():\n\t\"\"\"\n\tLoad the data to validate.\n\t\"\"\"\n\t# read the JSON-LD data\n\twith open(data_file) as f:\n\t\tdata = json.load(f)\n\treturn data\n\ndef write_errors(errors):\n\t\"\"\"\n\tWrite validation errors to a user-friendly report.\n\t\"\"\"\n\twith open(report_file, 'w+') as f:\n\t\tf.write(\"ID,ERROR\\n\")\n\t\tfor ont_id, msg in errors.items():\n\t\t\tf.write('\"' + ont_id + '\",\"' + msg + '\"\\n')\n\tprint(\n\t\t\"VALIDATION FAILED: {0} errors - see {1} for details\".format(\n\t\t\tlen(errors), report_file))\n\tsys.exit(1)\n\n# run the process!\nif __name__ == '__main__':\n\tvalidate()\n", "path": "util/validate-metadata.py"}]}
1,887
134
gh_patches_debug_437
rasdani/github-patches
git_diff
pypa__setuptools-2584
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> Add mechanism for side-by-side comparison of setup.py and its equivalent setup.cfg We have many documentation examples that are purely declarative and are either documented as `setup.py` or `setup.cfg`. It would be really awesome if, for each of these, we had the option to have either both versions side-by-side or, even better, in a sort of "tabbed container", like the one in the [code sample at the bottom of this example](https://leetcode.com/articles/median-of-two-sorted-arrays/). Requirements for this: 1. Cannot *link to* any third-party javascript dependencies. Ideally we wouldn't use any at all, but if you do they must be vendored in the documentation. 2. If javascript is disabled, it has to fall back to something intelligible. Ideally it would be implemented in pure CSS / HTML if that's at all possible. </issue> <code> [start of docs/conf.py] 1 extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker'] 2 3 master_doc = "index" 4 5 link_files = { 6 '../CHANGES.rst': dict( 7 using=dict( 8 BB='https://bitbucket.org', 9 GH='https://github.com', 10 ), 11 replace=[ 12 dict( 13 pattern=r'(Issue )?#(?P<issue>\d+)', 14 url='{package_url}/issues/{issue}', 15 ), 16 dict( 17 pattern=r'BB Pull Request ?#(?P<bb_pull_request>\d+)', 18 url='{BB}/pypa/setuptools/pull-request/{bb_pull_request}', 19 ), 20 dict( 21 pattern=r'Distribute #(?P<distribute>\d+)', 22 url='{BB}/tarek/distribute/issue/{distribute}', 23 ), 24 dict( 25 pattern=r'Buildout #(?P<buildout>\d+)', 26 url='{GH}/buildout/buildout/issues/{buildout}', 27 ), 28 dict( 29 pattern=r'Old Setuptools #(?P<old_setuptools>\d+)', 30 url='http://bugs.python.org/setuptools/issue{old_setuptools}', 31 ), 32 dict( 33 pattern=r'Jython #(?P<jython>\d+)', 34 url='http://bugs.jython.org/issue{jython}', 35 ), 36 dict( 37 pattern=r'(Python #|bpo-)(?P<python>\d+)', 38 url='http://bugs.python.org/issue{python}', 39 ), 40 dict( 41 pattern=r'Interop #(?P<interop>\d+)', 42 url='{GH}/pypa/interoperability-peps/issues/{interop}', 43 ), 44 dict( 45 pattern=r'Pip #(?P<pip>\d+)', 46 url='{GH}/pypa/pip/issues/{pip}', 47 ), 48 dict( 49 pattern=r'Packaging #(?P<packaging>\d+)', 50 url='{GH}/pypa/packaging/issues/{packaging}', 51 ), 52 dict( 53 pattern=r'[Pp]ackaging (?P<packaging_ver>\d+(\.\d+)+)', 54 url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst', 55 ), 56 dict( 57 pattern=r'PEP[- ](?P<pep_number>\d+)', 58 url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/', 59 ), 60 dict( 61 pattern=r'setuptools_svn #(?P<setuptools_svn>\d+)', 62 url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}', 63 ), 64 dict( 65 pattern=r'pypa/distutils#(?P<distutils>\d+)', 66 url='{GH}/pypa/distutils/issues/{distutils}', 67 ), 68 dict( 69 pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n', 70 with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', 71 ), 72 ], 73 ), 74 } 75 76 intersphinx_mapping = { 77 'pypa-build': ('https://pypa-build.readthedocs.io/en/latest/', None) 78 } 79 80 # Add support for linking usernames 81 github_url = 'https://github.com' 82 github_sponsors_url = f'{github_url}/sponsors' 83 extlinks = { 84 'user': (f'{github_sponsors_url}/%s', '@'), # noqa: WPS323 85 } 86 extensions += ['sphinx.ext.extlinks', 'sphinx.ext.intersphinx'] 87 88 # Be strict about any broken references: 89 nitpicky = True 90 91 # Ref: https://github.com/python-attrs/attrs/pull/571/files\ 92 # #diff-85987f48f1258d9ee486e3191495582dR82 93 default_role = 'any' 94 95 # Custom sidebar templates, maps document names to template names. 96 html_theme = 'alabaster' 97 templates_path = ['_templates'] 98 html_sidebars = {'index': ['tidelift-sidebar.html']} 99 [end of docs/conf.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -93,3 +93,6 @@ html_theme = 'alabaster' templates_path = ['_templates'] html_sidebars = {'index': ['tidelift-sidebar.html']} + +# Add support for inline tabs +extensions += ['sphinx_inline_tabs']
{"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -93,3 +93,6 @@\n html_theme = 'alabaster'\n templates_path = ['_templates']\n html_sidebars = {'index': ['tidelift-sidebar.html']}\n+\n+# Add support for inline tabs\n+extensions += ['sphinx_inline_tabs']\n", "issue": "Add mechanism for side-by-side comparison of setup.py and its equivalent setup.cfg\nWe have many documentation examples that are purely declarative and are either documented as `setup.py` or `setup.cfg`. It would be really awesome if, for each of these, we had the option to have either both versions side-by-side or, even better, in a sort of \"tabbed container\", like the one in the [code sample at the bottom of this example](https://leetcode.com/articles/median-of-two-sorted-arrays/).\r\n\r\nRequirements for this:\r\n\r\n1. Cannot *link to* any third-party javascript dependencies. Ideally we wouldn't use any at all, but if you do they must be vendored in the documentation.\r\n2. If javascript is disabled, it has to fall back to something intelligible.\r\n\r\nIdeally it would be implemented in pure CSS / HTML if that's at all possible.\n", "before_files": [{"content": "extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker']\n\nmaster_doc = \"index\"\n\nlink_files = {\n '../CHANGES.rst': dict(\n using=dict(\n BB='https://bitbucket.org',\n GH='https://github.com',\n ),\n replace=[\n dict(\n pattern=r'(Issue )?#(?P<issue>\\d+)',\n url='{package_url}/issues/{issue}',\n ),\n dict(\n pattern=r'BB Pull Request ?#(?P<bb_pull_request>\\d+)',\n url='{BB}/pypa/setuptools/pull-request/{bb_pull_request}',\n ),\n dict(\n pattern=r'Distribute #(?P<distribute>\\d+)',\n url='{BB}/tarek/distribute/issue/{distribute}',\n ),\n dict(\n pattern=r'Buildout #(?P<buildout>\\d+)',\n url='{GH}/buildout/buildout/issues/{buildout}',\n ),\n dict(\n pattern=r'Old Setuptools #(?P<old_setuptools>\\d+)',\n url='http://bugs.python.org/setuptools/issue{old_setuptools}',\n ),\n dict(\n pattern=r'Jython #(?P<jython>\\d+)',\n url='http://bugs.jython.org/issue{jython}',\n ),\n dict(\n pattern=r'(Python #|bpo-)(?P<python>\\d+)',\n url='http://bugs.python.org/issue{python}',\n ),\n dict(\n pattern=r'Interop #(?P<interop>\\d+)',\n url='{GH}/pypa/interoperability-peps/issues/{interop}',\n ),\n dict(\n pattern=r'Pip #(?P<pip>\\d+)',\n url='{GH}/pypa/pip/issues/{pip}',\n ),\n dict(\n pattern=r'Packaging #(?P<packaging>\\d+)',\n url='{GH}/pypa/packaging/issues/{packaging}',\n ),\n dict(\n pattern=r'[Pp]ackaging (?P<packaging_ver>\\d+(\\.\\d+)+)',\n url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst',\n ),\n dict(\n pattern=r'PEP[- ](?P<pep_number>\\d+)',\n url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',\n ),\n dict(\n pattern=r'setuptools_svn #(?P<setuptools_svn>\\d+)',\n url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}',\n ),\n dict(\n pattern=r'pypa/distutils#(?P<distutils>\\d+)',\n url='{GH}/pypa/distutils/issues/{distutils}',\n ),\n dict(\n pattern=r'^(?m)((?P<scm_version>v?\\d+(\\.\\d+){1,2}))\\n[-=]+\\n',\n with_scm='{text}\\n{rev[timestamp]:%d %b %Y}\\n',\n ),\n ],\n ),\n}\n\nintersphinx_mapping = {\n 'pypa-build': ('https://pypa-build.readthedocs.io/en/latest/', None)\n}\n\n# Add support for linking usernames\ngithub_url = 'https://github.com'\ngithub_sponsors_url = f'{github_url}/sponsors'\nextlinks = {\n 'user': (f'{github_sponsors_url}/%s', '@'), # noqa: WPS323\n}\nextensions += ['sphinx.ext.extlinks', 'sphinx.ext.intersphinx']\n\n# Be strict about any broken references:\nnitpicky = True\n\n# Ref: https://github.com/python-attrs/attrs/pull/571/files\\\n# #diff-85987f48f1258d9ee486e3191495582dR82\ndefault_role = 'any'\n\n# Custom sidebar templates, maps document names to template names.\nhtml_theme = 'alabaster'\ntemplates_path = ['_templates']\nhtml_sidebars = {'index': ['tidelift-sidebar.html']}\n", "path": "docs/conf.py"}]}
1,814
83
gh_patches_debug_22113
rasdani/github-patches
git_diff
rlworkgroup__garage-1879
You will be provided with a partial code base and an issue statement explaining a problem to resolve. <issue> TF 2.3.0 incompatibility TF 2.3.0 was released yesterday, and seems to be incompatible with TFP <0.11.0 and breaks imports (https://travis-ci.com/github/rlworkgroup/garage/jobs/365922927#L3061). We pin TFP to <=0.10.0 in the first place to resolve cloudpickle version mismatch (https://github.com/rlworkgroup/garage/issues/1758). Since TFP 0.11.0 still pins cloudpickle to 1.3.0 while gym needs 1.2.x, unpinning TFP won't work. So for now, TF needs to be pinned to <2.3.0 </issue> <code> [start of setup.py] 1 """setuptools based setup module.""" 2 import os 3 4 from setuptools import find_packages, setup 5 6 GARAGE_GH_TOKEN = os.environ.get('GARAGE_GH_TOKEN') or 'git' 7 GYM_VERSION = '0.15.4' 8 9 # Required dependencies 10 REQUIRED = [ 11 # Please keep alphabetized 12 'akro', 13 'click>=2.0', 14 'cloudpickle<1.5', 15 'cma==2.7.0', 16 'dowel==0.0.3', 17 f'gym[atari,box2d,classic_control]=={GYM_VERSION}', 18 'numpy>=1.14.5', 19 'psutil', 20 # Pyglet 1.4.0 introduces some api change which breaks some 21 # gym environments 22 # See: https://github.com/openai/gym/issues/1588 23 'pyglet<1.4.0,>=1.3.0', 24 'python-dateutil', 25 'ray', 26 'scikit-image', 27 'scipy', 28 'setproctitle>=1.0', 29 'tensorflow>=1.14,<2.3.0', 30 'tensorflow-probability<=0.10.0', 31 'torch>=1.0.0,!=1.5.0,<1.6.0', 32 'torchvision>=0.2.1,<0.7.0', 33 ] 34 35 # Dependencies for optional features 36 EXTRAS = {} 37 38 EXTRAS['mujoco'] = [ 39 'mujoco-py<2.1,>=2.0', 40 f'gym[all]=={GYM_VERSION}', 41 ] 42 43 EXTRAS['dm_control'] = [ 44 # dm_control throws an error during install about not being able to 45 # find a build dependency (absl-py). Later pip executes the `install` 46 # command again and the install succeeds because absl-py has been 47 # installed. This is stupid, but harmless. 48 'dm_control', 49 ] 50 51 EXTRAS['bullet'] = ['mpi4py', 'pybullet'] 52 53 EXTRAS['all'] = list(set(sum(EXTRAS.values(), []))) 54 55 # Development dependencies (*not* included in 'all') 56 EXTRAS['dev'] = [ 57 # Please keep alphabetized 58 'flake8', 59 'flake8-docstrings>=1.5.0', 60 'flake8-import-order', 61 f'metaworld @ https://{GARAGE_GH_TOKEN}@api.github.com/repos/rlworkgroup/metaworld/tarball/861ae8d8c4bef80a7ed86f47f47acaa494d4ab77', # noqa: E501 62 'isort>=4.3.21,<5.0.0', 63 'pep8-naming==0.7.0', 64 'pre-commit', 65 'pycodestyle>=2.5.0', 66 'pydocstyle>=4.0.0', 67 'pylint>=2.5.3', 68 'pytest>=4.5.0', # Required for strict-markers 69 'pytest-cov', 70 'pytest-timeout', 71 'pytest-xdist', 72 'recommonmark', 73 'sphinx', 74 'sphinx-autoapi>=1.4.0', 75 'sphinx_rtd_theme', 76 'sphinxcontrib-bibtex', 77 'yapf==0.30.0', 78 ] # yapf: disable 79 80 with open('README.md') as f: 81 README = f.read() 82 83 # Get the package version dynamically 84 with open('VERSION') as v: 85 VERSION = v.read().strip() 86 87 setup( 88 name='garage', 89 version=VERSION, 90 author='Reinforcement Learning Working Group', 91 description='A toolkit for reproducible reinforcement learning research', 92 url='https://github.com/rlworkgroup/garage', 93 packages=find_packages(where='src'), 94 package_dir={'': 'src'}, 95 scripts=['scripts/garage'], 96 python_requires='>=3.6', 97 install_requires=REQUIRED, 98 extras_require=EXTRAS, 99 license='MIT', 100 long_description=README, 101 long_description_content_type='text/markdown', 102 classifiers=[ 103 'Development Status :: 4 - Beta', 104 'Intended Audience :: Developers', 105 'Intended Audience :: Education', 106 'Intended Audience :: Science/Research', 107 'License :: OSI Approved :: MIT License', 108 'Programming Language :: Python :: 3.6', 109 'Programming Language :: Python :: 3.7', 110 'Programming Language :: Python :: 3 :: Only', 111 'Topic :: Scientific/Engineering :: Artificial Intelligence', 112 'Topic :: Scientific/Engineering :: Mathematics', 113 'Topic :: Software Development :: Libraries', 114 ], 115 ) 116 [end of setup.py] </code> I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. <patch> diff --git a/file.py b/file.py --- a/file.py +++ b/file.py @@ -1,27 +1,35 @@ def euclidean(a, b): - while b: - a, b = b, a % b - return a + if b == 0: + return a + return euclidean(b, a % b) def bresenham(x0, y0, x1, y1): points = [] dx = abs(x1 - x0) dy = abs(y1 - y0) - sx = 1 if x0 < x1 else -1 - sy = 1 if y0 < y1 else -1 - err = dx - dy + x, y = x0, y0 + sx = -1 if x0 > x1 else 1 + sy = -1 if y0 > y1 else 1 - while True: - points.append((x0, y0)) - if x0 == x1 and y0 == y1: - break - e2 = 2 * err - if e2 > -dy: - err -= dy - x0 += sx - if e2 < dx: - err += dx - y0 += sy + if dx > dy: + err = dx / 2.0 + while x != x1: + points.append((x, y)) + err -= dy + if err < 0: + y += sy + err += dx + x += sx + else: + err = dy / 2.0 + while y != y1: + points.append((x, y)) + err -= dx + if err < 0: + x += sx + err += dy + y += sy + + points.append((x, y)) return points </patch>
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -4,30 +4,26 @@ from setuptools import find_packages, setup GARAGE_GH_TOKEN = os.environ.get('GARAGE_GH_TOKEN') or 'git' -GYM_VERSION = '0.15.4' +GYM_VERSION = '0.17.2' # Required dependencies REQUIRED = [ # Please keep alphabetized 'akro', 'click>=2.0', - 'cloudpickle<1.5', + 'cloudpickle==1.3', 'cma==2.7.0', 'dowel==0.0.3', f'gym[atari,box2d,classic_control]=={GYM_VERSION}', 'numpy>=1.14.5', 'psutil', - # Pyglet 1.4.0 introduces some api change which breaks some - # gym environments - # See: https://github.com/openai/gym/issues/1588 - 'pyglet<1.4.0,>=1.3.0', 'python-dateutil', 'ray', 'scikit-image', 'scipy', 'setproctitle>=1.0', - 'tensorflow>=1.14,<2.3.0', - 'tensorflow-probability<=0.10.0', + 'tensorflow>=1.14', + 'tensorflow-probability>=0.11.0', 'torch>=1.0.0,!=1.5.0,<1.6.0', 'torchvision>=0.2.1,<0.7.0', ]
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -4,30 +4,26 @@\n from setuptools import find_packages, setup\n \n GARAGE_GH_TOKEN = os.environ.get('GARAGE_GH_TOKEN') or 'git'\n-GYM_VERSION = '0.15.4'\n+GYM_VERSION = '0.17.2'\n \n # Required dependencies\n REQUIRED = [\n # Please keep alphabetized\n 'akro',\n 'click>=2.0',\n- 'cloudpickle<1.5',\n+ 'cloudpickle==1.3',\n 'cma==2.7.0',\n 'dowel==0.0.3',\n f'gym[atari,box2d,classic_control]=={GYM_VERSION}',\n 'numpy>=1.14.5',\n 'psutil',\n- # Pyglet 1.4.0 introduces some api change which breaks some\n- # gym environments\n- # See: https://github.com/openai/gym/issues/1588\n- 'pyglet<1.4.0,>=1.3.0',\n 'python-dateutil',\n 'ray',\n 'scikit-image',\n 'scipy',\n 'setproctitle>=1.0',\n- 'tensorflow>=1.14,<2.3.0',\n- 'tensorflow-probability<=0.10.0',\n+ 'tensorflow>=1.14',\n+ 'tensorflow-probability>=0.11.0',\n 'torch>=1.0.0,!=1.5.0,<1.6.0',\n 'torchvision>=0.2.1,<0.7.0',\n ]\n", "issue": "TF 2.3.0 incompatibility\nTF 2.3.0 was released yesterday, and seems to be incompatible with TFP <0.11.0 and breaks imports (https://travis-ci.com/github/rlworkgroup/garage/jobs/365922927#L3061). We pin TFP to <=0.10.0 in the first place to resolve cloudpickle version mismatch (https://github.com/rlworkgroup/garage/issues/1758). Since TFP 0.11.0 still pins cloudpickle to 1.3.0 while gym needs 1.2.x, unpinning TFP won't work. So for now, TF needs to be pinned to <2.3.0\n", "before_files": [{"content": "\"\"\"setuptools based setup module.\"\"\"\nimport os\n\nfrom setuptools import find_packages, setup\n\nGARAGE_GH_TOKEN = os.environ.get('GARAGE_GH_TOKEN') or 'git'\nGYM_VERSION = '0.15.4'\n\n# Required dependencies\nREQUIRED = [\n # Please keep alphabetized\n 'akro',\n 'click>=2.0',\n 'cloudpickle<1.5',\n 'cma==2.7.0',\n 'dowel==0.0.3',\n f'gym[atari,box2d,classic_control]=={GYM_VERSION}',\n 'numpy>=1.14.5',\n 'psutil',\n # Pyglet 1.4.0 introduces some api change which breaks some\n # gym environments\n # See: https://github.com/openai/gym/issues/1588\n 'pyglet<1.4.0,>=1.3.0',\n 'python-dateutil',\n 'ray',\n 'scikit-image',\n 'scipy',\n 'setproctitle>=1.0',\n 'tensorflow>=1.14,<2.3.0',\n 'tensorflow-probability<=0.10.0',\n 'torch>=1.0.0,!=1.5.0,<1.6.0',\n 'torchvision>=0.2.1,<0.7.0',\n]\n\n# Dependencies for optional features\nEXTRAS = {}\n\nEXTRAS['mujoco'] = [\n 'mujoco-py<2.1,>=2.0',\n f'gym[all]=={GYM_VERSION}',\n]\n\nEXTRAS['dm_control'] = [\n # dm_control throws an error during install about not being able to\n # find a build dependency (absl-py). Later pip executes the `install`\n # command again and the install succeeds because absl-py has been\n # installed. This is stupid, but harmless.\n 'dm_control',\n]\n\nEXTRAS['bullet'] = ['mpi4py', 'pybullet']\n\nEXTRAS['all'] = list(set(sum(EXTRAS.values(), [])))\n\n# Development dependencies (*not* included in 'all')\nEXTRAS['dev'] = [\n # Please keep alphabetized\n 'flake8',\n 'flake8-docstrings>=1.5.0',\n 'flake8-import-order',\n f'metaworld @ https://{GARAGE_GH_TOKEN}@api.github.com/repos/rlworkgroup/metaworld/tarball/861ae8d8c4bef80a7ed86f47f47acaa494d4ab77', # noqa: E501\n 'isort>=4.3.21,<5.0.0',\n 'pep8-naming==0.7.0',\n 'pre-commit',\n 'pycodestyle>=2.5.0',\n 'pydocstyle>=4.0.0',\n 'pylint>=2.5.3',\n 'pytest>=4.5.0', # Required for strict-markers\n 'pytest-cov',\n 'pytest-timeout',\n 'pytest-xdist',\n 'recommonmark',\n 'sphinx',\n 'sphinx-autoapi>=1.4.0',\n 'sphinx_rtd_theme',\n 'sphinxcontrib-bibtex',\n 'yapf==0.30.0',\n] # yapf: disable\n\nwith open('README.md') as f:\n README = f.read()\n\n# Get the package version dynamically\nwith open('VERSION') as v:\n VERSION = v.read().strip()\n\nsetup(\n name='garage',\n version=VERSION,\n author='Reinforcement Learning Working Group',\n description='A toolkit for reproducible reinforcement learning research',\n url='https://github.com/rlworkgroup/garage',\n packages=find_packages(where='src'),\n package_dir={'': 'src'},\n scripts=['scripts/garage'],\n python_requires='>=3.6',\n install_requires=REQUIRED,\n extras_require=EXTRAS,\n license='MIT',\n long_description=README,\n long_description_content_type='text/markdown',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries',\n ],\n)\n", "path": "setup.py"}]}
1,991
388