problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
10.2k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 582
21k
| num_tokens
int64 271
2.05k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_4889 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-4990 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove $ before shell commands in docs
The developers have a [preference](https://github.com/rtfd/readthedocs.org/pull/4676#discussion_r221400605) to not have a `$` before shell commands in the docs. This makes it easier to copy and paste from our docs. We should remove it everywhere. The following command should show it everywhere.
grep -Ri " $ " docs/*.rst docs/*/*.rst
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/conf.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 from __future__ import division, print_function, unicode_literals
4
5 import os
6 import sys
7
8 import sphinx_rtd_theme
9 from recommonmark.parser import CommonMarkParser
10
11 sys.path.insert(0, os.path.abspath('..'))
12 sys.path.append(os.path.dirname(__file__))
13 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "readthedocs.settings.dev")
14
15 from django.conf import settings
16 from django.utils import timezone
17
18 import django
19 django.setup()
20
21
22 sys.path.append(os.path.abspath('_ext'))
23 extensions = [
24 'sphinx.ext.autosectionlabel',
25 'sphinx.ext.autodoc',
26 'sphinx.ext.intersphinx',
27 'sphinxcontrib.httpdomain',
28 'djangodocs',
29 'doc_extensions',
30 'sphinx_tabs.tabs',
31 ]
32 templates_path = ['_templates']
33
34 source_suffix = ['.rst', '.md']
35 source_parsers = {
36 '.md': CommonMarkParser,
37 }
38
39 master_doc = 'index'
40 project = u'Read the Docs'
41 copyright = '2010-{}, Read the Docs, Inc & contributors'.format(
42 timezone.now().year
43 )
44 version = '2.7'
45 release = version
46 exclude_patterns = ['_build']
47 default_role = 'obj'
48 intersphinx_mapping = {
49 'python': ('http://python.readthedocs.io/en/latest/', None),
50 'django': ('http://django.readthedocs.io/en/1.9.x/', None),
51 'sphinx': ('http://sphinx.readthedocs.io/en/latest/', None),
52 }
53 htmlhelp_basename = 'ReadTheDocsdoc'
54 latex_documents = [
55 ('index', 'ReadTheDocs.tex', u'Read the Docs Documentation',
56 u'Eric Holscher, Charlie Leifer, Bobby Grace', 'manual'),
57 ]
58 man_pages = [
59 ('index', 'read-the-docs', u'Read the Docs Documentation',
60 [u'Eric Holscher, Charlie Leifer, Bobby Grace'], 1)
61 ]
62
63 exclude_patterns = [
64 # 'api' # needed for ``make gettext`` to not die.
65 ]
66
67 language = 'en'
68
69 locale_dirs = [
70 'locale/',
71 ]
72 gettext_compact = False
73
74 html_theme = 'sphinx_rtd_theme'
75 html_static_path = ['_static']
76 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
77 html_logo = 'img/logo.svg'
78 html_theme_options = {
79 'logo_only': True,
80 'display_version': False,
81 }
82
83 # Activate autosectionlabel plugin
84 autosectionlabel_prefix_document = True
85
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -28,6 +28,7 @@
'djangodocs',
'doc_extensions',
'sphinx_tabs.tabs',
+ 'sphinx-prompt',
]
templates_path = ['_templates']
@@ -82,3 +83,7 @@
# Activate autosectionlabel plugin
autosectionlabel_prefix_document = True
+
+
+def setup(app):
+ app.add_stylesheet('css/sphinx_prompt_css.css')
| {"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -28,6 +28,7 @@\n 'djangodocs',\n 'doc_extensions',\n 'sphinx_tabs.tabs',\n+ 'sphinx-prompt',\n ]\n templates_path = ['_templates']\n \n@@ -82,3 +83,7 @@\n \n # Activate autosectionlabel plugin\n autosectionlabel_prefix_document = True\n+\n+\n+def setup(app):\n+ app.add_stylesheet('css/sphinx_prompt_css.css')\n", "issue": "Remove $ before shell commands in docs\nThe developers have a [preference](https://github.com/rtfd/readthedocs.org/pull/4676#discussion_r221400605) to not have a `$` before shell commands in the docs. This makes it easier to copy and paste from our docs. We should remove it everywhere. The following command should show it everywhere.\r\n\r\n grep -Ri \" $ \" docs/*.rst docs/*/*.rst\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom __future__ import division, print_function, unicode_literals\n\nimport os\nimport sys\n\nimport sphinx_rtd_theme\nfrom recommonmark.parser import CommonMarkParser\n\nsys.path.insert(0, os.path.abspath('..'))\nsys.path.append(os.path.dirname(__file__))\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"readthedocs.settings.dev\")\n\nfrom django.conf import settings\nfrom django.utils import timezone\n\nimport django\ndjango.setup()\n\n\nsys.path.append(os.path.abspath('_ext'))\nextensions = [\n 'sphinx.ext.autosectionlabel',\n 'sphinx.ext.autodoc',\n 'sphinx.ext.intersphinx',\n 'sphinxcontrib.httpdomain',\n 'djangodocs',\n 'doc_extensions',\n 'sphinx_tabs.tabs',\n]\ntemplates_path = ['_templates']\n\nsource_suffix = ['.rst', '.md']\nsource_parsers = {\n '.md': CommonMarkParser,\n}\n\nmaster_doc = 'index'\nproject = u'Read the Docs'\ncopyright = '2010-{}, Read the Docs, Inc & contributors'.format(\n timezone.now().year\n)\nversion = '2.7'\nrelease = version\nexclude_patterns = ['_build']\ndefault_role = 'obj'\nintersphinx_mapping = {\n 'python': ('http://python.readthedocs.io/en/latest/', None),\n 'django': ('http://django.readthedocs.io/en/1.9.x/', None),\n 'sphinx': ('http://sphinx.readthedocs.io/en/latest/', None),\n}\nhtmlhelp_basename = 'ReadTheDocsdoc'\nlatex_documents = [\n ('index', 'ReadTheDocs.tex', u'Read the Docs Documentation',\n u'Eric Holscher, Charlie Leifer, Bobby Grace', 'manual'),\n]\nman_pages = [\n ('index', 'read-the-docs', u'Read the Docs Documentation',\n [u'Eric Holscher, Charlie Leifer, Bobby Grace'], 1)\n]\n\nexclude_patterns = [\n # 'api' # needed for ``make gettext`` to not die.\n]\n\nlanguage = 'en'\n\nlocale_dirs = [\n 'locale/',\n]\ngettext_compact = False\n\nhtml_theme = 'sphinx_rtd_theme'\nhtml_static_path = ['_static']\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\nhtml_logo = 'img/logo.svg'\nhtml_theme_options = {\n 'logo_only': True,\n 'display_version': False,\n}\n\n# Activate autosectionlabel plugin\nautosectionlabel_prefix_document = True\n", "path": "docs/conf.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom __future__ import division, print_function, unicode_literals\n\nimport os\nimport sys\n\nimport sphinx_rtd_theme\nfrom recommonmark.parser import CommonMarkParser\n\nsys.path.insert(0, os.path.abspath('..'))\nsys.path.append(os.path.dirname(__file__))\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"readthedocs.settings.dev\")\n\nfrom django.conf import settings\nfrom django.utils import timezone\n\nimport django\ndjango.setup()\n\n\nsys.path.append(os.path.abspath('_ext'))\nextensions = [\n 'sphinx.ext.autosectionlabel',\n 'sphinx.ext.autodoc',\n 'sphinx.ext.intersphinx',\n 'sphinxcontrib.httpdomain',\n 'djangodocs',\n 'doc_extensions',\n 'sphinx_tabs.tabs',\n 'sphinx-prompt',\n]\ntemplates_path = ['_templates']\n\nsource_suffix = ['.rst', '.md']\nsource_parsers = {\n '.md': CommonMarkParser,\n}\n\nmaster_doc = 'index'\nproject = u'Read the Docs'\ncopyright = '2010-{}, Read the Docs, Inc & contributors'.format(\n timezone.now().year\n)\nversion = '2.7'\nrelease = version\nexclude_patterns = ['_build']\ndefault_role = 'obj'\nintersphinx_mapping = {\n 'python': ('http://python.readthedocs.io/en/latest/', None),\n 'django': ('http://django.readthedocs.io/en/1.9.x/', None),\n 'sphinx': ('http://sphinx.readthedocs.io/en/latest/', None),\n}\nhtmlhelp_basename = 'ReadTheDocsdoc'\nlatex_documents = [\n ('index', 'ReadTheDocs.tex', u'Read the Docs Documentation',\n u'Eric Holscher, Charlie Leifer, Bobby Grace', 'manual'),\n]\nman_pages = [\n ('index', 'read-the-docs', u'Read the Docs Documentation',\n [u'Eric Holscher, Charlie Leifer, Bobby Grace'], 1)\n]\n\nexclude_patterns = [\n # 'api' # needed for ``make gettext`` to not die.\n]\n\nlanguage = 'en'\n\nlocale_dirs = [\n 'locale/',\n]\ngettext_compact = False\n\nhtml_theme = 'sphinx_rtd_theme'\nhtml_static_path = ['_static']\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\nhtml_logo = 'img/logo.svg'\nhtml_theme_options = {\n 'logo_only': True,\n 'display_version': False,\n}\n\n# Activate autosectionlabel plugin\nautosectionlabel_prefix_document = True\n\n\ndef setup(app):\n app.add_stylesheet('css/sphinx_prompt_css.css')\n", "path": "docs/conf.py"}]} | 1,070 | 122 |
gh_patches_debug_19225 | rasdani/github-patches | git_diff | docker__docker-py-971 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
tlsv1 alert protocol version on 1.7.1 and 1.7.2 but not on 1.7.0
Similar to #949 I'm discovering issues with latest versions of `docker-py` running against docker 1.10.2 instance. I'm using `docker.utils.kwargs_from_env(assert_hostname=False)`. Things work fine with version 1.7.0.
Docker client is initialized via
```
client = docker.Client(
version='auto',
**docker.utils.kwargs_from_env(assert_hostname=False))
```
with docker environment variables being set to the following (via docker-machine)
```
DOCKER_HOST=tcp://192.168.156.137:2376
DOCKER_MACHINE_NAME=dev2
DOCKER_TLS_VERIFY=1
DOCKER_CERT_PATH=/Users/benjixx/.docker/machine/machines/dev2
```
docker-py 1.7.1 and 1.7.2 now raise the following exception:
```
DockerException: Error while fetching server API version: [Errno 1] _ssl.c:507: error:1407742E:SSL routines:SSL23_GET_SERVER_HELLO:tlsv1 alert protocol version
```
Any idea what's happening here?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docker/tls.py`
Content:
```
1 import os
2
3 from . import errors
4 from .ssladapter import ssladapter
5
6
7 class TLSConfig(object):
8 cert = None
9 ca_cert = None
10 verify = None
11 ssl_version = None
12
13 def __init__(self, client_cert=None, ca_cert=None, verify=None,
14 ssl_version=None, assert_hostname=None,
15 assert_fingerprint=None):
16 # Argument compatibility/mapping with
17 # https://docs.docker.com/engine/articles/https/
18 # This diverges from the Docker CLI in that users can specify 'tls'
19 # here, but also disable any public/default CA pool verification by
20 # leaving tls_verify=False
21
22 self.ssl_version = ssl_version
23 self.assert_hostname = assert_hostname
24 self.assert_fingerprint = assert_fingerprint
25
26 # "tls" and "tls_verify" must have both or neither cert/key files
27 # In either case, Alert the user when both are expected, but any are
28 # missing.
29
30 if client_cert:
31 try:
32 tls_cert, tls_key = client_cert
33 except ValueError:
34 raise errors.TLSParameterError(
35 'client_config must be a tuple of'
36 ' (client certificate, key file)'
37 )
38
39 if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
40 not os.path.isfile(tls_key)):
41 raise errors.TLSParameterError(
42 'Path to a certificate and key files must be provided'
43 ' through the client_config param'
44 )
45 self.cert = (tls_cert, tls_key)
46
47 # If verify is set, make sure the cert exists
48 self.verify = verify
49 self.ca_cert = ca_cert
50 if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert):
51 raise errors.TLSParameterError(
52 'Invalid CA certificate provided for `tls_ca_cert`.'
53 )
54
55 def configure_client(self, client):
56 client.ssl_version = self.ssl_version
57
58 if self.verify and self.ca_cert:
59 client.verify = self.ca_cert
60 else:
61 client.verify = self.verify
62
63 if self.cert:
64 client.cert = self.cert
65
66 client.mount('https://', ssladapter.SSLAdapter(
67 ssl_version=self.ssl_version,
68 assert_hostname=self.assert_hostname,
69 assert_fingerprint=self.assert_fingerprint,
70 ))
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docker/tls.py b/docker/tls.py
--- a/docker/tls.py
+++ b/docker/tls.py
@@ -1,4 +1,5 @@
import os
+import ssl
from . import errors
from .ssladapter import ssladapter
@@ -19,10 +20,14 @@
# here, but also disable any public/default CA pool verification by
# leaving tls_verify=False
- self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
+ # TLS v1.0 seems to be the safest default; SSLv23 fails in mysterious
+ # ways: https://github.com/docker/docker-py/issues/963
+
+ self.ssl_version = ssl_version or ssl.PROTOCOL_TLSv1
+
# "tls" and "tls_verify" must have both or neither cert/key files
# In either case, Alert the user when both are expected, but any are
# missing.
| {"golden_diff": "diff --git a/docker/tls.py b/docker/tls.py\n--- a/docker/tls.py\n+++ b/docker/tls.py\n@@ -1,4 +1,5 @@\n import os\n+import ssl\n \n from . import errors\n from .ssladapter import ssladapter\n@@ -19,10 +20,14 @@\n # here, but also disable any public/default CA pool verification by\n # leaving tls_verify=False\n \n- self.ssl_version = ssl_version\n self.assert_hostname = assert_hostname\n self.assert_fingerprint = assert_fingerprint\n \n+ # TLS v1.0 seems to be the safest default; SSLv23 fails in mysterious\n+ # ways: https://github.com/docker/docker-py/issues/963\n+\n+ self.ssl_version = ssl_version or ssl.PROTOCOL_TLSv1\n+\n # \"tls\" and \"tls_verify\" must have both or neither cert/key files\n # In either case, Alert the user when both are expected, but any are\n # missing.\n", "issue": "tlsv1 alert protocol version on 1.7.1 and 1.7.2 but not on 1.7.0\nSimilar to #949 I'm discovering issues with latest versions of `docker-py` running against docker 1.10.2 instance. I'm using `docker.utils.kwargs_from_env(assert_hostname=False)`. Things work fine with version 1.7.0.\n\nDocker client is initialized via\n\n```\nclient = docker.Client(\n version='auto',\n **docker.utils.kwargs_from_env(assert_hostname=False))\n```\n\nwith docker environment variables being set to the following (via docker-machine)\n\n```\nDOCKER_HOST=tcp://192.168.156.137:2376\nDOCKER_MACHINE_NAME=dev2\nDOCKER_TLS_VERIFY=1\nDOCKER_CERT_PATH=/Users/benjixx/.docker/machine/machines/dev2\n```\n\ndocker-py 1.7.1 and 1.7.2 now raise the following exception:\n\n```\nDockerException: Error while fetching server API version: [Errno 1] _ssl.c:507: error:1407742E:SSL routines:SSL23_GET_SERVER_HELLO:tlsv1 alert protocol version\n```\n\nAny idea what's happening here?\n\n", "before_files": [{"content": "import os\n\nfrom . import errors\nfrom .ssladapter import ssladapter\n\n\nclass TLSConfig(object):\n cert = None\n ca_cert = None\n verify = None\n ssl_version = None\n\n def __init__(self, client_cert=None, ca_cert=None, verify=None,\n ssl_version=None, assert_hostname=None,\n assert_fingerprint=None):\n # Argument compatibility/mapping with\n # https://docs.docker.com/engine/articles/https/\n # This diverges from the Docker CLI in that users can specify 'tls'\n # here, but also disable any public/default CA pool verification by\n # leaving tls_verify=False\n\n self.ssl_version = ssl_version\n self.assert_hostname = assert_hostname\n self.assert_fingerprint = assert_fingerprint\n\n # \"tls\" and \"tls_verify\" must have both or neither cert/key files\n # In either case, Alert the user when both are expected, but any are\n # missing.\n\n if client_cert:\n try:\n tls_cert, tls_key = client_cert\n except ValueError:\n raise errors.TLSParameterError(\n 'client_config must be a tuple of'\n ' (client certificate, key file)'\n )\n\n if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or\n not os.path.isfile(tls_key)):\n raise errors.TLSParameterError(\n 'Path to a certificate and key files must be provided'\n ' through the client_config param'\n )\n self.cert = (tls_cert, tls_key)\n\n # If verify is set, make sure the cert exists\n self.verify = verify\n self.ca_cert = ca_cert\n if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert):\n raise errors.TLSParameterError(\n 'Invalid CA certificate provided for `tls_ca_cert`.'\n )\n\n def configure_client(self, client):\n client.ssl_version = self.ssl_version\n\n if self.verify and self.ca_cert:\n client.verify = self.ca_cert\n else:\n client.verify = self.verify\n\n if self.cert:\n client.cert = self.cert\n\n client.mount('https://', ssladapter.SSLAdapter(\n ssl_version=self.ssl_version,\n assert_hostname=self.assert_hostname,\n assert_fingerprint=self.assert_fingerprint,\n ))\n", "path": "docker/tls.py"}], "after_files": [{"content": "import os\nimport ssl\n\nfrom . import errors\nfrom .ssladapter import ssladapter\n\n\nclass TLSConfig(object):\n cert = None\n ca_cert = None\n verify = None\n ssl_version = None\n\n def __init__(self, client_cert=None, ca_cert=None, verify=None,\n ssl_version=None, assert_hostname=None,\n assert_fingerprint=None):\n # Argument compatibility/mapping with\n # https://docs.docker.com/engine/articles/https/\n # This diverges from the Docker CLI in that users can specify 'tls'\n # here, but also disable any public/default CA pool verification by\n # leaving tls_verify=False\n\n self.assert_hostname = assert_hostname\n self.assert_fingerprint = assert_fingerprint\n\n # TLS v1.0 seems to be the safest default; SSLv23 fails in mysterious\n # ways: https://github.com/docker/docker-py/issues/963\n\n self.ssl_version = ssl_version or ssl.PROTOCOL_TLSv1\n\n # \"tls\" and \"tls_verify\" must have both or neither cert/key files\n # In either case, Alert the user when both are expected, but any are\n # missing.\n\n if client_cert:\n try:\n tls_cert, tls_key = client_cert\n except ValueError:\n raise errors.TLSParameterError(\n 'client_config must be a tuple of'\n ' (client certificate, key file)'\n )\n\n if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or\n not os.path.isfile(tls_key)):\n raise errors.TLSParameterError(\n 'Path to a certificate and key files must be provided'\n ' through the client_config param'\n )\n self.cert = (tls_cert, tls_key)\n\n # If verify is set, make sure the cert exists\n self.verify = verify\n self.ca_cert = ca_cert\n if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert):\n raise errors.TLSParameterError(\n 'Invalid CA certificate provided for `tls_ca_cert`.'\n )\n\n def configure_client(self, client):\n client.ssl_version = self.ssl_version\n\n if self.verify and self.ca_cert:\n client.verify = self.ca_cert\n else:\n client.verify = self.verify\n\n if self.cert:\n client.cert = self.cert\n\n client.mount('https://', ssladapter.SSLAdapter(\n ssl_version=self.ssl_version,\n assert_hostname=self.assert_hostname,\n assert_fingerprint=self.assert_fingerprint,\n ))\n", "path": "docker/tls.py"}]} | 1,174 | 223 |
gh_patches_debug_12926 | rasdani/github-patches | git_diff | bokeh__bokeh-6804 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Automatic configuration of Slider.format
Integer sliders should use integer formatting.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bokeh/models/widgets/sliders.py`
Content:
```
1 """ Various kinds of slider widgets.
2
3 """
4 from __future__ import absolute_import
5
6 from ...core.has_props import abstract
7 from ...core.properties import Bool, Int, Float, String, Date, Enum, Tuple, Instance, Color, Override
8 from ...core.enums import SliderCallbackPolicy
9 from ..callbacks import Callback
10 from .widget import Widget
11
12 @abstract
13 class AbstractSlider(Widget):
14 """ """
15
16 title = String(default="", help="""
17 Slider's label.
18 """)
19
20 show_value = Bool(default=True, help="""
21 Whether or not show slider's value.
22 """)
23
24 format = String(help="""
25 """)
26
27 orientation = Enum("horizontal", "vertical", help="""
28 Orient the slider either horizontally (default) or vertically.
29 """)
30
31 direction = Enum("ltr", "rtl", help="""
32 """)
33
34 tooltips = Bool(default=True, help="""
35 """)
36
37 callback = Instance(Callback, help="""
38 A callback to run in the browser whenever the current Slider value changes.
39 """)
40
41 callback_throttle = Float(default=200, help="""
42 Number of millseconds to pause between callback calls as the slider is moved.
43 """)
44
45 callback_policy = Enum(SliderCallbackPolicy, default="throttle", help="""
46 When the callback is initiated. This parameter can take on only one of three options:
47
48 * "continuous": the callback will be executed immediately for each movement of the slider
49 * "throttle": the callback will be executed at most every ``callback_throttle`` milliseconds.
50 * "mouseup": the callback will be executed only once when the slider is released.
51
52 The "mouseup" policy is intended for scenarios in which the callback is expensive in time.
53 """)
54
55 bar_color = Color(default="#3fb8af", help="""
56 """)
57
58 class Slider(AbstractSlider):
59 """ Slider-based number selection widget. """
60
61 start = Float(help="""
62 The minimum allowable value.
63 """)
64
65 end = Float(help="""
66 The maximum allowable value.
67 """)
68
69 value = Float(help="""
70 Initial or selected value.
71 """)
72
73 step = Float(default=1, help="""
74 The step between consecutive values.
75 """)
76
77 format = Override(default="0,0.00")
78
79 class RangeSlider(AbstractSlider):
80 """ Range-slider based number range selection widget. """
81
82 value = Tuple(Float, Float, help="""
83 Initial or selected range.
84 """)
85
86 start = Float(help="""
87 The minimum allowable value.
88 """)
89
90 end = Float(help="""
91 The maximum allowable value.
92 """)
93
94 step = Float(default=1, help="""
95 The step between consecutive values.
96 """)
97
98 format = Override(default="0,0.00")
99
100 class DateSlider(AbstractSlider):
101 """ Slider-based date selection widget. """
102
103 value = Date(help="""
104 Initial or selected value.
105 """)
106
107 start = Date(help="""
108 The minimum allowable value.
109 """)
110
111 end = Date(help="""
112 The maximum allowable value.
113 """)
114
115 step = Int(default=1, help="""
116 The step between consecutive values.
117 """)
118
119 format = Override(default="%d %b %G")
120
121 class DateRangeSlider(AbstractSlider):
122 """ Slider-based date range selection widget. """
123
124 value = Tuple(Date, Date, help="""
125 Initial or selected range.
126 """)
127
128 start = Date(help="""
129 The minimum allowable value.
130 """)
131
132 end = Date(help="""
133 The maximum allowable value.
134 """)
135
136 step = Int(default=1, help="""
137 The step between consecutive values.
138 """)
139
140 format = Override(default="%d %b %G")
141
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bokeh/models/widgets/sliders.py b/bokeh/models/widgets/sliders.py
--- a/bokeh/models/widgets/sliders.py
+++ b/bokeh/models/widgets/sliders.py
@@ -74,7 +74,7 @@
The step between consecutive values.
""")
- format = Override(default="0,0.00")
+ format = Override(default="0[.]00")
class RangeSlider(AbstractSlider):
""" Range-slider based number range selection widget. """
@@ -95,7 +95,7 @@
The step between consecutive values.
""")
- format = Override(default="0,0.00")
+ format = Override(default="0[.]00")
class DateSlider(AbstractSlider):
""" Slider-based date selection widget. """
| {"golden_diff": "diff --git a/bokeh/models/widgets/sliders.py b/bokeh/models/widgets/sliders.py\n--- a/bokeh/models/widgets/sliders.py\n+++ b/bokeh/models/widgets/sliders.py\n@@ -74,7 +74,7 @@\n The step between consecutive values.\n \"\"\")\n \n- format = Override(default=\"0,0.00\")\n+ format = Override(default=\"0[.]00\")\n \n class RangeSlider(AbstractSlider):\n \"\"\" Range-slider based number range selection widget. \"\"\"\n@@ -95,7 +95,7 @@\n The step between consecutive values.\n \"\"\")\n \n- format = Override(default=\"0,0.00\")\n+ format = Override(default=\"0[.]00\")\n \n class DateSlider(AbstractSlider):\n \"\"\" Slider-based date selection widget. \"\"\"\n", "issue": "Automatic configuration of Slider.format\nInteger sliders should use integer formatting.\r\n\n", "before_files": [{"content": "\"\"\" Various kinds of slider widgets.\n\n\"\"\"\nfrom __future__ import absolute_import\n\nfrom ...core.has_props import abstract\nfrom ...core.properties import Bool, Int, Float, String, Date, Enum, Tuple, Instance, Color, Override\nfrom ...core.enums import SliderCallbackPolicy\nfrom ..callbacks import Callback\nfrom .widget import Widget\n\n@abstract\nclass AbstractSlider(Widget):\n \"\"\" \"\"\"\n\n title = String(default=\"\", help=\"\"\"\n Slider's label.\n \"\"\")\n\n show_value = Bool(default=True, help=\"\"\"\n Whether or not show slider's value.\n \"\"\")\n\n format = String(help=\"\"\"\n \"\"\")\n\n orientation = Enum(\"horizontal\", \"vertical\", help=\"\"\"\n Orient the slider either horizontally (default) or vertically.\n \"\"\")\n\n direction = Enum(\"ltr\", \"rtl\", help=\"\"\"\n \"\"\")\n\n tooltips = Bool(default=True, help=\"\"\"\n \"\"\")\n\n callback = Instance(Callback, help=\"\"\"\n A callback to run in the browser whenever the current Slider value changes.\n \"\"\")\n\n callback_throttle = Float(default=200, help=\"\"\"\n Number of millseconds to pause between callback calls as the slider is moved.\n \"\"\")\n\n callback_policy = Enum(SliderCallbackPolicy, default=\"throttle\", help=\"\"\"\n When the callback is initiated. This parameter can take on only one of three options:\n\n * \"continuous\": the callback will be executed immediately for each movement of the slider\n * \"throttle\": the callback will be executed at most every ``callback_throttle`` milliseconds.\n * \"mouseup\": the callback will be executed only once when the slider is released.\n\n The \"mouseup\" policy is intended for scenarios in which the callback is expensive in time.\n \"\"\")\n\n bar_color = Color(default=\"#3fb8af\", help=\"\"\"\n \"\"\")\n\nclass Slider(AbstractSlider):\n \"\"\" Slider-based number selection widget. \"\"\"\n\n start = Float(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Float(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n value = Float(help=\"\"\"\n Initial or selected value.\n \"\"\")\n\n step = Float(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"0,0.00\")\n\nclass RangeSlider(AbstractSlider):\n \"\"\" Range-slider based number range selection widget. \"\"\"\n\n value = Tuple(Float, Float, help=\"\"\"\n Initial or selected range.\n \"\"\")\n\n start = Float(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Float(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n step = Float(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"0,0.00\")\n\nclass DateSlider(AbstractSlider):\n \"\"\" Slider-based date selection widget. \"\"\"\n\n value = Date(help=\"\"\"\n Initial or selected value.\n \"\"\")\n\n start = Date(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Date(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n step = Int(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"%d %b %G\")\n\nclass DateRangeSlider(AbstractSlider):\n \"\"\" Slider-based date range selection widget. \"\"\"\n\n value = Tuple(Date, Date, help=\"\"\"\n Initial or selected range.\n \"\"\")\n\n start = Date(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Date(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n step = Int(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"%d %b %G\")\n", "path": "bokeh/models/widgets/sliders.py"}], "after_files": [{"content": "\"\"\" Various kinds of slider widgets.\n\n\"\"\"\nfrom __future__ import absolute_import\n\nfrom ...core.has_props import abstract\nfrom ...core.properties import Bool, Int, Float, String, Date, Enum, Tuple, Instance, Color, Override\nfrom ...core.enums import SliderCallbackPolicy\nfrom ..callbacks import Callback\nfrom .widget import Widget\n\n@abstract\nclass AbstractSlider(Widget):\n \"\"\" \"\"\"\n\n title = String(default=\"\", help=\"\"\"\n Slider's label.\n \"\"\")\n\n show_value = Bool(default=True, help=\"\"\"\n Whether or not show slider's value.\n \"\"\")\n\n format = String(help=\"\"\"\n \"\"\")\n\n orientation = Enum(\"horizontal\", \"vertical\", help=\"\"\"\n Orient the slider either horizontally (default) or vertically.\n \"\"\")\n\n direction = Enum(\"ltr\", \"rtl\", help=\"\"\"\n \"\"\")\n\n tooltips = Bool(default=True, help=\"\"\"\n \"\"\")\n\n callback = Instance(Callback, help=\"\"\"\n A callback to run in the browser whenever the current Slider value changes.\n \"\"\")\n\n callback_throttle = Float(default=200, help=\"\"\"\n Number of millseconds to pause between callback calls as the slider is moved.\n \"\"\")\n\n callback_policy = Enum(SliderCallbackPolicy, default=\"throttle\", help=\"\"\"\n When the callback is initiated. This parameter can take on only one of three options:\n\n * \"continuous\": the callback will be executed immediately for each movement of the slider\n * \"throttle\": the callback will be executed at most every ``callback_throttle`` milliseconds.\n * \"mouseup\": the callback will be executed only once when the slider is released.\n\n The \"mouseup\" policy is intended for scenarios in which the callback is expensive in time.\n \"\"\")\n\n bar_color = Color(default=\"#3fb8af\", help=\"\"\"\n \"\"\")\n\nclass Slider(AbstractSlider):\n \"\"\" Slider-based number selection widget. \"\"\"\n\n start = Float(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Float(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n value = Float(help=\"\"\"\n Initial or selected value.\n \"\"\")\n\n step = Float(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"0[.]00\")\n\nclass RangeSlider(AbstractSlider):\n \"\"\" Range-slider based number range selection widget. \"\"\"\n\n value = Tuple(Float, Float, help=\"\"\"\n Initial or selected range.\n \"\"\")\n\n start = Float(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Float(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n step = Float(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"0[.]00\")\n\nclass DateSlider(AbstractSlider):\n \"\"\" Slider-based date selection widget. \"\"\"\n\n value = Date(help=\"\"\"\n Initial or selected value.\n \"\"\")\n\n start = Date(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Date(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n step = Int(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"%d %b %G\")\n\nclass DateRangeSlider(AbstractSlider):\n \"\"\" Slider-based date range selection widget. \"\"\"\n\n value = Tuple(Date, Date, help=\"\"\"\n Initial or selected range.\n \"\"\")\n\n start = Date(help=\"\"\"\n The minimum allowable value.\n \"\"\")\n\n end = Date(help=\"\"\"\n The maximum allowable value.\n \"\"\")\n\n step = Int(default=1, help=\"\"\"\n The step between consecutive values.\n \"\"\")\n\n format = Override(default=\"%d %b %G\")\n", "path": "bokeh/models/widgets/sliders.py"}]} | 1,411 | 178 |
gh_patches_debug_11006 | rasdani/github-patches | git_diff | urllib3__urllib3-818 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
UnicodeDecodeError in format_header_params
This issue was discussed here: https://github.com/kennethreitz/requests/issues/2639 and it seemed like the consensus was that this should be fixed in urllib3.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `urllib3/fields.py`
Content:
```
1 from __future__ import absolute_import
2 import email.utils
3 import mimetypes
4
5 from .packages import six
6
7
8 def guess_content_type(filename, default='application/octet-stream'):
9 """
10 Guess the "Content-Type" of a file.
11
12 :param filename:
13 The filename to guess the "Content-Type" of using :mod:`mimetypes`.
14 :param default:
15 If no "Content-Type" can be guessed, default to `default`.
16 """
17 if filename:
18 return mimetypes.guess_type(filename)[0] or default
19 return default
20
21
22 def format_header_param(name, value):
23 """
24 Helper function to format and quote a single header parameter.
25
26 Particularly useful for header parameters which might contain
27 non-ASCII values, like file names. This follows RFC 2231, as
28 suggested by RFC 2388 Section 4.4.
29
30 :param name:
31 The name of the parameter, a string expected to be ASCII only.
32 :param value:
33 The value of the parameter, provided as a unicode string.
34 """
35 if not any(ch in value for ch in '"\\\r\n'):
36 result = '%s="%s"' % (name, value)
37 try:
38 result.encode('ascii')
39 except UnicodeEncodeError:
40 pass
41 else:
42 return result
43 if not six.PY3: # Python 2:
44 value = value.encode('utf-8')
45 value = email.utils.encode_rfc2231(value, 'utf-8')
46 value = '%s*=%s' % (name, value)
47 return value
48
49
50 class RequestField(object):
51 """
52 A data container for request body parameters.
53
54 :param name:
55 The name of this request field.
56 :param data:
57 The data/value body.
58 :param filename:
59 An optional filename of the request field.
60 :param headers:
61 An optional dict-like object of headers to initially use for the field.
62 """
63 def __init__(self, name, data, filename=None, headers=None):
64 self._name = name
65 self._filename = filename
66 self.data = data
67 self.headers = {}
68 if headers:
69 self.headers = dict(headers)
70
71 @classmethod
72 def from_tuples(cls, fieldname, value):
73 """
74 A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
75
76 Supports constructing :class:`~urllib3.fields.RequestField` from
77 parameter of key/value strings AND key/filetuple. A filetuple is a
78 (filename, data, MIME type) tuple where the MIME type is optional.
79 For example::
80
81 'foo': 'bar',
82 'fakefile': ('foofile.txt', 'contents of foofile'),
83 'realfile': ('barfile.txt', open('realfile').read()),
84 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
85 'nonamefile': 'contents of nonamefile field',
86
87 Field names and filenames must be unicode.
88 """
89 if isinstance(value, tuple):
90 if len(value) == 3:
91 filename, data, content_type = value
92 else:
93 filename, data = value
94 content_type = guess_content_type(filename)
95 else:
96 filename = None
97 content_type = None
98 data = value
99
100 request_param = cls(fieldname, data, filename=filename)
101 request_param.make_multipart(content_type=content_type)
102
103 return request_param
104
105 def _render_part(self, name, value):
106 """
107 Overridable helper function to format a single header parameter.
108
109 :param name:
110 The name of the parameter, a string expected to be ASCII only.
111 :param value:
112 The value of the parameter, provided as a unicode string.
113 """
114 return format_header_param(name, value)
115
116 def _render_parts(self, header_parts):
117 """
118 Helper function to format and quote a single header.
119
120 Useful for single headers that are composed of multiple items. E.g.,
121 'Content-Disposition' fields.
122
123 :param header_parts:
124 A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
125 as `k1="v1"; k2="v2"; ...`.
126 """
127 parts = []
128 iterable = header_parts
129 if isinstance(header_parts, dict):
130 iterable = header_parts.items()
131
132 for name, value in iterable:
133 if value:
134 parts.append(self._render_part(name, value))
135
136 return '; '.join(parts)
137
138 def render_headers(self):
139 """
140 Renders the headers for this request field.
141 """
142 lines = []
143
144 sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
145 for sort_key in sort_keys:
146 if self.headers.get(sort_key, False):
147 lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
148
149 for header_name, header_value in self.headers.items():
150 if header_name not in sort_keys:
151 if header_value:
152 lines.append('%s: %s' % (header_name, header_value))
153
154 lines.append('\r\n')
155 return '\r\n'.join(lines)
156
157 def make_multipart(self, content_disposition=None, content_type=None,
158 content_location=None):
159 """
160 Makes this request field into a multipart request field.
161
162 This method overrides "Content-Disposition", "Content-Type" and
163 "Content-Location" headers to the request parameter.
164
165 :param content_type:
166 The 'Content-Type' of the request body.
167 :param content_location:
168 The 'Content-Location' of the request body.
169
170 """
171 self.headers['Content-Disposition'] = content_disposition or 'form-data'
172 self.headers['Content-Disposition'] += '; '.join([
173 '', self._render_parts(
174 (('name', self._name), ('filename', self._filename))
175 )
176 ])
177 self.headers['Content-Type'] = content_type
178 self.headers['Content-Location'] = content_location
179
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/urllib3/fields.py b/urllib3/fields.py
--- a/urllib3/fields.py
+++ b/urllib3/fields.py
@@ -36,11 +36,11 @@
result = '%s="%s"' % (name, value)
try:
result.encode('ascii')
- except UnicodeEncodeError:
+ except (UnicodeEncodeError, UnicodeDecodeError):
pass
else:
return result
- if not six.PY3: # Python 2:
+ if not six.PY3 and isinstance(value, six.text_type): # Python 2:
value = value.encode('utf-8')
value = email.utils.encode_rfc2231(value, 'utf-8')
value = '%s*=%s' % (name, value)
| {"golden_diff": "diff --git a/urllib3/fields.py b/urllib3/fields.py\n--- a/urllib3/fields.py\n+++ b/urllib3/fields.py\n@@ -36,11 +36,11 @@\n result = '%s=\"%s\"' % (name, value)\n try:\n result.encode('ascii')\n- except UnicodeEncodeError:\n+ except (UnicodeEncodeError, UnicodeDecodeError):\n pass\n else:\n return result\n- if not six.PY3: # Python 2:\n+ if not six.PY3 and isinstance(value, six.text_type): # Python 2:\n value = value.encode('utf-8')\n value = email.utils.encode_rfc2231(value, 'utf-8')\n value = '%s*=%s' % (name, value)\n", "issue": "UnicodeDecodeError in format_header_params\nThis issue was discussed here: https://github.com/kennethreitz/requests/issues/2639 and it seemed like the consensus was that this should be fixed in urllib3. \n\n", "before_files": [{"content": "from __future__ import absolute_import\nimport email.utils\nimport mimetypes\n\nfrom .packages import six\n\n\ndef guess_content_type(filename, default='application/octet-stream'):\n \"\"\"\n Guess the \"Content-Type\" of a file.\n\n :param filename:\n The filename to guess the \"Content-Type\" of using :mod:`mimetypes`.\n :param default:\n If no \"Content-Type\" can be guessed, default to `default`.\n \"\"\"\n if filename:\n return mimetypes.guess_type(filename)[0] or default\n return default\n\n\ndef format_header_param(name, value):\n \"\"\"\n Helper function to format and quote a single header parameter.\n\n Particularly useful for header parameters which might contain\n non-ASCII values, like file names. This follows RFC 2231, as\n suggested by RFC 2388 Section 4.4.\n\n :param name:\n The name of the parameter, a string expected to be ASCII only.\n :param value:\n The value of the parameter, provided as a unicode string.\n \"\"\"\n if not any(ch in value for ch in '\"\\\\\\r\\n'):\n result = '%s=\"%s\"' % (name, value)\n try:\n result.encode('ascii')\n except UnicodeEncodeError:\n pass\n else:\n return result\n if not six.PY3: # Python 2:\n value = value.encode('utf-8')\n value = email.utils.encode_rfc2231(value, 'utf-8')\n value = '%s*=%s' % (name, value)\n return value\n\n\nclass RequestField(object):\n \"\"\"\n A data container for request body parameters.\n\n :param name:\n The name of this request field.\n :param data:\n The data/value body.\n :param filename:\n An optional filename of the request field.\n :param headers:\n An optional dict-like object of headers to initially use for the field.\n \"\"\"\n def __init__(self, name, data, filename=None, headers=None):\n self._name = name\n self._filename = filename\n self.data = data\n self.headers = {}\n if headers:\n self.headers = dict(headers)\n\n @classmethod\n def from_tuples(cls, fieldname, value):\n \"\"\"\n A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.\n\n Supports constructing :class:`~urllib3.fields.RequestField` from\n parameter of key/value strings AND key/filetuple. A filetuple is a\n (filename, data, MIME type) tuple where the MIME type is optional.\n For example::\n\n 'foo': 'bar',\n 'fakefile': ('foofile.txt', 'contents of foofile'),\n 'realfile': ('barfile.txt', open('realfile').read()),\n 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),\n 'nonamefile': 'contents of nonamefile field',\n\n Field names and filenames must be unicode.\n \"\"\"\n if isinstance(value, tuple):\n if len(value) == 3:\n filename, data, content_type = value\n else:\n filename, data = value\n content_type = guess_content_type(filename)\n else:\n filename = None\n content_type = None\n data = value\n\n request_param = cls(fieldname, data, filename=filename)\n request_param.make_multipart(content_type=content_type)\n\n return request_param\n\n def _render_part(self, name, value):\n \"\"\"\n Overridable helper function to format a single header parameter.\n\n :param name:\n The name of the parameter, a string expected to be ASCII only.\n :param value:\n The value of the parameter, provided as a unicode string.\n \"\"\"\n return format_header_param(name, value)\n\n def _render_parts(self, header_parts):\n \"\"\"\n Helper function to format and quote a single header.\n\n Useful for single headers that are composed of multiple items. E.g.,\n 'Content-Disposition' fields.\n\n :param header_parts:\n A sequence of (k, v) typles or a :class:`dict` of (k, v) to format\n as `k1=\"v1\"; k2=\"v2\"; ...`.\n \"\"\"\n parts = []\n iterable = header_parts\n if isinstance(header_parts, dict):\n iterable = header_parts.items()\n\n for name, value in iterable:\n if value:\n parts.append(self._render_part(name, value))\n\n return '; '.join(parts)\n\n def render_headers(self):\n \"\"\"\n Renders the headers for this request field.\n \"\"\"\n lines = []\n\n sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']\n for sort_key in sort_keys:\n if self.headers.get(sort_key, False):\n lines.append('%s: %s' % (sort_key, self.headers[sort_key]))\n\n for header_name, header_value in self.headers.items():\n if header_name not in sort_keys:\n if header_value:\n lines.append('%s: %s' % (header_name, header_value))\n\n lines.append('\\r\\n')\n return '\\r\\n'.join(lines)\n\n def make_multipart(self, content_disposition=None, content_type=None,\n content_location=None):\n \"\"\"\n Makes this request field into a multipart request field.\n\n This method overrides \"Content-Disposition\", \"Content-Type\" and\n \"Content-Location\" headers to the request parameter.\n\n :param content_type:\n The 'Content-Type' of the request body.\n :param content_location:\n The 'Content-Location' of the request body.\n\n \"\"\"\n self.headers['Content-Disposition'] = content_disposition or 'form-data'\n self.headers['Content-Disposition'] += '; '.join([\n '', self._render_parts(\n (('name', self._name), ('filename', self._filename))\n )\n ])\n self.headers['Content-Type'] = content_type\n self.headers['Content-Location'] = content_location\n", "path": "urllib3/fields.py"}], "after_files": [{"content": "from __future__ import absolute_import\nimport email.utils\nimport mimetypes\n\nfrom .packages import six\n\n\ndef guess_content_type(filename, default='application/octet-stream'):\n \"\"\"\n Guess the \"Content-Type\" of a file.\n\n :param filename:\n The filename to guess the \"Content-Type\" of using :mod:`mimetypes`.\n :param default:\n If no \"Content-Type\" can be guessed, default to `default`.\n \"\"\"\n if filename:\n return mimetypes.guess_type(filename)[0] or default\n return default\n\n\ndef format_header_param(name, value):\n \"\"\"\n Helper function to format and quote a single header parameter.\n\n Particularly useful for header parameters which might contain\n non-ASCII values, like file names. This follows RFC 2231, as\n suggested by RFC 2388 Section 4.4.\n\n :param name:\n The name of the parameter, a string expected to be ASCII only.\n :param value:\n The value of the parameter, provided as a unicode string.\n \"\"\"\n if not any(ch in value for ch in '\"\\\\\\r\\n'):\n result = '%s=\"%s\"' % (name, value)\n try:\n result.encode('ascii')\n except (UnicodeEncodeError, UnicodeDecodeError):\n pass\n else:\n return result\n if not six.PY3 and isinstance(value, six.text_type): # Python 2:\n value = value.encode('utf-8')\n value = email.utils.encode_rfc2231(value, 'utf-8')\n value = '%s*=%s' % (name, value)\n return value\n\n\nclass RequestField(object):\n \"\"\"\n A data container for request body parameters.\n\n :param name:\n The name of this request field.\n :param data:\n The data/value body.\n :param filename:\n An optional filename of the request field.\n :param headers:\n An optional dict-like object of headers to initially use for the field.\n \"\"\"\n def __init__(self, name, data, filename=None, headers=None):\n self._name = name\n self._filename = filename\n self.data = data\n self.headers = {}\n if headers:\n self.headers = dict(headers)\n\n @classmethod\n def from_tuples(cls, fieldname, value):\n \"\"\"\n A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.\n\n Supports constructing :class:`~urllib3.fields.RequestField` from\n parameter of key/value strings AND key/filetuple. A filetuple is a\n (filename, data, MIME type) tuple where the MIME type is optional.\n For example::\n\n 'foo': 'bar',\n 'fakefile': ('foofile.txt', 'contents of foofile'),\n 'realfile': ('barfile.txt', open('realfile').read()),\n 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),\n 'nonamefile': 'contents of nonamefile field',\n\n Field names and filenames must be unicode.\n \"\"\"\n if isinstance(value, tuple):\n if len(value) == 3:\n filename, data, content_type = value\n else:\n filename, data = value\n content_type = guess_content_type(filename)\n else:\n filename = None\n content_type = None\n data = value\n\n request_param = cls(fieldname, data, filename=filename)\n request_param.make_multipart(content_type=content_type)\n\n return request_param\n\n def _render_part(self, name, value):\n \"\"\"\n Overridable helper function to format a single header parameter.\n\n :param name:\n The name of the parameter, a string expected to be ASCII only.\n :param value:\n The value of the parameter, provided as a unicode string.\n \"\"\"\n return format_header_param(name, value)\n\n def _render_parts(self, header_parts):\n \"\"\"\n Helper function to format and quote a single header.\n\n Useful for single headers that are composed of multiple items. E.g.,\n 'Content-Disposition' fields.\n\n :param header_parts:\n A sequence of (k, v) typles or a :class:`dict` of (k, v) to format\n as `k1=\"v1\"; k2=\"v2\"; ...`.\n \"\"\"\n parts = []\n iterable = header_parts\n if isinstance(header_parts, dict):\n iterable = header_parts.items()\n\n for name, value in iterable:\n if value:\n parts.append(self._render_part(name, value))\n\n return '; '.join(parts)\n\n def render_headers(self):\n \"\"\"\n Renders the headers for this request field.\n \"\"\"\n lines = []\n\n sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']\n for sort_key in sort_keys:\n if self.headers.get(sort_key, False):\n lines.append('%s: %s' % (sort_key, self.headers[sort_key]))\n\n for header_name, header_value in self.headers.items():\n if header_name not in sort_keys:\n if header_value:\n lines.append('%s: %s' % (header_name, header_value))\n\n lines.append('\\r\\n')\n return '\\r\\n'.join(lines)\n\n def make_multipart(self, content_disposition=None, content_type=None,\n content_location=None):\n \"\"\"\n Makes this request field into a multipart request field.\n\n This method overrides \"Content-Disposition\", \"Content-Type\" and\n \"Content-Location\" headers to the request parameter.\n\n :param content_type:\n The 'Content-Type' of the request body.\n :param content_location:\n The 'Content-Location' of the request body.\n\n \"\"\"\n self.headers['Content-Disposition'] = content_disposition or 'form-data'\n self.headers['Content-Disposition'] += '; '.join([\n '', self._render_parts(\n (('name', self._name), ('filename', self._filename))\n )\n ])\n self.headers['Content-Type'] = content_type\n self.headers['Content-Location'] = content_location\n", "path": "urllib3/fields.py"}]} | 2,046 | 187 |
gh_patches_debug_22902 | rasdani/github-patches | git_diff | Lightning-AI__pytorch-lightning-720 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
W&B: Allow for passing experiment into the WandbLogger (and logging semantics)
Currently, the WandbLogger will automatically create a new internal experiment (run) whenever you create a new WandbLogger.
# Issue
If I instantiate a wandb experiment outside of the logger, then I will have two experiments when I train my model since there is no way to set the internal experiment of the WandbLogger to my current external experiment.
# Potential Solution
Allow for passing an experiment into the WandbLogger:
```
class WandbLogger(LightningLoggerBase):
def __init__(self, name=None, save_dir=None, offline=False, id=None, anonymous=False,
version=None, project=None, tags=None, experiment=None):
.
.
.
self._experiment = experiment
```
Then I can do this:
```
experiment = wandb.init(.......)
wandb_logger = WandbLogger(experiment=experiment)
```
I made this change locally, however, I wasn't sure if this was something you also wanted to implement as well. It works for me.
# Another small note
In the `WandbLogger.log_metrics` function, I would change:
`self.experiment.history.add(metrics)` --> `self.experiment.log(metrics)`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pytorch_lightning/logging/wandb.py`
Content:
```
1 import os
2
3 try:
4 import wandb
5 except ImportError:
6 raise ImportError('Missing wandb package.')
7
8 from .base import LightningLoggerBase, rank_zero_only
9
10
11 class WandbLogger(LightningLoggerBase):
12 """
13 Logger for W&B.
14
15 Args:
16 name (str): display name for the run.
17 save_dir (str): path where data is saved.
18 offline (bool): run offline (data can be streamed later to wandb servers).
19 id or version (str): sets the version, mainly used to resume a previous run.
20 anonymous (bool): enables or explicitly disables anonymous logging.
21 project (str): the name of the project to which this run will belong.
22 tags (list of str): tags associated with this run.
23
24 Example
25 --------
26 .. code-block:: python
27
28 from pytorch_lightning.logging import WandbLogger
29 from pytorch_lightning import Trainer
30
31 wandb_logger = WandbLogger()
32 trainer = Trainer(logger=wandb_logger)
33 """
34
35 def __init__(self, name=None, save_dir=None, offline=False, id=None, anonymous=False,
36 version=None, project=None, tags=None):
37 super().__init__()
38 self._name = name
39 self._save_dir = save_dir
40 self._anonymous = "allow" if anonymous else None
41 self._id = version or id
42 self._tags = tags
43 self._project = project
44 self._experiment = None
45 self._offline = offline
46
47 def __getstate__(self):
48 state = self.__dict__.copy()
49 # cannot be pickled
50 state['_experiment'] = None
51 # args needed to reload correct experiment
52 state['_id'] = self.experiment.id
53 return state
54
55 @property
56 def experiment(self):
57 r"""
58
59 Actual wandb object. To use wandb features do the following.
60
61 Example::
62
63 self.logger.experiment.some_wandb_function()
64
65 """
66 if self._experiment is None:
67 if self._offline:
68 os.environ["WANDB_MODE"] = "dryrun"
69 self._experiment = wandb.init(
70 name=self._name, dir=self._save_dir, project=self._project, anonymous=self._anonymous,
71 id=self._id, resume="allow", tags=self._tags)
72 return self._experiment
73
74 def watch(self, model, log="gradients", log_freq=100):
75 wandb.watch(model, log, log_freq)
76
77 @rank_zero_only
78 def log_hyperparams(self, params):
79 self.experiment.config.update(params)
80
81 @rank_zero_only
82 def log_metrics(self, metrics, step=None):
83 metrics["global_step"] = step
84 self.experiment.history.add(metrics)
85
86 def save(self):
87 pass
88
89 @rank_zero_only
90 def finalize(self, status='success'):
91 try:
92 exit_code = 0 if status == 'success' else 1
93 wandb.join(exit_code)
94 except TypeError:
95 wandb.join()
96
97 @property
98 def name(self):
99 return self.experiment.project_name()
100
101 @property
102 def version(self):
103 return self.experiment.id
104
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pytorch_lightning/logging/wandb.py b/pytorch_lightning/logging/wandb.py
--- a/pytorch_lightning/logging/wandb.py
+++ b/pytorch_lightning/logging/wandb.py
@@ -33,7 +33,7 @@
"""
def __init__(self, name=None, save_dir=None, offline=False, id=None, anonymous=False,
- version=None, project=None, tags=None):
+ version=None, project=None, tags=None, experiment=None):
super().__init__()
self._name = name
self._save_dir = save_dir
@@ -41,7 +41,7 @@
self._id = version or id
self._tags = tags
self._project = project
- self._experiment = None
+ self._experiment = experiment
self._offline = offline
def __getstate__(self):
@@ -81,7 +81,7 @@
@rank_zero_only
def log_metrics(self, metrics, step=None):
metrics["global_step"] = step
- self.experiment.history.add(metrics)
+ self.experiment.log(metrics)
def save(self):
pass
| {"golden_diff": "diff --git a/pytorch_lightning/logging/wandb.py b/pytorch_lightning/logging/wandb.py\n--- a/pytorch_lightning/logging/wandb.py\n+++ b/pytorch_lightning/logging/wandb.py\n@@ -33,7 +33,7 @@\n \"\"\"\n \n def __init__(self, name=None, save_dir=None, offline=False, id=None, anonymous=False,\n- version=None, project=None, tags=None):\n+ version=None, project=None, tags=None, experiment=None):\n super().__init__()\n self._name = name\n self._save_dir = save_dir\n@@ -41,7 +41,7 @@\n self._id = version or id\n self._tags = tags\n self._project = project\n- self._experiment = None\n+ self._experiment = experiment\n self._offline = offline\n \n def __getstate__(self):\n@@ -81,7 +81,7 @@\n @rank_zero_only\n def log_metrics(self, metrics, step=None):\n metrics[\"global_step\"] = step\n- self.experiment.history.add(metrics)\n+ self.experiment.log(metrics)\n \n def save(self):\n pass\n", "issue": "W&B: Allow for passing experiment into the WandbLogger (and logging semantics)\nCurrently, the WandbLogger will automatically create a new internal experiment (run) whenever you create a new WandbLogger.\r\n\r\n# Issue \r\n\r\nIf I instantiate a wandb experiment outside of the logger, then I will have two experiments when I train my model since there is no way to set the internal experiment of the WandbLogger to my current external experiment.\r\n\r\n# Potential Solution\r\nAllow for passing an experiment into the WandbLogger:\r\n\r\n```\r\nclass WandbLogger(LightningLoggerBase):\r\n def __init__(self, name=None, save_dir=None, offline=False, id=None, anonymous=False,\r\n version=None, project=None, tags=None, experiment=None):\r\n .\r\n .\r\n .\r\n self._experiment = experiment\r\n```\r\nThen I can do this:\r\n\r\n```\r\nexperiment = wandb.init(.......)\r\nwandb_logger = WandbLogger(experiment=experiment)\r\n```\r\nI made this change locally, however, I wasn't sure if this was something you also wanted to implement as well. It works for me.\r\n\r\n# Another small note\r\n\r\nIn the `WandbLogger.log_metrics` function, I would change:\r\n\r\n`self.experiment.history.add(metrics)` --> `self.experiment.log(metrics)`\n", "before_files": [{"content": "import os\n\ntry:\n import wandb\nexcept ImportError:\n raise ImportError('Missing wandb package.')\n\nfrom .base import LightningLoggerBase, rank_zero_only\n\n\nclass WandbLogger(LightningLoggerBase):\n \"\"\"\n Logger for W&B.\n\n Args:\n name (str): display name for the run.\n save_dir (str): path where data is saved.\n offline (bool): run offline (data can be streamed later to wandb servers).\n id or version (str): sets the version, mainly used to resume a previous run.\n anonymous (bool): enables or explicitly disables anonymous logging.\n project (str): the name of the project to which this run will belong.\n tags (list of str): tags associated with this run.\n\n Example\n --------\n .. code-block:: python\n\n from pytorch_lightning.logging import WandbLogger\n from pytorch_lightning import Trainer\n\n wandb_logger = WandbLogger()\n trainer = Trainer(logger=wandb_logger)\n \"\"\"\n\n def __init__(self, name=None, save_dir=None, offline=False, id=None, anonymous=False,\n version=None, project=None, tags=None):\n super().__init__()\n self._name = name\n self._save_dir = save_dir\n self._anonymous = \"allow\" if anonymous else None\n self._id = version or id\n self._tags = tags\n self._project = project\n self._experiment = None\n self._offline = offline\n\n def __getstate__(self):\n state = self.__dict__.copy()\n # cannot be pickled\n state['_experiment'] = None\n # args needed to reload correct experiment\n state['_id'] = self.experiment.id\n return state\n\n @property\n def experiment(self):\n r\"\"\"\n\n Actual wandb object. To use wandb features do the following.\n\n Example::\n\n self.logger.experiment.some_wandb_function()\n\n \"\"\"\n if self._experiment is None:\n if self._offline:\n os.environ[\"WANDB_MODE\"] = \"dryrun\"\n self._experiment = wandb.init(\n name=self._name, dir=self._save_dir, project=self._project, anonymous=self._anonymous,\n id=self._id, resume=\"allow\", tags=self._tags)\n return self._experiment\n\n def watch(self, model, log=\"gradients\", log_freq=100):\n wandb.watch(model, log, log_freq)\n\n @rank_zero_only\n def log_hyperparams(self, params):\n self.experiment.config.update(params)\n\n @rank_zero_only\n def log_metrics(self, metrics, step=None):\n metrics[\"global_step\"] = step\n self.experiment.history.add(metrics)\n\n def save(self):\n pass\n\n @rank_zero_only\n def finalize(self, status='success'):\n try:\n exit_code = 0 if status == 'success' else 1\n wandb.join(exit_code)\n except TypeError:\n wandb.join()\n\n @property\n def name(self):\n return self.experiment.project_name()\n\n @property\n def version(self):\n return self.experiment.id\n", "path": "pytorch_lightning/logging/wandb.py"}], "after_files": [{"content": "import os\n\ntry:\n import wandb\nexcept ImportError:\n raise ImportError('Missing wandb package.')\n\nfrom .base import LightningLoggerBase, rank_zero_only\n\n\nclass WandbLogger(LightningLoggerBase):\n \"\"\"\n Logger for W&B.\n\n Args:\n name (str): display name for the run.\n save_dir (str): path where data is saved.\n offline (bool): run offline (data can be streamed later to wandb servers).\n id or version (str): sets the version, mainly used to resume a previous run.\n anonymous (bool): enables or explicitly disables anonymous logging.\n project (str): the name of the project to which this run will belong.\n tags (list of str): tags associated with this run.\n\n Example\n --------\n .. code-block:: python\n\n from pytorch_lightning.logging import WandbLogger\n from pytorch_lightning import Trainer\n\n wandb_logger = WandbLogger()\n trainer = Trainer(logger=wandb_logger)\n \"\"\"\n\n def __init__(self, name=None, save_dir=None, offline=False, id=None, anonymous=False,\n version=None, project=None, tags=None, experiment=None):\n super().__init__()\n self._name = name\n self._save_dir = save_dir\n self._anonymous = \"allow\" if anonymous else None\n self._id = version or id\n self._tags = tags\n self._project = project\n self._experiment = experiment\n self._offline = offline\n\n def __getstate__(self):\n state = self.__dict__.copy()\n # cannot be pickled\n state['_experiment'] = None\n # args needed to reload correct experiment\n state['_id'] = self.experiment.id\n return state\n\n @property\n def experiment(self):\n r\"\"\"\n\n Actual wandb object. To use wandb features do the following.\n\n Example::\n\n self.logger.experiment.some_wandb_function()\n\n \"\"\"\n if self._experiment is None:\n if self._offline:\n os.environ[\"WANDB_MODE\"] = \"dryrun\"\n self._experiment = wandb.init(\n name=self._name, dir=self._save_dir, project=self._project, anonymous=self._anonymous,\n id=self._id, resume=\"allow\", tags=self._tags)\n return self._experiment\n\n def watch(self, model, log=\"gradients\", log_freq=100):\n wandb.watch(model, log, log_freq)\n\n @rank_zero_only\n def log_hyperparams(self, params):\n self.experiment.config.update(params)\n\n @rank_zero_only\n def log_metrics(self, metrics, step=None):\n metrics[\"global_step\"] = step\n self.experiment.log(metrics)\n\n def save(self):\n pass\n\n @rank_zero_only\n def finalize(self, status='success'):\n try:\n exit_code = 0 if status == 'success' else 1\n wandb.join(exit_code)\n except TypeError:\n wandb.join()\n\n @property\n def name(self):\n return self.experiment.project_name()\n\n @property\n def version(self):\n return self.experiment.id\n", "path": "pytorch_lightning/logging/wandb.py"}]} | 1,425 | 262 |
gh_patches_debug_8178 | rasdani/github-patches | git_diff | kartoza__prj.app-1077 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Please include project name in certifying org admin
<img width="895" alt="image" src="https://user-images.githubusercontent.com/178003/67474466-6ab98000-f64c-11e9-8a69-95e12e8bd404.png">
We have two Kartoza entries there - one for InaSAFE and one for QGIS - I nearly deleted the first not realising it was for InaSAFE
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django_project/certification/admin.py`
Content:
```
1 # coding=UTF-8
2 """Model admin class definitions."""
3
4 from django.contrib.gis import admin
5 from simple_history.admin import SimpleHistoryAdmin
6 from certification.models.certificate import Certificate
7 from certification.models.course import Course
8 from certification.models.training_center import TrainingCenter
9 from certification.models.course_convener import CourseConvener
10 from certification.models.course_type import CourseType
11 from certification.models.attendee import Attendee
12 from certification.models.course_attendee import CourseAttendee
13 from certification.models.certifying_organisation import CertifyingOrganisation
14 from certification.models.organisation_certificate import \
15 CertifyingOrganisationCertificate
16 from certification.models.status import Status
17
18
19 class CertificateAdmin(admin.ModelAdmin):
20 """Certificate admin model."""
21
22 list_display = ('__unicode__', 'course')
23 search_fields = ('certificateID', 'course__name',)
24
25 def queryset(self, request):
26 """Ensure we use the correct manager.
27
28 :param request: HttpRequest object
29 """
30 query_set = self.model.objects
31 ordering = self.get_ordering(request)
32 if ordering:
33 query_set = query_set.order_by(*ordering)
34 return query_set
35
36
37 class AttendeeAdmin(admin.ModelAdmin):
38 """Attendee admin model."""
39 list_display = ('firstname', 'surname', 'email', 'certifying_organisation')
40 search_fields = ['firstname', 'surname']
41
42 def queryset(self, request):
43 """Ensure we use the correct manager.
44
45 :param request: HttpRequest object
46 """
47 query_set = self.model.objects
48 ordering = self.get_ordering(request)
49 if ordering:
50 query_set = query_set.order_by(*ordering)
51 return query_set
52
53
54 class CourseAttendeeAdmin(admin.ModelAdmin):
55 """Certificate admin model."""
56 list_display = ('course', 'attendee', 'author')
57
58 def queryset(self, request):
59 """Ensure we use the correct manager.
60
61 :param request: HttpRequest object
62 """
63 query_set = self.model.objects
64 ordering = self.get_ordering(request)
65 if ordering:
66 query_set = query_set.order_by(*ordering)
67 return query_set
68
69
70 class CourseAdmin(admin.ModelAdmin):
71 """Course admin model."""
72
73 def queryset(self, request):
74 """Ensure we use the correct manager.
75
76 :param request: HttpRequest object
77 """
78 query_set = self.model.objects
79 ordering = self.get_ordering(request)
80 if ordering:
81 query_set = query_set.order_by(*ordering)
82 return query_set
83
84
85 class CourseTypeAdmin(admin.ModelAdmin):
86 """Course type admin model."""
87
88 def queryset(self, request):
89 """Ensure we use the correct manager.
90
91 :param request: HttpRequest object
92 """
93 query_set = self.model.objects
94 ordering = self.get_ordering(request)
95 if ordering:
96 query_set = query_set.order_by(*ordering)
97 return query_set
98
99
100 class TrainingCenterAdmin(admin.GeoModelAdmin):
101 """Training center admin model."""
102
103 def queryset(self, request):
104 """Ensure we use the correct manager.
105
106 :param request: HttpRequest object
107 """
108 query_set = self.model.objects
109 ordering = self.get_ordering(request)
110 if ordering:
111 query_set = query_set.order_by(*ordering)
112 return query_set
113
114
115 class CourseConvenerAdmin(admin.ModelAdmin):
116 """Course convener admin model."""
117
118 def queryset(self, request):
119 """Ensure we use the correct manager.
120
121 :param request: HttpRequest object
122 """
123 query_set = self.model.objects
124 ordering = self.get_ordering(request)
125 if ordering:
126 query_set = query_set.order_by(*ordering)
127 return query_set
128
129
130 class CertifyingOrganisationCertificateAdminInline(admin.TabularInline):
131 model = CertifyingOrganisationCertificate
132 extra = 0
133
134
135 class CertifyingOrganisationCertificateAdmin(SimpleHistoryAdmin):
136 history_list_display = ['issued', 'valid']
137
138
139 class CertifyingOrganisationAdmin(SimpleHistoryAdmin):
140 """Certifying organisation admin model."""
141
142 filter_horizontal = ('organisation_owners',)
143 search_fields = ['name']
144 list_display = ('name', 'country', 'approved', 'rejected')
145 list_filter = ('country', 'approved', 'rejected', 'status')
146 inlines = (CertifyingOrganisationCertificateAdminInline, )
147 history_list_display = ['status', 'remarks']
148
149 def queryset(self, request):
150 """Ensure we use the correct manager.
151
152 :param request: HttpRequest object
153 """
154 query_set = self.model.objects
155 ordering = self.get_ordering(request)
156 if ordering:
157 query_set = query_set.order_by(*ordering)
158 return query_set
159
160
161 class StatusAdmin(admin.ModelAdmin):
162 list_display = ('name', 'project', 'order')
163
164
165 admin.site.register(Certificate, CertificateAdmin)
166 admin.site.register(Attendee, AttendeeAdmin)
167 admin.site.register(Course, CourseAdmin)
168 admin.site.register(CourseType, CourseTypeAdmin)
169 admin.site.register(TrainingCenter, TrainingCenterAdmin)
170 admin.site.register(CourseConvener, CourseConvenerAdmin)
171 admin.site.register(CertifyingOrganisation, CertifyingOrganisationAdmin)
172 admin.site.register(CourseAttendee, CourseAttendeeAdmin)
173 admin.site.register(
174 CertifyingOrganisationCertificate, CertifyingOrganisationCertificateAdmin)
175 admin.site.register(Status, StatusAdmin)
176
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django_project/certification/admin.py b/django_project/certification/admin.py
--- a/django_project/certification/admin.py
+++ b/django_project/certification/admin.py
@@ -141,7 +141,7 @@
filter_horizontal = ('organisation_owners',)
search_fields = ['name']
- list_display = ('name', 'country', 'approved', 'rejected')
+ list_display = ('name', 'project', 'country', 'approved', 'rejected')
list_filter = ('country', 'approved', 'rejected', 'status')
inlines = (CertifyingOrganisationCertificateAdminInline, )
history_list_display = ['status', 'remarks']
| {"golden_diff": "diff --git a/django_project/certification/admin.py b/django_project/certification/admin.py\n--- a/django_project/certification/admin.py\n+++ b/django_project/certification/admin.py\n@@ -141,7 +141,7 @@\n \n filter_horizontal = ('organisation_owners',)\n search_fields = ['name']\n- list_display = ('name', 'country', 'approved', 'rejected')\n+ list_display = ('name', 'project', 'country', 'approved', 'rejected')\n list_filter = ('country', 'approved', 'rejected', 'status')\n inlines = (CertifyingOrganisationCertificateAdminInline, )\n history_list_display = ['status', 'remarks']\n", "issue": "Please include project name in certifying org admin\n<img width=\"895\" alt=\"image\" src=\"https://user-images.githubusercontent.com/178003/67474466-6ab98000-f64c-11e9-8a69-95e12e8bd404.png\">\r\n\r\n\r\nWe have two Kartoza entries there - one for InaSAFE and one for QGIS - I nearly deleted the first not realising it was for InaSAFE\n", "before_files": [{"content": "# coding=UTF-8\n\"\"\"Model admin class definitions.\"\"\"\n\nfrom django.contrib.gis import admin\nfrom simple_history.admin import SimpleHistoryAdmin\nfrom certification.models.certificate import Certificate\nfrom certification.models.course import Course\nfrom certification.models.training_center import TrainingCenter\nfrom certification.models.course_convener import CourseConvener\nfrom certification.models.course_type import CourseType\nfrom certification.models.attendee import Attendee\nfrom certification.models.course_attendee import CourseAttendee\nfrom certification.models.certifying_organisation import CertifyingOrganisation\nfrom certification.models.organisation_certificate import \\\n CertifyingOrganisationCertificate\nfrom certification.models.status import Status\n\n\nclass CertificateAdmin(admin.ModelAdmin):\n \"\"\"Certificate admin model.\"\"\"\n\n list_display = ('__unicode__', 'course')\n search_fields = ('certificateID', 'course__name',)\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass AttendeeAdmin(admin.ModelAdmin):\n \"\"\"Attendee admin model.\"\"\"\n list_display = ('firstname', 'surname', 'email', 'certifying_organisation')\n search_fields = ['firstname', 'surname']\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseAttendeeAdmin(admin.ModelAdmin):\n \"\"\"Certificate admin model.\"\"\"\n list_display = ('course', 'attendee', 'author')\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseAdmin(admin.ModelAdmin):\n \"\"\"Course admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseTypeAdmin(admin.ModelAdmin):\n \"\"\"Course type admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass TrainingCenterAdmin(admin.GeoModelAdmin):\n \"\"\"Training center admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseConvenerAdmin(admin.ModelAdmin):\n \"\"\"Course convener admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CertifyingOrganisationCertificateAdminInline(admin.TabularInline):\n model = CertifyingOrganisationCertificate\n extra = 0\n\n\nclass CertifyingOrganisationCertificateAdmin(SimpleHistoryAdmin):\n history_list_display = ['issued', 'valid']\n\n\nclass CertifyingOrganisationAdmin(SimpleHistoryAdmin):\n \"\"\"Certifying organisation admin model.\"\"\"\n\n filter_horizontal = ('organisation_owners',)\n search_fields = ['name']\n list_display = ('name', 'country', 'approved', 'rejected')\n list_filter = ('country', 'approved', 'rejected', 'status')\n inlines = (CertifyingOrganisationCertificateAdminInline, )\n history_list_display = ['status', 'remarks']\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass StatusAdmin(admin.ModelAdmin):\n list_display = ('name', 'project', 'order')\n\n\nadmin.site.register(Certificate, CertificateAdmin)\nadmin.site.register(Attendee, AttendeeAdmin)\nadmin.site.register(Course, CourseAdmin)\nadmin.site.register(CourseType, CourseTypeAdmin)\nadmin.site.register(TrainingCenter, TrainingCenterAdmin)\nadmin.site.register(CourseConvener, CourseConvenerAdmin)\nadmin.site.register(CertifyingOrganisation, CertifyingOrganisationAdmin)\nadmin.site.register(CourseAttendee, CourseAttendeeAdmin)\nadmin.site.register(\n CertifyingOrganisationCertificate, CertifyingOrganisationCertificateAdmin)\nadmin.site.register(Status, StatusAdmin)\n", "path": "django_project/certification/admin.py"}], "after_files": [{"content": "# coding=UTF-8\n\"\"\"Model admin class definitions.\"\"\"\n\nfrom django.contrib.gis import admin\nfrom simple_history.admin import SimpleHistoryAdmin\nfrom certification.models.certificate import Certificate\nfrom certification.models.course import Course\nfrom certification.models.training_center import TrainingCenter\nfrom certification.models.course_convener import CourseConvener\nfrom certification.models.course_type import CourseType\nfrom certification.models.attendee import Attendee\nfrom certification.models.course_attendee import CourseAttendee\nfrom certification.models.certifying_organisation import CertifyingOrganisation\nfrom certification.models.organisation_certificate import \\\n CertifyingOrganisationCertificate\nfrom certification.models.status import Status\n\n\nclass CertificateAdmin(admin.ModelAdmin):\n \"\"\"Certificate admin model.\"\"\"\n\n list_display = ('__unicode__', 'course')\n search_fields = ('certificateID', 'course__name',)\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass AttendeeAdmin(admin.ModelAdmin):\n \"\"\"Attendee admin model.\"\"\"\n list_display = ('firstname', 'surname', 'email', 'certifying_organisation')\n search_fields = ['firstname', 'surname']\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseAttendeeAdmin(admin.ModelAdmin):\n \"\"\"Certificate admin model.\"\"\"\n list_display = ('course', 'attendee', 'author')\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseAdmin(admin.ModelAdmin):\n \"\"\"Course admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseTypeAdmin(admin.ModelAdmin):\n \"\"\"Course type admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass TrainingCenterAdmin(admin.GeoModelAdmin):\n \"\"\"Training center admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CourseConvenerAdmin(admin.ModelAdmin):\n \"\"\"Course convener admin model.\"\"\"\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass CertifyingOrganisationCertificateAdminInline(admin.TabularInline):\n model = CertifyingOrganisationCertificate\n extra = 0\n\n\nclass CertifyingOrganisationCertificateAdmin(SimpleHistoryAdmin):\n history_list_display = ['issued', 'valid']\n\n\nclass CertifyingOrganisationAdmin(SimpleHistoryAdmin):\n \"\"\"Certifying organisation admin model.\"\"\"\n\n filter_horizontal = ('organisation_owners',)\n search_fields = ['name']\n list_display = ('name', 'project', 'country', 'approved', 'rejected')\n list_filter = ('country', 'approved', 'rejected', 'status')\n inlines = (CertifyingOrganisationCertificateAdminInline, )\n history_list_display = ['status', 'remarks']\n\n def queryset(self, request):\n \"\"\"Ensure we use the correct manager.\n\n :param request: HttpRequest object\n \"\"\"\n query_set = self.model.objects\n ordering = self.get_ordering(request)\n if ordering:\n query_set = query_set.order_by(*ordering)\n return query_set\n\n\nclass StatusAdmin(admin.ModelAdmin):\n list_display = ('name', 'project', 'order')\n\n\nadmin.site.register(Certificate, CertificateAdmin)\nadmin.site.register(Attendee, AttendeeAdmin)\nadmin.site.register(Course, CourseAdmin)\nadmin.site.register(CourseType, CourseTypeAdmin)\nadmin.site.register(TrainingCenter, TrainingCenterAdmin)\nadmin.site.register(CourseConvener, CourseConvenerAdmin)\nadmin.site.register(CertifyingOrganisation, CertifyingOrganisationAdmin)\nadmin.site.register(CourseAttendee, CourseAttendeeAdmin)\nadmin.site.register(\n CertifyingOrganisationCertificate, CertifyingOrganisationCertificateAdmin)\nadmin.site.register(Status, StatusAdmin)\n", "path": "django_project/certification/admin.py"}]} | 1,921 | 153 |
gh_patches_debug_26619 | rasdani/github-patches | git_diff | benoitc__gunicorn-826 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Upcoming Tornado change breaks gunicorn.workers.gtornado
The next release of Tornado (will be 4.0 when released, although the current master branch hasn't been updated to change all mentions of 3.3 to 4.0) makes some major changes to the HTTP internals and breaks gunicorn's monkey-patching of HTTPConnection.finish. Fortunately, there is now a cleaner way to do the tracking that gunicorn wants to do here, since the interface between HTTPServer and Application is more formally defined by the HTTPServerConnectionDelegate and HTTPMessageDelegate interfaces, so you should be able to wrap the Application (which implements/subclasses HTTPServerConnectionDelegate) and the HTTPMessageDelegate its start_request method returns.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gunicorn/workers/gtornado.py`
Content:
```
1 # -*- coding: utf-8 -
2 #
3 # This file is part of gunicorn released under the MIT license.
4 # See the NOTICE for more information.
5
6 import os
7 import sys
8
9 try:
10 import tornado.web
11 except ImportError:
12 raise RuntimeError("You need tornado installed to use this worker.")
13 import tornado.httpserver
14 from tornado.ioloop import IOLoop, PeriodicCallback
15 from tornado.wsgi import WSGIContainer
16 from gunicorn.workers.base import Worker
17 from gunicorn import __version__ as gversion
18
19
20 class TornadoWorker(Worker):
21
22 @classmethod
23 def setup(cls):
24 web = sys.modules.pop("tornado.web")
25 old_clear = web.RequestHandler.clear
26
27 def clear(self):
28 old_clear(self)
29 self._headers["Server"] += " (Gunicorn/%s)" % gversion
30 web.RequestHandler.clear = clear
31 sys.modules["tornado.web"] = web
32
33 def handle_exit(self, sig, frame):
34 if self.alive:
35 super(TornadoWorker, self).handle_exit(sig, frame)
36 self.stop()
37
38 def handle_request(self):
39 self.nr += 1
40 if self.alive and self.nr >= self.max_requests:
41 self.alive = False
42 self.log.info("Autorestarting worker after current request.")
43 self.stop()
44
45 def watchdog(self):
46 if self.alive:
47 self.notify()
48
49 if self.ppid != os.getppid():
50 self.log.info("Parent changed, shutting down: %s", self)
51 self.stop()
52
53 def run(self):
54 self.ioloop = IOLoop.instance()
55 self.alive = True
56 PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
57
58 # Assume the app is a WSGI callable if its not an
59 # instance of tornado.web.Application or is an
60 # instance of tornado.wsgi.WSGIApplication
61 app = self.wsgi
62 if not isinstance(app, tornado.web.Application) or \
63 isinstance(app, tornado.wsgi.WSGIApplication):
64 app = WSGIContainer(app)
65
66 # Monkey-patching HTTPConnection.finish to count the
67 # number of requests being handled by Tornado. This
68 # will help gunicorn shutdown the worker if max_requests
69 # is exceeded.
70 httpserver = sys.modules["tornado.httpserver"]
71 old_connection_finish = httpserver.HTTPConnection.finish
72
73 def finish(other):
74 self.handle_request()
75 old_connection_finish(other)
76 httpserver.HTTPConnection.finish = finish
77 sys.modules["tornado.httpserver"] = httpserver
78
79 if self.cfg.is_ssl:
80 server = tornado.httpserver.HTTPServer(app, io_loop=self.ioloop,
81 ssl_options=self.cfg.ssl_options)
82 else:
83 server = tornado.httpserver.HTTPServer(app,
84 io_loop=self.ioloop)
85
86 self.server = server
87
88 for s in self.sockets:
89 s.setblocking(0)
90 if hasattr(server, "add_socket"): # tornado > 2.0
91 server.add_socket(s)
92 elif hasattr(server, "_sockets"): # tornado 2.0
93 server._sockets[s.fileno()] = s
94
95 server.no_keep_alive = self.cfg.keepalive <= 0
96 server.start(num_processes=1)
97
98 self.ioloop.start()
99
100 def stop(self):
101 if hasattr(self, 'server'):
102 try:
103 self.server.stop()
104 except Exception:
105 pass
106 PeriodicCallback(self.stop_ioloop, 1000, io_loop=self.ioloop).start()
107
108 def stop_ioloop(self):
109 if not self.ioloop._callbacks and len(self.ioloop._timeouts) <= 1:
110 self.ioloop.stop()
111
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/gunicorn/workers/gtornado.py b/gunicorn/workers/gtornado.py
--- a/gunicorn/workers/gtornado.py
+++ b/gunicorn/workers/gtornado.py
@@ -68,20 +68,31 @@
# will help gunicorn shutdown the worker if max_requests
# is exceeded.
httpserver = sys.modules["tornado.httpserver"]
- old_connection_finish = httpserver.HTTPConnection.finish
+ if hasattr(httpserver, 'HTTPConnection'):
+ old_connection_finish = httpserver.HTTPConnection.finish
- def finish(other):
- self.handle_request()
- old_connection_finish(other)
- httpserver.HTTPConnection.finish = finish
- sys.modules["tornado.httpserver"] = httpserver
+ def finish(other):
+ self.handle_request()
+ old_connection_finish(other)
+ httpserver.HTTPConnection.finish = finish
+ sys.modules["tornado.httpserver"] = httpserver
+
+ server_class = tornado.httpserver.HTTPServer
+ else:
+
+ class _HTTPServer(tornado.httpserver.HTTPServer):
+
+ def on_close(instance, server_conn):
+ self.handle_request()
+ super(_HTTPServer, instance).on_close(server_conn)
+
+ server_class = _HTTPServer
if self.cfg.is_ssl:
- server = tornado.httpserver.HTTPServer(app, io_loop=self.ioloop,
+ server = server_class(app, io_loop=self.ioloop,
ssl_options=self.cfg.ssl_options)
else:
- server = tornado.httpserver.HTTPServer(app,
- io_loop=self.ioloop)
+ server = server_class(app, io_loop=self.ioloop)
self.server = server
| {"golden_diff": "diff --git a/gunicorn/workers/gtornado.py b/gunicorn/workers/gtornado.py\n--- a/gunicorn/workers/gtornado.py\n+++ b/gunicorn/workers/gtornado.py\n@@ -68,20 +68,31 @@\n # will help gunicorn shutdown the worker if max_requests\n # is exceeded.\n httpserver = sys.modules[\"tornado.httpserver\"]\n- old_connection_finish = httpserver.HTTPConnection.finish\n+ if hasattr(httpserver, 'HTTPConnection'):\n+ old_connection_finish = httpserver.HTTPConnection.finish\n \n- def finish(other):\n- self.handle_request()\n- old_connection_finish(other)\n- httpserver.HTTPConnection.finish = finish\n- sys.modules[\"tornado.httpserver\"] = httpserver\n+ def finish(other):\n+ self.handle_request()\n+ old_connection_finish(other)\n+ httpserver.HTTPConnection.finish = finish\n+ sys.modules[\"tornado.httpserver\"] = httpserver\n+\n+ server_class = tornado.httpserver.HTTPServer\n+ else:\n+\n+ class _HTTPServer(tornado.httpserver.HTTPServer):\n+\n+ def on_close(instance, server_conn):\n+ self.handle_request()\n+ super(_HTTPServer, instance).on_close(server_conn)\n+\n+ server_class = _HTTPServer\n \n if self.cfg.is_ssl:\n- server = tornado.httpserver.HTTPServer(app, io_loop=self.ioloop,\n+ server = server_class(app, io_loop=self.ioloop,\n ssl_options=self.cfg.ssl_options)\n else:\n- server = tornado.httpserver.HTTPServer(app,\n- io_loop=self.ioloop)\n+ server = server_class(app, io_loop=self.ioloop)\n \n self.server = server\n", "issue": "Upcoming Tornado change breaks gunicorn.workers.gtornado\nThe next release of Tornado (will be 4.0 when released, although the current master branch hasn't been updated to change all mentions of 3.3 to 4.0) makes some major changes to the HTTP internals and breaks gunicorn's monkey-patching of HTTPConnection.finish. Fortunately, there is now a cleaner way to do the tracking that gunicorn wants to do here, since the interface between HTTPServer and Application is more formally defined by the HTTPServerConnectionDelegate and HTTPMessageDelegate interfaces, so you should be able to wrap the Application (which implements/subclasses HTTPServerConnectionDelegate) and the HTTPMessageDelegate its start_request method returns. \n\n", "before_files": [{"content": "# -*- coding: utf-8 -\n#\n# This file is part of gunicorn released under the MIT license.\n# See the NOTICE for more information.\n\nimport os\nimport sys\n\ntry:\n import tornado.web\nexcept ImportError:\n raise RuntimeError(\"You need tornado installed to use this worker.\")\nimport tornado.httpserver\nfrom tornado.ioloop import IOLoop, PeriodicCallback\nfrom tornado.wsgi import WSGIContainer\nfrom gunicorn.workers.base import Worker\nfrom gunicorn import __version__ as gversion\n\n\nclass TornadoWorker(Worker):\n\n @classmethod\n def setup(cls):\n web = sys.modules.pop(\"tornado.web\")\n old_clear = web.RequestHandler.clear\n\n def clear(self):\n old_clear(self)\n self._headers[\"Server\"] += \" (Gunicorn/%s)\" % gversion\n web.RequestHandler.clear = clear\n sys.modules[\"tornado.web\"] = web\n\n def handle_exit(self, sig, frame):\n if self.alive:\n super(TornadoWorker, self).handle_exit(sig, frame)\n self.stop()\n\n def handle_request(self):\n self.nr += 1\n if self.alive and self.nr >= self.max_requests:\n self.alive = False\n self.log.info(\"Autorestarting worker after current request.\")\n self.stop()\n\n def watchdog(self):\n if self.alive:\n self.notify()\n\n if self.ppid != os.getppid():\n self.log.info(\"Parent changed, shutting down: %s\", self)\n self.stop()\n\n def run(self):\n self.ioloop = IOLoop.instance()\n self.alive = True\n PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()\n\n # Assume the app is a WSGI callable if its not an\n # instance of tornado.web.Application or is an\n # instance of tornado.wsgi.WSGIApplication\n app = self.wsgi\n if not isinstance(app, tornado.web.Application) or \\\n isinstance(app, tornado.wsgi.WSGIApplication):\n app = WSGIContainer(app)\n\n # Monkey-patching HTTPConnection.finish to count the\n # number of requests being handled by Tornado. This\n # will help gunicorn shutdown the worker if max_requests\n # is exceeded.\n httpserver = sys.modules[\"tornado.httpserver\"]\n old_connection_finish = httpserver.HTTPConnection.finish\n\n def finish(other):\n self.handle_request()\n old_connection_finish(other)\n httpserver.HTTPConnection.finish = finish\n sys.modules[\"tornado.httpserver\"] = httpserver\n\n if self.cfg.is_ssl:\n server = tornado.httpserver.HTTPServer(app, io_loop=self.ioloop,\n ssl_options=self.cfg.ssl_options)\n else:\n server = tornado.httpserver.HTTPServer(app,\n io_loop=self.ioloop)\n\n self.server = server\n\n for s in self.sockets:\n s.setblocking(0)\n if hasattr(server, \"add_socket\"): # tornado > 2.0\n server.add_socket(s)\n elif hasattr(server, \"_sockets\"): # tornado 2.0\n server._sockets[s.fileno()] = s\n\n server.no_keep_alive = self.cfg.keepalive <= 0\n server.start(num_processes=1)\n\n self.ioloop.start()\n\n def stop(self):\n if hasattr(self, 'server'):\n try:\n self.server.stop()\n except Exception:\n pass\n PeriodicCallback(self.stop_ioloop, 1000, io_loop=self.ioloop).start()\n\n def stop_ioloop(self):\n if not self.ioloop._callbacks and len(self.ioloop._timeouts) <= 1:\n self.ioloop.stop()\n", "path": "gunicorn/workers/gtornado.py"}], "after_files": [{"content": "# -*- coding: utf-8 -\n#\n# This file is part of gunicorn released under the MIT license.\n# See the NOTICE for more information.\n\nimport os\nimport sys\n\ntry:\n import tornado.web\nexcept ImportError:\n raise RuntimeError(\"You need tornado installed to use this worker.\")\nimport tornado.httpserver\nfrom tornado.ioloop import IOLoop, PeriodicCallback\nfrom tornado.wsgi import WSGIContainer\nfrom gunicorn.workers.base import Worker\nfrom gunicorn import __version__ as gversion\n\n\nclass TornadoWorker(Worker):\n\n @classmethod\n def setup(cls):\n web = sys.modules.pop(\"tornado.web\")\n old_clear = web.RequestHandler.clear\n\n def clear(self):\n old_clear(self)\n self._headers[\"Server\"] += \" (Gunicorn/%s)\" % gversion\n web.RequestHandler.clear = clear\n sys.modules[\"tornado.web\"] = web\n\n def handle_exit(self, sig, frame):\n if self.alive:\n super(TornadoWorker, self).handle_exit(sig, frame)\n self.stop()\n\n def handle_request(self):\n self.nr += 1\n if self.alive and self.nr >= self.max_requests:\n self.alive = False\n self.log.info(\"Autorestarting worker after current request.\")\n self.stop()\n\n def watchdog(self):\n if self.alive:\n self.notify()\n\n if self.ppid != os.getppid():\n self.log.info(\"Parent changed, shutting down: %s\", self)\n self.stop()\n\n def run(self):\n self.ioloop = IOLoop.instance()\n self.alive = True\n PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()\n\n # Assume the app is a WSGI callable if its not an\n # instance of tornado.web.Application or is an\n # instance of tornado.wsgi.WSGIApplication\n app = self.wsgi\n if not isinstance(app, tornado.web.Application) or \\\n isinstance(app, tornado.wsgi.WSGIApplication):\n app = WSGIContainer(app)\n\n # Monkey-patching HTTPConnection.finish to count the\n # number of requests being handled by Tornado. This\n # will help gunicorn shutdown the worker if max_requests\n # is exceeded.\n httpserver = sys.modules[\"tornado.httpserver\"]\n if hasattr(httpserver, 'HTTPConnection'):\n old_connection_finish = httpserver.HTTPConnection.finish\n\n def finish(other):\n self.handle_request()\n old_connection_finish(other)\n httpserver.HTTPConnection.finish = finish\n sys.modules[\"tornado.httpserver\"] = httpserver\n\n server_class = tornado.httpserver.HTTPServer\n else:\n\n class _HTTPServer(tornado.httpserver.HTTPServer):\n\n def on_close(instance, server_conn):\n self.handle_request()\n super(_HTTPServer, instance).on_close(server_conn)\n\n server_class = _HTTPServer\n\n if self.cfg.is_ssl:\n server = server_class(app, io_loop=self.ioloop,\n ssl_options=self.cfg.ssl_options)\n else:\n server = server_class(app, io_loop=self.ioloop)\n\n self.server = server\n\n for s in self.sockets:\n s.setblocking(0)\n if hasattr(server, \"add_socket\"): # tornado > 2.0\n server.add_socket(s)\n elif hasattr(server, \"_sockets\"): # tornado 2.0\n server._sockets[s.fileno()] = s\n\n server.no_keep_alive = self.cfg.keepalive <= 0\n server.start(num_processes=1)\n\n self.ioloop.start()\n\n def stop(self):\n if hasattr(self, 'server'):\n try:\n self.server.stop()\n except Exception:\n pass\n PeriodicCallback(self.stop_ioloop, 1000, io_loop=self.ioloop).start()\n\n def stop_ioloop(self):\n if not self.ioloop._callbacks and len(self.ioloop._timeouts) <= 1:\n self.ioloop.stop()\n", "path": "gunicorn/workers/gtornado.py"}]} | 1,455 | 373 |
gh_patches_debug_4286 | rasdani/github-patches | git_diff | mozilla__pontoon-2379 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Redirected to a non-existing page while tring to translate to en-US
## Description
When I try to translate an application to `en-US`, I am redirected (`301`) to a non-existing page:

→ `/en-US/my-project/` is redirected to `/my-project/`
## Steps to reproduce
* Add the `en-US` locale to a project
* Browse project's language
* Click on English (en-US)
* → 404
## More...
This seems to be caused by some legacy code in `pontoon/urls.py:26`:
```python
urlpatterns = [
# Legacy: Locale redirect for compatibility with i18n ready URL scheme
path("en-US<path:url>", RedirectView.as_view(url="%(url)s", permanent=True)),
```
Removing this line fixes the problem and does not seem to cause additional issues (I am still testing on a production instance).
* This issue may be related to → #2192
* Another issue to look at about `en-US` locale → #2260
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pontoon/urls.py`
Content:
```
1 from django.urls import include, path, register_converter
2 from django.urls.converters import StringConverter
3 from django.contrib import admin
4 from django.contrib.auth import logout
5 from django.views.generic import RedirectView, TemplateView
6
7 from pontoon.teams.views import team
8
9
10 class LocaleConverter(StringConverter):
11 regex = r"[A-Za-z0-9\-\@\.]+"
12
13
14 register_converter(LocaleConverter, "locale")
15
16 pontoon_js_view = TemplateView.as_view(
17 template_name="js/pontoon.js", content_type="text/javascript"
18 )
19
20 permission_denied_view = TemplateView.as_view(template_name="403.html")
21 page_not_found_view = TemplateView.as_view(template_name="404.html")
22 server_error_view = TemplateView.as_view(template_name="500.html")
23
24 urlpatterns = [
25 # Legacy: Locale redirect for compatibility with i18n ready URL scheme
26 path("en-US<path:url>", RedirectView.as_view(url="%(url)s", permanent=True)),
27 # Redirect legacy Aurora projects
28 path(
29 "projects/firefox-aurora/<path:url>",
30 RedirectView.as_view(url="/projects/firefox/%(url)s", permanent=True),
31 ),
32 path(
33 "projects/firefox-for-android-aurora/<path:url>",
34 RedirectView.as_view(
35 url="/projects/firefox-for-android/%(url)s", permanent=True
36 ),
37 ),
38 path(
39 "projects/thunderbird-aurora/<path:url>",
40 RedirectView.as_view(url="/projects/thunderbird/%(url)s", permanent=True),
41 ),
42 path(
43 "projects/lightning-aurora/<path:url>",
44 RedirectView.as_view(url="/projects/lightning/%(url)s", permanent=True),
45 ),
46 path(
47 "projects/seamonkey-aurora/<path:url>",
48 RedirectView.as_view(url="/projects/seamonkey/%(url)s", permanent=True),
49 ),
50 path(
51 "<locale:locale>/firefox-aurora/<path:url>",
52 RedirectView.as_view(url="/%(locale)s/firefox/%(url)s", permanent=True),
53 ),
54 path(
55 "<locale:locale>/firefox-for-android-aurora/<path:url>",
56 RedirectView.as_view(
57 url="/%(locale)s/firefox-for-android/%(url)s", permanent=True
58 ),
59 ),
60 path(
61 "<locale:locale>/thunderbird-aurora/<path:url>",
62 RedirectView.as_view(url="/%(locale)s/thunderbird/%(url)s", permanent=True),
63 ),
64 path(
65 "<locale:locale>/lightning-aurora/<path:url>",
66 RedirectView.as_view(url="/%(locale)s/lightning/%(url)s", permanent=True),
67 ),
68 path(
69 "<locale:locale>/seamonkey-aurora/<path:url>",
70 RedirectView.as_view(url="/%(locale)s/seamonkey/%(url)s", permanent=True),
71 ),
72 # Accounts
73 path("accounts/", include("pontoon.allauth_urls")),
74 # Admin
75 path("admin/", include("pontoon.administration.urls")),
76 # Django admin: Disable the login form
77 path("a/login/", permission_denied_view),
78 # Django admin
79 path("a/", admin.site.urls),
80 # Logout
81 path("signout/", logout, {"next_page": "/"}, name="signout"),
82 # Error pages
83 path("403/", permission_denied_view),
84 path("404/", page_not_found_view),
85 path("500/", server_error_view),
86 # Robots.txt
87 path(
88 "robots.txt",
89 TemplateView.as_view(template_name="robots.txt", content_type="text/plain"),
90 ),
91 # contribute.json
92 path(
93 "contribute.json",
94 TemplateView.as_view(
95 template_name="contribute.json", content_type="text/plain"
96 ),
97 ),
98 # Favicon
99 path(
100 "favicon.ico",
101 RedirectView.as_view(url="/static/img/favicon.ico", permanent=True),
102 ),
103 # Include script
104 path("pontoon.js", pontoon_js_view),
105 path("static/js/pontoon.js", pontoon_js_view),
106 # Include URL configurations from installed apps
107 path("terminology/", include("pontoon.terminology.urls")),
108 path("translations/", include("pontoon.translations.urls")),
109 path("", include("pontoon.teams.urls")),
110 path("", include("pontoon.tour.urls")),
111 path("", include("pontoon.tags.urls")),
112 path("", include("pontoon.sync.urls")),
113 path("", include("pontoon.projects.urls")),
114 path("", include("pontoon.machinery.urls")),
115 path("", include("pontoon.contributors.urls")),
116 path("", include("pontoon.localizations.urls")),
117 path("", include("pontoon.base.urls")),
118 path("", include("pontoon.translate.urls")),
119 path("", include("pontoon.batch.urls")),
120 path("", include("pontoon.api.urls")),
121 path("", include("pontoon.homepage.urls")),
122 path("", include("pontoon.in_context.urls")),
123 path("", include("pontoon.uxactionlog.urls")),
124 # Team page: Must be at the end
125 path("<locale:locale>/", team, name="pontoon.teams.team"),
126 ]
127
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pontoon/urls.py b/pontoon/urls.py
--- a/pontoon/urls.py
+++ b/pontoon/urls.py
@@ -22,8 +22,6 @@
server_error_view = TemplateView.as_view(template_name="500.html")
urlpatterns = [
- # Legacy: Locale redirect for compatibility with i18n ready URL scheme
- path("en-US<path:url>", RedirectView.as_view(url="%(url)s", permanent=True)),
# Redirect legacy Aurora projects
path(
"projects/firefox-aurora/<path:url>",
| {"golden_diff": "diff --git a/pontoon/urls.py b/pontoon/urls.py\n--- a/pontoon/urls.py\n+++ b/pontoon/urls.py\n@@ -22,8 +22,6 @@\n server_error_view = TemplateView.as_view(template_name=\"500.html\")\n \n urlpatterns = [\n- # Legacy: Locale redirect for compatibility with i18n ready URL scheme\n- path(\"en-US<path:url>\", RedirectView.as_view(url=\"%(url)s\", permanent=True)),\n # Redirect legacy Aurora projects\n path(\n \"projects/firefox-aurora/<path:url>\",\n", "issue": "Redirected to a non-existing page while tring to translate to en-US\n## Description\r\n\r\nWhen I try to translate an application to `en-US`, I am redirected (`301`) to a non-existing page:\r\n\r\n\r\n\r\n\u2192 `/en-US/my-project/` is redirected to `/my-project/`\r\n\r\n## Steps to reproduce\r\n\r\n* Add the `en-US` locale to a project\r\n* Browse project's language\r\n* Click on English (en-US)\r\n* \u2192 404\r\n\r\n## More...\r\n\r\nThis seems to be caused by some legacy code in `pontoon/urls.py:26`:\r\n\r\n```python\r\nurlpatterns = [\r\n # Legacy: Locale redirect for compatibility with i18n ready URL scheme\r\n path(\"en-US<path:url>\", RedirectView.as_view(url=\"%(url)s\", permanent=True)),\r\n```\r\n\r\nRemoving this line fixes the problem and does not seem to cause additional issues (I am still testing on a production instance).\r\n\r\n* This issue may be related to \u2192 #2192 \r\n* Another issue to look at about `en-US` locale \u2192 #2260\r\n\n", "before_files": [{"content": "from django.urls import include, path, register_converter\nfrom django.urls.converters import StringConverter\nfrom django.contrib import admin\nfrom django.contrib.auth import logout\nfrom django.views.generic import RedirectView, TemplateView\n\nfrom pontoon.teams.views import team\n\n\nclass LocaleConverter(StringConverter):\n regex = r\"[A-Za-z0-9\\-\\@\\.]+\"\n\n\nregister_converter(LocaleConverter, \"locale\")\n\npontoon_js_view = TemplateView.as_view(\n template_name=\"js/pontoon.js\", content_type=\"text/javascript\"\n)\n\npermission_denied_view = TemplateView.as_view(template_name=\"403.html\")\npage_not_found_view = TemplateView.as_view(template_name=\"404.html\")\nserver_error_view = TemplateView.as_view(template_name=\"500.html\")\n\nurlpatterns = [\n # Legacy: Locale redirect for compatibility with i18n ready URL scheme\n path(\"en-US<path:url>\", RedirectView.as_view(url=\"%(url)s\", permanent=True)),\n # Redirect legacy Aurora projects\n path(\n \"projects/firefox-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/firefox/%(url)s\", permanent=True),\n ),\n path(\n \"projects/firefox-for-android-aurora/<path:url>\",\n RedirectView.as_view(\n url=\"/projects/firefox-for-android/%(url)s\", permanent=True\n ),\n ),\n path(\n \"projects/thunderbird-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/thunderbird/%(url)s\", permanent=True),\n ),\n path(\n \"projects/lightning-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/lightning/%(url)s\", permanent=True),\n ),\n path(\n \"projects/seamonkey-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/seamonkey/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/firefox-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/firefox/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/firefox-for-android-aurora/<path:url>\",\n RedirectView.as_view(\n url=\"/%(locale)s/firefox-for-android/%(url)s\", permanent=True\n ),\n ),\n path(\n \"<locale:locale>/thunderbird-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/thunderbird/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/lightning-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/lightning/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/seamonkey-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/seamonkey/%(url)s\", permanent=True),\n ),\n # Accounts\n path(\"accounts/\", include(\"pontoon.allauth_urls\")),\n # Admin\n path(\"admin/\", include(\"pontoon.administration.urls\")),\n # Django admin: Disable the login form\n path(\"a/login/\", permission_denied_view),\n # Django admin\n path(\"a/\", admin.site.urls),\n # Logout\n path(\"signout/\", logout, {\"next_page\": \"/\"}, name=\"signout\"),\n # Error pages\n path(\"403/\", permission_denied_view),\n path(\"404/\", page_not_found_view),\n path(\"500/\", server_error_view),\n # Robots.txt\n path(\n \"robots.txt\",\n TemplateView.as_view(template_name=\"robots.txt\", content_type=\"text/plain\"),\n ),\n # contribute.json\n path(\n \"contribute.json\",\n TemplateView.as_view(\n template_name=\"contribute.json\", content_type=\"text/plain\"\n ),\n ),\n # Favicon\n path(\n \"favicon.ico\",\n RedirectView.as_view(url=\"/static/img/favicon.ico\", permanent=True),\n ),\n # Include script\n path(\"pontoon.js\", pontoon_js_view),\n path(\"static/js/pontoon.js\", pontoon_js_view),\n # Include URL configurations from installed apps\n path(\"terminology/\", include(\"pontoon.terminology.urls\")),\n path(\"translations/\", include(\"pontoon.translations.urls\")),\n path(\"\", include(\"pontoon.teams.urls\")),\n path(\"\", include(\"pontoon.tour.urls\")),\n path(\"\", include(\"pontoon.tags.urls\")),\n path(\"\", include(\"pontoon.sync.urls\")),\n path(\"\", include(\"pontoon.projects.urls\")),\n path(\"\", include(\"pontoon.machinery.urls\")),\n path(\"\", include(\"pontoon.contributors.urls\")),\n path(\"\", include(\"pontoon.localizations.urls\")),\n path(\"\", include(\"pontoon.base.urls\")),\n path(\"\", include(\"pontoon.translate.urls\")),\n path(\"\", include(\"pontoon.batch.urls\")),\n path(\"\", include(\"pontoon.api.urls\")),\n path(\"\", include(\"pontoon.homepage.urls\")),\n path(\"\", include(\"pontoon.in_context.urls\")),\n path(\"\", include(\"pontoon.uxactionlog.urls\")),\n # Team page: Must be at the end\n path(\"<locale:locale>/\", team, name=\"pontoon.teams.team\"),\n]\n", "path": "pontoon/urls.py"}], "after_files": [{"content": "from django.urls import include, path, register_converter\nfrom django.urls.converters import StringConverter\nfrom django.contrib import admin\nfrom django.contrib.auth import logout\nfrom django.views.generic import RedirectView, TemplateView\n\nfrom pontoon.teams.views import team\n\n\nclass LocaleConverter(StringConverter):\n regex = r\"[A-Za-z0-9\\-\\@\\.]+\"\n\n\nregister_converter(LocaleConverter, \"locale\")\n\npontoon_js_view = TemplateView.as_view(\n template_name=\"js/pontoon.js\", content_type=\"text/javascript\"\n)\n\npermission_denied_view = TemplateView.as_view(template_name=\"403.html\")\npage_not_found_view = TemplateView.as_view(template_name=\"404.html\")\nserver_error_view = TemplateView.as_view(template_name=\"500.html\")\n\nurlpatterns = [\n # Redirect legacy Aurora projects\n path(\n \"projects/firefox-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/firefox/%(url)s\", permanent=True),\n ),\n path(\n \"projects/firefox-for-android-aurora/<path:url>\",\n RedirectView.as_view(\n url=\"/projects/firefox-for-android/%(url)s\", permanent=True\n ),\n ),\n path(\n \"projects/thunderbird-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/thunderbird/%(url)s\", permanent=True),\n ),\n path(\n \"projects/lightning-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/lightning/%(url)s\", permanent=True),\n ),\n path(\n \"projects/seamonkey-aurora/<path:url>\",\n RedirectView.as_view(url=\"/projects/seamonkey/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/firefox-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/firefox/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/firefox-for-android-aurora/<path:url>\",\n RedirectView.as_view(\n url=\"/%(locale)s/firefox-for-android/%(url)s\", permanent=True\n ),\n ),\n path(\n \"<locale:locale>/thunderbird-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/thunderbird/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/lightning-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/lightning/%(url)s\", permanent=True),\n ),\n path(\n \"<locale:locale>/seamonkey-aurora/<path:url>\",\n RedirectView.as_view(url=\"/%(locale)s/seamonkey/%(url)s\", permanent=True),\n ),\n # Accounts\n path(\"accounts/\", include(\"pontoon.allauth_urls\")),\n # Admin\n path(\"admin/\", include(\"pontoon.administration.urls\")),\n # Django admin: Disable the login form\n path(\"a/login/\", permission_denied_view),\n # Django admin\n path(\"a/\", admin.site.urls),\n # Logout\n path(\"signout/\", logout, {\"next_page\": \"/\"}, name=\"signout\"),\n # Error pages\n path(\"403/\", permission_denied_view),\n path(\"404/\", page_not_found_view),\n path(\"500/\", server_error_view),\n # Robots.txt\n path(\n \"robots.txt\",\n TemplateView.as_view(template_name=\"robots.txt\", content_type=\"text/plain\"),\n ),\n # contribute.json\n path(\n \"contribute.json\",\n TemplateView.as_view(\n template_name=\"contribute.json\", content_type=\"text/plain\"\n ),\n ),\n # Favicon\n path(\n \"favicon.ico\",\n RedirectView.as_view(url=\"/static/img/favicon.ico\", permanent=True),\n ),\n # Include script\n path(\"pontoon.js\", pontoon_js_view),\n path(\"static/js/pontoon.js\", pontoon_js_view),\n # Include URL configurations from installed apps\n path(\"terminology/\", include(\"pontoon.terminology.urls\")),\n path(\"translations/\", include(\"pontoon.translations.urls\")),\n path(\"\", include(\"pontoon.teams.urls\")),\n path(\"\", include(\"pontoon.tour.urls\")),\n path(\"\", include(\"pontoon.tags.urls\")),\n path(\"\", include(\"pontoon.sync.urls\")),\n path(\"\", include(\"pontoon.projects.urls\")),\n path(\"\", include(\"pontoon.machinery.urls\")),\n path(\"\", include(\"pontoon.contributors.urls\")),\n path(\"\", include(\"pontoon.localizations.urls\")),\n path(\"\", include(\"pontoon.base.urls\")),\n path(\"\", include(\"pontoon.translate.urls\")),\n path(\"\", include(\"pontoon.batch.urls\")),\n path(\"\", include(\"pontoon.api.urls\")),\n path(\"\", include(\"pontoon.homepage.urls\")),\n path(\"\", include(\"pontoon.in_context.urls\")),\n path(\"\", include(\"pontoon.uxactionlog.urls\")),\n # Team page: Must be at the end\n path(\"<locale:locale>/\", team, name=\"pontoon.teams.team\"),\n]\n", "path": "pontoon/urls.py"}]} | 1,952 | 130 |
gh_patches_debug_61381 | rasdani/github-patches | git_diff | tensorflow__addons-1213 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Proposal: Upload the nightlies at each commit on the master branch
We already build the wheels already anyway. It's just missing the push to pypi. Yes/No?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15 """TensorFlow Addons.
16
17 TensorFlow Addons is a repository of contributions that conform to well-
18 established API patterns, but implement new functionality not available
19 in core TensorFlow. TensorFlow natively supports a large number of
20 operators, layers, metrics, losses, and optimizers. However, in a fast
21 moving field like ML, there are many interesting new developments that
22 cannot be integrated into core TensorFlow (because their broad
23 applicability is not yet clear, or it is mostly used by a smaller subset
24 of the community).
25 """
26
27 import os
28 import sys
29
30 from datetime import datetime
31 from setuptools import find_packages
32 from setuptools import setup
33 from setuptools.dist import Distribution
34 from setuptools import Extension
35
36 DOCLINES = __doc__.split("\n")
37
38 TFA_NIGHTLY = "tfa-nightly"
39 TFA_RELEASE = "tensorflow-addons"
40
41 if "--nightly" in sys.argv:
42 project_name = TFA_NIGHTLY
43 nightly_idx = sys.argv.index("--nightly")
44 sys.argv.pop(nightly_idx)
45 else:
46 project_name = TFA_RELEASE
47
48 # Version
49 version = {}
50 base_dir = os.path.dirname(os.path.abspath(__file__))
51 with open(os.path.join(base_dir, "tensorflow_addons", "version.py")) as fp:
52 exec(fp.read(), version)
53
54 if project_name == TFA_NIGHTLY:
55 version["__version__"] += datetime.strftime(datetime.today(), "%Y%m%d")
56
57 with open("requirements.txt") as f:
58 required_pkgs = f.read().splitlines()
59
60 # Manylinux2010 requires a patch for platlib
61 if (
62 sys.platform.startswith("linux")
63 and os.environ.get("TF_ADDONS_NO_BUILD", "0") == "0"
64 ):
65 ext_modules = [Extension("_foo", ["stub.cc"])]
66 else:
67 ext_modules = []
68
69
70 class BinaryDistribution(Distribution):
71 """This class is needed in order to create OS specific wheels."""
72
73 def has_ext_modules(self):
74 return True
75
76
77 setup(
78 name=project_name,
79 version=version["__version__"],
80 description=DOCLINES[0],
81 long_description="\n".join(DOCLINES[2:]),
82 author="Google Inc.",
83 author_email="[email protected]",
84 packages=find_packages(),
85 ext_modules=ext_modules,
86 install_requires=required_pkgs,
87 include_package_data=True,
88 zip_safe=False,
89 distclass=BinaryDistribution,
90 classifiers=[
91 "Development Status :: 4 - Beta",
92 "Intended Audience :: Developers",
93 "Intended Audience :: Education",
94 "Intended Audience :: Science/Research",
95 "License :: OSI Approved :: Apache Software License",
96 "Programming Language :: Python :: 3.5",
97 "Programming Language :: Python :: 3.6",
98 "Programming Language :: Python :: 3.7",
99 "Topic :: Scientific/Engineering :: Mathematics",
100 "Topic :: Software Development :: Libraries :: Python Modules",
101 "Topic :: Software Development :: Libraries",
102 ],
103 license="Apache 2.0",
104 keywords="tensorflow addons machine learning",
105 )
106
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -52,7 +52,7 @@
exec(fp.read(), version)
if project_name == TFA_NIGHTLY:
- version["__version__"] += datetime.strftime(datetime.today(), "%Y%m%d")
+ version["__version__"] += datetime.now().strftime("%Y%m%d%H%M%S")
with open("requirements.txt") as f:
required_pkgs = f.read().splitlines()
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -52,7 +52,7 @@\n exec(fp.read(), version)\n \n if project_name == TFA_NIGHTLY:\n- version[\"__version__\"] += datetime.strftime(datetime.today(), \"%Y%m%d\")\n+ version[\"__version__\"] += datetime.now().strftime(\"%Y%m%d%H%M%S\")\n \n with open(\"requirements.txt\") as f:\n required_pkgs = f.read().splitlines()\n", "issue": "Proposal: Upload the nightlies at each commit on the master branch\nWe already build the wheels already anyway. It's just missing the push to pypi. Yes/No?\n", "before_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"TensorFlow Addons.\n\nTensorFlow Addons is a repository of contributions that conform to well-\nestablished API patterns, but implement new functionality not available\nin core TensorFlow. TensorFlow natively supports a large number of\noperators, layers, metrics, losses, and optimizers. However, in a fast\nmoving field like ML, there are many interesting new developments that\ncannot be integrated into core TensorFlow (because their broad\napplicability is not yet clear, or it is mostly used by a smaller subset\nof the community).\n\"\"\"\n\nimport os\nimport sys\n\nfrom datetime import datetime\nfrom setuptools import find_packages\nfrom setuptools import setup\nfrom setuptools.dist import Distribution\nfrom setuptools import Extension\n\nDOCLINES = __doc__.split(\"\\n\")\n\nTFA_NIGHTLY = \"tfa-nightly\"\nTFA_RELEASE = \"tensorflow-addons\"\n\nif \"--nightly\" in sys.argv:\n project_name = TFA_NIGHTLY\n nightly_idx = sys.argv.index(\"--nightly\")\n sys.argv.pop(nightly_idx)\nelse:\n project_name = TFA_RELEASE\n\n# Version\nversion = {}\nbase_dir = os.path.dirname(os.path.abspath(__file__))\nwith open(os.path.join(base_dir, \"tensorflow_addons\", \"version.py\")) as fp:\n exec(fp.read(), version)\n\nif project_name == TFA_NIGHTLY:\n version[\"__version__\"] += datetime.strftime(datetime.today(), \"%Y%m%d\")\n\nwith open(\"requirements.txt\") as f:\n required_pkgs = f.read().splitlines()\n\n# Manylinux2010 requires a patch for platlib\nif (\n sys.platform.startswith(\"linux\")\n and os.environ.get(\"TF_ADDONS_NO_BUILD\", \"0\") == \"0\"\n):\n ext_modules = [Extension(\"_foo\", [\"stub.cc\"])]\nelse:\n ext_modules = []\n\n\nclass BinaryDistribution(Distribution):\n \"\"\"This class is needed in order to create OS specific wheels.\"\"\"\n\n def has_ext_modules(self):\n return True\n\n\nsetup(\n name=project_name,\n version=version[\"__version__\"],\n description=DOCLINES[0],\n long_description=\"\\n\".join(DOCLINES[2:]),\n author=\"Google Inc.\",\n author_email=\"[email protected]\",\n packages=find_packages(),\n ext_modules=ext_modules,\n install_requires=required_pkgs,\n include_package_data=True,\n zip_safe=False,\n distclass=BinaryDistribution,\n classifiers=[\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Scientific/Engineering :: Mathematics\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"Topic :: Software Development :: Libraries\",\n ],\n license=\"Apache 2.0\",\n keywords=\"tensorflow addons machine learning\",\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"TensorFlow Addons.\n\nTensorFlow Addons is a repository of contributions that conform to well-\nestablished API patterns, but implement new functionality not available\nin core TensorFlow. TensorFlow natively supports a large number of\noperators, layers, metrics, losses, and optimizers. However, in a fast\nmoving field like ML, there are many interesting new developments that\ncannot be integrated into core TensorFlow (because their broad\napplicability is not yet clear, or it is mostly used by a smaller subset\nof the community).\n\"\"\"\n\nimport os\nimport sys\n\nfrom datetime import datetime\nfrom setuptools import find_packages\nfrom setuptools import setup\nfrom setuptools.dist import Distribution\nfrom setuptools import Extension\n\nDOCLINES = __doc__.split(\"\\n\")\n\nTFA_NIGHTLY = \"tfa-nightly\"\nTFA_RELEASE = \"tensorflow-addons\"\n\nif \"--nightly\" in sys.argv:\n project_name = TFA_NIGHTLY\n nightly_idx = sys.argv.index(\"--nightly\")\n sys.argv.pop(nightly_idx)\nelse:\n project_name = TFA_RELEASE\n\n# Version\nversion = {}\nbase_dir = os.path.dirname(os.path.abspath(__file__))\nwith open(os.path.join(base_dir, \"tensorflow_addons\", \"version.py\")) as fp:\n exec(fp.read(), version)\n\nif project_name == TFA_NIGHTLY:\n version[\"__version__\"] += datetime.now().strftime(\"%Y%m%d%H%M%S\")\n\nwith open(\"requirements.txt\") as f:\n required_pkgs = f.read().splitlines()\n\n# Manylinux2010 requires a patch for platlib\nif (\n sys.platform.startswith(\"linux\")\n and os.environ.get(\"TF_ADDONS_NO_BUILD\", \"0\") == \"0\"\n):\n ext_modules = [Extension(\"_foo\", [\"stub.cc\"])]\nelse:\n ext_modules = []\n\n\nclass BinaryDistribution(Distribution):\n \"\"\"This class is needed in order to create OS specific wheels.\"\"\"\n\n def has_ext_modules(self):\n return True\n\n\nsetup(\n name=project_name,\n version=version[\"__version__\"],\n description=DOCLINES[0],\n long_description=\"\\n\".join(DOCLINES[2:]),\n author=\"Google Inc.\",\n author_email=\"[email protected]\",\n packages=find_packages(),\n ext_modules=ext_modules,\n install_requires=required_pkgs,\n include_package_data=True,\n zip_safe=False,\n distclass=BinaryDistribution,\n classifiers=[\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Scientific/Engineering :: Mathematics\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"Topic :: Software Development :: Libraries\",\n ],\n license=\"Apache 2.0\",\n keywords=\"tensorflow addons machine learning\",\n)\n", "path": "setup.py"}]} | 1,293 | 111 |
gh_patches_debug_34411 | rasdani/github-patches | git_diff | ESMCI__cime-3079 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Branch a single instance case from a multi-instance case
When a multi-instance CAM forecast fails, I want to use one of the failed instances
as an exact restart in a single instance case, to speed debugging.
Building it as hybrid or startup won't work because for CAM
those are not exact restarts; they use the CAM initial file
I first tried building a single instance branch run with the multi-instance
as the refcase. That branched the entire multi-instance run.
Then I tried copying all of the restart files for 1 instance into a new directory,
"Restarts", with instance numbers removed from the file names.
I built the single instance case with Restarts as the RUN_REFDIR.
It built, but when I tried to run it it complained about a mozart file
already existing in the run directory. I believe that an earlier stage
of the submit process created that file, so I'm stumped about how
to fix this problem. I've played with GET_REFCASE and CONTINUE_RUN
and other things, but have not found the magic combination.
Am I missing something, or is this a new capability that hasn't been implemented?
$CASEROOT = /gpfs/fs1/work/raeder/Exp/Debug_lwdn4
$RUNDIR = /gpfs/fs1/scratch/raeder/Debug_lwdn4/run
$CESMROOT = /glade/work/raeder/Models/cesm2_2_maint-5.6
Thanks
Kevin
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/lib/CIME/Servers/wget.py`
Content:
```
1 """
2 WGET Server class. Interact with a server using WGET protocol
3 """
4 # pylint: disable=super-init-not-called
5 from CIME.XML.standard_module_setup import *
6 from CIME.Servers.generic_server import GenericServer
7 logger = logging.getLogger(__name__)
8
9 class WGET(GenericServer):
10 def __init__(self, address, user='', passwd=''):
11 self._args = ''
12 if user:
13 self._args += "--user {} ".format(user)
14 if passwd:
15 self._args += "--password {} ".format(passwd)
16 self._server_loc = address
17
18 err = run_cmd("wget {} --spider {}".format(self._args, address))[0]
19 expect(err == 0,"Could not connect to repo '{0}'\nThis is most likely either a proxy, or network issue .")
20
21
22 def fileexists(self, rel_path):
23 full_url = os.path.join(self._server_loc, rel_path)
24 stat, out, err = run_cmd("wget {} --spider {}".format(self._args, full_url))
25 if (stat != 0):
26 logging.warning("FAIL: Repo '{}' does not have file '{}'\nReason:{}\n{}\n".format(self._server_loc, full_url, out.encode('utf-8'), err.encode('utf-8')))
27 return False
28 return True
29
30 def getfile(self, rel_path, full_path):
31 full_url = os.path.join(self._server_loc, rel_path)
32 stat, output, errput = \
33 run_cmd("wget {} {} -nc --output-document {}".format(self._args, full_url, full_path))
34 if (stat != 0):
35 logging.warning("wget failed with output: {} and errput {}\n".format(output, errput))
36 # wget puts an empty file if it fails.
37 try:
38 os.remove(full_path)
39 except OSError:
40 pass
41 return False
42 else:
43 logging.info("SUCCESS\n")
44 return True
45
46 def getdirectory(self, rel_path, full_path):
47 full_url = os.path.join(self._server_loc, rel_path)
48 stat, output, errput = \
49 run_cmd("wget {} {} -r -N --no-directories ".format(self._args, full_url+os.sep), from_dir=full_path)
50 logger.debug(output)
51 logger.debug(errput)
52 if (stat != 0):
53 logging.warning("wget failed with output: {} and errput {}\n".format(output, errput))
54 # wget puts an empty file if it fails.
55 try:
56 os.remove(full_path)
57 except OSError:
58 pass
59 return False
60 else:
61 logging.info("SUCCESS\n")
62 return True
63
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/lib/CIME/Servers/wget.py b/scripts/lib/CIME/Servers/wget.py
--- a/scripts/lib/CIME/Servers/wget.py
+++ b/scripts/lib/CIME/Servers/wget.py
@@ -15,9 +15,9 @@
self._args += "--password {} ".format(passwd)
self._server_loc = address
- err = run_cmd("wget {} --spider {}".format(self._args, address))[0]
- expect(err == 0,"Could not connect to repo '{0}'\nThis is most likely either a proxy, or network issue .")
-
+ cmd = "wget {} --no-check-certificate --spider {}".format(self._args, address)
+ err, output, _ = run_cmd(cmd, combine_output=True)
+ expect(err == 0,"Could not connect to repo via '{}'\nThis is most likely either a proxy, or network issue.\nOutput:\n{}".format(cmd, output.encode('utf-8')))
def fileexists(self, rel_path):
full_url = os.path.join(self._server_loc, rel_path)
@@ -30,9 +30,9 @@
def getfile(self, rel_path, full_path):
full_url = os.path.join(self._server_loc, rel_path)
stat, output, errput = \
- run_cmd("wget {} {} -nc --output-document {}".format(self._args, full_url, full_path))
+ run_cmd("wget {} {} -nc --no-check-certificate --output-document {}".format(self._args, full_url, full_path))
if (stat != 0):
- logging.warning("wget failed with output: {} and errput {}\n".format(output, errput))
+ logging.warning("wget failed with output: {} and errput {}\n".format(output.encode('utf-8'), errput.encode('utf-8')))
# wget puts an empty file if it fails.
try:
os.remove(full_path)
@@ -46,7 +46,7 @@
def getdirectory(self, rel_path, full_path):
full_url = os.path.join(self._server_loc, rel_path)
stat, output, errput = \
- run_cmd("wget {} {} -r -N --no-directories ".format(self._args, full_url+os.sep), from_dir=full_path)
+ run_cmd("wget {} {} -r -N --no-check-certificate --no-directories ".format(self._args, full_url+os.sep), from_dir=full_path)
logger.debug(output)
logger.debug(errput)
if (stat != 0):
| {"golden_diff": "diff --git a/scripts/lib/CIME/Servers/wget.py b/scripts/lib/CIME/Servers/wget.py\n--- a/scripts/lib/CIME/Servers/wget.py\n+++ b/scripts/lib/CIME/Servers/wget.py\n@@ -15,9 +15,9 @@\n self._args += \"--password {} \".format(passwd)\n self._server_loc = address\n \n- err = run_cmd(\"wget {} --spider {}\".format(self._args, address))[0]\n- expect(err == 0,\"Could not connect to repo '{0}'\\nThis is most likely either a proxy, or network issue .\")\n-\n+ cmd = \"wget {} --no-check-certificate --spider {}\".format(self._args, address)\n+ err, output, _ = run_cmd(cmd, combine_output=True)\n+ expect(err == 0,\"Could not connect to repo via '{}'\\nThis is most likely either a proxy, or network issue.\\nOutput:\\n{}\".format(cmd, output.encode('utf-8')))\n \n def fileexists(self, rel_path):\n full_url = os.path.join(self._server_loc, rel_path)\n@@ -30,9 +30,9 @@\n def getfile(self, rel_path, full_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, output, errput = \\\n- run_cmd(\"wget {} {} -nc --output-document {}\".format(self._args, full_url, full_path))\n+ run_cmd(\"wget {} {} -nc --no-check-certificate --output-document {}\".format(self._args, full_url, full_path))\n if (stat != 0):\n- logging.warning(\"wget failed with output: {} and errput {}\\n\".format(output, errput))\n+ logging.warning(\"wget failed with output: {} and errput {}\\n\".format(output.encode('utf-8'), errput.encode('utf-8')))\n # wget puts an empty file if it fails.\n try:\n os.remove(full_path)\n@@ -46,7 +46,7 @@\n def getdirectory(self, rel_path, full_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, output, errput = \\\n- run_cmd(\"wget {} {} -r -N --no-directories \".format(self._args, full_url+os.sep), from_dir=full_path)\n+ run_cmd(\"wget {} {} -r -N --no-check-certificate --no-directories \".format(self._args, full_url+os.sep), from_dir=full_path)\n logger.debug(output)\n logger.debug(errput)\n if (stat != 0):\n", "issue": "Branch a single instance case from a multi-instance case\nWhen a multi-instance CAM forecast fails, I want to use one of the failed instances\r\nas an exact restart in a single instance case, to speed debugging.\r\nBuilding it as hybrid or startup won't work because for CAM \r\nthose are not exact restarts; they use the CAM initial file\r\nI first tried building a single instance branch run with the multi-instance\r\nas the refcase. That branched the entire multi-instance run.\r\n\r\nThen I tried copying all of the restart files for 1 instance into a new directory, \r\n\"Restarts\", with instance numbers removed from the file names.\r\nI built the single instance case with Restarts as the RUN_REFDIR.\r\nIt built, but when I tried to run it it complained about a mozart file\r\nalready existing in the run directory. I believe that an earlier stage\r\nof the submit process created that file, so I'm stumped about how\r\nto fix this problem. I've played with GET_REFCASE and CONTINUE_RUN\r\nand other things, but have not found the magic combination.\r\nAm I missing something, or is this a new capability that hasn't been implemented?\r\n\r\n$CASEROOT = /gpfs/fs1/work/raeder/Exp/Debug_lwdn4\r\n$RUNDIR = /gpfs/fs1/scratch/raeder/Debug_lwdn4/run\r\n$CESMROOT = /glade/work/raeder/Models/cesm2_2_maint-5.6\r\n\r\nThanks\r\nKevin\r\n\n", "before_files": [{"content": "\"\"\"\nWGET Server class. Interact with a server using WGET protocol\n\"\"\"\n# pylint: disable=super-init-not-called\nfrom CIME.XML.standard_module_setup import *\nfrom CIME.Servers.generic_server import GenericServer\nlogger = logging.getLogger(__name__)\n\nclass WGET(GenericServer):\n def __init__(self, address, user='', passwd=''):\n self._args = ''\n if user:\n self._args += \"--user {} \".format(user)\n if passwd:\n self._args += \"--password {} \".format(passwd)\n self._server_loc = address\n\n err = run_cmd(\"wget {} --spider {}\".format(self._args, address))[0]\n expect(err == 0,\"Could not connect to repo '{0}'\\nThis is most likely either a proxy, or network issue .\")\n\n\n def fileexists(self, rel_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, out, err = run_cmd(\"wget {} --spider {}\".format(self._args, full_url))\n if (stat != 0):\n logging.warning(\"FAIL: Repo '{}' does not have file '{}'\\nReason:{}\\n{}\\n\".format(self._server_loc, full_url, out.encode('utf-8'), err.encode('utf-8')))\n return False\n return True\n\n def getfile(self, rel_path, full_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, output, errput = \\\n run_cmd(\"wget {} {} -nc --output-document {}\".format(self._args, full_url, full_path))\n if (stat != 0):\n logging.warning(\"wget failed with output: {} and errput {}\\n\".format(output, errput))\n # wget puts an empty file if it fails.\n try:\n os.remove(full_path)\n except OSError:\n pass\n return False\n else:\n logging.info(\"SUCCESS\\n\")\n return True\n\n def getdirectory(self, rel_path, full_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, output, errput = \\\n run_cmd(\"wget {} {} -r -N --no-directories \".format(self._args, full_url+os.sep), from_dir=full_path)\n logger.debug(output)\n logger.debug(errput)\n if (stat != 0):\n logging.warning(\"wget failed with output: {} and errput {}\\n\".format(output, errput))\n # wget puts an empty file if it fails.\n try:\n os.remove(full_path)\n except OSError:\n pass\n return False\n else:\n logging.info(\"SUCCESS\\n\")\n return True\n", "path": "scripts/lib/CIME/Servers/wget.py"}], "after_files": [{"content": "\"\"\"\nWGET Server class. Interact with a server using WGET protocol\n\"\"\"\n# pylint: disable=super-init-not-called\nfrom CIME.XML.standard_module_setup import *\nfrom CIME.Servers.generic_server import GenericServer\nlogger = logging.getLogger(__name__)\n\nclass WGET(GenericServer):\n def __init__(self, address, user='', passwd=''):\n self._args = ''\n if user:\n self._args += \"--user {} \".format(user)\n if passwd:\n self._args += \"--password {} \".format(passwd)\n self._server_loc = address\n\n cmd = \"wget {} --no-check-certificate --spider {}\".format(self._args, address)\n err, output, _ = run_cmd(cmd, combine_output=True)\n expect(err == 0,\"Could not connect to repo via '{}'\\nThis is most likely either a proxy, or network issue.\\nOutput:\\n{}\".format(cmd, output.encode('utf-8')))\n\n def fileexists(self, rel_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, out, err = run_cmd(\"wget {} --spider {}\".format(self._args, full_url))\n if (stat != 0):\n logging.warning(\"FAIL: Repo '{}' does not have file '{}'\\nReason:{}\\n{}\\n\".format(self._server_loc, full_url, out.encode('utf-8'), err.encode('utf-8')))\n return False\n return True\n\n def getfile(self, rel_path, full_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, output, errput = \\\n run_cmd(\"wget {} {} -nc --no-check-certificate --output-document {}\".format(self._args, full_url, full_path))\n if (stat != 0):\n logging.warning(\"wget failed with output: {} and errput {}\\n\".format(output.encode('utf-8'), errput.encode('utf-8')))\n # wget puts an empty file if it fails.\n try:\n os.remove(full_path)\n except OSError:\n pass\n return False\n else:\n logging.info(\"SUCCESS\\n\")\n return True\n\n def getdirectory(self, rel_path, full_path):\n full_url = os.path.join(self._server_loc, rel_path)\n stat, output, errput = \\\n run_cmd(\"wget {} {} -r -N --no-check-certificate --no-directories \".format(self._args, full_url+os.sep), from_dir=full_path)\n logger.debug(output)\n logger.debug(errput)\n if (stat != 0):\n logging.warning(\"wget failed with output: {} and errput {}\\n\".format(output, errput))\n # wget puts an empty file if it fails.\n try:\n os.remove(full_path)\n except OSError:\n pass\n return False\n else:\n logging.info(\"SUCCESS\\n\")\n return True\n", "path": "scripts/lib/CIME/Servers/wget.py"}]} | 1,280 | 572 |
gh_patches_debug_14006 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-3341 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider northern_california_breweries is broken
During the global build at 2021-06-23-14-42-18, spider **northern_california_breweries** failed with **0 features** and **1 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-06-23-14-42-18/logs/northern_california_breweries.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-06-23-14-42-18/output/northern_california_breweries.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-06-23-14-42-18/output/northern_california_breweries.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/northern_california_breweries.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 import scrapy
3 from locations.items import GeojsonPointItem
4 import json
5 import re
6
7 class NorthernCaliforniaBreweriesSpider(scrapy.Spider):
8 name = "northern_california_breweries"
9 allowed_domains = ["projects.sfchronicle.com"]
10 start_urls = (
11 'http://projects.sfchronicle.com/2017/brewery-map/',
12 )
13
14 def parse(self, response):
15 beerData = response.xpath("//*[text()[contains(.,'beerData')]]").extract_first()
16 matches = re.search(r"var beerData = (\[(.*)\])", beerData)
17 jsonData = matches.group(0).replace("var beerData = ","")
18 breweryList = json.loads(jsonData)
19
20 for item in breweryList:
21 yield GeojsonPointItem(
22 ref=item.get('Brewery'),
23 lat=float(item.get('Latitude')),
24 lon=float(item.get('Longitude')),
25 addr_full=item.get('Address'),
26 city=item.get('City'),
27 state="CA",
28 website=item.get('Website'),
29 )
30
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/northern_california_breweries.py b/locations/spiders/northern_california_breweries.py
--- a/locations/spiders/northern_california_breweries.py
+++ b/locations/spiders/northern_california_breweries.py
@@ -18,10 +18,19 @@
breweryList = json.loads(jsonData)
for item in breweryList:
+ latitude = None
+ longitude = None
+
+ if item.get('Latitude') is not None:
+ latitude = float(item.get('Latitude'))
+
+ if item.get('Longitude') is not None:
+ longitude = float(item.get('Longitude'))
+
yield GeojsonPointItem(
ref=item.get('Brewery'),
- lat=float(item.get('Latitude')),
- lon=float(item.get('Longitude')),
+ lat=latitude,
+ lon=longitude,
addr_full=item.get('Address'),
city=item.get('City'),
state="CA",
| {"golden_diff": "diff --git a/locations/spiders/northern_california_breweries.py b/locations/spiders/northern_california_breweries.py\n--- a/locations/spiders/northern_california_breweries.py\n+++ b/locations/spiders/northern_california_breweries.py\n@@ -18,10 +18,19 @@\n breweryList = json.loads(jsonData)\n \n for item in breweryList:\n+ latitude = None\n+ longitude = None\n+\n+ if item.get('Latitude') is not None:\n+ latitude = float(item.get('Latitude'))\n+\n+ if item.get('Longitude') is not None:\n+ longitude = float(item.get('Longitude'))\n+ \n yield GeojsonPointItem(\n ref=item.get('Brewery'),\n- lat=float(item.get('Latitude')),\n- lon=float(item.get('Longitude')),\n+ lat=latitude,\n+ lon=longitude,\n addr_full=item.get('Address'),\n city=item.get('City'),\n state=\"CA\",\n", "issue": "Spider northern_california_breweries is broken\nDuring the global build at 2021-06-23-14-42-18, spider **northern_california_breweries** failed with **0 features** and **1 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-06-23-14-42-18/logs/northern_california_breweries.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-06-23-14-42-18/output/northern_california_breweries.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-06-23-14-42-18/output/northern_california_breweries.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom locations.items import GeojsonPointItem\nimport json\nimport re\n\nclass NorthernCaliforniaBreweriesSpider(scrapy.Spider):\n name = \"northern_california_breweries\"\n allowed_domains = [\"projects.sfchronicle.com\"]\n start_urls = (\n 'http://projects.sfchronicle.com/2017/brewery-map/',\n )\n\n def parse(self, response):\n beerData = response.xpath(\"//*[text()[contains(.,'beerData')]]\").extract_first()\n matches = re.search(r\"var beerData = (\\[(.*)\\])\", beerData)\n jsonData = matches.group(0).replace(\"var beerData = \",\"\")\n breweryList = json.loads(jsonData)\n\n for item in breweryList:\n yield GeojsonPointItem(\n ref=item.get('Brewery'),\n lat=float(item.get('Latitude')),\n lon=float(item.get('Longitude')),\n addr_full=item.get('Address'),\n city=item.get('City'),\n state=\"CA\",\n website=item.get('Website'),\n )\n", "path": "locations/spiders/northern_california_breweries.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom locations.items import GeojsonPointItem\nimport json\nimport re\n\nclass NorthernCaliforniaBreweriesSpider(scrapy.Spider):\n name = \"northern_california_breweries\"\n allowed_domains = [\"projects.sfchronicle.com\"]\n start_urls = (\n 'http://projects.sfchronicle.com/2017/brewery-map/',\n )\n\n def parse(self, response):\n beerData = response.xpath(\"//*[text()[contains(.,'beerData')]]\").extract_first()\n matches = re.search(r\"var beerData = (\\[(.*)\\])\", beerData)\n jsonData = matches.group(0).replace(\"var beerData = \",\"\")\n breweryList = json.loads(jsonData)\n\n for item in breweryList:\n latitude = None\n longitude = None\n\n if item.get('Latitude') is not None:\n latitude = float(item.get('Latitude'))\n\n if item.get('Longitude') is not None:\n longitude = float(item.get('Longitude'))\n \n yield GeojsonPointItem(\n ref=item.get('Brewery'),\n lat=latitude,\n lon=longitude,\n addr_full=item.get('Address'),\n city=item.get('City'),\n state=\"CA\",\n website=item.get('Website'),\n )\n", "path": "locations/spiders/northern_california_breweries.py"}]} | 756 | 221 |
gh_patches_debug_59836 | rasdani/github-patches | git_diff | angr__angr-4105 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Duplicate member docs on subclasses
### Description
e.g. the documentation on SimCC's members is also present on SimCCUsercall. This is a huge problem considering that the api docs page is already fucking gigantic, this is just making it multiplicatively bigger.
### Steps to reproduce the bug
_No response_
### Environment
_No response_
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/conf.py`
Content:
```
1 # Configuration file for the Sphinx documentation builder.
2 #
3 # For the full list of built-in configuration values, see the documentation:
4 # https://www.sphinx-doc.org/en/master/usage/configuration.html
5
6 import datetime
7
8 # -- Project information -----------------------------------------------------
9 # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
10
11 project = "angr"
12 project_copyright = f"{datetime.datetime.now().year}, The angr Project contributors"
13 author = "The angr Project"
14
15 # -- General configuration ---------------------------------------------------
16 # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
17
18 extensions = [
19 "sphinx.ext.autodoc",
20 "sphinx.ext.autosectionlabel",
21 "sphinx.ext.autosummary",
22 "sphinx.ext.coverage",
23 "sphinx.ext.intersphinx",
24 "sphinx.ext.napoleon",
25 "sphinx.ext.todo",
26 "sphinx.ext.viewcode",
27 "sphinx_autodoc_typehints",
28 "myst_parser",
29 ]
30
31 templates_path = ["_templates"]
32 exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
33
34 # -- Options for autodoc -----------------------------------------------------
35 # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration
36 autoclass_content = "class"
37 autodoc_default_options = {
38 "members": True,
39 "member-order": "bysource",
40 "inherited-members": True,
41 "show-inheritance": True,
42 "special-members": "__init__",
43 "undoc-members": True,
44 }
45 autodoc_inherit_docstrings = True
46 autodoc_typehints = "both"
47
48 # -- Options for coverage ----------------------------------------------------
49 # https://www.sphinx-doc.org/en/master/usage/extensions/coverage.html
50 coverage_write_headline = False
51
52 coverage_ignore_pyobjects = [
53 "angr.analyses.decompiler.structured_codegen.c.StructuredCodeGenerator", # Alias to CStructuredCodeGenerator
54 "angr.sim_type.SimTypeFixedSizeArray", # Alias to SimTypeArray
55 ]
56
57 # -- Options for intersphinx -------------------------------------------------
58 # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
59 intersphinx_mapping = {
60 "python": ("https://docs.python.org/3", None),
61 "ailment": ("https://docs.angr.io/projects/ailment/en/latest/", None),
62 "archinfo": ("https://docs.angr.io/projects/archinfo/en/latest/", None),
63 "claripy": ("https://docs.angr.io/projects/claripy/en/latest/", None),
64 "cle": ("https://docs.angr.io/projects/cle/en/latest/", None),
65 "pypcode": ("https://docs.angr.io/projects/pypcode/en/latest/", None),
66 "pyvex": ("https://docs.angr.io/projects/pyvex/en/latest/", None),
67 }
68
69 # -- Options for todos -------------------------------------------------------
70 # https://www.sphinx-doc.org/en/master/usage/extensions/todo.html
71 todo_include_todos = True
72
73 # -- Options for HTML output -------------------------------------------------
74 # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
75
76 html_theme = "furo"
77 html_static_path = ["_static"]
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -37,7 +37,6 @@
autodoc_default_options = {
"members": True,
"member-order": "bysource",
- "inherited-members": True,
"show-inheritance": True,
"special-members": "__init__",
"undoc-members": True,
| {"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -37,7 +37,6 @@\n autodoc_default_options = {\n \"members\": True,\n \"member-order\": \"bysource\",\n- \"inherited-members\": True,\n \"show-inheritance\": True,\n \"special-members\": \"__init__\",\n \"undoc-members\": True,\n", "issue": "Duplicate member docs on subclasses\n### Description\n\ne.g. the documentation on SimCC's members is also present on SimCCUsercall. This is a huge problem considering that the api docs page is already fucking gigantic, this is just making it multiplicatively bigger.\n\n### Steps to reproduce the bug\n\n_No response_\n\n### Environment\n\n_No response_\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "# Configuration file for the Sphinx documentation builder.\n#\n# For the full list of built-in configuration values, see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\nimport datetime\n\n# -- Project information -----------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information\n\nproject = \"angr\"\nproject_copyright = f\"{datetime.datetime.now().year}, The angr Project contributors\"\nauthor = \"The angr Project\"\n\n# -- General configuration ---------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration\n\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.autosectionlabel\",\n \"sphinx.ext.autosummary\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.todo\",\n \"sphinx.ext.viewcode\",\n \"sphinx_autodoc_typehints\",\n \"myst_parser\",\n]\n\ntemplates_path = [\"_templates\"]\nexclude_patterns = [\"_build\", \"Thumbs.db\", \".DS_Store\"]\n\n# -- Options for autodoc -----------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration\nautoclass_content = \"class\"\nautodoc_default_options = {\n \"members\": True,\n \"member-order\": \"bysource\",\n \"inherited-members\": True,\n \"show-inheritance\": True,\n \"special-members\": \"__init__\",\n \"undoc-members\": True,\n}\nautodoc_inherit_docstrings = True\nautodoc_typehints = \"both\"\n\n# -- Options for coverage ----------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/coverage.html\ncoverage_write_headline = False\n\ncoverage_ignore_pyobjects = [\n \"angr.analyses.decompiler.structured_codegen.c.StructuredCodeGenerator\", # Alias to CStructuredCodeGenerator\n \"angr.sim_type.SimTypeFixedSizeArray\", # Alias to SimTypeArray\n]\n\n# -- Options for intersphinx -------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3\", None),\n \"ailment\": (\"https://docs.angr.io/projects/ailment/en/latest/\", None),\n \"archinfo\": (\"https://docs.angr.io/projects/archinfo/en/latest/\", None),\n \"claripy\": (\"https://docs.angr.io/projects/claripy/en/latest/\", None),\n \"cle\": (\"https://docs.angr.io/projects/cle/en/latest/\", None),\n \"pypcode\": (\"https://docs.angr.io/projects/pypcode/en/latest/\", None),\n \"pyvex\": (\"https://docs.angr.io/projects/pyvex/en/latest/\", None),\n}\n\n# -- Options for todos -------------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/todo.html\ntodo_include_todos = True\n\n# -- Options for HTML output -------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output\n\nhtml_theme = \"furo\"\nhtml_static_path = [\"_static\"]\n", "path": "docs/conf.py"}], "after_files": [{"content": "# Configuration file for the Sphinx documentation builder.\n#\n# For the full list of built-in configuration values, see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\nimport datetime\n\n# -- Project information -----------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information\n\nproject = \"angr\"\nproject_copyright = f\"{datetime.datetime.now().year}, The angr Project contributors\"\nauthor = \"The angr Project\"\n\n# -- General configuration ---------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration\n\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.autosectionlabel\",\n \"sphinx.ext.autosummary\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.todo\",\n \"sphinx.ext.viewcode\",\n \"sphinx_autodoc_typehints\",\n \"myst_parser\",\n]\n\ntemplates_path = [\"_templates\"]\nexclude_patterns = [\"_build\", \"Thumbs.db\", \".DS_Store\"]\n\n# -- Options for autodoc -----------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration\nautoclass_content = \"class\"\nautodoc_default_options = {\n \"members\": True,\n \"member-order\": \"bysource\",\n \"show-inheritance\": True,\n \"special-members\": \"__init__\",\n \"undoc-members\": True,\n}\nautodoc_inherit_docstrings = True\nautodoc_typehints = \"both\"\n\n# -- Options for coverage ----------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/coverage.html\ncoverage_write_headline = False\n\ncoverage_ignore_pyobjects = [\n \"angr.analyses.decompiler.structured_codegen.c.StructuredCodeGenerator\", # Alias to CStructuredCodeGenerator\n \"angr.sim_type.SimTypeFixedSizeArray\", # Alias to SimTypeArray\n]\n\n# -- Options for intersphinx -------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3\", None),\n \"ailment\": (\"https://docs.angr.io/projects/ailment/en/latest/\", None),\n \"archinfo\": (\"https://docs.angr.io/projects/archinfo/en/latest/\", None),\n \"claripy\": (\"https://docs.angr.io/projects/claripy/en/latest/\", None),\n \"cle\": (\"https://docs.angr.io/projects/cle/en/latest/\", None),\n \"pypcode\": (\"https://docs.angr.io/projects/pypcode/en/latest/\", None),\n \"pyvex\": (\"https://docs.angr.io/projects/pyvex/en/latest/\", None),\n}\n\n# -- Options for todos -------------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/todo.html\ntodo_include_todos = True\n\n# -- Options for HTML output -------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output\n\nhtml_theme = \"furo\"\nhtml_static_path = [\"_static\"]\n", "path": "docs/conf.py"}]} | 1,179 | 93 |
gh_patches_debug_40226 | rasdani/github-patches | git_diff | hpcaitech__ColossalAI-3113 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[tensor] fix some unittests
[tensor] fix some unittests
[tensor] fix some unittests
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py`
Content:
```
1 from typing import Optional
2
3 import torch.nn as nn
4 from transformers.models.gpt2.configuration_gpt2 import GPT2Config
5 from transformers.models.gpt2.modeling_gpt2 import GPT2Model
6
7 from ..base import Critic
8
9
10 class GPTCritic(Critic):
11 """
12 GPT Critic model.
13
14 Args:
15 pretrained (str): Pretrained model name or path.
16 config (GPT2Config): Model config.
17 checkpoint (bool): Enable gradient checkpointing.
18 """
19
20 def __init__(self,
21 pretrained: Optional[str] = None,
22 config: Optional[GPT2Config] = None,
23 checkpoint: bool = False,
24 **kwargs) -> None:
25 if pretrained is not None:
26 model = GPT2Model.from_pretrained(pretrained)
27 elif config is not None:
28 model = GPT2Model(config)
29 else:
30 model = GPT2Model(GPT2Config())
31 if checkpoint:
32 model.gradient_checkpointing_enable()
33 value_head = nn.Linear(model.config.n_embd, 1)
34 super().__init__(model, value_head, **kwargs)
35
```
Path: `applications/ChatGPT/chatgpt/models/opt/opt_critic.py`
Content:
```
1 from typing import Optional
2
3 import torch.nn as nn
4 from transformers.models.opt.configuration_opt import OPTConfig
5 from transformers.models.opt.modeling_opt import OPTModel
6
7 from ..base import Critic
8
9
10 class OPTCritic(Critic):
11 """
12 OPT Critic model.
13
14 Args:
15 pretrained (str): Pretrained model name or path.
16 config (OPTConfig): Model config.
17 checkpoint (bool): Enable gradient checkpointing.
18 lora_rank (int): Rank of the low-rank approximation.
19 lora_train_bias (str): LoRA bias training mode.
20 """
21
22 def __init__(self,
23 pretrained: Optional[str] = None,
24 config: Optional[OPTConfig] = None,
25 checkpoint: bool = False,
26 lora_rank: int = 0,
27 lora_train_bias: str = 'none',
28 **kwargs) -> None:
29 if pretrained is not None:
30 model = OPTModel.from_pretrained(pretrained)
31 elif config is not None:
32 model = OPTModel(config)
33 else:
34 model = OPTModel(OPTConfig())
35 if checkpoint:
36 model.gradient_checkpointing_enable()
37 value_head = nn.Linear(model.config.hidden_size, 1)
38 super().__init__(model, value_head, lora_rank, lora_train_bias, **kwargs)
39
```
Path: `applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py`
Content:
```
1 from typing import Optional
2
3 from transformers.models.gpt2.configuration_gpt2 import GPT2Config
4 from transformers.models.gpt2.modeling_gpt2 import GPT2LMHeadModel
5
6 from ..base import Actor
7
8
9 class GPTActor(Actor):
10 """
11 GPT Actor model.
12
13 Args:
14 pretrained (str): Pretrained model name or path.
15 config (GPT2Config): Model config.
16 checkpoint (bool): Enable gradient checkpointing.
17 """
18
19 def __init__(self,
20 pretrained: Optional[str] = None,
21 config: Optional[GPT2Config] = None,
22 checkpoint: bool = False) -> None:
23 if pretrained is not None:
24 model = GPT2LMHeadModel.from_pretrained(pretrained)
25 elif config is not None:
26 model = GPT2LMHeadModel(config)
27 else:
28 model = GPT2LMHeadModel(GPT2Config())
29 if checkpoint:
30 model.gradient_checkpointing_enable()
31 super().__init__(model)
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py b/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py
--- a/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py
+++ b/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py
@@ -14,12 +14,16 @@
pretrained (str): Pretrained model name or path.
config (GPT2Config): Model config.
checkpoint (bool): Enable gradient checkpointing.
+ lora_rank (int): Rank of the LoRa layer.
+ lora_train_bias (str): Bias training strategy for the LoRa layer.
"""
def __init__(self,
pretrained: Optional[str] = None,
config: Optional[GPT2Config] = None,
- checkpoint: bool = False) -> None:
+ checkpoint: bool = False,
+ lora_rank: int = 0,
+ lora_train_bias: str = 'none') -> None:
if pretrained is not None:
model = GPT2LMHeadModel.from_pretrained(pretrained)
elif config is not None:
@@ -28,4 +32,4 @@
model = GPT2LMHeadModel(GPT2Config())
if checkpoint:
model.gradient_checkpointing_enable()
- super().__init__(model)
+ super().__init__(model, lora_rank, lora_train_bias)
diff --git a/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py b/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py
--- a/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py
+++ b/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py
@@ -15,13 +15,16 @@
pretrained (str): Pretrained model name or path.
config (GPT2Config): Model config.
checkpoint (bool): Enable gradient checkpointing.
+ lora_rank (int): Rank of the LO-RA decomposition.
+ lora_train_bias (str): LoRA bias training mode.
"""
def __init__(self,
pretrained: Optional[str] = None,
config: Optional[GPT2Config] = None,
checkpoint: bool = False,
- **kwargs) -> None:
+ lora_rank: int = 0,
+ lora_train_bias: str = 'none') -> None:
if pretrained is not None:
model = GPT2Model.from_pretrained(pretrained)
elif config is not None:
@@ -31,4 +34,4 @@
if checkpoint:
model.gradient_checkpointing_enable()
value_head = nn.Linear(model.config.n_embd, 1)
- super().__init__(model, value_head, **kwargs)
+ super().__init__(model, value_head, lora_rank, lora_train_bias)
diff --git a/applications/ChatGPT/chatgpt/models/opt/opt_critic.py b/applications/ChatGPT/chatgpt/models/opt/opt_critic.py
--- a/applications/ChatGPT/chatgpt/models/opt/opt_critic.py
+++ b/applications/ChatGPT/chatgpt/models/opt/opt_critic.py
@@ -34,5 +34,5 @@
model = OPTModel(OPTConfig())
if checkpoint:
model.gradient_checkpointing_enable()
- value_head = nn.Linear(model.config.hidden_size, 1)
+ value_head = nn.Linear(model.config.word_embed_proj_dim, 1)
super().__init__(model, value_head, lora_rank, lora_train_bias, **kwargs)
| {"golden_diff": "diff --git a/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py b/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py\n--- a/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py\n+++ b/applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py\n@@ -14,12 +14,16 @@\n pretrained (str): Pretrained model name or path.\n config (GPT2Config): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n+ lora_rank (int): Rank of the LoRa layer.\n+ lora_train_bias (str): Bias training strategy for the LoRa layer.\n \"\"\"\n \n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[GPT2Config] = None,\n- checkpoint: bool = False) -> None:\n+ checkpoint: bool = False,\n+ lora_rank: int = 0,\n+ lora_train_bias: str = 'none') -> None:\n if pretrained is not None:\n model = GPT2LMHeadModel.from_pretrained(pretrained)\n elif config is not None:\n@@ -28,4 +32,4 @@\n model = GPT2LMHeadModel(GPT2Config())\n if checkpoint:\n model.gradient_checkpointing_enable()\n- super().__init__(model)\n+ super().__init__(model, lora_rank, lora_train_bias)\ndiff --git a/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py b/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py\n--- a/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py\n+++ b/applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py\n@@ -15,13 +15,16 @@\n pretrained (str): Pretrained model name or path.\n config (GPT2Config): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n+ lora_rank (int): Rank of the LO-RA decomposition.\n+ lora_train_bias (str): LoRA bias training mode.\n \"\"\"\n \n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[GPT2Config] = None,\n checkpoint: bool = False,\n- **kwargs) -> None:\n+ lora_rank: int = 0,\n+ lora_train_bias: str = 'none') -> None:\n if pretrained is not None:\n model = GPT2Model.from_pretrained(pretrained)\n elif config is not None:\n@@ -31,4 +34,4 @@\n if checkpoint:\n model.gradient_checkpointing_enable()\n value_head = nn.Linear(model.config.n_embd, 1)\n- super().__init__(model, value_head, **kwargs)\n+ super().__init__(model, value_head, lora_rank, lora_train_bias)\ndiff --git a/applications/ChatGPT/chatgpt/models/opt/opt_critic.py b/applications/ChatGPT/chatgpt/models/opt/opt_critic.py\n--- a/applications/ChatGPT/chatgpt/models/opt/opt_critic.py\n+++ b/applications/ChatGPT/chatgpt/models/opt/opt_critic.py\n@@ -34,5 +34,5 @@\n model = OPTModel(OPTConfig())\n if checkpoint:\n model.gradient_checkpointing_enable()\n- value_head = nn.Linear(model.config.hidden_size, 1)\n+ value_head = nn.Linear(model.config.word_embed_proj_dim, 1)\n super().__init__(model, value_head, lora_rank, lora_train_bias, **kwargs)\n", "issue": "[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n", "before_files": [{"content": "from typing import Optional\n\nimport torch.nn as nn\nfrom transformers.models.gpt2.configuration_gpt2 import GPT2Config\nfrom transformers.models.gpt2.modeling_gpt2 import GPT2Model\n\nfrom ..base import Critic\n\n\nclass GPTCritic(Critic):\n \"\"\"\n GPT Critic model.\n\n Args:\n pretrained (str): Pretrained model name or path.\n config (GPT2Config): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n \"\"\"\n\n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[GPT2Config] = None,\n checkpoint: bool = False,\n **kwargs) -> None:\n if pretrained is not None:\n model = GPT2Model.from_pretrained(pretrained)\n elif config is not None:\n model = GPT2Model(config)\n else:\n model = GPT2Model(GPT2Config())\n if checkpoint:\n model.gradient_checkpointing_enable()\n value_head = nn.Linear(model.config.n_embd, 1)\n super().__init__(model, value_head, **kwargs)\n", "path": "applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py"}, {"content": "from typing import Optional\n\nimport torch.nn as nn\nfrom transformers.models.opt.configuration_opt import OPTConfig\nfrom transformers.models.opt.modeling_opt import OPTModel\n\nfrom ..base import Critic\n\n\nclass OPTCritic(Critic):\n \"\"\"\n OPT Critic model.\n\n Args:\n pretrained (str): Pretrained model name or path.\n config (OPTConfig): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n lora_rank (int): Rank of the low-rank approximation.\n lora_train_bias (str): LoRA bias training mode.\n \"\"\"\n\n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[OPTConfig] = None,\n checkpoint: bool = False,\n lora_rank: int = 0,\n lora_train_bias: str = 'none',\n **kwargs) -> None:\n if pretrained is not None:\n model = OPTModel.from_pretrained(pretrained)\n elif config is not None:\n model = OPTModel(config)\n else:\n model = OPTModel(OPTConfig())\n if checkpoint:\n model.gradient_checkpointing_enable()\n value_head = nn.Linear(model.config.hidden_size, 1)\n super().__init__(model, value_head, lora_rank, lora_train_bias, **kwargs)\n", "path": "applications/ChatGPT/chatgpt/models/opt/opt_critic.py"}, {"content": "from typing import Optional\n\nfrom transformers.models.gpt2.configuration_gpt2 import GPT2Config\nfrom transformers.models.gpt2.modeling_gpt2 import GPT2LMHeadModel\n\nfrom ..base import Actor\n\n\nclass GPTActor(Actor):\n \"\"\"\n GPT Actor model.\n\n Args:\n pretrained (str): Pretrained model name or path.\n config (GPT2Config): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n \"\"\"\n\n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[GPT2Config] = None,\n checkpoint: bool = False) -> None:\n if pretrained is not None:\n model = GPT2LMHeadModel.from_pretrained(pretrained)\n elif config is not None:\n model = GPT2LMHeadModel(config)\n else:\n model = GPT2LMHeadModel(GPT2Config())\n if checkpoint:\n model.gradient_checkpointing_enable()\n super().__init__(model)\n", "path": "applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py"}], "after_files": [{"content": "from typing import Optional\n\nimport torch.nn as nn\nfrom transformers.models.gpt2.configuration_gpt2 import GPT2Config\nfrom transformers.models.gpt2.modeling_gpt2 import GPT2Model\n\nfrom ..base import Critic\n\n\nclass GPTCritic(Critic):\n \"\"\"\n GPT Critic model.\n\n Args:\n pretrained (str): Pretrained model name or path.\n config (GPT2Config): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n lora_rank (int): Rank of the LO-RA decomposition.\n lora_train_bias (str): LoRA bias training mode.\n \"\"\"\n\n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[GPT2Config] = None,\n checkpoint: bool = False,\n lora_rank: int = 0,\n lora_train_bias: str = 'none') -> None:\n if pretrained is not None:\n model = GPT2Model.from_pretrained(pretrained)\n elif config is not None:\n model = GPT2Model(config)\n else:\n model = GPT2Model(GPT2Config())\n if checkpoint:\n model.gradient_checkpointing_enable()\n value_head = nn.Linear(model.config.n_embd, 1)\n super().__init__(model, value_head, lora_rank, lora_train_bias)\n", "path": "applications/ChatGPT/chatgpt/models/gpt/gpt_critic.py"}, {"content": "from typing import Optional\n\nimport torch.nn as nn\nfrom transformers.models.opt.configuration_opt import OPTConfig\nfrom transformers.models.opt.modeling_opt import OPTModel\n\nfrom ..base import Critic\n\n\nclass OPTCritic(Critic):\n \"\"\"\n OPT Critic model.\n\n Args:\n pretrained (str): Pretrained model name or path.\n config (OPTConfig): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n lora_rank (int): Rank of the low-rank approximation.\n lora_train_bias (str): LoRA bias training mode.\n \"\"\"\n\n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[OPTConfig] = None,\n checkpoint: bool = False,\n lora_rank: int = 0,\n lora_train_bias: str = 'none',\n **kwargs) -> None:\n if pretrained is not None:\n model = OPTModel.from_pretrained(pretrained)\n elif config is not None:\n model = OPTModel(config)\n else:\n model = OPTModel(OPTConfig())\n if checkpoint:\n model.gradient_checkpointing_enable()\n value_head = nn.Linear(model.config.word_embed_proj_dim, 1)\n super().__init__(model, value_head, lora_rank, lora_train_bias, **kwargs)\n", "path": "applications/ChatGPT/chatgpt/models/opt/opt_critic.py"}, {"content": "from typing import Optional\n\nfrom transformers.models.gpt2.configuration_gpt2 import GPT2Config\nfrom transformers.models.gpt2.modeling_gpt2 import GPT2LMHeadModel\n\nfrom ..base import Actor\n\n\nclass GPTActor(Actor):\n \"\"\"\n GPT Actor model.\n\n Args:\n pretrained (str): Pretrained model name or path.\n config (GPT2Config): Model config.\n checkpoint (bool): Enable gradient checkpointing.\n lora_rank (int): Rank of the LoRa layer.\n lora_train_bias (str): Bias training strategy for the LoRa layer.\n \"\"\"\n\n def __init__(self,\n pretrained: Optional[str] = None,\n config: Optional[GPT2Config] = None,\n checkpoint: bool = False,\n lora_rank: int = 0,\n lora_train_bias: str = 'none') -> None:\n if pretrained is not None:\n model = GPT2LMHeadModel.from_pretrained(pretrained)\n elif config is not None:\n model = GPT2LMHeadModel(config)\n else:\n model = GPT2LMHeadModel(GPT2Config())\n if checkpoint:\n model.gradient_checkpointing_enable()\n super().__init__(model, lora_rank, lora_train_bias)\n", "path": "applications/ChatGPT/chatgpt/models/gpt/gpt_actor.py"}]} | 1,270 | 810 |
gh_patches_debug_21007 | rasdani/github-patches | git_diff | joke2k__faker-213 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`.prefix`/`.suffix` returns a tuple instead of a single value
`.prefix` (and `.suffix`) can occasionally return a tuple of values instead of a single value when `prefixes_male` and `prefixes_female` (or `suffixes_*`) are present in the provider.
[See here for the code responsible.](https://github.com/joke2k/faker/blob/2af330e09d84306d10921fed00ad2e5cc8e3d36f/faker/providers/person/__init__.py#L93-L94)
I wasn't sure if this was intentional (it's documented to do so -- then again, the documentation is autogenerated, isn't it?), so I didn't make a PR yet, but it's certainly counterintuitive.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `faker/providers/person/__init__.py`
Content:
```
1 localized = True
2
3 from .. import BaseProvider
4
5
6 class Provider(BaseProvider):
7 formats = ['{{first_name}} {{last_name}}', ]
8
9 first_names = ['John', 'Jane']
10
11 last_names = ['Doe', ]
12
13 def name(self):
14 """
15 :example 'John Doe'
16 """
17 pattern = self.random_element(self.formats)
18 return self.generator.parse(pattern)
19
20 @classmethod
21 def first_name(cls):
22 return cls.random_element(cls.first_names)
23
24 @classmethod
25 def last_name(cls):
26 return cls.random_element(cls.last_names)
27
28 def name_male(self):
29 if hasattr(self, 'formats_male'):
30 formats = self.formats_male
31 else:
32 formats = self.formats
33 pattern = self.random_element(formats)
34 return self.generator.parse(pattern)
35
36 def name_female(self):
37 if hasattr(self, 'formats_female'):
38 formats = self.formats_female
39 else:
40 formats = self.formats
41 pattern = self.random_element(formats)
42 return self.generator.parse(pattern)
43
44 @classmethod
45 def first_name_male(cls):
46 if hasattr(cls, 'first_names_male'):
47 return cls.random_element(cls.first_names_male)
48 return cls.first_name()
49
50 @classmethod
51 def first_name_female(cls):
52 if hasattr(cls, 'first_names_female'):
53 return cls.random_element(cls.first_names_female)
54 return cls.first_name()
55
56 @classmethod
57 def last_name_male(cls):
58 if hasattr(cls, 'last_names_male'):
59 return cls.random_element(cls.last_names_male)
60 return cls.last_name()
61
62 @classmethod
63 def last_name_female(cls):
64 if hasattr(cls, 'last_names_female'):
65 return cls.random_element(cls.last_names_female)
66 return cls.last_name()
67
68
69 @classmethod
70 def prefix(cls):
71 if hasattr(cls, 'prefixes'):
72 return cls.random_element(cls.prefixes)
73 if hasattr(cls, 'prefixes_male') and hasattr(cls, 'prefixes_female'):
74 return cls.random_element((cls.prefixes_male, cls.prefixes_female))
75 return ''
76
77 @classmethod
78 def prefix_male(cls):
79 if hasattr(cls, 'prefixes_male'):
80 return cls.random_element(cls.prefixes_male)
81 return cls.prefix()
82
83 @classmethod
84 def prefix_female(cls):
85 if hasattr(cls, 'prefixes_female'):
86 return cls.random_element(cls.prefixes_female)
87 return cls.prefix()
88
89 @classmethod
90 def suffix(cls):
91 if hasattr(cls, 'suffixes'):
92 return cls.random_element(cls.suffixes)
93 if hasattr(cls, 'suffixes_male') and hasattr(cls, 'suffixes_female'):
94 return cls.random_element((cls.suffixes_male, cls.suffixes_female))
95 return ''
96
97 @classmethod
98 def suffix_male(cls):
99 if hasattr(cls, 'suffixes_male'):
100 return cls.random_element(cls.suffixes_male)
101 return cls.suffix()
102
103 @classmethod
104 def suffix_female(cls):
105 if hasattr(cls, 'suffixes_female'):
106 return cls.random_element(cls.suffixes_female)
107 return cls.suffix()
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/faker/providers/person/__init__.py b/faker/providers/person/__init__.py
--- a/faker/providers/person/__init__.py
+++ b/faker/providers/person/__init__.py
@@ -71,7 +71,8 @@
if hasattr(cls, 'prefixes'):
return cls.random_element(cls.prefixes)
if hasattr(cls, 'prefixes_male') and hasattr(cls, 'prefixes_female'):
- return cls.random_element((cls.prefixes_male, cls.prefixes_female))
+ prefixes = cls.random_element((cls.prefixes_male, cls.prefixes_female))
+ return cls.random_element(prefixes)
return ''
@classmethod
@@ -91,7 +92,8 @@
if hasattr(cls, 'suffixes'):
return cls.random_element(cls.suffixes)
if hasattr(cls, 'suffixes_male') and hasattr(cls, 'suffixes_female'):
- return cls.random_element((cls.suffixes_male, cls.suffixes_female))
+ suffixes = cls.random_element((cls.suffixes_male, cls.suffixes_female))
+ return cls.random_element(suffixes)
return ''
@classmethod
| {"golden_diff": "diff --git a/faker/providers/person/__init__.py b/faker/providers/person/__init__.py\n--- a/faker/providers/person/__init__.py\n+++ b/faker/providers/person/__init__.py\n@@ -71,7 +71,8 @@\n if hasattr(cls, 'prefixes'):\n return cls.random_element(cls.prefixes)\n if hasattr(cls, 'prefixes_male') and hasattr(cls, 'prefixes_female'):\n- return cls.random_element((cls.prefixes_male, cls.prefixes_female))\n+ prefixes = cls.random_element((cls.prefixes_male, cls.prefixes_female))\n+ return cls.random_element(prefixes)\n return ''\n \n @classmethod\n@@ -91,7 +92,8 @@\n if hasattr(cls, 'suffixes'):\n return cls.random_element(cls.suffixes)\n if hasattr(cls, 'suffixes_male') and hasattr(cls, 'suffixes_female'):\n- return cls.random_element((cls.suffixes_male, cls.suffixes_female))\n+ suffixes = cls.random_element((cls.suffixes_male, cls.suffixes_female))\n+ return cls.random_element(suffixes)\n return ''\n \n @classmethod\n", "issue": "`.prefix`/`.suffix` returns a tuple instead of a single value\n`.prefix` (and `.suffix`) can occasionally return a tuple of values instead of a single value when `prefixes_male` and `prefixes_female` (or `suffixes_*`) are present in the provider.\n\n[See here for the code responsible.](https://github.com/joke2k/faker/blob/2af330e09d84306d10921fed00ad2e5cc8e3d36f/faker/providers/person/__init__.py#L93-L94)\n\nI wasn't sure if this was intentional (it's documented to do so -- then again, the documentation is autogenerated, isn't it?), so I didn't make a PR yet, but it's certainly counterintuitive.\n\n", "before_files": [{"content": "localized = True\n\nfrom .. import BaseProvider\n\n\nclass Provider(BaseProvider):\n formats = ['{{first_name}} {{last_name}}', ]\n\n first_names = ['John', 'Jane']\n\n last_names = ['Doe', ]\n\n def name(self):\n \"\"\"\n :example 'John Doe'\n \"\"\"\n pattern = self.random_element(self.formats)\n return self.generator.parse(pattern)\n\n @classmethod\n def first_name(cls):\n return cls.random_element(cls.first_names)\n\n @classmethod\n def last_name(cls):\n return cls.random_element(cls.last_names)\n\n def name_male(self):\n if hasattr(self, 'formats_male'):\n formats = self.formats_male\n else:\n formats = self.formats\n pattern = self.random_element(formats)\n return self.generator.parse(pattern)\n\n def name_female(self):\n if hasattr(self, 'formats_female'):\n formats = self.formats_female\n else:\n formats = self.formats\n pattern = self.random_element(formats)\n return self.generator.parse(pattern)\n\n @classmethod\n def first_name_male(cls):\n if hasattr(cls, 'first_names_male'):\n return cls.random_element(cls.first_names_male)\n return cls.first_name()\n\n @classmethod\n def first_name_female(cls):\n if hasattr(cls, 'first_names_female'):\n return cls.random_element(cls.first_names_female)\n return cls.first_name()\n\n @classmethod\n def last_name_male(cls):\n if hasattr(cls, 'last_names_male'):\n return cls.random_element(cls.last_names_male)\n return cls.last_name()\n\n @classmethod\n def last_name_female(cls):\n if hasattr(cls, 'last_names_female'):\n return cls.random_element(cls.last_names_female)\n return cls.last_name()\n\n\n @classmethod\n def prefix(cls):\n if hasattr(cls, 'prefixes'):\n return cls.random_element(cls.prefixes)\n if hasattr(cls, 'prefixes_male') and hasattr(cls, 'prefixes_female'):\n return cls.random_element((cls.prefixes_male, cls.prefixes_female))\n return ''\n\n @classmethod\n def prefix_male(cls):\n if hasattr(cls, 'prefixes_male'):\n return cls.random_element(cls.prefixes_male)\n return cls.prefix()\n\n @classmethod\n def prefix_female(cls):\n if hasattr(cls, 'prefixes_female'):\n return cls.random_element(cls.prefixes_female)\n return cls.prefix()\n\n @classmethod\n def suffix(cls):\n if hasattr(cls, 'suffixes'):\n return cls.random_element(cls.suffixes)\n if hasattr(cls, 'suffixes_male') and hasattr(cls, 'suffixes_female'):\n return cls.random_element((cls.suffixes_male, cls.suffixes_female))\n return ''\n\n @classmethod\n def suffix_male(cls):\n if hasattr(cls, 'suffixes_male'):\n return cls.random_element(cls.suffixes_male)\n return cls.suffix()\n\n @classmethod\n def suffix_female(cls):\n if hasattr(cls, 'suffixes_female'):\n return cls.random_element(cls.suffixes_female)\n return cls.suffix()\n", "path": "faker/providers/person/__init__.py"}], "after_files": [{"content": "localized = True\n\nfrom .. import BaseProvider\n\n\nclass Provider(BaseProvider):\n formats = ['{{first_name}} {{last_name}}', ]\n\n first_names = ['John', 'Jane']\n\n last_names = ['Doe', ]\n\n def name(self):\n \"\"\"\n :example 'John Doe'\n \"\"\"\n pattern = self.random_element(self.formats)\n return self.generator.parse(pattern)\n\n @classmethod\n def first_name(cls):\n return cls.random_element(cls.first_names)\n\n @classmethod\n def last_name(cls):\n return cls.random_element(cls.last_names)\n\n def name_male(self):\n if hasattr(self, 'formats_male'):\n formats = self.formats_male\n else:\n formats = self.formats\n pattern = self.random_element(formats)\n return self.generator.parse(pattern)\n\n def name_female(self):\n if hasattr(self, 'formats_female'):\n formats = self.formats_female\n else:\n formats = self.formats\n pattern = self.random_element(formats)\n return self.generator.parse(pattern)\n\n @classmethod\n def first_name_male(cls):\n if hasattr(cls, 'first_names_male'):\n return cls.random_element(cls.first_names_male)\n return cls.first_name()\n\n @classmethod\n def first_name_female(cls):\n if hasattr(cls, 'first_names_female'):\n return cls.random_element(cls.first_names_female)\n return cls.first_name()\n\n @classmethod\n def last_name_male(cls):\n if hasattr(cls, 'last_names_male'):\n return cls.random_element(cls.last_names_male)\n return cls.last_name()\n\n @classmethod\n def last_name_female(cls):\n if hasattr(cls, 'last_names_female'):\n return cls.random_element(cls.last_names_female)\n return cls.last_name()\n\n\n @classmethod\n def prefix(cls):\n if hasattr(cls, 'prefixes'):\n return cls.random_element(cls.prefixes)\n if hasattr(cls, 'prefixes_male') and hasattr(cls, 'prefixes_female'):\n prefixes = cls.random_element((cls.prefixes_male, cls.prefixes_female))\n return cls.random_element(prefixes)\n return ''\n\n @classmethod\n def prefix_male(cls):\n if hasattr(cls, 'prefixes_male'):\n return cls.random_element(cls.prefixes_male)\n return cls.prefix()\n\n @classmethod\n def prefix_female(cls):\n if hasattr(cls, 'prefixes_female'):\n return cls.random_element(cls.prefixes_female)\n return cls.prefix()\n\n @classmethod\n def suffix(cls):\n if hasattr(cls, 'suffixes'):\n return cls.random_element(cls.suffixes)\n if hasattr(cls, 'suffixes_male') and hasattr(cls, 'suffixes_female'):\n suffixes = cls.random_element((cls.suffixes_male, cls.suffixes_female))\n return cls.random_element(suffixes)\n return ''\n\n @classmethod\n def suffix_male(cls):\n if hasattr(cls, 'suffixes_male'):\n return cls.random_element(cls.suffixes_male)\n return cls.suffix()\n\n @classmethod\n def suffix_female(cls):\n if hasattr(cls, 'suffixes_female'):\n return cls.random_element(cls.suffixes_female)\n return cls.suffix()\n", "path": "faker/providers/person/__init__.py"}]} | 1,318 | 252 |
gh_patches_debug_24821 | rasdani/github-patches | git_diff | nilearn__nilearn-3077 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
nilearn.plotting overrides the matplotlib backend, causing troubles on remote SSH development
I am working on a remote settings, accessing a machine (drago/margaret for INRIA context) through a PyCharm Remote Interpreter.
I have been struggling to display nilearn figures in the scientific view of PyCharm, leading to a painful process of saving the image => rsync figures dir => visualize figures once there are on personal laptop.
I did a simple experiment drawing first a simple `plot(list(range(10)))` which is displayed and then a nilearn figure which does not show up (script at the end of the issue).
I found out the reason for this, which is that in `nilearn.plotting.__init__`, the matplotlib backend is set to `agg` if not in `[inline, agg]`, but the backend used initially (which is working for me) is `module://backend_interagg`.
Setting the backend to the initial value after importing `nilearn.datasets` fixes the problem.
I don't know exactly how those matplotlib backends work, if the list of accepted backends in `nilearn.datasets.__init__` could be extended or if we could have a more robust list of valid matplotlib backend. Also, I feel that nilearn shouldn't override matplotlib backend silently?
<!--Please fill in the following information, to the best of your ability.-->
Nilearn version: 0.7.1
### Expected behavior
Nilearn does not override matplotlib backend silently.
### Actual behavior
Nilearn sets backend to "agg" if not in ["inline", "agg"], causing trouble with remote SSH development.
### Steps and code to reproduce bug
Run the following script through SSH remote interpreter
```python
import matplotlib
initial_backend = matplotlib.get_backend().lower()
print(initial_backend)
import matplotlib.pyplot as plt
import numpy as np
arr = np.zeros((100, 100))
plt.figure()
plt.plot(list(range(10)))
plt.show()
# >> The show is displayed in PyCharm
from nilearn import datasets, plotting
print(matplotlib.get_backend().lower())
# >> Backend has switched to "agg"
sample_brain_map = datasets.fetch_neurovault_motor_task().images[0]
plotting.plot_stat_map(
sample_brain_map,
threshold=3,
title="Before setting back the backend",
)
plt.show()
# >> Does not show up
matplotlib.use(initial_backend)
plotting.plot_stat_map(
sample_brain_map,
threshold=3,
title="After setting back the backend",
)
plt.show()
# >> Shows up
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nilearn/plotting/__init__.py`
Content:
```
1 """
2 Plotting code for nilearn
3 """
4 # Original Authors: Chris Filo Gorgolewski, Gael Varoquaux
5 import os
6 import sys
7 import importlib
8
9
10 ###############################################################################
11 # Make sure that we don't get DISPLAY problems when running without X on
12 # unices
13 def _set_mpl_backend():
14 # We are doing local imports here to avoid polluting our namespace
15 try:
16 import matplotlib
17 except ImportError:
18 if importlib.util.find_spec("pytest") is not None:
19 from .._utils.testing import skip_if_running_tests
20 # No need to fail when running tests
21 skip_if_running_tests('matplotlib not installed')
22 raise
23 else:
24 from ..version import (_import_module_with_version_check,
25 OPTIONAL_MATPLOTLIB_MIN_VERSION)
26 # When matplotlib was successfully imported we need to check
27 # that the version is greater that the minimum required one
28 _import_module_with_version_check('matplotlib',
29 OPTIONAL_MATPLOTLIB_MIN_VERSION)
30 current_backend = matplotlib.get_backend().lower()
31
32 if 'inline' in current_backend or 'nbagg' in current_backend:
33 return
34 # Set the backend to a non-interactive one for unices without X
35 # (see gh-2560)
36 if (sys.platform not in ('darwin', 'win32') and
37 'DISPLAY' not in os.environ):
38 matplotlib.use('Agg')
39
40
41 _set_mpl_backend()
42
43 ###############################################################################
44 from . import cm
45 from .img_plotting import (
46 plot_img, plot_anat, plot_epi, plot_roi, plot_stat_map,
47 plot_glass_brain, plot_connectome, plot_connectome_strength,
48 plot_markers, plot_prob_atlas, plot_carpet, plot_img_comparison, show)
49 from .find_cuts import find_xyz_cut_coords, find_cut_slices, \
50 find_parcellation_cut_coords, find_probabilistic_atlas_cut_coords
51 from .matrix_plotting import (plot_matrix, plot_contrast_matrix,
52 plot_design_matrix, plot_event)
53 from .html_surface import view_surf, view_img_on_surf
54 from .html_stat_map import view_img
55 from .html_connectome import view_connectome, view_markers
56 from .surf_plotting import (plot_surf, plot_surf_stat_map, plot_surf_roi,
57 plot_img_on_surf, plot_surf_contours)
58
59 __all__ = ['cm', 'plot_img', 'plot_anat', 'plot_epi',
60 'plot_roi', 'plot_stat_map', 'plot_glass_brain',
61 'plot_markers', 'plot_connectome', 'plot_prob_atlas',
62 'find_xyz_cut_coords', 'find_cut_slices',
63 'plot_img_comparison',
64 'show', 'plot_matrix',
65 'plot_design_matrix', 'plot_contrast_matrix', 'plot_event',
66 'view_surf', 'view_img_on_surf',
67 'view_img', 'view_connectome', 'view_markers',
68 'find_parcellation_cut_coords',
69 'find_probabilistic_atlas_cut_coords',
70 'plot_surf', 'plot_surf_stat_map', 'plot_surf_roi',
71 'plot_img_on_surf', 'plot_connectome_strength', 'plot_carpet',
72 'plot_surf_contours']
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nilearn/plotting/__init__.py b/nilearn/plotting/__init__.py
--- a/nilearn/plotting/__init__.py
+++ b/nilearn/plotting/__init__.py
@@ -2,9 +2,8 @@
Plotting code for nilearn
"""
# Original Authors: Chris Filo Gorgolewski, Gael Varoquaux
-import os
-import sys
import importlib
+import warnings
###############################################################################
@@ -29,13 +28,17 @@
OPTIONAL_MATPLOTLIB_MIN_VERSION)
current_backend = matplotlib.get_backend().lower()
- if 'inline' in current_backend or 'nbagg' in current_backend:
- return
- # Set the backend to a non-interactive one for unices without X
- # (see gh-2560)
- if (sys.platform not in ('darwin', 'win32') and
- 'DISPLAY' not in os.environ):
- matplotlib.use('Agg')
+ try:
+ # Making sure the current backend is usable by matplotlib
+ matplotlib.use(current_backend)
+ except Exception:
+ # If not, switching to default agg backend
+ matplotlib.use("Agg")
+ new_backend = matplotlib.get_backend().lower()
+
+ if new_backend != current_backend:
+ # Matplotlib backend has been changed, let's warn the user
+ warnings.warn(f"Backend changed to {new_backend}...")
_set_mpl_backend()
| {"golden_diff": "diff --git a/nilearn/plotting/__init__.py b/nilearn/plotting/__init__.py\n--- a/nilearn/plotting/__init__.py\n+++ b/nilearn/plotting/__init__.py\n@@ -2,9 +2,8 @@\n Plotting code for nilearn\n \"\"\"\n # Original Authors: Chris Filo Gorgolewski, Gael Varoquaux\n-import os\n-import sys\n import importlib\n+import warnings\n \n \n ###############################################################################\n@@ -29,13 +28,17 @@\n OPTIONAL_MATPLOTLIB_MIN_VERSION)\n current_backend = matplotlib.get_backend().lower()\n \n- if 'inline' in current_backend or 'nbagg' in current_backend:\n- return\n- # Set the backend to a non-interactive one for unices without X\n- # (see gh-2560)\n- if (sys.platform not in ('darwin', 'win32') and\n- 'DISPLAY' not in os.environ):\n- matplotlib.use('Agg')\n+ try:\n+ # Making sure the current backend is usable by matplotlib\n+ matplotlib.use(current_backend)\n+ except Exception:\n+ # If not, switching to default agg backend\n+ matplotlib.use(\"Agg\")\n+ new_backend = matplotlib.get_backend().lower()\n+\n+ if new_backend != current_backend:\n+ # Matplotlib backend has been changed, let's warn the user\n+ warnings.warn(f\"Backend changed to {new_backend}...\")\n \n \n _set_mpl_backend()\n", "issue": "nilearn.plotting overrides the matplotlib backend, causing troubles on remote SSH development\nI am working on a remote settings, accessing a machine (drago/margaret for INRIA context) through a PyCharm Remote Interpreter.\r\nI have been struggling to display nilearn figures in the scientific view of PyCharm, leading to a painful process of saving the image => rsync figures dir => visualize figures once there are on personal laptop.\r\n\r\nI did a simple experiment drawing first a simple `plot(list(range(10)))` which is displayed and then a nilearn figure which does not show up (script at the end of the issue).\r\n\r\nI found out the reason for this, which is that in `nilearn.plotting.__init__`, the matplotlib backend is set to `agg` if not in `[inline, agg]`, but the backend used initially (which is working for me) is `module://backend_interagg`.\r\n\r\nSetting the backend to the initial value after importing `nilearn.datasets` fixes the problem.\r\n\r\nI don't know exactly how those matplotlib backends work, if the list of accepted backends in `nilearn.datasets.__init__` could be extended or if we could have a more robust list of valid matplotlib backend. Also, I feel that nilearn shouldn't override matplotlib backend silently? \r\n\r\n<!--Please fill in the following information, to the best of your ability.-->\r\nNilearn version: 0.7.1\r\n\r\n### Expected behavior\r\n\r\nNilearn does not override matplotlib backend silently.\r\n\r\n### Actual behavior\r\n\r\nNilearn sets backend to \"agg\" if not in [\"inline\", \"agg\"], causing trouble with remote SSH development.\r\n\r\n### Steps and code to reproduce bug\r\n\r\nRun the following script through SSH remote interpreter\r\n\r\n```python\r\nimport matplotlib\r\ninitial_backend = matplotlib.get_backend().lower()\r\nprint(initial_backend)\r\n\r\nimport matplotlib.pyplot as plt\r\nimport numpy as np\r\n\r\narr = np.zeros((100, 100))\r\nplt.figure()\r\nplt.plot(list(range(10)))\r\nplt.show()\r\n# >> The show is displayed in PyCharm\r\n\r\nfrom nilearn import datasets, plotting\r\nprint(matplotlib.get_backend().lower())\r\n# >> Backend has switched to \"agg\"\r\n\r\nsample_brain_map = datasets.fetch_neurovault_motor_task().images[0]\r\nplotting.plot_stat_map(\r\n sample_brain_map,\r\n threshold=3,\r\n title=\"Before setting back the backend\",\r\n)\r\n\r\nplt.show()\r\n# >> Does not show up\r\n\r\nmatplotlib.use(initial_backend)\r\nplotting.plot_stat_map(\r\n sample_brain_map,\r\n threshold=3,\r\n title=\"After setting back the backend\",\r\n)\r\n\r\nplt.show()\r\n# >> Shows up\r\n\r\n```\r\n\n", "before_files": [{"content": "\"\"\"\nPlotting code for nilearn\n\"\"\"\n# Original Authors: Chris Filo Gorgolewski, Gael Varoquaux\nimport os\nimport sys\nimport importlib\n\n\n###############################################################################\n# Make sure that we don't get DISPLAY problems when running without X on\n# unices\ndef _set_mpl_backend():\n # We are doing local imports here to avoid polluting our namespace\n try:\n import matplotlib\n except ImportError:\n if importlib.util.find_spec(\"pytest\") is not None:\n from .._utils.testing import skip_if_running_tests\n # No need to fail when running tests\n skip_if_running_tests('matplotlib not installed')\n raise\n else:\n from ..version import (_import_module_with_version_check,\n OPTIONAL_MATPLOTLIB_MIN_VERSION)\n # When matplotlib was successfully imported we need to check\n # that the version is greater that the minimum required one\n _import_module_with_version_check('matplotlib',\n OPTIONAL_MATPLOTLIB_MIN_VERSION)\n current_backend = matplotlib.get_backend().lower()\n\n if 'inline' in current_backend or 'nbagg' in current_backend:\n return\n # Set the backend to a non-interactive one for unices without X\n # (see gh-2560)\n if (sys.platform not in ('darwin', 'win32') and\n 'DISPLAY' not in os.environ):\n matplotlib.use('Agg')\n\n\n_set_mpl_backend()\n\n###############################################################################\nfrom . import cm\nfrom .img_plotting import (\n plot_img, plot_anat, plot_epi, plot_roi, plot_stat_map,\n plot_glass_brain, plot_connectome, plot_connectome_strength,\n plot_markers, plot_prob_atlas, plot_carpet, plot_img_comparison, show)\nfrom .find_cuts import find_xyz_cut_coords, find_cut_slices, \\\n find_parcellation_cut_coords, find_probabilistic_atlas_cut_coords\nfrom .matrix_plotting import (plot_matrix, plot_contrast_matrix,\n plot_design_matrix, plot_event)\nfrom .html_surface import view_surf, view_img_on_surf\nfrom .html_stat_map import view_img\nfrom .html_connectome import view_connectome, view_markers\nfrom .surf_plotting import (plot_surf, plot_surf_stat_map, plot_surf_roi,\n plot_img_on_surf, plot_surf_contours)\n\n__all__ = ['cm', 'plot_img', 'plot_anat', 'plot_epi',\n 'plot_roi', 'plot_stat_map', 'plot_glass_brain',\n 'plot_markers', 'plot_connectome', 'plot_prob_atlas',\n 'find_xyz_cut_coords', 'find_cut_slices',\n 'plot_img_comparison',\n 'show', 'plot_matrix',\n 'plot_design_matrix', 'plot_contrast_matrix', 'plot_event',\n 'view_surf', 'view_img_on_surf',\n 'view_img', 'view_connectome', 'view_markers',\n 'find_parcellation_cut_coords',\n 'find_probabilistic_atlas_cut_coords',\n 'plot_surf', 'plot_surf_stat_map', 'plot_surf_roi',\n 'plot_img_on_surf', 'plot_connectome_strength', 'plot_carpet',\n 'plot_surf_contours']\n", "path": "nilearn/plotting/__init__.py"}], "after_files": [{"content": "\"\"\"\nPlotting code for nilearn\n\"\"\"\n# Original Authors: Chris Filo Gorgolewski, Gael Varoquaux\nimport importlib\nimport warnings\n\n\n###############################################################################\n# Make sure that we don't get DISPLAY problems when running without X on\n# unices\ndef _set_mpl_backend():\n # We are doing local imports here to avoid polluting our namespace\n try:\n import matplotlib\n except ImportError:\n if importlib.util.find_spec(\"pytest\") is not None:\n from .._utils.testing import skip_if_running_tests\n # No need to fail when running tests\n skip_if_running_tests('matplotlib not installed')\n raise\n else:\n from ..version import (_import_module_with_version_check,\n OPTIONAL_MATPLOTLIB_MIN_VERSION)\n # When matplotlib was successfully imported we need to check\n # that the version is greater that the minimum required one\n _import_module_with_version_check('matplotlib',\n OPTIONAL_MATPLOTLIB_MIN_VERSION)\n current_backend = matplotlib.get_backend().lower()\n\n try:\n # Making sure the current backend is usable by matplotlib\n matplotlib.use(current_backend)\n except Exception:\n # If not, switching to default agg backend\n matplotlib.use(\"Agg\")\n new_backend = matplotlib.get_backend().lower()\n\n if new_backend != current_backend:\n # Matplotlib backend has been changed, let's warn the user\n warnings.warn(f\"Backend changed to {new_backend}...\")\n\n\n_set_mpl_backend()\n\n###############################################################################\nfrom . import cm\nfrom .img_plotting import (\n plot_img, plot_anat, plot_epi, plot_roi, plot_stat_map,\n plot_glass_brain, plot_connectome, plot_connectome_strength,\n plot_markers, plot_prob_atlas, plot_carpet, plot_img_comparison, show)\nfrom .find_cuts import find_xyz_cut_coords, find_cut_slices, \\\n find_parcellation_cut_coords, find_probabilistic_atlas_cut_coords\nfrom .matrix_plotting import (plot_matrix, plot_contrast_matrix,\n plot_design_matrix, plot_event)\nfrom .html_surface import view_surf, view_img_on_surf\nfrom .html_stat_map import view_img\nfrom .html_connectome import view_connectome, view_markers\nfrom .surf_plotting import (plot_surf, plot_surf_stat_map, plot_surf_roi,\n plot_img_on_surf, plot_surf_contours)\n\n__all__ = ['cm', 'plot_img', 'plot_anat', 'plot_epi',\n 'plot_roi', 'plot_stat_map', 'plot_glass_brain',\n 'plot_markers', 'plot_connectome', 'plot_prob_atlas',\n 'find_xyz_cut_coords', 'find_cut_slices',\n 'plot_img_comparison',\n 'show', 'plot_matrix',\n 'plot_design_matrix', 'plot_contrast_matrix', 'plot_event',\n 'view_surf', 'view_img_on_surf',\n 'view_img', 'view_connectome', 'view_markers',\n 'find_parcellation_cut_coords',\n 'find_probabilistic_atlas_cut_coords',\n 'plot_surf', 'plot_surf_stat_map', 'plot_surf_roi',\n 'plot_img_on_surf', 'plot_connectome_strength', 'plot_carpet',\n 'plot_surf_contours']\n", "path": "nilearn/plotting/__init__.py"}]} | 1,642 | 337 |
gh_patches_debug_130 | rasdani/github-patches | git_diff | svthalia__concrexit-1750 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Event registration member serializer should be read only
### Describe the bug
https://github.com/svthalia/concrexit/blob/4ab37961f50e398cc52422cdc1df66f6ab8ff2ee/website/events/api/v2/serializers/event_registration.py#L34 This serializer should be read-only
### How to reproduce
https://staging.thalia.nu/api/v2/events/150/registrations/ shows that you can POST to update the member profile, that should not be the case
### Expected behaviour
Be read only
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/events/api/v2/serializers/event_registration.py`
Content:
```
1 from rest_framework import serializers
2
3 from events.models import EventRegistration
4 from members.api.v2.serializers.member import MemberSerializer
5
6
7 class EventRegistrationSerializer(serializers.ModelSerializer):
8 """Serializer for event registrations."""
9
10 def __init__(self, *args, **kwargs):
11 # Don't pass the 'fields' arg up to the superclass
12 fields = kwargs.pop("fields", {"pk", "member", "name"})
13
14 # Instantiate the superclass normally
15 super().__init__(*args, **kwargs)
16
17 allowed = set(fields)
18 existing = set(self.fields.keys())
19 for field_name in existing - allowed:
20 self.fields.pop(field_name)
21
22 class Meta:
23 model = EventRegistration
24 fields = (
25 "pk",
26 "present",
27 "queue_position",
28 "date",
29 "payment",
30 "member",
31 "name",
32 )
33
34 member = MemberSerializer(detailed=False)
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/events/api/v2/serializers/event_registration.py b/website/events/api/v2/serializers/event_registration.py
--- a/website/events/api/v2/serializers/event_registration.py
+++ b/website/events/api/v2/serializers/event_registration.py
@@ -31,4 +31,4 @@
"name",
)
- member = MemberSerializer(detailed=False)
+ member = MemberSerializer(detailed=False, read_only=True)
| {"golden_diff": "diff --git a/website/events/api/v2/serializers/event_registration.py b/website/events/api/v2/serializers/event_registration.py\n--- a/website/events/api/v2/serializers/event_registration.py\n+++ b/website/events/api/v2/serializers/event_registration.py\n@@ -31,4 +31,4 @@\n \"name\",\n )\n \n- member = MemberSerializer(detailed=False)\n+ member = MemberSerializer(detailed=False, read_only=True)\n", "issue": "Event registration member serializer should be read only\n### Describe the bug\r\nhttps://github.com/svthalia/concrexit/blob/4ab37961f50e398cc52422cdc1df66f6ab8ff2ee/website/events/api/v2/serializers/event_registration.py#L34 This serializer should be read-only\r\n\r\n### How to reproduce\r\nhttps://staging.thalia.nu/api/v2/events/150/registrations/ shows that you can POST to update the member profile, that should not be the case\r\n\r\n### Expected behaviour\r\nBe read only\r\n\n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom events.models import EventRegistration\nfrom members.api.v2.serializers.member import MemberSerializer\n\n\nclass EventRegistrationSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for event registrations.\"\"\"\n\n def __init__(self, *args, **kwargs):\n # Don't pass the 'fields' arg up to the superclass\n fields = kwargs.pop(\"fields\", {\"pk\", \"member\", \"name\"})\n\n # Instantiate the superclass normally\n super().__init__(*args, **kwargs)\n\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in existing - allowed:\n self.fields.pop(field_name)\n\n class Meta:\n model = EventRegistration\n fields = (\n \"pk\",\n \"present\",\n \"queue_position\",\n \"date\",\n \"payment\",\n \"member\",\n \"name\",\n )\n\n member = MemberSerializer(detailed=False)\n", "path": "website/events/api/v2/serializers/event_registration.py"}], "after_files": [{"content": "from rest_framework import serializers\n\nfrom events.models import EventRegistration\nfrom members.api.v2.serializers.member import MemberSerializer\n\n\nclass EventRegistrationSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for event registrations.\"\"\"\n\n def __init__(self, *args, **kwargs):\n # Don't pass the 'fields' arg up to the superclass\n fields = kwargs.pop(\"fields\", {\"pk\", \"member\", \"name\"})\n\n # Instantiate the superclass normally\n super().__init__(*args, **kwargs)\n\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in existing - allowed:\n self.fields.pop(field_name)\n\n class Meta:\n model = EventRegistration\n fields = (\n \"pk\",\n \"present\",\n \"queue_position\",\n \"date\",\n \"payment\",\n \"member\",\n \"name\",\n )\n\n member = MemberSerializer(detailed=False, read_only=True)\n", "path": "website/events/api/v2/serializers/event_registration.py"}]} | 653 | 103 |
gh_patches_debug_30897 | rasdani/github-patches | git_diff | encode__starlette-186 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Session middleware is highly insecure
The session middleware does not have any timestamp component to it, which means that if a session happens to be somehow leaked, it can be reused any time in the future.
Can we have a timestamp component added to the session, so that the session middleware can reject such sessions after a reasonable period of time (e.g. 24 hours)?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `starlette/middleware/sessions.py`
Content:
```
1 import functools
2 import json
3 from base64 import b64decode, b64encode
4
5 import itsdangerous
6
7 from starlette.datastructures import MutableHeaders
8 from starlette.requests import Request
9 from starlette.types import ASGIApp, ASGIInstance, Message, Receive, Scope, Send
10
11
12 class SessionMiddleware:
13 def __init__(
14 self, app: ASGIApp, secret_key: str, session_cookie: str = "session"
15 ) -> None:
16 self.app = app
17 self.signer = itsdangerous.Signer(secret_key)
18 self.session_cookie = session_cookie
19
20 def __call__(self, scope: Scope) -> ASGIInstance:
21 if scope["type"] in ("http", "websocket"):
22 request = Request(scope)
23 if self.session_cookie in request.cookies:
24 data = request.cookies[self.session_cookie].encode("utf-8")
25 data = self.signer.unsign(data)
26 scope["session"] = json.loads(b64decode(data))
27 else:
28 scope["session"] = {}
29 return functools.partial(self.asgi, scope=scope)
30 return self.app(scope) # pragma: no cover
31
32 async def asgi(self, receive: Receive, send: Send, scope: Scope) -> None:
33 was_empty_session = not scope["session"]
34 inner = self.app(scope)
35
36 async def sender(message: Message) -> None:
37 if message["type"] == "http.response.start":
38 if scope["session"]:
39 # We have session data to persist.
40 data = b64encode(json.dumps(scope["session"]).encode("utf-8"))
41 data = self.signer.sign(data)
42 headers = MutableHeaders(scope=message)
43 header_value = "%s=%s" % (self.session_cookie, data.decode("utf-8"))
44 headers.append("Set-Cookie", header_value)
45 elif not was_empty_session:
46 # The session has been cleared.
47 headers = MutableHeaders(scope=message)
48 header_value = "%s=%s" % (
49 self.session_cookie,
50 "null; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT",
51 )
52 headers.append("Set-Cookie", header_value)
53 await send(message)
54
55 await inner(receive, sender)
56
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/starlette/middleware/sessions.py b/starlette/middleware/sessions.py
--- a/starlette/middleware/sessions.py
+++ b/starlette/middleware/sessions.py
@@ -3,6 +3,7 @@
from base64 import b64decode, b64encode
import itsdangerous
+from itsdangerous.exc import BadTimeSignature, SignatureExpired
from starlette.datastructures import MutableHeaders
from starlette.requests import Request
@@ -11,19 +12,27 @@
class SessionMiddleware:
def __init__(
- self, app: ASGIApp, secret_key: str, session_cookie: str = "session"
+ self,
+ app: ASGIApp,
+ secret_key: str,
+ session_cookie: str = "session",
+ max_age: int = 14 * 24 * 60 * 60, # 14 days, in seconds
) -> None:
self.app = app
- self.signer = itsdangerous.Signer(secret_key)
+ self.signer = itsdangerous.TimestampSigner(secret_key)
self.session_cookie = session_cookie
+ self.max_age = max_age
def __call__(self, scope: Scope) -> ASGIInstance:
if scope["type"] in ("http", "websocket"):
request = Request(scope)
if self.session_cookie in request.cookies:
data = request.cookies[self.session_cookie].encode("utf-8")
- data = self.signer.unsign(data)
- scope["session"] = json.loads(b64decode(data))
+ try:
+ data = self.signer.unsign(data, max_age=self.max_age)
+ scope["session"] = json.loads(b64decode(data))
+ except (BadTimeSignature, SignatureExpired):
+ scope["session"] = {}
else:
scope["session"] = {}
return functools.partial(self.asgi, scope=scope)
| {"golden_diff": "diff --git a/starlette/middleware/sessions.py b/starlette/middleware/sessions.py\n--- a/starlette/middleware/sessions.py\n+++ b/starlette/middleware/sessions.py\n@@ -3,6 +3,7 @@\n from base64 import b64decode, b64encode\n \n import itsdangerous\n+from itsdangerous.exc import BadTimeSignature, SignatureExpired\n \n from starlette.datastructures import MutableHeaders\n from starlette.requests import Request\n@@ -11,19 +12,27 @@\n \n class SessionMiddleware:\n def __init__(\n- self, app: ASGIApp, secret_key: str, session_cookie: str = \"session\"\n+ self,\n+ app: ASGIApp,\n+ secret_key: str,\n+ session_cookie: str = \"session\",\n+ max_age: int = 14 * 24 * 60 * 60, # 14 days, in seconds\n ) -> None:\n self.app = app\n- self.signer = itsdangerous.Signer(secret_key)\n+ self.signer = itsdangerous.TimestampSigner(secret_key)\n self.session_cookie = session_cookie\n+ self.max_age = max_age\n \n def __call__(self, scope: Scope) -> ASGIInstance:\n if scope[\"type\"] in (\"http\", \"websocket\"):\n request = Request(scope)\n if self.session_cookie in request.cookies:\n data = request.cookies[self.session_cookie].encode(\"utf-8\")\n- data = self.signer.unsign(data)\n- scope[\"session\"] = json.loads(b64decode(data))\n+ try:\n+ data = self.signer.unsign(data, max_age=self.max_age)\n+ scope[\"session\"] = json.loads(b64decode(data))\n+ except (BadTimeSignature, SignatureExpired):\n+ scope[\"session\"] = {}\n else:\n scope[\"session\"] = {}\n return functools.partial(self.asgi, scope=scope)\n", "issue": "Session middleware is highly insecure\nThe session middleware does not have any timestamp component to it, which means that if a session happens to be somehow leaked, it can be reused any time in the future.\r\n\r\nCan we have a timestamp component added to the session, so that the session middleware can reject such sessions after a reasonable period of time (e.g. 24 hours)?\n", "before_files": [{"content": "import functools\nimport json\nfrom base64 import b64decode, b64encode\n\nimport itsdangerous\n\nfrom starlette.datastructures import MutableHeaders\nfrom starlette.requests import Request\nfrom starlette.types import ASGIApp, ASGIInstance, Message, Receive, Scope, Send\n\n\nclass SessionMiddleware:\n def __init__(\n self, app: ASGIApp, secret_key: str, session_cookie: str = \"session\"\n ) -> None:\n self.app = app\n self.signer = itsdangerous.Signer(secret_key)\n self.session_cookie = session_cookie\n\n def __call__(self, scope: Scope) -> ASGIInstance:\n if scope[\"type\"] in (\"http\", \"websocket\"):\n request = Request(scope)\n if self.session_cookie in request.cookies:\n data = request.cookies[self.session_cookie].encode(\"utf-8\")\n data = self.signer.unsign(data)\n scope[\"session\"] = json.loads(b64decode(data))\n else:\n scope[\"session\"] = {}\n return functools.partial(self.asgi, scope=scope)\n return self.app(scope) # pragma: no cover\n\n async def asgi(self, receive: Receive, send: Send, scope: Scope) -> None:\n was_empty_session = not scope[\"session\"]\n inner = self.app(scope)\n\n async def sender(message: Message) -> None:\n if message[\"type\"] == \"http.response.start\":\n if scope[\"session\"]:\n # We have session data to persist.\n data = b64encode(json.dumps(scope[\"session\"]).encode(\"utf-8\"))\n data = self.signer.sign(data)\n headers = MutableHeaders(scope=message)\n header_value = \"%s=%s\" % (self.session_cookie, data.decode(\"utf-8\"))\n headers.append(\"Set-Cookie\", header_value)\n elif not was_empty_session:\n # The session has been cleared.\n headers = MutableHeaders(scope=message)\n header_value = \"%s=%s\" % (\n self.session_cookie,\n \"null; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT\",\n )\n headers.append(\"Set-Cookie\", header_value)\n await send(message)\n\n await inner(receive, sender)\n", "path": "starlette/middleware/sessions.py"}], "after_files": [{"content": "import functools\nimport json\nfrom base64 import b64decode, b64encode\n\nimport itsdangerous\nfrom itsdangerous.exc import BadTimeSignature, SignatureExpired\n\nfrom starlette.datastructures import MutableHeaders\nfrom starlette.requests import Request\nfrom starlette.types import ASGIApp, ASGIInstance, Message, Receive, Scope, Send\n\n\nclass SessionMiddleware:\n def __init__(\n self,\n app: ASGIApp,\n secret_key: str,\n session_cookie: str = \"session\",\n max_age: int = 14 * 24 * 60 * 60, # 14 days, in seconds\n ) -> None:\n self.app = app\n self.signer = itsdangerous.TimestampSigner(secret_key)\n self.session_cookie = session_cookie\n self.max_age = max_age\n\n def __call__(self, scope: Scope) -> ASGIInstance:\n if scope[\"type\"] in (\"http\", \"websocket\"):\n request = Request(scope)\n if self.session_cookie in request.cookies:\n data = request.cookies[self.session_cookie].encode(\"utf-8\")\n try:\n data = self.signer.unsign(data, max_age=self.max_age)\n scope[\"session\"] = json.loads(b64decode(data))\n except (BadTimeSignature, SignatureExpired):\n scope[\"session\"] = {}\n else:\n scope[\"session\"] = {}\n return functools.partial(self.asgi, scope=scope)\n return self.app(scope) # pragma: no cover\n\n async def asgi(self, receive: Receive, send: Send, scope: Scope) -> None:\n was_empty_session = not scope[\"session\"]\n inner = self.app(scope)\n\n async def sender(message: Message) -> None:\n if message[\"type\"] == \"http.response.start\":\n if scope[\"session\"]:\n # We have session data to persist.\n data = b64encode(json.dumps(scope[\"session\"]).encode(\"utf-8\"))\n data = self.signer.sign(data)\n headers = MutableHeaders(scope=message)\n header_value = \"%s=%s\" % (self.session_cookie, data.decode(\"utf-8\"))\n headers.append(\"Set-Cookie\", header_value)\n elif not was_empty_session:\n # The session has been cleared.\n headers = MutableHeaders(scope=message)\n header_value = \"%s=%s\" % (\n self.session_cookie,\n \"null; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT\",\n )\n headers.append(\"Set-Cookie\", header_value)\n await send(message)\n\n await inner(receive, sender)\n", "path": "starlette/middleware/sessions.py"}]} | 933 | 428 |
gh_patches_debug_13477 | rasdani/github-patches | git_diff | dj-stripe__dj-stripe-547 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error: Cannot resolve keyword 'customer' into field.
All,
Cannot get past the step
`python manage.py djstripe_init_customers`
in the installation.
Running Python 3.6.0, Django 1.11, and the latest version of dj-stripe (1.0.0).
What combination of Django version and dj-stripe version are folks successfully using at the moment? Thanks!
Here is the traceback:
```
Traceback (most recent call last):
File "manage.py", line 22, in <module>
execute_from_command_line(sys.argv)
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/__init__.py", line 363, in execute_from_command_line
utility.execute()
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/__init__.py", line 355, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/Users/jdln/temp/dj-stripe/djstripe/management/commands/djstripe_init_customers.py", line 25, in handle
for subscriber in get_subscriber_model().objects.filter(customer__isnull=True):
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/manager.py", line 85, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/query.py", line 781, in filter
return self._filter_or_exclude(False, *args, **kwargs)
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/query.py", line 799, in _filter_or_exclude
clone.query.add_q(Q(*args, **kwargs))
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1260, in add_q
clause, _ = self._add_q(q_object, self.used_aliases)
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1286, in _add_q
allow_joins=allow_joins, split_subq=split_subq,
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1164, in build_filter
lookups, parts, reffed_expression = self.solve_lookup_type(arg)
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1044, in solve_lookup_type
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
File "/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1362, in names_to_path
"Choices are: %s" % (name, ", ".join(available)))
django.core.exceptions.FieldError: Cannot resolve keyword 'customer' into field. Choices are: date_joined, djstripe_customers, email, first_name, groups, id, is_active, is_staff, is_superuser, last_login, last_name, logentry, password, user_permissions, username
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `djstripe/management/commands/djstripe_init_customers.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 .. module:: djstripe.management.commands.djstripe_init_customers.
4
5 :synopsis: dj-stripe - init_customers command.
6
7 .. moduleauthor:: @kavdev, @pydanny
8
9 """
10 from __future__ import unicode_literals
11
12 from django.core.management.base import BaseCommand
13
14 from ...models import Customer
15 from ...settings import get_subscriber_model
16
17
18 class Command(BaseCommand):
19 """Create customer objects for existing subscribers that don't have one."""
20
21 help = "Create customer objects for existing subscribers that don't have one"
22
23 def handle(self, *args, **options):
24 """Create Customer objects for Subscribers without Customer objects associated."""
25 for subscriber in get_subscriber_model().objects.filter(customer__isnull=True):
26 # use get_or_create in case of race conditions on large subscriber bases
27 Customer.get_or_create(subscriber=subscriber)
28 print("Created subscriber for {0}".format(subscriber.email))
29
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/djstripe/management/commands/djstripe_init_customers.py b/djstripe/management/commands/djstripe_init_customers.py
--- a/djstripe/management/commands/djstripe_init_customers.py
+++ b/djstripe/management/commands/djstripe_init_customers.py
@@ -22,7 +22,7 @@
def handle(self, *args, **options):
"""Create Customer objects for Subscribers without Customer objects associated."""
- for subscriber in get_subscriber_model().objects.filter(customer__isnull=True):
+ for subscriber in get_subscriber_model().objects.filter(djstripe_customers=None):
# use get_or_create in case of race conditions on large subscriber bases
Customer.get_or_create(subscriber=subscriber)
print("Created subscriber for {0}".format(subscriber.email))
| {"golden_diff": "diff --git a/djstripe/management/commands/djstripe_init_customers.py b/djstripe/management/commands/djstripe_init_customers.py\n--- a/djstripe/management/commands/djstripe_init_customers.py\n+++ b/djstripe/management/commands/djstripe_init_customers.py\n@@ -22,7 +22,7 @@\n \n def handle(self, *args, **options):\n \"\"\"Create Customer objects for Subscribers without Customer objects associated.\"\"\"\n- for subscriber in get_subscriber_model().objects.filter(customer__isnull=True):\n+ for subscriber in get_subscriber_model().objects.filter(djstripe_customers=None):\n # use get_or_create in case of race conditions on large subscriber bases\n Customer.get_or_create(subscriber=subscriber)\n print(\"Created subscriber for {0}\".format(subscriber.email))\n", "issue": "Error: Cannot resolve keyword 'customer' into field. \nAll,\r\n\r\nCannot get past the step\r\n\r\n`python manage.py djstripe_init_customers`\r\n\r\nin the installation. \r\n\r\nRunning Python 3.6.0, Django 1.11, and the latest version of dj-stripe (1.0.0).\r\n\r\nWhat combination of Django version and dj-stripe version are folks successfully using at the moment? Thanks! \r\n\r\nHere is the traceback:\r\n```\r\nTraceback (most recent call last):\r\n File \"manage.py\", line 22, in <module>\r\n execute_from_command_line(sys.argv)\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/__init__.py\", line 363, in execute_from_command_line\r\n utility.execute()\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/__init__.py\", line 355, in execute\r\n self.fetch_command(subcommand).run_from_argv(self.argv)\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/base.py\", line 283, in run_from_argv\r\n self.execute(*args, **cmd_options)\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/core/management/base.py\", line 330, in execute\r\n output = self.handle(*args, **options)\r\n File \"/Users/jdln/temp/dj-stripe/djstripe/management/commands/djstripe_init_customers.py\", line 25, in handle\r\n for subscriber in get_subscriber_model().objects.filter(customer__isnull=True):\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/manager.py\", line 85, in manager_method\r\n return getattr(self.get_queryset(), name)(*args, **kwargs)\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/query.py\", line 781, in filter\r\n return self._filter_or_exclude(False, *args, **kwargs)\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/query.py\", line 799, in _filter_or_exclude\r\n clone.query.add_q(Q(*args, **kwargs))\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1260, in add_q\r\n clause, _ = self._add_q(q_object, self.used_aliases)\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1286, in _add_q\r\n allow_joins=allow_joins, split_subq=split_subq,\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1164, in build_filter\r\n lookups, parts, reffed_expression = self.solve_lookup_type(arg)\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1044, in solve_lookup_type\r\n _, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())\r\n File \"/Users/jdln/.virtualenvs/djstripe/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1362, in names_to_path\r\n \"Choices are: %s\" % (name, \", \".join(available)))\r\ndjango.core.exceptions.FieldError: Cannot resolve keyword 'customer' into field. Choices are: date_joined, djstripe_customers, email, first_name, groups, id, is_active, is_staff, is_superuser, last_login, last_name, logentry, password, user_permissions, username\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\n.. module:: djstripe.management.commands.djstripe_init_customers.\n\n :synopsis: dj-stripe - init_customers command.\n\n.. moduleauthor:: @kavdev, @pydanny\n\n\"\"\"\nfrom __future__ import unicode_literals\n\nfrom django.core.management.base import BaseCommand\n\nfrom ...models import Customer\nfrom ...settings import get_subscriber_model\n\n\nclass Command(BaseCommand):\n \"\"\"Create customer objects for existing subscribers that don't have one.\"\"\"\n\n help = \"Create customer objects for existing subscribers that don't have one\"\n\n def handle(self, *args, **options):\n \"\"\"Create Customer objects for Subscribers without Customer objects associated.\"\"\"\n for subscriber in get_subscriber_model().objects.filter(customer__isnull=True):\n # use get_or_create in case of race conditions on large subscriber bases\n Customer.get_or_create(subscriber=subscriber)\n print(\"Created subscriber for {0}\".format(subscriber.email))\n", "path": "djstripe/management/commands/djstripe_init_customers.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\n.. module:: djstripe.management.commands.djstripe_init_customers.\n\n :synopsis: dj-stripe - init_customers command.\n\n.. moduleauthor:: @kavdev, @pydanny\n\n\"\"\"\nfrom __future__ import unicode_literals\n\nfrom django.core.management.base import BaseCommand\n\nfrom ...models import Customer\nfrom ...settings import get_subscriber_model\n\n\nclass Command(BaseCommand):\n \"\"\"Create customer objects for existing subscribers that don't have one.\"\"\"\n\n help = \"Create customer objects for existing subscribers that don't have one\"\n\n def handle(self, *args, **options):\n \"\"\"Create Customer objects for Subscribers without Customer objects associated.\"\"\"\n for subscriber in get_subscriber_model().objects.filter(djstripe_customers=None):\n # use get_or_create in case of race conditions on large subscriber bases\n Customer.get_or_create(subscriber=subscriber)\n print(\"Created subscriber for {0}\".format(subscriber.email))\n", "path": "djstripe/management/commands/djstripe_init_customers.py"}]} | 1,415 | 182 |
gh_patches_debug_48141 | rasdani/github-patches | git_diff | google__flax-270 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`typing._ClassVar` cannot be accessed in the iPython shell – dataclasses package seems to mess up on Python 3.7
### Problem you have encountered:
I just installed flax and tried to import it from the iPython shell. But it raises an `AttributeError`.
```
In [1]: import flax
*snip*
~/.virtualenvs/flax2/lib/python3.7/site-packages/dataclasses.py in _is_classvar(a_type, typing)
548 # This test uses a typing internal class, but it's the best way to
549 # test if this is a ClassVar.
--> 550 return type(a_type) is typing._ClassVar
551
552
AttributeError: module 'typing' has no attribute '_ClassVar'
```
This does not happen in the normal interpreter, where everything goes fine.
### What you expected to happen:
I expected the import to work the same in iPython and the normal python shell.
### Logs, error messages, etc:
Full traceback in this gist: https://gist.github.com/bayerj/96f096c7fb09a7c9b758dabdbca32671
### Steps to reproduce:
On Mac OS X with Python 3.7.6, not anaconda, virtuelenvwrapper installed.
```
❯❯❯ mkvirtualenv flax2
❯❯❯ pip install jaxlib
*snip*
❯❯❯ pip install flax
*snip*
❯❯❯ ipython
*snip*
In [1]: import flax
```
### Workaround
The problem seems to be in the `dataclasses` package–not python's own one–from PyPI. If I uninstall it...
```
❯❯❯ pip uninstall dataclasses
Found existing installation: dataclasses 0.6
Uninstalling dataclasses-0.6:
Would remove:
/Users/bayerj/.virtualenvs/debug2/lib/python3.7/site-packages/dataclasses-0.6.dist-info/*
/Users/bayerj/.virtualenvs/debug2/lib/python3.7/site-packages/dataclasses.py
Proceed (y/n)? y
Successfully uninstalled dataclasses-0.6
❯❯❯ ipython
/usr/local/lib/python3.7/site-packages/IPython/core/interactiveshell.py:931: UserWarning: Attempting to work in a virtualenv. If you encounter problems, please install IPython inside the virtualenv.
warn("Attempting to work in a virtualenv. If you encounter problems, please "
Python 3.7.6 (default, Dec 30 2019, 19:38:28)
Type 'copyright', 'credits' or 'license' for more information
IPython 7.9.0 -- An enhanced Interactive Python. Type '?' for help.
In [1]: import flax
```
... this goes fine.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2020 The Flax Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """setup.py for Flax."""
16
17 import os
18 from setuptools import find_packages
19 from setuptools import setup
20
21 version = "0.1.0"
22
23 here = os.path.abspath(os.path.dirname(__file__))
24 try:
25 README = open(os.path.join(here, "README.md"), encoding='utf-8').read()
26 except IOError:
27 README = ""
28
29 install_requires = [
30 "numpy>=1.12",
31 "jax>=0.1.59",
32 "matplotlib", # only needed for tensorboard export
33 "dataclasses", # will only install on py3.6
34 "msgpack",
35 ]
36
37 tests_require = [
38 "jaxlib",
39 "pytest",
40 "pytest-cov",
41 "pytest-xdist",
42 "tensorflow",
43 "tensorflow_datasets",
44 ]
45
46 setup(
47 name="flax",
48 version=version,
49 description="Flax: A neural network library for JAX designed for flexibility",
50 long_description="\n\n".join([README]),
51 long_description_content_type='text/markdown',
52 classifiers=[
53 "Development Status :: 3 - Alpha",
54 "Intended Audience :: Developers",
55 "Intended Audience :: Science/Research",
56 "License :: OSI Approved :: MIT License",
57 "Programming Language :: Python :: 3.7",
58 "Topic :: Scientific/Engineering :: Artificial Intelligence",
59 ],
60 keywords="",
61 author="Flax team",
62 author_email="[email protected]",
63 url="https://github.com/google/flax",
64 license="Apache",
65 packages=find_packages(),
66 include_package_data=False,
67 zip_safe=False,
68 install_requires=install_requires,
69 extras_require={
70 "testing": tests_require,
71 },
72 )
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,7 @@
"numpy>=1.12",
"jax>=0.1.59",
"matplotlib", # only needed for tensorboard export
- "dataclasses", # will only install on py3.6
+ "dataclasses;python_version<'3.7'", # will only install on py3.6
"msgpack",
]
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -30,7 +30,7 @@\n \"numpy>=1.12\",\n \"jax>=0.1.59\",\n \"matplotlib\", # only needed for tensorboard export\n- \"dataclasses\", # will only install on py3.6\n+ \"dataclasses;python_version<'3.7'\", # will only install on py3.6\n \"msgpack\",\n ]\n", "issue": "`typing._ClassVar` cannot be accessed in the iPython shell \u2013 dataclasses package seems to mess up on Python 3.7\n### Problem you have encountered:\r\n\r\nI just installed flax and tried to import it from the iPython shell. But it raises an `AttributeError`.\r\n\r\n```\r\nIn [1]: import flax\r\n\r\n*snip*\r\n\r\n~/.virtualenvs/flax2/lib/python3.7/site-packages/dataclasses.py in _is_classvar(a_type, typing)\r\n 548 # This test uses a typing internal class, but it's the best way to\r\n 549 # test if this is a ClassVar.\r\n--> 550 return type(a_type) is typing._ClassVar\r\n 551\r\n 552\r\n\r\nAttributeError: module 'typing' has no attribute '_ClassVar'\r\n```\r\nThis does not happen in the normal interpreter, where everything goes fine. \r\n\r\n### What you expected to happen:\r\n\r\nI expected the import to work the same in iPython and the normal python shell.\r\n\r\n### Logs, error messages, etc:\r\n\r\nFull traceback in this gist: https://gist.github.com/bayerj/96f096c7fb09a7c9b758dabdbca32671\r\n\r\n### Steps to reproduce:\r\n\r\nOn Mac OS X with Python 3.7.6, not anaconda, virtuelenvwrapper installed.\r\n\r\n```\r\n\u276f\u276f\u276f mkvirtualenv flax2\r\n\u276f\u276f\u276f pip install jaxlib\r\n*snip*\r\n\u276f\u276f\u276f pip install flax\r\n*snip*\r\n\u276f\u276f\u276f ipython\r\n\r\n*snip*\r\n\r\nIn [1]: import flax\r\n```\r\n\r\n### Workaround\r\n\r\nThe problem seems to be in the `dataclasses` package\u2013not python's own one\u2013from PyPI. If I uninstall it...\r\n\r\n```\r\n\u276f\u276f\u276f pip uninstall dataclasses\r\nFound existing installation: dataclasses 0.6\r\nUninstalling dataclasses-0.6:\r\n Would remove:\r\n /Users/bayerj/.virtualenvs/debug2/lib/python3.7/site-packages/dataclasses-0.6.dist-info/*\r\n /Users/bayerj/.virtualenvs/debug2/lib/python3.7/site-packages/dataclasses.py\r\nProceed (y/n)? y\r\n Successfully uninstalled dataclasses-0.6\r\n\u276f\u276f\u276f ipython\r\n/usr/local/lib/python3.7/site-packages/IPython/core/interactiveshell.py:931: UserWarning: Attempting to work in a virtualenv. If you encounter problems, please install IPython inside the virtualenv.\r\n warn(\"Attempting to work in a virtualenv. If you encounter problems, please \"\r\nPython 3.7.6 (default, Dec 30 2019, 19:38:28)\r\nType 'copyright', 'credits' or 'license' for more information\r\nIPython 7.9.0 -- An enhanced Interactive Python. Type '?' for help.\r\n\r\nIn [1]: import flax\r\n```\r\n... this goes fine.\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "# Copyright 2020 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"setup.py for Flax.\"\"\"\n\nimport os\nfrom setuptools import find_packages\nfrom setuptools import setup\n\nversion = \"0.1.0\"\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n README = open(os.path.join(here, \"README.md\"), encoding='utf-8').read()\nexcept IOError:\n README = \"\"\n\ninstall_requires = [\n \"numpy>=1.12\",\n \"jax>=0.1.59\",\n \"matplotlib\", # only needed for tensorboard export\n \"dataclasses\", # will only install on py3.6\n \"msgpack\",\n]\n\ntests_require = [\n \"jaxlib\",\n \"pytest\",\n \"pytest-cov\",\n \"pytest-xdist\",\n \"tensorflow\",\n \"tensorflow_datasets\",\n]\n\nsetup(\n name=\"flax\",\n version=version,\n description=\"Flax: A neural network library for JAX designed for flexibility\",\n long_description=\"\\n\\n\".join([README]),\n long_description_content_type='text/markdown',\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n keywords=\"\",\n author=\"Flax team\",\n author_email=\"[email protected]\",\n url=\"https://github.com/google/flax\",\n license=\"Apache\",\n packages=find_packages(),\n include_package_data=False,\n zip_safe=False,\n install_requires=install_requires,\n extras_require={\n \"testing\": tests_require,\n },\n )\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2020 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"setup.py for Flax.\"\"\"\n\nimport os\nfrom setuptools import find_packages\nfrom setuptools import setup\n\nversion = \"0.1.0\"\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n README = open(os.path.join(here, \"README.md\"), encoding='utf-8').read()\nexcept IOError:\n README = \"\"\n\ninstall_requires = [\n \"numpy>=1.12\",\n \"jax>=0.1.59\",\n \"matplotlib\", # only needed for tensorboard export\n \"dataclasses;python_version<'3.7'\", # will only install on py3.6\n \"msgpack\",\n]\n\ntests_require = [\n \"jaxlib\",\n \"pytest\",\n \"pytest-cov\",\n \"pytest-xdist\",\n \"tensorflow\",\n \"tensorflow_datasets\",\n]\n\nsetup(\n name=\"flax\",\n version=version,\n description=\"Flax: A neural network library for JAX designed for flexibility\",\n long_description=\"\\n\\n\".join([README]),\n long_description_content_type='text/markdown',\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n ],\n keywords=\"\",\n author=\"Flax team\",\n author_email=\"[email protected]\",\n url=\"https://github.com/google/flax\",\n license=\"Apache\",\n packages=find_packages(),\n include_package_data=False,\n zip_safe=False,\n install_requires=install_requires,\n extras_require={\n \"testing\": tests_require,\n },\n )\n", "path": "setup.py"}]} | 1,546 | 111 |
gh_patches_debug_58693 | rasdani/github-patches | git_diff | google-parfait__tensorflow-federated-1334 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Keras model in federated_learning_for_image_classification.ipynb throws warning
**Describe the bug**
Keras Sequential Model in [federated_learning_for_image_classification.ipynb](https://github.com/tensorflow/federated/blob/master/docs/tutorials/federated_learning_for_image_classification.ipynb) throws warning.
The model in the notebook is
```python
def create_keras_model():
return tf.keras.models.Sequential([
tf.keras.layers.Input(shape=(784,)),
tf.keras.layers.Dense(10, kernel_initializer='zeros'),
tf.keras.layers.Softmax(),
])
```
Warning thrown:
```python
WARNING:tensorflow:Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.
<tensorflow.python.keras.engine.sequential.Sequential at 0x7f66178a46d0>
```
Easily fixed using the correct layer type:
```python
def create_keras_model():
return tf.keras.models.Sequential([
tf.keras.layers.InputLayer(input_shape=(784,)),
tf.keras.layers.Dense(10, kernel_initializer='zeros'),
tf.keras.layers.Softmax(),
])
```
[colab](https://colab.research.google.com/drive/1LFgBiu9xUa-k92IW24fiSX_kVp7lb0SB?usp=sharing) notebook that reproduces the bug.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tensorflow_federated/python/examples/remote_execution/remote_executor_example.py`
Content:
```
1 # Copyright 2018, The TensorFlow Federated Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """Example showing how to run a multi-machine simulation.
15
16 In order to run this example, you must have a running instance of the
17 Executor Service, either locally or on Kubernetes.
18
19 The model trains EMNIST for a small number of rounds, but uses a RemoteExecutor
20 to distribute the work to the ExecutorService.
21 """
22
23 import collections
24 import warnings
25
26 from absl import app
27 from absl import flags
28 import grpc
29 import numpy as np
30 import tensorflow as tf
31 import tensorflow_federated as tff
32
33 FLAGS = flags.FLAGS
34
35 flags.DEFINE_string('host', None, 'The host to connect to.')
36 flags.mark_flag_as_required('host')
37 flags.DEFINE_string('port', '8000', 'The port to connect to.')
38 flags.DEFINE_integer('n_clients', 10, 'Number of clients.')
39 flags.DEFINE_integer('n_rounds', 3, 'Number of rounds.')
40
41
42 def preprocess(dataset):
43
44 def element_fn(element):
45 return collections.OrderedDict([
46 ('x', tf.reshape(element['pixels'], [-1])),
47 ('y', tf.reshape(element['label'], [1])),
48 ])
49
50 return dataset.repeat(NUM_EPOCHS).map(element_fn).batch(BATCH_SIZE)
51
52
53 def make_federated_data(client_data, client_ids):
54 return [
55 preprocess(client_data.create_tf_dataset_for_client(x))
56 for x in client_ids
57 ]
58
59
60 NUM_EPOCHS = 10
61 BATCH_SIZE = 20
62
63
64 def make_remote_executor(inferred_cardinalities):
65 """Make remote executor."""
66
67 def create_worker_stack(ex):
68 ex = tff.framework.ThreadDelegatingExecutor(ex)
69 return tff.framework.ReferenceResolvingExecutor(ex)
70
71 client_ex = []
72 num_clients = inferred_cardinalities.get(tff.CLIENTS, None)
73 if num_clients:
74 print('Inferred that there are {} clients'.format(num_clients))
75 else:
76 print('No CLIENTS placement provided')
77
78 for _ in range(num_clients or 0):
79 channel = grpc.insecure_channel('{}:{}'.format(FLAGS.host, FLAGS.port))
80 remote_ex = tff.framework.RemoteExecutor(channel)
81 worker_stack = create_worker_stack(remote_ex)
82 client_ex.append(worker_stack)
83
84 federating_strategy_factory = tff.framework.FederatedResolvingStrategy.factory(
85 {
86 tff.SERVER: create_worker_stack(tff.framework.EagerTFExecutor()),
87 tff.CLIENTS: client_ex,
88 })
89 unplaced_ex = create_worker_stack(tff.framework.EagerTFExecutor())
90 federating_ex = tff.framework.FederatingExecutor(federating_strategy_factory,
91 unplaced_ex)
92 return tff.framework.ReferenceResolvingExecutor(federating_ex)
93
94
95 def main(argv):
96 if len(argv) > 1:
97 raise app.UsageError('Too many command-line arguments.')
98
99 warnings.simplefilter('ignore')
100
101 np.random.seed(0)
102
103 emnist_train, _ = tff.simulation.datasets.emnist.load_data()
104
105 sample_clients = emnist_train.client_ids[0:FLAGS.n_clients]
106
107 federated_train_data = make_federated_data(emnist_train, sample_clients)
108
109 example_dataset = emnist_train.create_tf_dataset_for_client(
110 emnist_train.client_ids[0])
111
112 preprocessed_example_dataset = preprocess(example_dataset)
113 input_spec = preprocessed_example_dataset.element_spec
114
115 def model_fn():
116 model = tf.keras.models.Sequential([
117 tf.keras.layers.Input(shape=(784,)),
118 tf.keras.layers.Dense(10, kernel_initializer='zeros'),
119 tf.keras.layers.Softmax(),
120 ])
121 return tff.learning.from_keras_model(
122 model,
123 input_spec=input_spec,
124 loss=tf.keras.losses.SparseCategoricalCrossentropy(),
125 metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
126
127 iterative_process = tff.learning.build_federated_averaging_process(
128 model_fn,
129 client_optimizer_fn=lambda: tf.keras.optimizers.SGD(learning_rate=0.02))
130
131 factory = tff.framework.ResourceManagingExecutorFactory(make_remote_executor)
132 context = tff.framework.ExecutionContext(factory)
133 tff.framework.set_default_context(context)
134
135 state = iterative_process.initialize()
136
137 state, metrics = iterative_process.next(state, federated_train_data)
138 print('round 1, metrics={}'.format(metrics))
139
140 for round_num in range(2, FLAGS.n_rounds + 1):
141 state, metrics = iterative_process.next(state, federated_train_data)
142 print('round {:2d}, metrics={}'.format(round_num, metrics))
143
144
145 if __name__ == '__main__':
146 app.run(main)
147
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py b/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py
--- a/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py
+++ b/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py
@@ -114,7 +114,7 @@
def model_fn():
model = tf.keras.models.Sequential([
- tf.keras.layers.Input(shape=(784,)),
+ tf.keras.layers.InputLayer(input_shape=(784,)),
tf.keras.layers.Dense(10, kernel_initializer='zeros'),
tf.keras.layers.Softmax(),
])
| {"golden_diff": "diff --git a/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py b/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py\n--- a/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py\n+++ b/tensorflow_federated/python/examples/remote_execution/remote_executor_example.py\n@@ -114,7 +114,7 @@\n \n def model_fn():\n model = tf.keras.models.Sequential([\n- tf.keras.layers.Input(shape=(784,)),\n+ tf.keras.layers.InputLayer(input_shape=(784,)),\n tf.keras.layers.Dense(10, kernel_initializer='zeros'),\n tf.keras.layers.Softmax(),\n ])\n", "issue": "Keras model in federated_learning_for_image_classification.ipynb throws warning\n**Describe the bug**\r\nKeras Sequential Model in [federated_learning_for_image_classification.ipynb](https://github.com/tensorflow/federated/blob/master/docs/tutorials/federated_learning_for_image_classification.ipynb) throws warning.\r\nThe model in the notebook is\r\n```python\r\ndef create_keras_model():\r\n return tf.keras.models.Sequential([\r\n tf.keras.layers.Input(shape=(784,)),\r\n tf.keras.layers.Dense(10, kernel_initializer='zeros'),\r\n tf.keras.layers.Softmax(),\r\n ])\r\n```\r\nWarning thrown:\r\n```python\r\nWARNING:tensorflow:Please add `keras.layers.InputLayer` instead of `keras.Input` to Sequential model. `keras.Input` is intended to be used by Functional model.\r\n<tensorflow.python.keras.engine.sequential.Sequential at 0x7f66178a46d0>\r\n```\r\n\r\nEasily fixed using the correct layer type:\r\n```python\r\ndef create_keras_model():\r\n return tf.keras.models.Sequential([\r\n tf.keras.layers.InputLayer(input_shape=(784,)),\r\n tf.keras.layers.Dense(10, kernel_initializer='zeros'),\r\n tf.keras.layers.Softmax(),\r\n ])\r\n```\r\n\r\n[colab](https://colab.research.google.com/drive/1LFgBiu9xUa-k92IW24fiSX_kVp7lb0SB?usp=sharing) notebook that reproduces the bug.\r\n\n", "before_files": [{"content": "# Copyright 2018, The TensorFlow Federated Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Example showing how to run a multi-machine simulation.\n\nIn order to run this example, you must have a running instance of the\nExecutor Service, either locally or on Kubernetes.\n\nThe model trains EMNIST for a small number of rounds, but uses a RemoteExecutor\nto distribute the work to the ExecutorService.\n\"\"\"\n\nimport collections\nimport warnings\n\nfrom absl import app\nfrom absl import flags\nimport grpc\nimport numpy as np\nimport tensorflow as tf\nimport tensorflow_federated as tff\n\nFLAGS = flags.FLAGS\n\nflags.DEFINE_string('host', None, 'The host to connect to.')\nflags.mark_flag_as_required('host')\nflags.DEFINE_string('port', '8000', 'The port to connect to.')\nflags.DEFINE_integer('n_clients', 10, 'Number of clients.')\nflags.DEFINE_integer('n_rounds', 3, 'Number of rounds.')\n\n\ndef preprocess(dataset):\n\n def element_fn(element):\n return collections.OrderedDict([\n ('x', tf.reshape(element['pixels'], [-1])),\n ('y', tf.reshape(element['label'], [1])),\n ])\n\n return dataset.repeat(NUM_EPOCHS).map(element_fn).batch(BATCH_SIZE)\n\n\ndef make_federated_data(client_data, client_ids):\n return [\n preprocess(client_data.create_tf_dataset_for_client(x))\n for x in client_ids\n ]\n\n\nNUM_EPOCHS = 10\nBATCH_SIZE = 20\n\n\ndef make_remote_executor(inferred_cardinalities):\n \"\"\"Make remote executor.\"\"\"\n\n def create_worker_stack(ex):\n ex = tff.framework.ThreadDelegatingExecutor(ex)\n return tff.framework.ReferenceResolvingExecutor(ex)\n\n client_ex = []\n num_clients = inferred_cardinalities.get(tff.CLIENTS, None)\n if num_clients:\n print('Inferred that there are {} clients'.format(num_clients))\n else:\n print('No CLIENTS placement provided')\n\n for _ in range(num_clients or 0):\n channel = grpc.insecure_channel('{}:{}'.format(FLAGS.host, FLAGS.port))\n remote_ex = tff.framework.RemoteExecutor(channel)\n worker_stack = create_worker_stack(remote_ex)\n client_ex.append(worker_stack)\n\n federating_strategy_factory = tff.framework.FederatedResolvingStrategy.factory(\n {\n tff.SERVER: create_worker_stack(tff.framework.EagerTFExecutor()),\n tff.CLIENTS: client_ex,\n })\n unplaced_ex = create_worker_stack(tff.framework.EagerTFExecutor())\n federating_ex = tff.framework.FederatingExecutor(federating_strategy_factory,\n unplaced_ex)\n return tff.framework.ReferenceResolvingExecutor(federating_ex)\n\n\ndef main(argv):\n if len(argv) > 1:\n raise app.UsageError('Too many command-line arguments.')\n\n warnings.simplefilter('ignore')\n\n np.random.seed(0)\n\n emnist_train, _ = tff.simulation.datasets.emnist.load_data()\n\n sample_clients = emnist_train.client_ids[0:FLAGS.n_clients]\n\n federated_train_data = make_federated_data(emnist_train, sample_clients)\n\n example_dataset = emnist_train.create_tf_dataset_for_client(\n emnist_train.client_ids[0])\n\n preprocessed_example_dataset = preprocess(example_dataset)\n input_spec = preprocessed_example_dataset.element_spec\n\n def model_fn():\n model = tf.keras.models.Sequential([\n tf.keras.layers.Input(shape=(784,)),\n tf.keras.layers.Dense(10, kernel_initializer='zeros'),\n tf.keras.layers.Softmax(),\n ])\n return tff.learning.from_keras_model(\n model,\n input_spec=input_spec,\n loss=tf.keras.losses.SparseCategoricalCrossentropy(),\n metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])\n\n iterative_process = tff.learning.build_federated_averaging_process(\n model_fn,\n client_optimizer_fn=lambda: tf.keras.optimizers.SGD(learning_rate=0.02))\n\n factory = tff.framework.ResourceManagingExecutorFactory(make_remote_executor)\n context = tff.framework.ExecutionContext(factory)\n tff.framework.set_default_context(context)\n\n state = iterative_process.initialize()\n\n state, metrics = iterative_process.next(state, federated_train_data)\n print('round 1, metrics={}'.format(metrics))\n\n for round_num in range(2, FLAGS.n_rounds + 1):\n state, metrics = iterative_process.next(state, federated_train_data)\n print('round {:2d}, metrics={}'.format(round_num, metrics))\n\n\nif __name__ == '__main__':\n app.run(main)\n", "path": "tensorflow_federated/python/examples/remote_execution/remote_executor_example.py"}], "after_files": [{"content": "# Copyright 2018, The TensorFlow Federated Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Example showing how to run a multi-machine simulation.\n\nIn order to run this example, you must have a running instance of the\nExecutor Service, either locally or on Kubernetes.\n\nThe model trains EMNIST for a small number of rounds, but uses a RemoteExecutor\nto distribute the work to the ExecutorService.\n\"\"\"\n\nimport collections\nimport warnings\n\nfrom absl import app\nfrom absl import flags\nimport grpc\nimport numpy as np\nimport tensorflow as tf\nimport tensorflow_federated as tff\n\nFLAGS = flags.FLAGS\n\nflags.DEFINE_string('host', None, 'The host to connect to.')\nflags.mark_flag_as_required('host')\nflags.DEFINE_string('port', '8000', 'The port to connect to.')\nflags.DEFINE_integer('n_clients', 10, 'Number of clients.')\nflags.DEFINE_integer('n_rounds', 3, 'Number of rounds.')\n\n\ndef preprocess(dataset):\n\n def element_fn(element):\n return collections.OrderedDict([\n ('x', tf.reshape(element['pixels'], [-1])),\n ('y', tf.reshape(element['label'], [1])),\n ])\n\n return dataset.repeat(NUM_EPOCHS).map(element_fn).batch(BATCH_SIZE)\n\n\ndef make_federated_data(client_data, client_ids):\n return [\n preprocess(client_data.create_tf_dataset_for_client(x))\n for x in client_ids\n ]\n\n\nNUM_EPOCHS = 10\nBATCH_SIZE = 20\n\n\ndef make_remote_executor(inferred_cardinalities):\n \"\"\"Make remote executor.\"\"\"\n\n def create_worker_stack(ex):\n ex = tff.framework.ThreadDelegatingExecutor(ex)\n return tff.framework.ReferenceResolvingExecutor(ex)\n\n client_ex = []\n num_clients = inferred_cardinalities.get(tff.CLIENTS, None)\n if num_clients:\n print('Inferred that there are {} clients'.format(num_clients))\n else:\n print('No CLIENTS placement provided')\n\n for _ in range(num_clients or 0):\n channel = grpc.insecure_channel('{}:{}'.format(FLAGS.host, FLAGS.port))\n remote_ex = tff.framework.RemoteExecutor(channel)\n worker_stack = create_worker_stack(remote_ex)\n client_ex.append(worker_stack)\n\n federating_strategy_factory = tff.framework.FederatedResolvingStrategy.factory(\n {\n tff.SERVER: create_worker_stack(tff.framework.EagerTFExecutor()),\n tff.CLIENTS: client_ex,\n })\n unplaced_ex = create_worker_stack(tff.framework.EagerTFExecutor())\n federating_ex = tff.framework.FederatingExecutor(federating_strategy_factory,\n unplaced_ex)\n return tff.framework.ReferenceResolvingExecutor(federating_ex)\n\n\ndef main(argv):\n if len(argv) > 1:\n raise app.UsageError('Too many command-line arguments.')\n\n warnings.simplefilter('ignore')\n\n np.random.seed(0)\n\n emnist_train, _ = tff.simulation.datasets.emnist.load_data()\n\n sample_clients = emnist_train.client_ids[0:FLAGS.n_clients]\n\n federated_train_data = make_federated_data(emnist_train, sample_clients)\n\n example_dataset = emnist_train.create_tf_dataset_for_client(\n emnist_train.client_ids[0])\n\n preprocessed_example_dataset = preprocess(example_dataset)\n input_spec = preprocessed_example_dataset.element_spec\n\n def model_fn():\n model = tf.keras.models.Sequential([\n tf.keras.layers.InputLayer(input_shape=(784,)),\n tf.keras.layers.Dense(10, kernel_initializer='zeros'),\n tf.keras.layers.Softmax(),\n ])\n return tff.learning.from_keras_model(\n model,\n input_spec=input_spec,\n loss=tf.keras.losses.SparseCategoricalCrossentropy(),\n metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])\n\n iterative_process = tff.learning.build_federated_averaging_process(\n model_fn,\n client_optimizer_fn=lambda: tf.keras.optimizers.SGD(learning_rate=0.02))\n\n factory = tff.framework.ResourceManagingExecutorFactory(make_remote_executor)\n context = tff.framework.ExecutionContext(factory)\n tff.framework.set_default_context(context)\n\n state = iterative_process.initialize()\n\n state, metrics = iterative_process.next(state, federated_train_data)\n print('round 1, metrics={}'.format(metrics))\n\n for round_num in range(2, FLAGS.n_rounds + 1):\n state, metrics = iterative_process.next(state, federated_train_data)\n print('round {:2d}, metrics={}'.format(round_num, metrics))\n\n\nif __name__ == '__main__':\n app.run(main)\n", "path": "tensorflow_federated/python/examples/remote_execution/remote_executor_example.py"}]} | 2,040 | 156 |
gh_patches_debug_32843 | rasdani/github-patches | git_diff | nextcloud__appstore-693 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Password reset sends outdated reset token
When requesting a password reset link from apps.nextcloud.com, the token in the link of the reset e-mail does not work, the website reports:
> Bad API Token
>
> The password reset link was invalid, possibly because it has already been used. Please request a new password reset.
When requesting a new password reset, an e-mail with the very same token is sent. In consequence it is impossible to change the password.
## Details
* Firefox 72.0.2
## Steps to reproduce
1. "Forget" your password
2. Request password reset link
3. Receive e-mail and open reset link
4. Observe error, follow suggestion and request new link
5. Receive e-mail with exactly the same token
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/user/forms.py`
Content:
```
1 from allauth.account.utils import filter_users_by_email, user_username, \
2 user_pk_to_url_str
3 from django import forms
4 from django.contrib.auth import get_user_model
5 from django.forms import EmailField, CharField, PasswordInput
6 from django.utils.translation import ugettext_lazy as _
7 from snowpenguin.django.recaptcha2.fields import ReCaptchaField
8 from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
9
10
11 class SignupFormRecaptcha(forms.Form):
12 """integrate a recaptcha field."""
13 recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
14 first_name = CharField(max_length=30, label=_('First name'))
15 last_name = CharField(max_length=30, label=_('Last name'))
16
17 def signup(self, request, user):
18 user.first_name = self.cleaned_data['first_name']
19 user.last_name = self.cleaned_data['last_name']
20 user.save()
21
22
23 class DeleteAccountForm(forms.Form):
24 email = EmailField(required=True, label=_('Your e-mail address'))
25
26 def __init__(self, *args, **kwargs):
27 self.user = kwargs.pop('user', None)
28 super().__init__(*args, **kwargs)
29
30 def clean_email(self):
31 email = self.cleaned_data.get('email')
32 if self.user and self.user.email == email:
33 return email
34 else:
35 raise forms.ValidationError(_(
36 'The given e-mail address does not match your e-mail address'))
37
38
39 class AccountForm(forms.ModelForm):
40 passwd = CharField(widget=PasswordInput(), label=_('Confirm password'),
41 help_text=_('Password is required to prevent '
42 'unauthorized users from changing your '
43 'email address and resetting your '
44 'password. This field does not update your '
45 'password!'))
46
47 class Meta:
48 model = get_user_model()
49 fields = ('first_name', 'last_name', 'email')
50
51 def clean_email(self):
52 value = self.cleaned_data['email']
53 users = filter_users_by_email(value)
54 if [u for u in users if u.pk != self.instance.pk]:
55 msg = _(
56 'This e-mail address is already associated with another '
57 'account.')
58 raise forms.ValidationError(msg)
59 return value
60
61 def clean_passwd(self):
62 value = self.cleaned_data['passwd']
63 if self.instance.check_password(value):
64 return value
65 else:
66 raise forms.ValidationError(_('Invalid password'))
67
68
69 class CustomResetPasswordForm(forms.Form):
70 # remove this class once issue #1307 is resolved django-allauth
71 email = forms.EmailField(
72 label=_("E-mail"),
73 required=True,
74 widget=forms.TextInput(attrs={
75 "type": "email",
76 "size": "30",
77 "placeholder": _("E-mail address"),
78 })
79 )
80
81 def clean_email(self):
82 email = self.cleaned_data["email"]
83 from allauth.account.adapter import get_adapter
84 email = get_adapter().clean_email(email)
85 self.users = filter_users_by_email(email)
86
87 return self.cleaned_data["email"]
88
89 def save(self, request, **kwargs):
90 from django.contrib.sites.shortcuts import get_current_site
91 current_site = get_current_site(request)
92 email = self.cleaned_data["email"]
93 from django.contrib.auth.tokens import default_token_generator
94 token_generator = kwargs.get("token_generator",
95 default_token_generator)
96
97 for user in self.users:
98 temp_key = token_generator.make_token(user)
99
100 # save it to the password reset model
101 # password_reset = PasswordReset(user=user, temp_key=temp_key)
102 # password_reset.save()
103
104 # send the password reset email
105 from django.urls import reverse
106 path = reverse("account_reset_password_from_key",
107 kwargs=dict(uidb36=user_pk_to_url_str(user),
108 key=temp_key))
109 from allauth.utils import build_absolute_uri
110 url = build_absolute_uri(
111 request, path)
112
113 context = {"current_site": current_site,
114 "user": user,
115 "password_reset_url": url,
116 "request": request}
117
118 from allauth.account import app_settings
119
120 if app_settings.AUTHENTICATION_METHOD \
121 != app_settings.AuthenticationMethod.EMAIL:
122 context['username'] = user_username(user)
123 from allauth.account.adapter import get_adapter
124 get_adapter(request).send_mail(
125 'account/email/password_reset_key',
126 email,
127 context)
128 return self.cleaned_data["email"]
129
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nextcloudappstore/user/forms.py b/nextcloudappstore/user/forms.py
--- a/nextcloudappstore/user/forms.py
+++ b/nextcloudappstore/user/forms.py
@@ -1,3 +1,4 @@
+from allauth.account.forms import EmailAwarePasswordResetTokenGenerator
from allauth.account.utils import filter_users_by_email, user_username, \
user_pk_to_url_str
from django import forms
@@ -82,7 +83,7 @@
email = self.cleaned_data["email"]
from allauth.account.adapter import get_adapter
email = get_adapter().clean_email(email)
- self.users = filter_users_by_email(email)
+ self.users = filter_users_by_email(email, is_active=True)
return self.cleaned_data["email"]
@@ -90,9 +91,7 @@
from django.contrib.sites.shortcuts import get_current_site
current_site = get_current_site(request)
email = self.cleaned_data["email"]
- from django.contrib.auth.tokens import default_token_generator
- token_generator = kwargs.get("token_generator",
- default_token_generator)
+ token_generator = EmailAwarePasswordResetTokenGenerator()
for user in self.users:
temp_key = token_generator.make_token(user)
@@ -118,7 +117,7 @@
from allauth.account import app_settings
if app_settings.AUTHENTICATION_METHOD \
- != app_settings.AuthenticationMethod.EMAIL:
+ != app_settings.AuthenticationMethod.EMAIL:
context['username'] = user_username(user)
from allauth.account.adapter import get_adapter
get_adapter(request).send_mail(
| {"golden_diff": "diff --git a/nextcloudappstore/user/forms.py b/nextcloudappstore/user/forms.py\n--- a/nextcloudappstore/user/forms.py\n+++ b/nextcloudappstore/user/forms.py\n@@ -1,3 +1,4 @@\n+from allauth.account.forms import EmailAwarePasswordResetTokenGenerator\n from allauth.account.utils import filter_users_by_email, user_username, \\\n user_pk_to_url_str\n from django import forms\n@@ -82,7 +83,7 @@\n email = self.cleaned_data[\"email\"]\n from allauth.account.adapter import get_adapter\n email = get_adapter().clean_email(email)\n- self.users = filter_users_by_email(email)\n+ self.users = filter_users_by_email(email, is_active=True)\n \n return self.cleaned_data[\"email\"]\n \n@@ -90,9 +91,7 @@\n from django.contrib.sites.shortcuts import get_current_site\n current_site = get_current_site(request)\n email = self.cleaned_data[\"email\"]\n- from django.contrib.auth.tokens import default_token_generator\n- token_generator = kwargs.get(\"token_generator\",\n- default_token_generator)\n+ token_generator = EmailAwarePasswordResetTokenGenerator()\n \n for user in self.users:\n temp_key = token_generator.make_token(user)\n@@ -118,7 +117,7 @@\n from allauth.account import app_settings\n \n if app_settings.AUTHENTICATION_METHOD \\\n- != app_settings.AuthenticationMethod.EMAIL:\n+ != app_settings.AuthenticationMethod.EMAIL:\n context['username'] = user_username(user)\n from allauth.account.adapter import get_adapter\n get_adapter(request).send_mail(\n", "issue": "Password reset sends outdated reset token\nWhen requesting a password reset link from apps.nextcloud.com, the token in the link of the reset e-mail does not work, the website reports:\r\n\r\n> Bad API Token\r\n> \r\n> The password reset link was invalid, possibly because it has already been used. Please request a new password reset.\r\n\r\nWhen requesting a new password reset, an e-mail with the very same token is sent. In consequence it is impossible to change the password.\r\n\r\n## Details\r\n\r\n* Firefox 72.0.2\r\n\r\n## Steps to reproduce\r\n\r\n1. \"Forget\" your password\r\n2. Request password reset link\r\n3. Receive e-mail and open reset link\r\n4. Observe error, follow suggestion and request new link\r\n5. Receive e-mail with exactly the same token\r\n\n", "before_files": [{"content": "from allauth.account.utils import filter_users_by_email, user_username, \\\n user_pk_to_url_str\nfrom django import forms\nfrom django.contrib.auth import get_user_model\nfrom django.forms import EmailField, CharField, PasswordInput\nfrom django.utils.translation import ugettext_lazy as _\nfrom snowpenguin.django.recaptcha2.fields import ReCaptchaField\nfrom snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget\n\n\nclass SignupFormRecaptcha(forms.Form):\n \"\"\"integrate a recaptcha field.\"\"\"\n recaptcha = ReCaptchaField(widget=ReCaptchaWidget())\n first_name = CharField(max_length=30, label=_('First name'))\n last_name = CharField(max_length=30, label=_('Last name'))\n\n def signup(self, request, user):\n user.first_name = self.cleaned_data['first_name']\n user.last_name = self.cleaned_data['last_name']\n user.save()\n\n\nclass DeleteAccountForm(forms.Form):\n email = EmailField(required=True, label=_('Your e-mail address'))\n\n def __init__(self, *args, **kwargs):\n self.user = kwargs.pop('user', None)\n super().__init__(*args, **kwargs)\n\n def clean_email(self):\n email = self.cleaned_data.get('email')\n if self.user and self.user.email == email:\n return email\n else:\n raise forms.ValidationError(_(\n 'The given e-mail address does not match your e-mail address'))\n\n\nclass AccountForm(forms.ModelForm):\n passwd = CharField(widget=PasswordInput(), label=_('Confirm password'),\n help_text=_('Password is required to prevent '\n 'unauthorized users from changing your '\n 'email address and resetting your '\n 'password. This field does not update your '\n 'password!'))\n\n class Meta:\n model = get_user_model()\n fields = ('first_name', 'last_name', 'email')\n\n def clean_email(self):\n value = self.cleaned_data['email']\n users = filter_users_by_email(value)\n if [u for u in users if u.pk != self.instance.pk]:\n msg = _(\n 'This e-mail address is already associated with another '\n 'account.')\n raise forms.ValidationError(msg)\n return value\n\n def clean_passwd(self):\n value = self.cleaned_data['passwd']\n if self.instance.check_password(value):\n return value\n else:\n raise forms.ValidationError(_('Invalid password'))\n\n\nclass CustomResetPasswordForm(forms.Form):\n # remove this class once issue #1307 is resolved django-allauth\n email = forms.EmailField(\n label=_(\"E-mail\"),\n required=True,\n widget=forms.TextInput(attrs={\n \"type\": \"email\",\n \"size\": \"30\",\n \"placeholder\": _(\"E-mail address\"),\n })\n )\n\n def clean_email(self):\n email = self.cleaned_data[\"email\"]\n from allauth.account.adapter import get_adapter\n email = get_adapter().clean_email(email)\n self.users = filter_users_by_email(email)\n\n return self.cleaned_data[\"email\"]\n\n def save(self, request, **kwargs):\n from django.contrib.sites.shortcuts import get_current_site\n current_site = get_current_site(request)\n email = self.cleaned_data[\"email\"]\n from django.contrib.auth.tokens import default_token_generator\n token_generator = kwargs.get(\"token_generator\",\n default_token_generator)\n\n for user in self.users:\n temp_key = token_generator.make_token(user)\n\n # save it to the password reset model\n # password_reset = PasswordReset(user=user, temp_key=temp_key)\n # password_reset.save()\n\n # send the password reset email\n from django.urls import reverse\n path = reverse(\"account_reset_password_from_key\",\n kwargs=dict(uidb36=user_pk_to_url_str(user),\n key=temp_key))\n from allauth.utils import build_absolute_uri\n url = build_absolute_uri(\n request, path)\n\n context = {\"current_site\": current_site,\n \"user\": user,\n \"password_reset_url\": url,\n \"request\": request}\n\n from allauth.account import app_settings\n\n if app_settings.AUTHENTICATION_METHOD \\\n != app_settings.AuthenticationMethod.EMAIL:\n context['username'] = user_username(user)\n from allauth.account.adapter import get_adapter\n get_adapter(request).send_mail(\n 'account/email/password_reset_key',\n email,\n context)\n return self.cleaned_data[\"email\"]\n", "path": "nextcloudappstore/user/forms.py"}], "after_files": [{"content": "from allauth.account.forms import EmailAwarePasswordResetTokenGenerator\nfrom allauth.account.utils import filter_users_by_email, user_username, \\\n user_pk_to_url_str\nfrom django import forms\nfrom django.contrib.auth import get_user_model\nfrom django.forms import EmailField, CharField, PasswordInput\nfrom django.utils.translation import ugettext_lazy as _\nfrom snowpenguin.django.recaptcha2.fields import ReCaptchaField\nfrom snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget\n\n\nclass SignupFormRecaptcha(forms.Form):\n \"\"\"integrate a recaptcha field.\"\"\"\n recaptcha = ReCaptchaField(widget=ReCaptchaWidget())\n first_name = CharField(max_length=30, label=_('First name'))\n last_name = CharField(max_length=30, label=_('Last name'))\n\n def signup(self, request, user):\n user.first_name = self.cleaned_data['first_name']\n user.last_name = self.cleaned_data['last_name']\n user.save()\n\n\nclass DeleteAccountForm(forms.Form):\n email = EmailField(required=True, label=_('Your e-mail address'))\n\n def __init__(self, *args, **kwargs):\n self.user = kwargs.pop('user', None)\n super().__init__(*args, **kwargs)\n\n def clean_email(self):\n email = self.cleaned_data.get('email')\n if self.user and self.user.email == email:\n return email\n else:\n raise forms.ValidationError(_(\n 'The given e-mail address does not match your e-mail address'))\n\n\nclass AccountForm(forms.ModelForm):\n passwd = CharField(widget=PasswordInput(), label=_('Confirm password'),\n help_text=_('Password is required to prevent '\n 'unauthorized users from changing your '\n 'email address and resetting your '\n 'password. This field does not update your '\n 'password!'))\n\n class Meta:\n model = get_user_model()\n fields = ('first_name', 'last_name', 'email')\n\n def clean_email(self):\n value = self.cleaned_data['email']\n users = filter_users_by_email(value)\n if [u for u in users if u.pk != self.instance.pk]:\n msg = _(\n 'This e-mail address is already associated with another '\n 'account.')\n raise forms.ValidationError(msg)\n return value\n\n def clean_passwd(self):\n value = self.cleaned_data['passwd']\n if self.instance.check_password(value):\n return value\n else:\n raise forms.ValidationError(_('Invalid password'))\n\n\nclass CustomResetPasswordForm(forms.Form):\n # remove this class once issue #1307 is resolved django-allauth\n email = forms.EmailField(\n label=_(\"E-mail\"),\n required=True,\n widget=forms.TextInput(attrs={\n \"type\": \"email\",\n \"size\": \"30\",\n \"placeholder\": _(\"E-mail address\"),\n })\n )\n\n def clean_email(self):\n email = self.cleaned_data[\"email\"]\n from allauth.account.adapter import get_adapter\n email = get_adapter().clean_email(email)\n self.users = filter_users_by_email(email, is_active=True)\n\n return self.cleaned_data[\"email\"]\n\n def save(self, request, **kwargs):\n from django.contrib.sites.shortcuts import get_current_site\n current_site = get_current_site(request)\n email = self.cleaned_data[\"email\"]\n token_generator = EmailAwarePasswordResetTokenGenerator()\n\n for user in self.users:\n temp_key = token_generator.make_token(user)\n\n # save it to the password reset model\n # password_reset = PasswordReset(user=user, temp_key=temp_key)\n # password_reset.save()\n\n # send the password reset email\n from django.urls import reverse\n path = reverse(\"account_reset_password_from_key\",\n kwargs=dict(uidb36=user_pk_to_url_str(user),\n key=temp_key))\n from allauth.utils import build_absolute_uri\n url = build_absolute_uri(\n request, path)\n\n context = {\"current_site\": current_site,\n \"user\": user,\n \"password_reset_url\": url,\n \"request\": request}\n\n from allauth.account import app_settings\n\n if app_settings.AUTHENTICATION_METHOD \\\n != app_settings.AuthenticationMethod.EMAIL:\n context['username'] = user_username(user)\n from allauth.account.adapter import get_adapter\n get_adapter(request).send_mail(\n 'account/email/password_reset_key',\n email,\n context)\n return self.cleaned_data[\"email\"]\n", "path": "nextcloudappstore/user/forms.py"}]} | 1,646 | 350 |
gh_patches_debug_24199 | rasdani/github-patches | git_diff | LibraryOfCongress__concordia-782 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove Latest page URL, in nav bar, and redirect
Community managers want to consolidate `/latest` in to the `/about`. Most of the communication and everyday updates happen in History Hub, there is less usage for a dedicated latest updates page.
Once the URL is removed, the CMs will move the content from `/latest` to `/about`.
Acceptance criteria:
- [x] Remove URL
- [x] Any link to `/latest` to be redirected to `/about`
- [x] Move content (CMs will do this using the static page editor)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `concordia/urls.py`
Content:
```
1 from django.conf import settings
2 from django.conf.urls import url
3 from django.contrib import admin
4 from django.http import Http404, HttpResponseForbidden
5 from django.urls import include, path
6 from django.views.defaults import page_not_found, permission_denied, server_error
7
8 from exporter import views as exporter_views
9
10 from . import views
11
12 tx_urlpatterns = (
13 [
14 path("", views.CampaignListView.as_view(), name="campaign-list"),
15 path(
16 "<slug:slug>/", views.CampaignDetailView.as_view(), name="campaign-detail"
17 ),
18 path(
19 "<slug:campaign_slug>/export/csv/",
20 exporter_views.ExportCampaignToCSV.as_view(),
21 name="campaign-export-csv",
22 ),
23 path(
24 "<slug:campaign_slug>/export/bagit/",
25 exporter_views.ExportCampaignToBagit.as_view(),
26 name="campaign-export-bagit",
27 ),
28 path(
29 "<slug:campaign_slug>/<slug:project_slug>/export/bagit/",
30 exporter_views.ExportProjectToBagIt.as_view(),
31 name="project-export-bagit",
32 ),
33 path(
34 "<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/export/bagit/",
35 exporter_views.ExportItemToBagIt.as_view(),
36 name="item-export-bagit",
37 ),
38 path(
39 "<slug:campaign_slug>/report/",
40 views.ReportCampaignView.as_view(),
41 name="campaign-report",
42 ),
43 path(
44 "<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/<slug:slug>/",
45 views.AssetDetailView.as_view(),
46 name="asset-detail",
47 ),
48 # n.b. this must be above project-detail to avoid being seen as a project slug:
49 path(
50 "<slug:campaign_slug>/next-transcribable-asset/",
51 views.redirect_to_next_transcribable_asset,
52 name="redirect-to-next-transcribable-asset",
53 ),
54 path(
55 "<slug:campaign_slug>/<slug:slug>/",
56 views.ProjectDetailView.as_view(),
57 name="project-detail",
58 ),
59 path(
60 "<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/",
61 views.ItemDetailView.as_view(),
62 name="item-detail",
63 ),
64 ],
65 "transcriptions",
66 )
67
68 urlpatterns = [
69 path("", views.HomeView.as_view(), name="homepage"),
70 path("healthz", views.healthz, name="health-check"),
71 path("about/", views.simple_page, name="about"),
72 path("help-center/", views.simple_page, name="help-center"),
73 path("help-center/welcome-guide/", views.simple_page, name="welcome-guide"),
74 path("help-center/how-to-transcribe/", views.simple_page, name="how-to-transcribe"),
75 path("help-center/how-to-review/", views.simple_page, name="how-to-review"),
76 path("help-center/how-to-tag/", views.simple_page, name="how-to-tag"),
77 path("for-educators/", views.simple_page, name="for-educators"),
78 path("latest/", views.simple_page, name="latest"),
79 path("questions/", views.simple_page, name="questions"),
80 path("contact/", views.ContactUsView.as_view(), name="contact"),
81 path("campaigns/", include(tx_urlpatterns, namespace="transcriptions")),
82 path(
83 "reserve-asset-for-transcription/<int:asset_pk>/",
84 views.reserve_asset_transcription,
85 name="reserve-asset-for-transcription",
86 ),
87 path(
88 "assets/<int:asset_pk>/transcriptions/save/",
89 views.save_transcription,
90 name="save-transcription",
91 ),
92 path(
93 "transcriptions/<int:pk>/submit/",
94 views.submit_transcription,
95 name="submit-transcription",
96 ),
97 path(
98 "transcriptions/<int:pk>/review/",
99 views.review_transcription,
100 name="review-transcription",
101 ),
102 path("assets/<int:asset_pk>/tags/submit/", views.submit_tags, name="submit-tags"),
103 path("account/ajax-status/", views.ajax_session_status, name="ajax-session-status"),
104 path("account/ajax-messages/", views.ajax_messages, name="ajax-messages"),
105 path(
106 "account/register/",
107 views.ConcordiaRegistrationView.as_view(),
108 name="registration_register",
109 ),
110 path(
111 "account/login/", views.ConcordiaLoginView.as_view(), name="registration_login"
112 ),
113 path("account/profile/", views.AccountProfileView.as_view(), name="user-profile"),
114 path("account/", include("django_registration.backends.activation.urls")),
115 path("account/", include("django.contrib.auth.urls")),
116 path("captcha/ajax/", views.ajax_captcha, name="ajax-captcha"),
117 path("captcha/", include("captcha.urls")),
118 path("admin/", admin.site.urls),
119 # Internal support assists:
120 path("maintenance-mode/", include("maintenance_mode.urls")),
121 path("error/500/", server_error),
122 path("error/404/", page_not_found, {"exception": Http404()}),
123 path("error/429/", views.ratelimit_view),
124 path("error/403/", permission_denied, {"exception": HttpResponseForbidden()}),
125 url("", include("django_prometheus_metrics.urls")),
126 path("robots.txt", include("robots.urls")),
127 ]
128
129 if settings.DEBUG:
130 import debug_toolbar
131 from django.conf.urls.static import static
132
133 urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
134
135 urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
136
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/concordia/urls.py b/concordia/urls.py
--- a/concordia/urls.py
+++ b/concordia/urls.py
@@ -4,6 +4,7 @@
from django.http import Http404, HttpResponseForbidden
from django.urls import include, path
from django.views.defaults import page_not_found, permission_denied, server_error
+from django.views.generic import RedirectView
from exporter import views as exporter_views
@@ -75,7 +76,10 @@
path("help-center/how-to-review/", views.simple_page, name="how-to-review"),
path("help-center/how-to-tag/", views.simple_page, name="how-to-tag"),
path("for-educators/", views.simple_page, name="for-educators"),
- path("latest/", views.simple_page, name="latest"),
+ path(
+ "latest/",
+ RedirectView.as_view(pattern_name="about", permanent=True, query_string=True),
+ ),
path("questions/", views.simple_page, name="questions"),
path("contact/", views.ContactUsView.as_view(), name="contact"),
path("campaigns/", include(tx_urlpatterns, namespace="transcriptions")),
| {"golden_diff": "diff --git a/concordia/urls.py b/concordia/urls.py\n--- a/concordia/urls.py\n+++ b/concordia/urls.py\n@@ -4,6 +4,7 @@\n from django.http import Http404, HttpResponseForbidden\n from django.urls import include, path\n from django.views.defaults import page_not_found, permission_denied, server_error\n+from django.views.generic import RedirectView\n \n from exporter import views as exporter_views\n \n@@ -75,7 +76,10 @@\n path(\"help-center/how-to-review/\", views.simple_page, name=\"how-to-review\"),\n path(\"help-center/how-to-tag/\", views.simple_page, name=\"how-to-tag\"),\n path(\"for-educators/\", views.simple_page, name=\"for-educators\"),\n- path(\"latest/\", views.simple_page, name=\"latest\"),\n+ path(\n+ \"latest/\",\n+ RedirectView.as_view(pattern_name=\"about\", permanent=True, query_string=True),\n+ ),\n path(\"questions/\", views.simple_page, name=\"questions\"),\n path(\"contact/\", views.ContactUsView.as_view(), name=\"contact\"),\n path(\"campaigns/\", include(tx_urlpatterns, namespace=\"transcriptions\")),\n", "issue": "Remove Latest page URL, in nav bar, and redirect\nCommunity managers want to consolidate `/latest` in to the `/about`. Most of the communication and everyday updates happen in History Hub, there is less usage for a dedicated latest updates page. \r\n\r\nOnce the URL is removed, the CMs will move the content from `/latest` to `/about`. \r\n\r\nAcceptance criteria: \r\n- [x] Remove URL\r\n- [x] Any link to `/latest` to be redirected to `/about` \r\n- [x] Move content (CMs will do this using the static page editor) \n", "before_files": [{"content": "from django.conf import settings\nfrom django.conf.urls import url\nfrom django.contrib import admin\nfrom django.http import Http404, HttpResponseForbidden\nfrom django.urls import include, path\nfrom django.views.defaults import page_not_found, permission_denied, server_error\n\nfrom exporter import views as exporter_views\n\nfrom . import views\n\ntx_urlpatterns = (\n [\n path(\"\", views.CampaignListView.as_view(), name=\"campaign-list\"),\n path(\n \"<slug:slug>/\", views.CampaignDetailView.as_view(), name=\"campaign-detail\"\n ),\n path(\n \"<slug:campaign_slug>/export/csv/\",\n exporter_views.ExportCampaignToCSV.as_view(),\n name=\"campaign-export-csv\",\n ),\n path(\n \"<slug:campaign_slug>/export/bagit/\",\n exporter_views.ExportCampaignToBagit.as_view(),\n name=\"campaign-export-bagit\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/export/bagit/\",\n exporter_views.ExportProjectToBagIt.as_view(),\n name=\"project-export-bagit\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/export/bagit/\",\n exporter_views.ExportItemToBagIt.as_view(),\n name=\"item-export-bagit\",\n ),\n path(\n \"<slug:campaign_slug>/report/\",\n views.ReportCampaignView.as_view(),\n name=\"campaign-report\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/<slug:slug>/\",\n views.AssetDetailView.as_view(),\n name=\"asset-detail\",\n ),\n # n.b. this must be above project-detail to avoid being seen as a project slug:\n path(\n \"<slug:campaign_slug>/next-transcribable-asset/\",\n views.redirect_to_next_transcribable_asset,\n name=\"redirect-to-next-transcribable-asset\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:slug>/\",\n views.ProjectDetailView.as_view(),\n name=\"project-detail\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/\",\n views.ItemDetailView.as_view(),\n name=\"item-detail\",\n ),\n ],\n \"transcriptions\",\n)\n\nurlpatterns = [\n path(\"\", views.HomeView.as_view(), name=\"homepage\"),\n path(\"healthz\", views.healthz, name=\"health-check\"),\n path(\"about/\", views.simple_page, name=\"about\"),\n path(\"help-center/\", views.simple_page, name=\"help-center\"),\n path(\"help-center/welcome-guide/\", views.simple_page, name=\"welcome-guide\"),\n path(\"help-center/how-to-transcribe/\", views.simple_page, name=\"how-to-transcribe\"),\n path(\"help-center/how-to-review/\", views.simple_page, name=\"how-to-review\"),\n path(\"help-center/how-to-tag/\", views.simple_page, name=\"how-to-tag\"),\n path(\"for-educators/\", views.simple_page, name=\"for-educators\"),\n path(\"latest/\", views.simple_page, name=\"latest\"),\n path(\"questions/\", views.simple_page, name=\"questions\"),\n path(\"contact/\", views.ContactUsView.as_view(), name=\"contact\"),\n path(\"campaigns/\", include(tx_urlpatterns, namespace=\"transcriptions\")),\n path(\n \"reserve-asset-for-transcription/<int:asset_pk>/\",\n views.reserve_asset_transcription,\n name=\"reserve-asset-for-transcription\",\n ),\n path(\n \"assets/<int:asset_pk>/transcriptions/save/\",\n views.save_transcription,\n name=\"save-transcription\",\n ),\n path(\n \"transcriptions/<int:pk>/submit/\",\n views.submit_transcription,\n name=\"submit-transcription\",\n ),\n path(\n \"transcriptions/<int:pk>/review/\",\n views.review_transcription,\n name=\"review-transcription\",\n ),\n path(\"assets/<int:asset_pk>/tags/submit/\", views.submit_tags, name=\"submit-tags\"),\n path(\"account/ajax-status/\", views.ajax_session_status, name=\"ajax-session-status\"),\n path(\"account/ajax-messages/\", views.ajax_messages, name=\"ajax-messages\"),\n path(\n \"account/register/\",\n views.ConcordiaRegistrationView.as_view(),\n name=\"registration_register\",\n ),\n path(\n \"account/login/\", views.ConcordiaLoginView.as_view(), name=\"registration_login\"\n ),\n path(\"account/profile/\", views.AccountProfileView.as_view(), name=\"user-profile\"),\n path(\"account/\", include(\"django_registration.backends.activation.urls\")),\n path(\"account/\", include(\"django.contrib.auth.urls\")),\n path(\"captcha/ajax/\", views.ajax_captcha, name=\"ajax-captcha\"),\n path(\"captcha/\", include(\"captcha.urls\")),\n path(\"admin/\", admin.site.urls),\n # Internal support assists:\n path(\"maintenance-mode/\", include(\"maintenance_mode.urls\")),\n path(\"error/500/\", server_error),\n path(\"error/404/\", page_not_found, {\"exception\": Http404()}),\n path(\"error/429/\", views.ratelimit_view),\n path(\"error/403/\", permission_denied, {\"exception\": HttpResponseForbidden()}),\n url(\"\", include(\"django_prometheus_metrics.urls\")),\n path(\"robots.txt\", include(\"robots.urls\")),\n]\n\nif settings.DEBUG:\n import debug_toolbar\n from django.conf.urls.static import static\n\n urlpatterns = [path(\"__debug__/\", include(debug_toolbar.urls))] + urlpatterns\n\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n", "path": "concordia/urls.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.conf.urls import url\nfrom django.contrib import admin\nfrom django.http import Http404, HttpResponseForbidden\nfrom django.urls import include, path\nfrom django.views.defaults import page_not_found, permission_denied, server_error\nfrom django.views.generic import RedirectView\n\nfrom exporter import views as exporter_views\n\nfrom . import views\n\ntx_urlpatterns = (\n [\n path(\"\", views.CampaignListView.as_view(), name=\"campaign-list\"),\n path(\n \"<slug:slug>/\", views.CampaignDetailView.as_view(), name=\"campaign-detail\"\n ),\n path(\n \"<slug:campaign_slug>/export/csv/\",\n exporter_views.ExportCampaignToCSV.as_view(),\n name=\"campaign-export-csv\",\n ),\n path(\n \"<slug:campaign_slug>/export/bagit/\",\n exporter_views.ExportCampaignToBagit.as_view(),\n name=\"campaign-export-bagit\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/export/bagit/\",\n exporter_views.ExportProjectToBagIt.as_view(),\n name=\"project-export-bagit\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/export/bagit/\",\n exporter_views.ExportItemToBagIt.as_view(),\n name=\"item-export-bagit\",\n ),\n path(\n \"<slug:campaign_slug>/report/\",\n views.ReportCampaignView.as_view(),\n name=\"campaign-report\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/<slug:slug>/\",\n views.AssetDetailView.as_view(),\n name=\"asset-detail\",\n ),\n # n.b. this must be above project-detail to avoid being seen as a project slug:\n path(\n \"<slug:campaign_slug>/next-transcribable-asset/\",\n views.redirect_to_next_transcribable_asset,\n name=\"redirect-to-next-transcribable-asset\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:slug>/\",\n views.ProjectDetailView.as_view(),\n name=\"project-detail\",\n ),\n path(\n \"<slug:campaign_slug>/<slug:project_slug>/<slug:item_id>/\",\n views.ItemDetailView.as_view(),\n name=\"item-detail\",\n ),\n ],\n \"transcriptions\",\n)\n\nurlpatterns = [\n path(\"\", views.HomeView.as_view(), name=\"homepage\"),\n path(\"healthz\", views.healthz, name=\"health-check\"),\n path(\"about/\", views.simple_page, name=\"about\"),\n path(\"help-center/\", views.simple_page, name=\"help-center\"),\n path(\"help-center/welcome-guide/\", views.simple_page, name=\"welcome-guide\"),\n path(\"help-center/how-to-transcribe/\", views.simple_page, name=\"how-to-transcribe\"),\n path(\"help-center/how-to-review/\", views.simple_page, name=\"how-to-review\"),\n path(\"help-center/how-to-tag/\", views.simple_page, name=\"how-to-tag\"),\n path(\"for-educators/\", views.simple_page, name=\"for-educators\"),\n path(\n \"latest/\",\n RedirectView.as_view(pattern_name=\"about\", permanent=True, query_string=True),\n ),\n path(\"questions/\", views.simple_page, name=\"questions\"),\n path(\"contact/\", views.ContactUsView.as_view(), name=\"contact\"),\n path(\"campaigns/\", include(tx_urlpatterns, namespace=\"transcriptions\")),\n path(\n \"reserve-asset-for-transcription/<int:asset_pk>/\",\n views.reserve_asset_transcription,\n name=\"reserve-asset-for-transcription\",\n ),\n path(\n \"assets/<int:asset_pk>/transcriptions/save/\",\n views.save_transcription,\n name=\"save-transcription\",\n ),\n path(\n \"transcriptions/<int:pk>/submit/\",\n views.submit_transcription,\n name=\"submit-transcription\",\n ),\n path(\n \"transcriptions/<int:pk>/review/\",\n views.review_transcription,\n name=\"review-transcription\",\n ),\n path(\"assets/<int:asset_pk>/tags/submit/\", views.submit_tags, name=\"submit-tags\"),\n path(\"account/ajax-status/\", views.ajax_session_status, name=\"ajax-session-status\"),\n path(\"account/ajax-messages/\", views.ajax_messages, name=\"ajax-messages\"),\n path(\n \"account/register/\",\n views.ConcordiaRegistrationView.as_view(),\n name=\"registration_register\",\n ),\n path(\n \"account/login/\", views.ConcordiaLoginView.as_view(), name=\"registration_login\"\n ),\n path(\"account/profile/\", views.AccountProfileView.as_view(), name=\"user-profile\"),\n path(\"account/\", include(\"django_registration.backends.activation.urls\")),\n path(\"account/\", include(\"django.contrib.auth.urls\")),\n path(\"captcha/ajax/\", views.ajax_captcha, name=\"ajax-captcha\"),\n path(\"captcha/\", include(\"captcha.urls\")),\n path(\"admin/\", admin.site.urls),\n # Internal support assists:\n path(\"maintenance-mode/\", include(\"maintenance_mode.urls\")),\n path(\"error/500/\", server_error),\n path(\"error/404/\", page_not_found, {\"exception\": Http404()}),\n path(\"error/429/\", views.ratelimit_view),\n path(\"error/403/\", permission_denied, {\"exception\": HttpResponseForbidden()}),\n url(\"\", include(\"django_prometheus_metrics.urls\")),\n path(\"robots.txt\", include(\"robots.urls\")),\n]\n\nif settings.DEBUG:\n import debug_toolbar\n from django.conf.urls.static import static\n\n urlpatterns = [path(\"__debug__/\", include(debug_toolbar.urls))] + urlpatterns\n\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n", "path": "concordia/urls.py"}]} | 1,836 | 258 |
gh_patches_debug_35444 | rasdani/github-patches | git_diff | InstaPy__InstaPy-831 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot focus element error
I'm running the script on mac and after some time i get this error:
Message: unknown error: cannot focus element
(Session info: chrome=61.0.3163.100)
(Driver info: chromedriver=2.32.498537 (cb2f855cbc7b82e20387eaf9a43f6b99b6105061),platform=Mac OS X 10.12.3 x86_64)
Now I'm trying to update chromedriver with all of the packages to check whether they are the reason, but does anybody else get this error?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `instapy/comment_util.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """Module which handles the commenting features"""
3 from random import choice
4 from .time_util import sleep
5 import emoji
6
7
8 def comment_image(browser, comments):
9 """Checks if it should comment on the image"""
10 rand_comment = (choice(comments))
11 rand_comment = emoji.demojize(rand_comment)
12 rand_comment = emoji.emojize(rand_comment, use_aliases=True)
13
14 comment_input = browser.find_elements_by_xpath(
15 '//textarea[@placeholder = "Add a comment…"]')
16 if len(comment_input) <= 0:
17 comment_input = browser.find_elements_by_xpath(
18 '//input[@placeholder = "Add a comment…"]')
19
20 if len(comment_input) > 0:
21 browser.execute_script(
22 "arguments[0].value = '" + rand_comment + " ';", comment_input[0])
23 # An extra space is added here and then deleted.
24 # This forces the input box to update the reactJS core
25 comment_input[0].send_keys("\b")
26 comment_input[0].submit()
27 else:
28 print('--> Warning: Comment Action Likely Failed:'
29 ' Comment Element not found')
30
31 print("--> Commented: {}".format(rand_comment.encode('utf-8')))
32 sleep(2)
33
34 return 1
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/instapy/comment_util.py b/instapy/comment_util.py
--- a/instapy/comment_util.py
+++ b/instapy/comment_util.py
@@ -2,8 +2,31 @@
"""Module which handles the commenting features"""
from random import choice
from .time_util import sleep
+from selenium.common.exceptions import WebDriverException
import emoji
+def get_comment_input(browser):
+ comment_input = browser.find_elements_by_xpath(
+ '//textarea[@placeholder = "Add a comment…"]')
+ if len(comment_input) <= 0:
+ comment_input = browser.find_elements_by_xpath(
+ '//input[@placeholder = "Add a comment…"]')
+ return comment_input
+
+def open_comment_section(browser):
+ missing_comment_elem_warning = (
+ '--> Warning: Comment Button Not Found:'
+ ' May cause issues with browser windows of smaller widths')
+ comment_elem = browser.find_elements_by_xpath(
+ "//a[@role='button']/span[text()='Comment']/..")
+ if len(comment_elem) > 0:
+ try:
+ browser.execute_script(
+ "arguments[0].click();", comment_elem[0])
+ except WebDriverException:
+ print(missing_comment_elem_warning)
+ else:
+ print(missing_comment_elem_warning)
def comment_image(browser, comments):
"""Checks if it should comment on the image"""
@@ -11,18 +34,19 @@
rand_comment = emoji.demojize(rand_comment)
rand_comment = emoji.emojize(rand_comment, use_aliases=True)
- comment_input = browser.find_elements_by_xpath(
- '//textarea[@placeholder = "Add a comment…"]')
- if len(comment_input) <= 0:
- comment_input = browser.find_elements_by_xpath(
- '//input[@placeholder = "Add a comment…"]')
+ open_comment_section(browser)
+ comment_input = get_comment_input(browser)
if len(comment_input) > 0:
+ comment_input[0].clear()
+ comment_input = get_comment_input(browser)
+
browser.execute_script(
"arguments[0].value = '" + rand_comment + " ';", comment_input[0])
# An extra space is added here and then deleted.
# This forces the input box to update the reactJS core
comment_input[0].send_keys("\b")
+ comment_input = get_comment_input(browser)
comment_input[0].submit()
else:
print('--> Warning: Comment Action Likely Failed:'
| {"golden_diff": "diff --git a/instapy/comment_util.py b/instapy/comment_util.py\n--- a/instapy/comment_util.py\n+++ b/instapy/comment_util.py\n@@ -2,8 +2,31 @@\n \"\"\"Module which handles the commenting features\"\"\"\n from random import choice\n from .time_util import sleep\n+from selenium.common.exceptions import WebDriverException\n import emoji\n \n+def get_comment_input(browser):\n+ comment_input = browser.find_elements_by_xpath(\n+ '//textarea[@placeholder = \"Add a comment\u2026\"]')\n+ if len(comment_input) <= 0:\n+ comment_input = browser.find_elements_by_xpath(\n+ '//input[@placeholder = \"Add a comment\u2026\"]')\n+ return comment_input\n+\n+def open_comment_section(browser):\n+ missing_comment_elem_warning = (\n+ '--> Warning: Comment Button Not Found:'\n+ ' May cause issues with browser windows of smaller widths')\n+ comment_elem = browser.find_elements_by_xpath(\n+ \"//a[@role='button']/span[text()='Comment']/..\")\n+ if len(comment_elem) > 0:\n+ try:\n+ browser.execute_script(\n+ \"arguments[0].click();\", comment_elem[0])\n+ except WebDriverException:\n+ print(missing_comment_elem_warning)\n+ else:\n+ print(missing_comment_elem_warning)\n \n def comment_image(browser, comments):\n \"\"\"Checks if it should comment on the image\"\"\"\n@@ -11,18 +34,19 @@\n rand_comment = emoji.demojize(rand_comment)\n rand_comment = emoji.emojize(rand_comment, use_aliases=True)\n \n- comment_input = browser.find_elements_by_xpath(\n- '//textarea[@placeholder = \"Add a comment\u2026\"]')\n- if len(comment_input) <= 0:\n- comment_input = browser.find_elements_by_xpath(\n- '//input[@placeholder = \"Add a comment\u2026\"]')\n+ open_comment_section(browser)\n+ comment_input = get_comment_input(browser)\n \n if len(comment_input) > 0:\n+ comment_input[0].clear()\n+ comment_input = get_comment_input(browser)\n+\n browser.execute_script(\n \"arguments[0].value = '\" + rand_comment + \" ';\", comment_input[0])\n # An extra space is added here and then deleted.\n # This forces the input box to update the reactJS core\n comment_input[0].send_keys(\"\\b\")\n+ comment_input = get_comment_input(browser)\n comment_input[0].submit()\n else:\n print('--> Warning: Comment Action Likely Failed:'\n", "issue": "Cannot focus element error\nI'm running the script on mac and after some time i get this error:\r\n\r\nMessage: unknown error: cannot focus element\r\n (Session info: chrome=61.0.3163.100)\r\n (Driver info: chromedriver=2.32.498537 (cb2f855cbc7b82e20387eaf9a43f6b99b6105061),platform=Mac OS X 10.12.3 x86_64)\r\n\r\nNow I'm trying to update chromedriver with all of the packages to check whether they are the reason, but does anybody else get this error?\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"Module which handles the commenting features\"\"\"\nfrom random import choice\nfrom .time_util import sleep\nimport emoji\n\n\ndef comment_image(browser, comments):\n \"\"\"Checks if it should comment on the image\"\"\"\n rand_comment = (choice(comments))\n rand_comment = emoji.demojize(rand_comment)\n rand_comment = emoji.emojize(rand_comment, use_aliases=True)\n\n comment_input = browser.find_elements_by_xpath(\n '//textarea[@placeholder = \"Add a comment\u2026\"]')\n if len(comment_input) <= 0:\n comment_input = browser.find_elements_by_xpath(\n '//input[@placeholder = \"Add a comment\u2026\"]')\n\n if len(comment_input) > 0:\n browser.execute_script(\n \"arguments[0].value = '\" + rand_comment + \" ';\", comment_input[0])\n # An extra space is added here and then deleted.\n # This forces the input box to update the reactJS core\n comment_input[0].send_keys(\"\\b\")\n comment_input[0].submit()\n else:\n print('--> Warning: Comment Action Likely Failed:'\n ' Comment Element not found')\n\n print(\"--> Commented: {}\".format(rand_comment.encode('utf-8')))\n sleep(2)\n\n return 1\n", "path": "instapy/comment_util.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"Module which handles the commenting features\"\"\"\nfrom random import choice\nfrom .time_util import sleep\nfrom selenium.common.exceptions import WebDriverException\nimport emoji\n\ndef get_comment_input(browser):\n comment_input = browser.find_elements_by_xpath(\n '//textarea[@placeholder = \"Add a comment\u2026\"]')\n if len(comment_input) <= 0:\n comment_input = browser.find_elements_by_xpath(\n '//input[@placeholder = \"Add a comment\u2026\"]')\n return comment_input\n\ndef open_comment_section(browser):\n missing_comment_elem_warning = (\n '--> Warning: Comment Button Not Found:'\n ' May cause issues with browser windows of smaller widths')\n comment_elem = browser.find_elements_by_xpath(\n \"//a[@role='button']/span[text()='Comment']/..\")\n if len(comment_elem) > 0:\n try:\n browser.execute_script(\n \"arguments[0].click();\", comment_elem[0])\n except WebDriverException:\n print(missing_comment_elem_warning)\n else:\n print(missing_comment_elem_warning)\n\ndef comment_image(browser, comments):\n \"\"\"Checks if it should comment on the image\"\"\"\n rand_comment = (choice(comments))\n rand_comment = emoji.demojize(rand_comment)\n rand_comment = emoji.emojize(rand_comment, use_aliases=True)\n\n open_comment_section(browser)\n comment_input = get_comment_input(browser)\n\n if len(comment_input) > 0:\n comment_input[0].clear()\n comment_input = get_comment_input(browser)\n\n browser.execute_script(\n \"arguments[0].value = '\" + rand_comment + \" ';\", comment_input[0])\n # An extra space is added here and then deleted.\n # This forces the input box to update the reactJS core\n comment_input[0].send_keys(\"\\b\")\n comment_input = get_comment_input(browser)\n comment_input[0].submit()\n else:\n print('--> Warning: Comment Action Likely Failed:'\n ' Comment Element not found')\n\n print(\"--> Commented: {}\".format(rand_comment.encode('utf-8')))\n sleep(2)\n\n return 1\n", "path": "instapy/comment_util.py"}]} | 753 | 549 |
gh_patches_debug_24835 | rasdani/github-patches | git_diff | napari__napari-589 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
novel gene discovery in Napari (a.k.a. imperfect layer name incrementing)
## 🐛 Bug
napari does a smart thing and adds an integer to the name of a layer if the name is already in use in the viewer. It then increments that number when another layer is added that fits the pattern. This is great until you have layer names that end in numbers

which in this case results in different names that are sometimes real genes and sometimes not.
## To Reproduce
Steps to reproduce the behavior:
```
v = napari.Viewer()
for i in range(3):
v.add_points(1000*np.random.rand(10,2), name = "no_problem")
for i in range(3):
v.add_points(1000*np.random.rand(10,2), name = "problem:GAD1")
```
## Expected behavior
consistent use of a space or other character to separate the auto-increment integer from the original string
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `napari/util/naming.py`
Content:
```
1 """Automatically generate names.
2 """
3 import re
4 from .misc import formatdoc
5
6
7 sep = ' '
8 start = 1
9
10 numbered_patt = re.compile(r'(?<!\d)(?:\d+|)$')
11
12
13 def _inc_name_count_sub(match):
14 count = match.group(0)
15
16 try:
17 count = int(count)
18 except ValueError: # not an int
19 count = f'{sep}{start}'
20 else:
21 count = f'{count + 1}'
22
23 return count
24
25
26 @formatdoc
27 def inc_name_count(name):
28 """Increase a name's count matching `{numbered_patt}` by ``1``.
29
30 If the name is not already numbered, append '{sep}{start}'.
31
32 Parameters
33 ----------
34 name : str
35 Original name.
36
37 Returns
38 -------
39 incremented_name : str
40 Numbered name incremented by ``1``.
41 """
42 return numbered_patt.sub(_inc_name_count_sub, name)
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/napari/util/naming.py b/napari/util/naming.py
--- a/napari/util/naming.py
+++ b/napari/util/naming.py
@@ -7,7 +7,9 @@
sep = ' '
start = 1
-numbered_patt = re.compile(r'(?<!\d)(?:\d+|)$')
+# Match integer between square brackets at end of string if after space
+# or at beginning of string or just match end of string
+numbered_patt = re.compile(r'((?<=\A\[)|(?<=\s\[))(?:\d+|)(?=\]$)|$')
def _inc_name_count_sub(match):
@@ -16,7 +18,7 @@
try:
count = int(count)
except ValueError: # not an int
- count = f'{sep}{start}'
+ count = f'{sep}[{start}]'
else:
count = f'{count + 1}'
@@ -27,7 +29,7 @@
def inc_name_count(name):
"""Increase a name's count matching `{numbered_patt}` by ``1``.
- If the name is not already numbered, append '{sep}{start}'.
+ If the name is not already numbered, append '{sep}[{start}]'.
Parameters
----------
@@ -39,4 +41,4 @@
incremented_name : str
Numbered name incremented by ``1``.
"""
- return numbered_patt.sub(_inc_name_count_sub, name)
+ return numbered_patt.sub(_inc_name_count_sub, name, count=1)
| {"golden_diff": "diff --git a/napari/util/naming.py b/napari/util/naming.py\n--- a/napari/util/naming.py\n+++ b/napari/util/naming.py\n@@ -7,7 +7,9 @@\n sep = ' '\n start = 1\n \n-numbered_patt = re.compile(r'(?<!\\d)(?:\\d+|)$')\n+# Match integer between square brackets at end of string if after space\n+# or at beginning of string or just match end of string\n+numbered_patt = re.compile(r'((?<=\\A\\[)|(?<=\\s\\[))(?:\\d+|)(?=\\]$)|$')\n \n \n def _inc_name_count_sub(match):\n@@ -16,7 +18,7 @@\n try:\n count = int(count)\n except ValueError: # not an int\n- count = f'{sep}{start}'\n+ count = f'{sep}[{start}]'\n else:\n count = f'{count + 1}'\n \n@@ -27,7 +29,7 @@\n def inc_name_count(name):\n \"\"\"Increase a name's count matching `{numbered_patt}` by ``1``.\n \n- If the name is not already numbered, append '{sep}{start}'.\n+ If the name is not already numbered, append '{sep}[{start}]'.\n \n Parameters\n ----------\n@@ -39,4 +41,4 @@\n incremented_name : str\n Numbered name incremented by ``1``.\n \"\"\"\n- return numbered_patt.sub(_inc_name_count_sub, name)\n+ return numbered_patt.sub(_inc_name_count_sub, name, count=1)\n", "issue": "novel gene discovery in Napari (a.k.a. imperfect layer name incrementing)\n## \ud83d\udc1b Bug\r\nnapari does a smart thing and adds an integer to the name of a layer if the name is already in use in the viewer. It then increments that number when another layer is added that fits the pattern. This is great until you have layer names that end in numbers \r\n\r\nwhich in this case results in different names that are sometimes real genes and sometimes not.\r\n\r\n## To Reproduce\r\n\r\nSteps to reproduce the behavior:\r\n```\r\nv = napari.Viewer()\r\nfor i in range(3):\r\n v.add_points(1000*np.random.rand(10,2), name = \"no_problem\")\r\nfor i in range(3):\r\n v.add_points(1000*np.random.rand(10,2), name = \"problem:GAD1\")\r\n```\r\n\r\n## Expected behavior\r\nconsistent use of a space or other character to separate the auto-increment integer from the original string\r\n\n", "before_files": [{"content": "\"\"\"Automatically generate names.\n\"\"\"\nimport re\nfrom .misc import formatdoc\n\n\nsep = ' '\nstart = 1\n\nnumbered_patt = re.compile(r'(?<!\\d)(?:\\d+|)$')\n\n\ndef _inc_name_count_sub(match):\n count = match.group(0)\n\n try:\n count = int(count)\n except ValueError: # not an int\n count = f'{sep}{start}'\n else:\n count = f'{count + 1}'\n\n return count\n\n\n@formatdoc\ndef inc_name_count(name):\n \"\"\"Increase a name's count matching `{numbered_patt}` by ``1``.\n\n If the name is not already numbered, append '{sep}{start}'.\n\n Parameters\n ----------\n name : str\n Original name.\n\n Returns\n -------\n incremented_name : str\n Numbered name incremented by ``1``.\n \"\"\"\n return numbered_patt.sub(_inc_name_count_sub, name)\n", "path": "napari/util/naming.py"}], "after_files": [{"content": "\"\"\"Automatically generate names.\n\"\"\"\nimport re\nfrom .misc import formatdoc\n\n\nsep = ' '\nstart = 1\n\n# Match integer between square brackets at end of string if after space\n# or at beginning of string or just match end of string\nnumbered_patt = re.compile(r'((?<=\\A\\[)|(?<=\\s\\[))(?:\\d+|)(?=\\]$)|$')\n\n\ndef _inc_name_count_sub(match):\n count = match.group(0)\n\n try:\n count = int(count)\n except ValueError: # not an int\n count = f'{sep}[{start}]'\n else:\n count = f'{count + 1}'\n\n return count\n\n\n@formatdoc\ndef inc_name_count(name):\n \"\"\"Increase a name's count matching `{numbered_patt}` by ``1``.\n\n If the name is not already numbered, append '{sep}[{start}]'.\n\n Parameters\n ----------\n name : str\n Original name.\n\n Returns\n -------\n incremented_name : str\n Numbered name incremented by ``1``.\n \"\"\"\n return numbered_patt.sub(_inc_name_count_sub, name, count=1)\n", "path": "napari/util/naming.py"}]} | 843 | 361 |
gh_patches_debug_13137 | rasdani/github-patches | git_diff | genialis__resolwe-313 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
404 returned when deleting entity with `delete_content` set to `true`
Probably because sample gets deleted when the last data object is deleted.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `resolwe/flow/views/entity.py`
Content:
```
1 """Entity viewset."""
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 from distutils.util import strtobool # pylint: disable=import-error,no-name-in-module
5
6 from django.db.models import Max
7 from django.db.models.query import Prefetch
8
9 from rest_framework import exceptions, status
10 from rest_framework.decorators import detail_route
11 from rest_framework.response import Response
12
13 from resolwe.flow.filters import EntityFilter
14 from resolwe.flow.models import Collection, Data, Entity
15 from resolwe.flow.serializers import EntitySerializer
16 from resolwe.permissions.utils import remove_permission, update_permission
17
18 from .collection import CollectionViewSet
19
20
21 class EntityViewSet(CollectionViewSet):
22 """API view for entities."""
23
24 filter_class = EntityFilter
25 serializer_class = EntitySerializer
26
27 queryset = Entity.objects.prefetch_related(
28 Prefetch('data', queryset=Data.objects.all().order_by('id')),
29 'descriptor_schema',
30 'contributor'
31 ).annotate(
32 latest_date=Max('data__modified')
33 ).order_by('-latest_date')
34
35 def _check_collection_permissions(self, collection_id, user):
36 """Check that collection exists and user has `add` permission."""
37 collection_query = Collection.objects.filter(pk=collection_id)
38 if not collection_query.exists():
39 raise exceptions.ValidationError('Collection id does not exist')
40
41 collection = collection_query.first()
42 if not user.has_perm('add_collection', obj=collection):
43 if user.is_authenticated():
44 raise exceptions.PermissionDenied()
45 else:
46 raise exceptions.NotFound()
47
48 def set_content_permissions(self, user, obj, payload):
49 """Apply permissions to data objects in ``Entity``."""
50 # Data doesn't have "ADD" permission, so it has to be removed
51 payload = remove_permission(payload, 'add')
52
53 for data in obj.data.all():
54 if user.has_perm('share_data', data):
55 update_permission(data, payload)
56
57 def destroy(self, request, *args, **kwargs):
58 """Destroy a model instance.
59
60 If ``delete_content`` flag is set in query parameters, also all
61 Data objects contained in entity will be deleted.
62 """
63 obj = self.get_object()
64 user = request.user
65
66 if strtobool(request.query_params.get('delete_content', 'false')):
67 for data in obj.data.all():
68 if user.has_perm('edit_data', data):
69 data.delete()
70
71 # NOTE: Collection's ``destroy`` method should be skiped, so we
72 # intentionaly call it's parent.
73 return super(CollectionViewSet, self).destroy( # pylint: disable=no-member,bad-super-call
74 request, *args, **kwargs
75 )
76
77 @detail_route(methods=[u'post'])
78 def add_to_collection(self, request, pk=None):
79 """Add Entity to a collection."""
80 entity = self.get_object()
81
82 if 'ids' not in request.data:
83 return Response({"error": "`ids` parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
84
85 for collection_id in request.data['ids']:
86 self._check_collection_permissions(collection_id, request.user)
87
88 for collection_id in request.data['ids']:
89 entity.collections.add(collection_id)
90
91 collection = Collection.objects.get(pk=collection_id)
92 for data in entity.data.all():
93 collection.data.add(data)
94
95 return Response()
96
97 @detail_route(methods=[u'post'])
98 def remove_from_collection(self, request, pk=None):
99 """Remove Entity from a collection."""
100 entity = self.get_object()
101
102 if 'ids' not in request.data:
103 return Response({"error": "`ids` parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
104
105 for collection_id in request.data['ids']:
106 self._check_collection_permissions(collection_id, request.user)
107
108 for collection_id in request.data['ids']:
109 entity.collections.remove(collection_id)
110
111 collection = Collection.objects.get(pk=collection_id)
112 for data in entity.data.all():
113 collection.data.remove(data)
114
115 return Response()
116
117 @detail_route(methods=[u'post'])
118 def add_data(self, request, pk=None):
119 """Add data to Entity and it's collection."""
120 # add data to entity
121 resp = super(EntityViewSet, self).add_data(request, pk)
122
123 # add data to collections in which entity is
124 entity = self.get_object()
125 for collection in entity.collections.all():
126 collection.data.add(*request.data['ids'])
127
128 return resp
129
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/resolwe/flow/views/entity.py b/resolwe/flow/views/entity.py
--- a/resolwe/flow/views/entity.py
+++ b/resolwe/flow/views/entity.py
@@ -68,6 +68,11 @@
if user.has_perm('edit_data', data):
data.delete()
+ # If all data objects in an entity are removed, the entity may
+ # have already been removed, so there is no need to call destroy.
+ if not Entity.objects.filter(pk=obj.pk).exists():
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
# NOTE: Collection's ``destroy`` method should be skiped, so we
# intentionaly call it's parent.
return super(CollectionViewSet, self).destroy( # pylint: disable=no-member,bad-super-call
| {"golden_diff": "diff --git a/resolwe/flow/views/entity.py b/resolwe/flow/views/entity.py\n--- a/resolwe/flow/views/entity.py\n+++ b/resolwe/flow/views/entity.py\n@@ -68,6 +68,11 @@\n if user.has_perm('edit_data', data):\n data.delete()\n \n+ # If all data objects in an entity are removed, the entity may\n+ # have already been removed, so there is no need to call destroy.\n+ if not Entity.objects.filter(pk=obj.pk).exists():\n+ return Response(status=status.HTTP_204_NO_CONTENT)\n+\n # NOTE: Collection's ``destroy`` method should be skiped, so we\n # intentionaly call it's parent.\n return super(CollectionViewSet, self).destroy( # pylint: disable=no-member,bad-super-call\n", "issue": "404 returned when deleting entity with `delete_content` set to `true`\nProbably because sample gets deleted when the last data object is deleted.\n", "before_files": [{"content": "\"\"\"Entity viewset.\"\"\"\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom distutils.util import strtobool # pylint: disable=import-error,no-name-in-module\n\nfrom django.db.models import Max\nfrom django.db.models.query import Prefetch\n\nfrom rest_framework import exceptions, status\nfrom rest_framework.decorators import detail_route\nfrom rest_framework.response import Response\n\nfrom resolwe.flow.filters import EntityFilter\nfrom resolwe.flow.models import Collection, Data, Entity\nfrom resolwe.flow.serializers import EntitySerializer\nfrom resolwe.permissions.utils import remove_permission, update_permission\n\nfrom .collection import CollectionViewSet\n\n\nclass EntityViewSet(CollectionViewSet):\n \"\"\"API view for entities.\"\"\"\n\n filter_class = EntityFilter\n serializer_class = EntitySerializer\n\n queryset = Entity.objects.prefetch_related(\n Prefetch('data', queryset=Data.objects.all().order_by('id')),\n 'descriptor_schema',\n 'contributor'\n ).annotate(\n latest_date=Max('data__modified')\n ).order_by('-latest_date')\n\n def _check_collection_permissions(self, collection_id, user):\n \"\"\"Check that collection exists and user has `add` permission.\"\"\"\n collection_query = Collection.objects.filter(pk=collection_id)\n if not collection_query.exists():\n raise exceptions.ValidationError('Collection id does not exist')\n\n collection = collection_query.first()\n if not user.has_perm('add_collection', obj=collection):\n if user.is_authenticated():\n raise exceptions.PermissionDenied()\n else:\n raise exceptions.NotFound()\n\n def set_content_permissions(self, user, obj, payload):\n \"\"\"Apply permissions to data objects in ``Entity``.\"\"\"\n # Data doesn't have \"ADD\" permission, so it has to be removed\n payload = remove_permission(payload, 'add')\n\n for data in obj.data.all():\n if user.has_perm('share_data', data):\n update_permission(data, payload)\n\n def destroy(self, request, *args, **kwargs):\n \"\"\"Destroy a model instance.\n\n If ``delete_content`` flag is set in query parameters, also all\n Data objects contained in entity will be deleted.\n \"\"\"\n obj = self.get_object()\n user = request.user\n\n if strtobool(request.query_params.get('delete_content', 'false')):\n for data in obj.data.all():\n if user.has_perm('edit_data', data):\n data.delete()\n\n # NOTE: Collection's ``destroy`` method should be skiped, so we\n # intentionaly call it's parent.\n return super(CollectionViewSet, self).destroy( # pylint: disable=no-member,bad-super-call\n request, *args, **kwargs\n )\n\n @detail_route(methods=[u'post'])\n def add_to_collection(self, request, pk=None):\n \"\"\"Add Entity to a collection.\"\"\"\n entity = self.get_object()\n\n if 'ids' not in request.data:\n return Response({\"error\": \"`ids` parameter is required\"}, status=status.HTTP_400_BAD_REQUEST)\n\n for collection_id in request.data['ids']:\n self._check_collection_permissions(collection_id, request.user)\n\n for collection_id in request.data['ids']:\n entity.collections.add(collection_id)\n\n collection = Collection.objects.get(pk=collection_id)\n for data in entity.data.all():\n collection.data.add(data)\n\n return Response()\n\n @detail_route(methods=[u'post'])\n def remove_from_collection(self, request, pk=None):\n \"\"\"Remove Entity from a collection.\"\"\"\n entity = self.get_object()\n\n if 'ids' not in request.data:\n return Response({\"error\": \"`ids` parameter is required\"}, status=status.HTTP_400_BAD_REQUEST)\n\n for collection_id in request.data['ids']:\n self._check_collection_permissions(collection_id, request.user)\n\n for collection_id in request.data['ids']:\n entity.collections.remove(collection_id)\n\n collection = Collection.objects.get(pk=collection_id)\n for data in entity.data.all():\n collection.data.remove(data)\n\n return Response()\n\n @detail_route(methods=[u'post'])\n def add_data(self, request, pk=None):\n \"\"\"Add data to Entity and it's collection.\"\"\"\n # add data to entity\n resp = super(EntityViewSet, self).add_data(request, pk)\n\n # add data to collections in which entity is\n entity = self.get_object()\n for collection in entity.collections.all():\n collection.data.add(*request.data['ids'])\n\n return resp\n", "path": "resolwe/flow/views/entity.py"}], "after_files": [{"content": "\"\"\"Entity viewset.\"\"\"\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom distutils.util import strtobool # pylint: disable=import-error,no-name-in-module\n\nfrom django.db.models import Max\nfrom django.db.models.query import Prefetch\n\nfrom rest_framework import exceptions, status\nfrom rest_framework.decorators import detail_route\nfrom rest_framework.response import Response\n\nfrom resolwe.flow.filters import EntityFilter\nfrom resolwe.flow.models import Collection, Data, Entity\nfrom resolwe.flow.serializers import EntitySerializer\nfrom resolwe.permissions.utils import remove_permission, update_permission\n\nfrom .collection import CollectionViewSet\n\n\nclass EntityViewSet(CollectionViewSet):\n \"\"\"API view for entities.\"\"\"\n\n filter_class = EntityFilter\n serializer_class = EntitySerializer\n\n queryset = Entity.objects.prefetch_related(\n Prefetch('data', queryset=Data.objects.all().order_by('id')),\n 'descriptor_schema',\n 'contributor'\n ).annotate(\n latest_date=Max('data__modified')\n ).order_by('-latest_date')\n\n def _check_collection_permissions(self, collection_id, user):\n \"\"\"Check that collection exists and user has `add` permission.\"\"\"\n collection_query = Collection.objects.filter(pk=collection_id)\n if not collection_query.exists():\n raise exceptions.ValidationError('Collection id does not exist')\n\n collection = collection_query.first()\n if not user.has_perm('add_collection', obj=collection):\n if user.is_authenticated():\n raise exceptions.PermissionDenied()\n else:\n raise exceptions.NotFound()\n\n def set_content_permissions(self, user, obj, payload):\n \"\"\"Apply permissions to data objects in ``Entity``.\"\"\"\n # Data doesn't have \"ADD\" permission, so it has to be removed\n payload = remove_permission(payload, 'add')\n\n for data in obj.data.all():\n if user.has_perm('share_data', data):\n update_permission(data, payload)\n\n def destroy(self, request, *args, **kwargs):\n \"\"\"Destroy a model instance.\n\n If ``delete_content`` flag is set in query parameters, also all\n Data objects contained in entity will be deleted.\n \"\"\"\n obj = self.get_object()\n user = request.user\n\n if strtobool(request.query_params.get('delete_content', 'false')):\n for data in obj.data.all():\n if user.has_perm('edit_data', data):\n data.delete()\n\n # If all data objects in an entity are removed, the entity may\n # have already been removed, so there is no need to call destroy.\n if not Entity.objects.filter(pk=obj.pk).exists():\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n # NOTE: Collection's ``destroy`` method should be skiped, so we\n # intentionaly call it's parent.\n return super(CollectionViewSet, self).destroy( # pylint: disable=no-member,bad-super-call\n request, *args, **kwargs\n )\n\n @detail_route(methods=[u'post'])\n def add_to_collection(self, request, pk=None):\n \"\"\"Add Entity to a collection.\"\"\"\n entity = self.get_object()\n\n if 'ids' not in request.data:\n return Response({\"error\": \"`ids` parameter is required\"}, status=status.HTTP_400_BAD_REQUEST)\n\n for collection_id in request.data['ids']:\n self._check_collection_permissions(collection_id, request.user)\n\n for collection_id in request.data['ids']:\n entity.collections.add(collection_id)\n\n collection = Collection.objects.get(pk=collection_id)\n for data in entity.data.all():\n collection.data.add(data)\n\n return Response()\n\n @detail_route(methods=[u'post'])\n def remove_from_collection(self, request, pk=None):\n \"\"\"Remove Entity from a collection.\"\"\"\n entity = self.get_object()\n\n if 'ids' not in request.data:\n return Response({\"error\": \"`ids` parameter is required\"}, status=status.HTTP_400_BAD_REQUEST)\n\n for collection_id in request.data['ids']:\n self._check_collection_permissions(collection_id, request.user)\n\n for collection_id in request.data['ids']:\n entity.collections.remove(collection_id)\n\n collection = Collection.objects.get(pk=collection_id)\n for data in entity.data.all():\n collection.data.remove(data)\n\n return Response()\n\n @detail_route(methods=[u'post'])\n def add_data(self, request, pk=None):\n \"\"\"Add data to Entity and it's collection.\"\"\"\n # add data to entity\n resp = super(EntityViewSet, self).add_data(request, pk)\n\n # add data to collections in which entity is\n entity = self.get_object()\n for collection in entity.collections.all():\n collection.data.add(*request.data['ids'])\n\n return resp\n", "path": "resolwe/flow/views/entity.py"}]} | 1,531 | 185 |
gh_patches_debug_6684 | rasdani/github-patches | git_diff | netbox-community__netbox-11404 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Scheduling a job in the past raises an exception
### NetBox version
v3.4.2
### Python version
3.10
### Steps to Reproduce
1. Create a script
2. Schedule it in the past
### Expected Behavior
Form validation error message
### Observed Behavior
```
Traceback (most recent call last):
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/views/generic/base.py", line 103, in view
return self.dispatch(request, *args, **kwargs)
File "/home/main/devel/repos/netbox/netbox/utilities/views.py", line 53, in dispatch
return super().dispatch(request, *args, **kwargs)
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/views/generic/base.py", line 142, in dispatch
return handler(request, *args, **kwargs)
File "/home/main/devel/repos/netbox/netbox/extras/views.py", line 815, in post
elif form.is_valid():
File "/home/main/devel/repos/netbox/netbox/utilities/forms/forms.py", line 69, in is_valid
is_valid = super().is_valid()
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py", line 205, in is_valid
return self.is_bound and not self.errors
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py", line 200, in errors
self.full_clean()
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py", line 437, in full_clean
self._clean_fields()
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py", line 455, in _clean_fields
self.add_error(name, e)
File "/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py", line 392, in add_error
raise TypeError(
TypeError: The argument `field` must be `None` when the `error` argument contains errors for multiple fields.
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `netbox/extras/forms/scripts.py`
Content:
```
1 from django import forms
2 from django.utils import timezone
3 from django.utils.translation import gettext as _
4
5 from utilities.forms import BootstrapMixin, DateTimePicker, SelectDurationWidget
6
7 __all__ = (
8 'ScriptForm',
9 )
10
11
12 class ScriptForm(BootstrapMixin, forms.Form):
13 _commit = forms.BooleanField(
14 required=False,
15 initial=True,
16 label=_("Commit changes"),
17 help_text=_("Commit changes to the database (uncheck for a dry-run)")
18 )
19 _schedule_at = forms.DateTimeField(
20 required=False,
21 widget=DateTimePicker(),
22 label=_("Schedule at"),
23 help_text=_("Schedule execution of script to a set time"),
24 )
25 _interval = forms.IntegerField(
26 required=False,
27 min_value=1,
28 label=_("Recurs every"),
29 widget=SelectDurationWidget(),
30 help_text=_("Interval at which this script is re-run (in minutes)")
31 )
32
33 def __init__(self, *args, **kwargs):
34 super().__init__(*args, **kwargs)
35
36 # Annotate the current system time for reference
37 now = timezone.now().strftime('%Y-%m-%d %H:%M:%S')
38 self.fields['_schedule_at'].help_text += f' (current time: <strong>{now}</strong>)'
39
40 # Move _commit and _schedule_at to the end of the form
41 schedule_at = self.fields.pop('_schedule_at')
42 interval = self.fields.pop('_interval')
43 commit = self.fields.pop('_commit')
44 self.fields['_schedule_at'] = schedule_at
45 self.fields['_interval'] = interval
46 self.fields['_commit'] = commit
47
48 def clean__schedule_at(self):
49 scheduled_time = self.cleaned_data['_schedule_at']
50 if scheduled_time and scheduled_time < timezone.now():
51 raise forms.ValidationError({
52 '_schedule_at': _('Scheduled time must be in the future.')
53 })
54
55 return scheduled_time
56
57 @property
58 def requires_input(self):
59 """
60 A boolean indicating whether the form requires user input (ignore the built-in fields).
61 """
62 return bool(len(self.fields) > 3)
63
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/netbox/extras/forms/scripts.py b/netbox/extras/forms/scripts.py
--- a/netbox/extras/forms/scripts.py
+++ b/netbox/extras/forms/scripts.py
@@ -48,9 +48,7 @@
def clean__schedule_at(self):
scheduled_time = self.cleaned_data['_schedule_at']
if scheduled_time and scheduled_time < timezone.now():
- raise forms.ValidationError({
- '_schedule_at': _('Scheduled time must be in the future.')
- })
+ raise forms.ValidationError(_('Scheduled time must be in the future.'))
return scheduled_time
| {"golden_diff": "diff --git a/netbox/extras/forms/scripts.py b/netbox/extras/forms/scripts.py\n--- a/netbox/extras/forms/scripts.py\n+++ b/netbox/extras/forms/scripts.py\n@@ -48,9 +48,7 @@\n def clean__schedule_at(self):\n scheduled_time = self.cleaned_data['_schedule_at']\n if scheduled_time and scheduled_time < timezone.now():\n- raise forms.ValidationError({\n- '_schedule_at': _('Scheduled time must be in the future.')\n- })\n+ raise forms.ValidationError(_('Scheduled time must be in the future.'))\n \n return scheduled_time\n", "issue": "Scheduling a job in the past raises an exception\n### NetBox version\n\nv3.4.2\n\n### Python version\n\n3.10\n\n### Steps to Reproduce\n\n1. Create a script\r\n2. Schedule it in the past\n\n### Expected Behavior\n\nForm validation error message\n\n### Observed Behavior\n\n```\r\nTraceback (most recent call last):\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/core/handlers/exception.py\", line 55, in inner\r\n response = get_response(request)\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/core/handlers/base.py\", line 197, in _get_response\r\n response = wrapped_callback(request, *callback_args, **callback_kwargs)\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/views/generic/base.py\", line 103, in view\r\n return self.dispatch(request, *args, **kwargs)\r\n File \"/home/main/devel/repos/netbox/netbox/utilities/views.py\", line 53, in dispatch\r\n return super().dispatch(request, *args, **kwargs)\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/views/generic/base.py\", line 142, in dispatch\r\n return handler(request, *args, **kwargs)\r\n File \"/home/main/devel/repos/netbox/netbox/extras/views.py\", line 815, in post\r\n elif form.is_valid():\r\n File \"/home/main/devel/repos/netbox/netbox/utilities/forms/forms.py\", line 69, in is_valid\r\n is_valid = super().is_valid()\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py\", line 205, in is_valid\r\n return self.is_bound and not self.errors\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py\", line 200, in errors\r\n self.full_clean()\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py\", line 437, in full_clean\r\n self._clean_fields()\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py\", line 455, in _clean_fields\r\n self.add_error(name, e)\r\n File \"/home/main/devel/repos/netbox/venv/lib/python3.10/site-packages/django/forms/forms.py\", line 392, in add_error\r\n raise TypeError(\r\nTypeError: The argument `field` must be `None` when the `error` argument contains errors for multiple fields.\r\n```\n", "before_files": [{"content": "from django import forms\nfrom django.utils import timezone\nfrom django.utils.translation import gettext as _\n\nfrom utilities.forms import BootstrapMixin, DateTimePicker, SelectDurationWidget\n\n__all__ = (\n 'ScriptForm',\n)\n\n\nclass ScriptForm(BootstrapMixin, forms.Form):\n _commit = forms.BooleanField(\n required=False,\n initial=True,\n label=_(\"Commit changes\"),\n help_text=_(\"Commit changes to the database (uncheck for a dry-run)\")\n )\n _schedule_at = forms.DateTimeField(\n required=False,\n widget=DateTimePicker(),\n label=_(\"Schedule at\"),\n help_text=_(\"Schedule execution of script to a set time\"),\n )\n _interval = forms.IntegerField(\n required=False,\n min_value=1,\n label=_(\"Recurs every\"),\n widget=SelectDurationWidget(),\n help_text=_(\"Interval at which this script is re-run (in minutes)\")\n )\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # Annotate the current system time for reference\n now = timezone.now().strftime('%Y-%m-%d %H:%M:%S')\n self.fields['_schedule_at'].help_text += f' (current time: <strong>{now}</strong>)'\n\n # Move _commit and _schedule_at to the end of the form\n schedule_at = self.fields.pop('_schedule_at')\n interval = self.fields.pop('_interval')\n commit = self.fields.pop('_commit')\n self.fields['_schedule_at'] = schedule_at\n self.fields['_interval'] = interval\n self.fields['_commit'] = commit\n\n def clean__schedule_at(self):\n scheduled_time = self.cleaned_data['_schedule_at']\n if scheduled_time and scheduled_time < timezone.now():\n raise forms.ValidationError({\n '_schedule_at': _('Scheduled time must be in the future.')\n })\n\n return scheduled_time\n\n @property\n def requires_input(self):\n \"\"\"\n A boolean indicating whether the form requires user input (ignore the built-in fields).\n \"\"\"\n return bool(len(self.fields) > 3)\n", "path": "netbox/extras/forms/scripts.py"}], "after_files": [{"content": "from django import forms\nfrom django.utils import timezone\nfrom django.utils.translation import gettext as _\n\nfrom utilities.forms import BootstrapMixin, DateTimePicker, SelectDurationWidget\n\n__all__ = (\n 'ScriptForm',\n)\n\n\nclass ScriptForm(BootstrapMixin, forms.Form):\n _commit = forms.BooleanField(\n required=False,\n initial=True,\n label=_(\"Commit changes\"),\n help_text=_(\"Commit changes to the database (uncheck for a dry-run)\")\n )\n _schedule_at = forms.DateTimeField(\n required=False,\n widget=DateTimePicker(),\n label=_(\"Schedule at\"),\n help_text=_(\"Schedule execution of script to a set time\"),\n )\n _interval = forms.IntegerField(\n required=False,\n min_value=1,\n label=_(\"Recurs every\"),\n widget=SelectDurationWidget(),\n help_text=_(\"Interval at which this script is re-run (in minutes)\")\n )\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # Annotate the current system time for reference\n now = timezone.now().strftime('%Y-%m-%d %H:%M:%S')\n self.fields['_schedule_at'].help_text += f' (current time: <strong>{now}</strong>)'\n\n # Move _commit and _schedule_at to the end of the form\n schedule_at = self.fields.pop('_schedule_at')\n interval = self.fields.pop('_interval')\n commit = self.fields.pop('_commit')\n self.fields['_schedule_at'] = schedule_at\n self.fields['_interval'] = interval\n self.fields['_commit'] = commit\n\n def clean__schedule_at(self):\n scheduled_time = self.cleaned_data['_schedule_at']\n if scheduled_time and scheduled_time < timezone.now():\n raise forms.ValidationError(_('Scheduled time must be in the future.'))\n\n return scheduled_time\n\n @property\n def requires_input(self):\n \"\"\"\n A boolean indicating whether the form requires user input (ignore the built-in fields).\n \"\"\"\n return bool(len(self.fields) > 3)\n", "path": "netbox/extras/forms/scripts.py"}]} | 1,448 | 127 |
gh_patches_debug_42729 | rasdani/github-patches | git_diff | microsoft__botbuilder-python-1227 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[PORT] Replace UseState() with UseBotState()
> Port this change from botbuilder-dotnet/master branch:
https://github.com/microsoft/botbuilder-dotnet/pull/3862
Fixes #3859
and use untyped params so that order and type are not fixed.
Tweak RegisterMiddlewareClass so you can provide the key for the turnstate.
# Changed projects
* Microsoft.Bot.Builder.Dialogs.Adaptive.Testing
* Microsoft.Bot.Builder
* Microsoft.Bot.Builder.AI.QnA.Tests
* Microsoft.Bot.Builder.Dialogs.Adaptive.Templates.Tests
* Microsoft.Bot.Builder.Dialogs.Adaptive.Tests
* Microsoft.Bot.Builder.Dialogs.Declarative.Tests
* Microsoft.Bot.Builder.Dialogs.Tests
* Microsoft.Bot.Builder.TestBot.Json
*
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `libraries/botbuilder-core/botbuilder/core/register_class_middleware.py`
Content:
```
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License.
3 from typing import Callable, Awaitable
4
5 from botbuilder.core import Middleware, TurnContext
6
7
8 class RegisterClassMiddleware(Middleware):
9 """
10 Middleware for adding an object to or registering a service with the current turn context.
11 """
12
13 def __init__(self, service):
14 self.service = service
15
16 async def on_turn(
17 self, context: TurnContext, logic: Callable[[TurnContext], Awaitable]
18 ):
19 # C# has TurnStateCollection with has overrides for adding items
20 # to TurnState. Python does not. In C#'s case, there is an 'Add'
21 # to handle adding object, and that uses the fully qualified class name.
22 context.turn_state[self.fullname(self.service)] = self.service
23 await logic()
24
25 @staticmethod
26 def fullname(obj):
27 module = obj.__class__.__module__
28 if module is None or module == str.__class__.__module__:
29 return obj.__class__.__name__ # Avoid reporting __builtin__
30 return module + "." + obj.__class__.__name__
31
```
Path: `libraries/botbuilder-core/botbuilder/core/adapter_extensions.py`
Content:
```
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License.
3 from botbuilder.core import (
4 BotAdapter,
5 Storage,
6 RegisterClassMiddleware,
7 UserState,
8 ConversationState,
9 AutoSaveStateMiddleware,
10 )
11
12
13 class AdapterExtensions:
14 @staticmethod
15 def use_storage(adapter: BotAdapter, storage: Storage) -> BotAdapter:
16 """
17 Registers a storage layer with the adapter. The storage object will be available via the turn context's
18 `turn_state` property.
19
20 :param adapter: The BotAdapter on which to register the storage object.
21 :param storage: The Storage object to register.
22 :return: The BotAdapter
23 """
24 return adapter.use(RegisterClassMiddleware(storage))
25
26 @staticmethod
27 def use_state(
28 adapter: BotAdapter,
29 user_state: UserState,
30 conversation_state: ConversationState,
31 auto: bool = True,
32 ) -> BotAdapter:
33 """
34 Registers user and conversation state objects with the adapter. These objects will be available via
35 the turn context's `turn_state` property.
36
37 :param adapter: The BotAdapter on which to register the state objects.
38 :param user_state: The UserState object to register.
39 :param conversation_state: The ConversationState object to register.
40 :param auto: True to automatically persist state each turn.
41 :return: The BotAdapter
42 """
43 if not adapter:
44 raise TypeError("BotAdapter is required")
45
46 if not user_state:
47 raise TypeError("UserState is required")
48
49 if not conversation_state:
50 raise TypeError("ConversationState is required")
51
52 adapter.use(RegisterClassMiddleware(user_state))
53 adapter.use(RegisterClassMiddleware(conversation_state))
54
55 if auto:
56 adapter.use(AutoSaveStateMiddleware([user_state, conversation_state]))
57
58 return adapter
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py b/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py
--- a/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py
+++ b/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py
@@ -1,7 +1,10 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
+from warnings import warn
+
from botbuilder.core import (
BotAdapter,
+ BotState,
Storage,
RegisterClassMiddleware,
UserState,
@@ -23,6 +26,39 @@
"""
return adapter.use(RegisterClassMiddleware(storage))
+ @staticmethod
+ def use_bot_state(
+ bot_adapter: BotAdapter, *bot_states: BotState, auto: bool = True
+ ) -> BotAdapter:
+ """
+ Registers bot state object into the TurnContext. The botstate will be available via the turn context.
+
+ :param bot_adapter: The BotAdapter on which to register the state objects.
+ :param bot_states: One or more BotState objects to register.
+ :return: The updated adapter.
+ """
+ if not bot_states:
+ raise TypeError("At least one BotAdapter is required")
+
+ for bot_state in bot_states:
+ bot_adapter.use(
+ RegisterClassMiddleware(
+ bot_state, AdapterExtensions.fullname(bot_state)
+ )
+ )
+
+ if auto:
+ bot_adapter.use(AutoSaveStateMiddleware(bot_states))
+
+ return bot_adapter
+
+ @staticmethod
+ def fullname(obj):
+ module = obj.__class__.__module__
+ if module is None or module == str.__class__.__module__:
+ return obj.__class__.__name__ # Avoid reporting __builtin__
+ return module + "." + obj.__class__.__name__
+
@staticmethod
def use_state(
adapter: BotAdapter,
@@ -31,7 +67,7 @@
auto: bool = True,
) -> BotAdapter:
"""
- Registers user and conversation state objects with the adapter. These objects will be available via
+ [DEPRECATED] Registers user and conversation state objects with the adapter. These objects will be available via
the turn context's `turn_state` property.
:param adapter: The BotAdapter on which to register the state objects.
@@ -40,6 +76,11 @@
:param auto: True to automatically persist state each turn.
:return: The BotAdapter
"""
+ warn(
+ "This method is deprecated in 4.9. You should use the method .use_bot_state() instead.",
+ DeprecationWarning,
+ )
+
if not adapter:
raise TypeError("BotAdapter is required")
diff --git a/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py b/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py
--- a/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py
+++ b/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py
@@ -10,8 +10,9 @@
Middleware for adding an object to or registering a service with the current turn context.
"""
- def __init__(self, service):
+ def __init__(self, service, key: str = None):
self.service = service
+ self._key = key
async def on_turn(
self, context: TurnContext, logic: Callable[[TurnContext], Awaitable]
@@ -19,7 +20,8 @@
# C# has TurnStateCollection with has overrides for adding items
# to TurnState. Python does not. In C#'s case, there is an 'Add'
# to handle adding object, and that uses the fully qualified class name.
- context.turn_state[self.fullname(self.service)] = self.service
+ key = self._key or self.fullname(self.service)
+ context.turn_state[key] = self.service
await logic()
@staticmethod
| {"golden_diff": "diff --git a/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py b/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py\n--- a/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py\n+++ b/libraries/botbuilder-core/botbuilder/core/adapter_extensions.py\n@@ -1,7 +1,10 @@\n # Copyright (c) Microsoft Corporation. All rights reserved.\n # Licensed under the MIT License.\n+from warnings import warn\n+\n from botbuilder.core import (\n BotAdapter,\n+ BotState,\n Storage,\n RegisterClassMiddleware,\n UserState,\n@@ -23,6 +26,39 @@\n \"\"\"\n return adapter.use(RegisterClassMiddleware(storage))\n \n+ @staticmethod\n+ def use_bot_state(\n+ bot_adapter: BotAdapter, *bot_states: BotState, auto: bool = True\n+ ) -> BotAdapter:\n+ \"\"\"\n+ Registers bot state object into the TurnContext. The botstate will be available via the turn context.\n+\n+ :param bot_adapter: The BotAdapter on which to register the state objects.\n+ :param bot_states: One or more BotState objects to register.\n+ :return: The updated adapter.\n+ \"\"\"\n+ if not bot_states:\n+ raise TypeError(\"At least one BotAdapter is required\")\n+\n+ for bot_state in bot_states:\n+ bot_adapter.use(\n+ RegisterClassMiddleware(\n+ bot_state, AdapterExtensions.fullname(bot_state)\n+ )\n+ )\n+\n+ if auto:\n+ bot_adapter.use(AutoSaveStateMiddleware(bot_states))\n+\n+ return bot_adapter\n+\n+ @staticmethod\n+ def fullname(obj):\n+ module = obj.__class__.__module__\n+ if module is None or module == str.__class__.__module__:\n+ return obj.__class__.__name__ # Avoid reporting __builtin__\n+ return module + \".\" + obj.__class__.__name__\n+\n @staticmethod\n def use_state(\n adapter: BotAdapter,\n@@ -31,7 +67,7 @@\n auto: bool = True,\n ) -> BotAdapter:\n \"\"\"\n- Registers user and conversation state objects with the adapter. These objects will be available via\n+ [DEPRECATED] Registers user and conversation state objects with the adapter. These objects will be available via\n the turn context's `turn_state` property.\n \n :param adapter: The BotAdapter on which to register the state objects.\n@@ -40,6 +76,11 @@\n :param auto: True to automatically persist state each turn.\n :return: The BotAdapter\n \"\"\"\n+ warn(\n+ \"This method is deprecated in 4.9. You should use the method .use_bot_state() instead.\",\n+ DeprecationWarning,\n+ )\n+\n if not adapter:\n raise TypeError(\"BotAdapter is required\")\n \ndiff --git a/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py b/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py\n--- a/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py\n+++ b/libraries/botbuilder-core/botbuilder/core/register_class_middleware.py\n@@ -10,8 +10,9 @@\n Middleware for adding an object to or registering a service with the current turn context.\n \"\"\"\n \n- def __init__(self, service):\n+ def __init__(self, service, key: str = None):\n self.service = service\n+ self._key = key\n \n async def on_turn(\n self, context: TurnContext, logic: Callable[[TurnContext], Awaitable]\n@@ -19,7 +20,8 @@\n # C# has TurnStateCollection with has overrides for adding items\n # to TurnState. Python does not. In C#'s case, there is an 'Add'\n # to handle adding object, and that uses the fully qualified class name.\n- context.turn_state[self.fullname(self.service)] = self.service\n+ key = self._key or self.fullname(self.service)\n+ context.turn_state[key] = self.service\n await logic()\n \n @staticmethod\n", "issue": "[PORT] Replace UseState() with UseBotState() \n> Port this change from botbuilder-dotnet/master branch:\nhttps://github.com/microsoft/botbuilder-dotnet/pull/3862\n\nFixes #3859 \r\nand use untyped params so that order and type are not fixed.\r\nTweak RegisterMiddlewareClass so you can provide the key for the turnstate.\n\n\r\n# Changed projects\r\n* Microsoft.Bot.Builder.Dialogs.Adaptive.Testing\r\n* Microsoft.Bot.Builder\r\n* Microsoft.Bot.Builder.AI.QnA.Tests\r\n* Microsoft.Bot.Builder.Dialogs.Adaptive.Templates.Tests\r\n* Microsoft.Bot.Builder.Dialogs.Adaptive.Tests\r\n* Microsoft.Bot.Builder.Dialogs.Declarative.Tests\r\n* Microsoft.Bot.Builder.Dialogs.Tests\r\n* Microsoft.Bot.Builder.TestBot.Json\r\n* \r\n\r\n\r\n\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\nfrom typing import Callable, Awaitable\n\nfrom botbuilder.core import Middleware, TurnContext\n\n\nclass RegisterClassMiddleware(Middleware):\n \"\"\"\n Middleware for adding an object to or registering a service with the current turn context.\n \"\"\"\n\n def __init__(self, service):\n self.service = service\n\n async def on_turn(\n self, context: TurnContext, logic: Callable[[TurnContext], Awaitable]\n ):\n # C# has TurnStateCollection with has overrides for adding items\n # to TurnState. Python does not. In C#'s case, there is an 'Add'\n # to handle adding object, and that uses the fully qualified class name.\n context.turn_state[self.fullname(self.service)] = self.service\n await logic()\n\n @staticmethod\n def fullname(obj):\n module = obj.__class__.__module__\n if module is None or module == str.__class__.__module__:\n return obj.__class__.__name__ # Avoid reporting __builtin__\n return module + \".\" + obj.__class__.__name__\n", "path": "libraries/botbuilder-core/botbuilder/core/register_class_middleware.py"}, {"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\nfrom botbuilder.core import (\n BotAdapter,\n Storage,\n RegisterClassMiddleware,\n UserState,\n ConversationState,\n AutoSaveStateMiddleware,\n)\n\n\nclass AdapterExtensions:\n @staticmethod\n def use_storage(adapter: BotAdapter, storage: Storage) -> BotAdapter:\n \"\"\"\n Registers a storage layer with the adapter. The storage object will be available via the turn context's\n `turn_state` property.\n\n :param adapter: The BotAdapter on which to register the storage object.\n :param storage: The Storage object to register.\n :return: The BotAdapter\n \"\"\"\n return adapter.use(RegisterClassMiddleware(storage))\n\n @staticmethod\n def use_state(\n adapter: BotAdapter,\n user_state: UserState,\n conversation_state: ConversationState,\n auto: bool = True,\n ) -> BotAdapter:\n \"\"\"\n Registers user and conversation state objects with the adapter. These objects will be available via\n the turn context's `turn_state` property.\n\n :param adapter: The BotAdapter on which to register the state objects.\n :param user_state: The UserState object to register.\n :param conversation_state: The ConversationState object to register.\n :param auto: True to automatically persist state each turn.\n :return: The BotAdapter\n \"\"\"\n if not adapter:\n raise TypeError(\"BotAdapter is required\")\n\n if not user_state:\n raise TypeError(\"UserState is required\")\n\n if not conversation_state:\n raise TypeError(\"ConversationState is required\")\n\n adapter.use(RegisterClassMiddleware(user_state))\n adapter.use(RegisterClassMiddleware(conversation_state))\n\n if auto:\n adapter.use(AutoSaveStateMiddleware([user_state, conversation_state]))\n\n return adapter\n", "path": "libraries/botbuilder-core/botbuilder/core/adapter_extensions.py"}], "after_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\nfrom typing import Callable, Awaitable\n\nfrom botbuilder.core import Middleware, TurnContext\n\n\nclass RegisterClassMiddleware(Middleware):\n \"\"\"\n Middleware for adding an object to or registering a service with the current turn context.\n \"\"\"\n\n def __init__(self, service, key: str = None):\n self.service = service\n self._key = key\n\n async def on_turn(\n self, context: TurnContext, logic: Callable[[TurnContext], Awaitable]\n ):\n # C# has TurnStateCollection with has overrides for adding items\n # to TurnState. Python does not. In C#'s case, there is an 'Add'\n # to handle adding object, and that uses the fully qualified class name.\n key = self._key or self.fullname(self.service)\n context.turn_state[key] = self.service\n await logic()\n\n @staticmethod\n def fullname(obj):\n module = obj.__class__.__module__\n if module is None or module == str.__class__.__module__:\n return obj.__class__.__name__ # Avoid reporting __builtin__\n return module + \".\" + obj.__class__.__name__\n", "path": "libraries/botbuilder-core/botbuilder/core/register_class_middleware.py"}, {"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\nfrom warnings import warn\n\nfrom botbuilder.core import (\n BotAdapter,\n BotState,\n Storage,\n RegisterClassMiddleware,\n UserState,\n ConversationState,\n AutoSaveStateMiddleware,\n)\n\n\nclass AdapterExtensions:\n @staticmethod\n def use_storage(adapter: BotAdapter, storage: Storage) -> BotAdapter:\n \"\"\"\n Registers a storage layer with the adapter. The storage object will be available via the turn context's\n `turn_state` property.\n\n :param adapter: The BotAdapter on which to register the storage object.\n :param storage: The Storage object to register.\n :return: The BotAdapter\n \"\"\"\n return adapter.use(RegisterClassMiddleware(storage))\n\n @staticmethod\n def use_bot_state(\n bot_adapter: BotAdapter, *bot_states: BotState, auto: bool = True\n ) -> BotAdapter:\n \"\"\"\n Registers bot state object into the TurnContext. The botstate will be available via the turn context.\n\n :param bot_adapter: The BotAdapter on which to register the state objects.\n :param bot_states: One or more BotState objects to register.\n :return: The updated adapter.\n \"\"\"\n if not bot_states:\n raise TypeError(\"At least one BotAdapter is required\")\n\n for bot_state in bot_states:\n bot_adapter.use(\n RegisterClassMiddleware(\n bot_state, AdapterExtensions.fullname(bot_state)\n )\n )\n\n if auto:\n bot_adapter.use(AutoSaveStateMiddleware(bot_states))\n\n return bot_adapter\n\n @staticmethod\n def fullname(obj):\n module = obj.__class__.__module__\n if module is None or module == str.__class__.__module__:\n return obj.__class__.__name__ # Avoid reporting __builtin__\n return module + \".\" + obj.__class__.__name__\n\n @staticmethod\n def use_state(\n adapter: BotAdapter,\n user_state: UserState,\n conversation_state: ConversationState,\n auto: bool = True,\n ) -> BotAdapter:\n \"\"\"\n [DEPRECATED] Registers user and conversation state objects with the adapter. These objects will be available via\n the turn context's `turn_state` property.\n\n :param adapter: The BotAdapter on which to register the state objects.\n :param user_state: The UserState object to register.\n :param conversation_state: The ConversationState object to register.\n :param auto: True to automatically persist state each turn.\n :return: The BotAdapter\n \"\"\"\n warn(\n \"This method is deprecated in 4.9. You should use the method .use_bot_state() instead.\",\n DeprecationWarning,\n )\n\n if not adapter:\n raise TypeError(\"BotAdapter is required\")\n\n if not user_state:\n raise TypeError(\"UserState is required\")\n\n if not conversation_state:\n raise TypeError(\"ConversationState is required\")\n\n adapter.use(RegisterClassMiddleware(user_state))\n adapter.use(RegisterClassMiddleware(conversation_state))\n\n if auto:\n adapter.use(AutoSaveStateMiddleware([user_state, conversation_state]))\n\n return adapter\n", "path": "libraries/botbuilder-core/botbuilder/core/adapter_extensions.py"}]} | 1,252 | 910 |
gh_patches_debug_2897 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-8922 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CT-3210] [Bug] Error using `dbt list --select` when there is a cross-project model that is `version=0` in the parent project
### Is this a new bug in dbt-core?
- [X] I believe this is a new bug in dbt-core
- [X] I have searched the existing issues, and I could not find an existing issue for this bug
### Current Behavior
When you attempt to reference a model version 0, you get a stack trace error.
### Expected Behavior
We should allow you to set model version to be 0.
### Steps To Reproduce
1. On parent/hub project, add a versioned model with `v: 0`
2. On the child/spoke project, attempt to reference that versioned model in a model:
`select * from {{ ref('example_hub', 'my_second_dbt_model', v=0) }}`
3. run `dbt list --select anything`
Outstanding question - is this only affecting cross-project refs? Or all refs to a model with `v: 0`?
### Relevant log output
_No response_
### Environment
```markdown
- OS:
- Python:
- dbt:
```
### Which database adapter are you using with dbt?
_No response_
### Additional Context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/dbt/contracts/graph/node_args.py`
Content:
```
1 from dataclasses import dataclass, field
2 from datetime import datetime
3 from typing import Optional, List
4
5 from dbt.contracts.graph.unparsed import NodeVersion
6 from dbt.node_types import NodeType, AccessType
7
8
9 @dataclass
10 class ModelNodeArgs:
11 name: str
12 package_name: str
13 identifier: str
14 schema: str
15 database: Optional[str] = None
16 relation_name: Optional[str] = None
17 version: Optional[NodeVersion] = None
18 latest_version: Optional[NodeVersion] = None
19 deprecation_date: Optional[datetime] = None
20 access: Optional[str] = AccessType.Protected.value
21 generated_at: datetime = field(default_factory=datetime.utcnow)
22 depends_on_nodes: List[str] = field(default_factory=list)
23 enabled: bool = True
24
25 @property
26 def unique_id(self) -> str:
27 unique_id = f"{NodeType.Model}.{self.package_name}.{self.name}"
28 if self.version:
29 unique_id = f"{unique_id}.v{self.version}"
30
31 return unique_id
32
33 @property
34 def fqn(self) -> List[str]:
35 fqn = [self.package_name, self.name]
36 if self.version:
37 fqn.append(f"v{self.version}")
38
39 return fqn
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/dbt/contracts/graph/node_args.py b/core/dbt/contracts/graph/node_args.py
--- a/core/dbt/contracts/graph/node_args.py
+++ b/core/dbt/contracts/graph/node_args.py
@@ -33,7 +33,8 @@
@property
def fqn(self) -> List[str]:
fqn = [self.package_name, self.name]
- if self.version:
+ # Test for None explicitly because version can be 0
+ if self.version is not None:
fqn.append(f"v{self.version}")
return fqn
| {"golden_diff": "diff --git a/core/dbt/contracts/graph/node_args.py b/core/dbt/contracts/graph/node_args.py\n--- a/core/dbt/contracts/graph/node_args.py\n+++ b/core/dbt/contracts/graph/node_args.py\n@@ -33,7 +33,8 @@\n @property\n def fqn(self) -> List[str]:\n fqn = [self.package_name, self.name]\n- if self.version:\n+ # Test for None explicitly because version can be 0\n+ if self.version is not None:\n fqn.append(f\"v{self.version}\")\n \n return fqn\n", "issue": "[CT-3210] [Bug] Error using `dbt list --select` when there is a cross-project model that is `version=0` in the parent project\n### Is this a new bug in dbt-core?\r\n\r\n- [X] I believe this is a new bug in dbt-core\r\n- [X] I have searched the existing issues, and I could not find an existing issue for this bug\r\n\r\n### Current Behavior\r\n\r\nWhen you attempt to reference a model version 0, you get a stack trace error.\r\n\r\n\r\n\r\n### Expected Behavior\r\n\r\nWe should allow you to set model version to be 0.\r\n\r\n### Steps To Reproduce\r\n\r\n1. On parent/hub project, add a versioned model with `v: 0`\r\n2. On the child/spoke project, attempt to reference that versioned model in a model:\r\n `select * from {{ ref('example_hub', 'my_second_dbt_model', v=0) }}`\r\n3. run `dbt list --select anything`\r\n\r\nOutstanding question - is this only affecting cross-project refs? Or all refs to a model with `v: 0`?\r\n\r\n### Relevant log output\r\n\r\n_No response_\r\n\r\n### Environment\r\n\r\n```markdown\r\n- OS:\r\n- Python:\r\n- dbt:\r\n```\r\n\r\n\r\n### Which database adapter are you using with dbt?\r\n\r\n_No response_\r\n\r\n### Additional Context\r\n\r\n_No response_\n", "before_files": [{"content": "from dataclasses import dataclass, field\nfrom datetime import datetime\nfrom typing import Optional, List\n\nfrom dbt.contracts.graph.unparsed import NodeVersion\nfrom dbt.node_types import NodeType, AccessType\n\n\n@dataclass\nclass ModelNodeArgs:\n name: str\n package_name: str\n identifier: str\n schema: str\n database: Optional[str] = None\n relation_name: Optional[str] = None\n version: Optional[NodeVersion] = None\n latest_version: Optional[NodeVersion] = None\n deprecation_date: Optional[datetime] = None\n access: Optional[str] = AccessType.Protected.value\n generated_at: datetime = field(default_factory=datetime.utcnow)\n depends_on_nodes: List[str] = field(default_factory=list)\n enabled: bool = True\n\n @property\n def unique_id(self) -> str:\n unique_id = f\"{NodeType.Model}.{self.package_name}.{self.name}\"\n if self.version:\n unique_id = f\"{unique_id}.v{self.version}\"\n\n return unique_id\n\n @property\n def fqn(self) -> List[str]:\n fqn = [self.package_name, self.name]\n if self.version:\n fqn.append(f\"v{self.version}\")\n\n return fqn\n", "path": "core/dbt/contracts/graph/node_args.py"}], "after_files": [{"content": "from dataclasses import dataclass, field\nfrom datetime import datetime\nfrom typing import Optional, List\n\nfrom dbt.contracts.graph.unparsed import NodeVersion\nfrom dbt.node_types import NodeType, AccessType\n\n\n@dataclass\nclass ModelNodeArgs:\n name: str\n package_name: str\n identifier: str\n schema: str\n database: Optional[str] = None\n relation_name: Optional[str] = None\n version: Optional[NodeVersion] = None\n latest_version: Optional[NodeVersion] = None\n deprecation_date: Optional[datetime] = None\n access: Optional[str] = AccessType.Protected.value\n generated_at: datetime = field(default_factory=datetime.utcnow)\n depends_on_nodes: List[str] = field(default_factory=list)\n enabled: bool = True\n\n @property\n def unique_id(self) -> str:\n unique_id = f\"{NodeType.Model}.{self.package_name}.{self.name}\"\n if self.version:\n unique_id = f\"{unique_id}.v{self.version}\"\n\n return unique_id\n\n @property\n def fqn(self) -> List[str]:\n fqn = [self.package_name, self.name]\n # Test for None explicitly because version can be 0\n if self.version is not None:\n fqn.append(f\"v{self.version}\")\n\n return fqn\n", "path": "core/dbt/contracts/graph/node_args.py"}]} | 902 | 132 |
gh_patches_debug_2955 | rasdani/github-patches | git_diff | facebookresearch__hydra-2729 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CI failing: `./tools/configen/configen/utils.py:4:1: F401 'typing.Tuple' imported but unused`
```
./tools/configen/configen/utils.py:4:1: F401 'typing.Tuple' imported but unused
nox > [2023-07-24 22:16:52,631] Command flake8 --config .flake8 failed with exit code 1
nox > [2023-07-24 22:16:52,632] Session lint-3.10 failed.
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tools/configen/configen/utils.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 import sys
3 from enum import Enum
4 from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
5
6 from omegaconf._utils import (
7 _resolve_optional,
8 get_dict_key_value_types,
9 get_list_element_type,
10 is_dict_annotation,
11 is_list_annotation,
12 is_primitive_type_annotation,
13 )
14
15
16 # borrowed from OmegaConf
17 def type_str(t: Any) -> str:
18 is_optional, t = _resolve_optional(t)
19 if t is None:
20 return type(t).__name__
21 if t is Any:
22 return "Any"
23 if t is ...:
24 return "..."
25
26 if sys.version_info < (3, 7, 0): # pragma: no cover
27 # Python 3.6
28 if hasattr(t, "__name__"):
29 name = str(t.__name__)
30 else:
31 if t.__origin__ is not None:
32 name = type_str(t.__origin__)
33 else:
34 name = str(t)
35 if name.startswith("typing."):
36 name = name[len("typing.") :]
37 else: # pragma: no cover
38 # Python >= 3.7
39 if hasattr(t, "__name__"):
40 name = str(t.__name__)
41 else:
42 if t._name is None:
43 if t.__origin__ is not None:
44 name = type_str(t.__origin__)
45 else:
46 name = str(t._name)
47
48 args = getattr(t, "__args__", None)
49 if args is not None:
50 args = ", ".join(type_str(t) for t in t.__args__)
51 ret = f"{name}[{args}]"
52 else:
53 ret = name
54 if is_optional:
55 return f"Optional[{ret}]"
56 else:
57 return ret
58
59
60 def is_tuple_annotation(type_: Any) -> bool:
61 origin = getattr(type_, "__origin__", None)
62 return origin is tuple
63
64
65 def convert_imports(imports: Set[Any], string_imports: Iterable[str]) -> List[str]:
66 tmp = set()
67 for imp in string_imports:
68 tmp.add(imp)
69 for t in imports:
70 s = None
71 origin = getattr(t, "__origin__", None)
72 if t is Any:
73 classname = "Any"
74 elif t is Optional:
75 classname = "Optional"
76 else:
77 if origin is list:
78 classname = "List"
79 elif origin is tuple:
80 classname = "Tuple"
81 elif origin is dict:
82 classname = "Dict"
83 else:
84 classname = t.__name__
85
86 if not is_primitive_type_annotation(t) or issubclass(t, Enum):
87 s = f"from {t.__module__} import {classname}"
88
89 if s is not None:
90 tmp.add(s)
91 return sorted(list(tmp))
92
93
94 def collect_imports(imports: Set[Any], type_: Any) -> None:
95 if is_list_annotation(type_):
96 collect_imports(imports, get_list_element_type(type_))
97 type_ = List
98 elif is_dict_annotation(type_):
99 kvt = get_dict_key_value_types(type_)
100 collect_imports(imports, kvt[0])
101 collect_imports(imports, kvt[1])
102 type_ = Dict
103 else:
104 is_optional = _resolve_optional(type_)[0]
105 if is_optional and type_ is not Any:
106 type_ = Optional
107 imports.add(type_)
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tools/configen/configen/utils.py b/tools/configen/configen/utils.py
--- a/tools/configen/configen/utils.py
+++ b/tools/configen/configen/utils.py
@@ -1,7 +1,7 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import sys
from enum import Enum
-from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
+from typing import Any, Dict, Iterable, List, Optional, Set
from omegaconf._utils import (
_resolve_optional,
| {"golden_diff": "diff --git a/tools/configen/configen/utils.py b/tools/configen/configen/utils.py\n--- a/tools/configen/configen/utils.py\n+++ b/tools/configen/configen/utils.py\n@@ -1,7 +1,7 @@\n # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n import sys\n from enum import Enum\n-from typing import Any, Dict, Iterable, List, Optional, Set, Tuple\n+from typing import Any, Dict, Iterable, List, Optional, Set\n \n from omegaconf._utils import (\n _resolve_optional,\n", "issue": "CI failing: `./tools/configen/configen/utils.py:4:1: F401 'typing.Tuple' imported but unused`\n```\r\n./tools/configen/configen/utils.py:4:1: F401 'typing.Tuple' imported but unused\r\nnox > [2023-07-24 22:16:52,631] Command flake8 --config .flake8 failed with exit code 1\r\nnox > [2023-07-24 22:16:52,632] Session lint-3.10 failed.\r\n```\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport sys\nfrom enum import Enum\nfrom typing import Any, Dict, Iterable, List, Optional, Set, Tuple\n\nfrom omegaconf._utils import (\n _resolve_optional,\n get_dict_key_value_types,\n get_list_element_type,\n is_dict_annotation,\n is_list_annotation,\n is_primitive_type_annotation,\n)\n\n\n# borrowed from OmegaConf\ndef type_str(t: Any) -> str:\n is_optional, t = _resolve_optional(t)\n if t is None:\n return type(t).__name__\n if t is Any:\n return \"Any\"\n if t is ...:\n return \"...\"\n\n if sys.version_info < (3, 7, 0): # pragma: no cover\n # Python 3.6\n if hasattr(t, \"__name__\"):\n name = str(t.__name__)\n else:\n if t.__origin__ is not None:\n name = type_str(t.__origin__)\n else:\n name = str(t)\n if name.startswith(\"typing.\"):\n name = name[len(\"typing.\") :]\n else: # pragma: no cover\n # Python >= 3.7\n if hasattr(t, \"__name__\"):\n name = str(t.__name__)\n else:\n if t._name is None:\n if t.__origin__ is not None:\n name = type_str(t.__origin__)\n else:\n name = str(t._name)\n\n args = getattr(t, \"__args__\", None)\n if args is not None:\n args = \", \".join(type_str(t) for t in t.__args__)\n ret = f\"{name}[{args}]\"\n else:\n ret = name\n if is_optional:\n return f\"Optional[{ret}]\"\n else:\n return ret\n\n\ndef is_tuple_annotation(type_: Any) -> bool:\n origin = getattr(type_, \"__origin__\", None)\n return origin is tuple\n\n\ndef convert_imports(imports: Set[Any], string_imports: Iterable[str]) -> List[str]:\n tmp = set()\n for imp in string_imports:\n tmp.add(imp)\n for t in imports:\n s = None\n origin = getattr(t, \"__origin__\", None)\n if t is Any:\n classname = \"Any\"\n elif t is Optional:\n classname = \"Optional\"\n else:\n if origin is list:\n classname = \"List\"\n elif origin is tuple:\n classname = \"Tuple\"\n elif origin is dict:\n classname = \"Dict\"\n else:\n classname = t.__name__\n\n if not is_primitive_type_annotation(t) or issubclass(t, Enum):\n s = f\"from {t.__module__} import {classname}\"\n\n if s is not None:\n tmp.add(s)\n return sorted(list(tmp))\n\n\ndef collect_imports(imports: Set[Any], type_: Any) -> None:\n if is_list_annotation(type_):\n collect_imports(imports, get_list_element_type(type_))\n type_ = List\n elif is_dict_annotation(type_):\n kvt = get_dict_key_value_types(type_)\n collect_imports(imports, kvt[0])\n collect_imports(imports, kvt[1])\n type_ = Dict\n else:\n is_optional = _resolve_optional(type_)[0]\n if is_optional and type_ is not Any:\n type_ = Optional\n imports.add(type_)\n", "path": "tools/configen/configen/utils.py"}], "after_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport sys\nfrom enum import Enum\nfrom typing import Any, Dict, Iterable, List, Optional, Set\n\nfrom omegaconf._utils import (\n _resolve_optional,\n get_dict_key_value_types,\n get_list_element_type,\n is_dict_annotation,\n is_list_annotation,\n is_primitive_type_annotation,\n)\n\n\n# borrowed from OmegaConf\ndef type_str(t: Any) -> str:\n is_optional, t = _resolve_optional(t)\n if t is None:\n return type(t).__name__\n if t is Any:\n return \"Any\"\n if t is ...:\n return \"...\"\n\n if sys.version_info < (3, 7, 0): # pragma: no cover\n # Python 3.6\n if hasattr(t, \"__name__\"):\n name = str(t.__name__)\n else:\n if t.__origin__ is not None:\n name = type_str(t.__origin__)\n else:\n name = str(t)\n if name.startswith(\"typing.\"):\n name = name[len(\"typing.\") :]\n else: # pragma: no cover\n # Python >= 3.7\n if hasattr(t, \"__name__\"):\n name = str(t.__name__)\n else:\n if t._name is None:\n if t.__origin__ is not None:\n name = type_str(t.__origin__)\n else:\n name = str(t._name)\n\n args = getattr(t, \"__args__\", None)\n if args is not None:\n args = \", \".join(type_str(t) for t in t.__args__)\n ret = f\"{name}[{args}]\"\n else:\n ret = name\n if is_optional:\n return f\"Optional[{ret}]\"\n else:\n return ret\n\n\ndef is_tuple_annotation(type_: Any) -> bool:\n origin = getattr(type_, \"__origin__\", None)\n return origin is tuple\n\n\ndef convert_imports(imports: Set[Any], string_imports: Iterable[str]) -> List[str]:\n tmp = set()\n for imp in string_imports:\n tmp.add(imp)\n for t in imports:\n s = None\n origin = getattr(t, \"__origin__\", None)\n if t is Any:\n classname = \"Any\"\n elif t is Optional:\n classname = \"Optional\"\n else:\n if origin is list:\n classname = \"List\"\n elif origin is tuple:\n classname = \"Tuple\"\n elif origin is dict:\n classname = \"Dict\"\n else:\n classname = t.__name__\n\n if not is_primitive_type_annotation(t) or issubclass(t, Enum):\n s = f\"from {t.__module__} import {classname}\"\n\n if s is not None:\n tmp.add(s)\n return sorted(list(tmp))\n\n\ndef collect_imports(imports: Set[Any], type_: Any) -> None:\n if is_list_annotation(type_):\n collect_imports(imports, get_list_element_type(type_))\n type_ = List\n elif is_dict_annotation(type_):\n kvt = get_dict_key_value_types(type_)\n collect_imports(imports, kvt[0])\n collect_imports(imports, kvt[1])\n type_ = Dict\n else:\n is_optional = _resolve_optional(type_)[0]\n if is_optional and type_ is not Any:\n type_ = Optional\n imports.add(type_)\n", "path": "tools/configen/configen/utils.py"}]} | 1,368 | 120 |
gh_patches_debug_2926 | rasdani/github-patches | git_diff | Mailu__Mailu-2116 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error 404 not found when opening admin after upgrade 1.8 to master
## Before you open your issue
- [X] Check if no issue or pull-request for this already exists.
- [X] Check [documentation](https://mailu.io/master/) and [FAQ](https://mailu.io/master/faq.html). (Tip, use the search function on the documentation page)
- [X] You understand `Mailu` is made by volunteers in their **free time** — be conscise, civil and accept that delays can occur.
- [X] The title of the issue should be short and simple. It should contain specific terms related to the actual issue. Be specific while writing the title.
## Environment & Versions
### Environment
- [X] docker-compose
### Versions
Before upgrade: Docker 1.8 images.
After upgrade: Docker master images (pulled 30 December 2021).
## Description
**Mailu 1.8** image redirects `/admin` to `/admin/ui`.
**Mailu master** image no longer redirects `/admin/ui` as the `ui` part in the URL has been removed according to [Tomcat 1929.enhacement](https://github.com/Mailu/Mailu/blob/master/towncrier/newsfragments/1929.enhancement):
> Removed the /admin/ prefix to reduce complexity of routing with Mailu. Admin is accessible directly via /admin instead of /admin/ui
After the upgrade from `1.8` to `master` and visiting the admin page, the browser still uses the cached URL `/admin/ui` and results in 404 not found.
## Replication Steps
1. Create 1.8 production environment on AMD64 platform using `mailu 1.8 Docker images`.
2. Make sure the Admin page works.
3. Remove docker containers (`docker-compose down`).
4. Recreate **all** containers at the same time using `mailu master Docker images`.
5. Open root mail domain. The browser uses the cached URL `admin/ui` and shows Error 404 not found.
Note: Tested with `TLS_FLAVOR=letsencrypt`, admin and roundcube and Firefox.
## Expected behaviour
Backwards compatibility after Mailu 1.8 upgrade without the need of removing browser caches.
## Front log
```
front_1 | <IP> - - [30/Dec/2021:10:14:35 +0000] "GET /admin/ui/ HTTP/2.0" 404 198 "https://mail.mydomain.nl/sso/login" "Mozilla/5.0 (X11; Linux x86_64; rv:95.0) Gecko/20100101 Firefox/95.0"
```
## Bugfix
Proposal is to redirect `/admin/ui` always to `/admin` to prevent browser caching problems after the upgrade.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/admin/mailu/ui/views/base.py`
Content:
```
1 from mailu import models, utils
2 from mailu.ui import ui, forms, access
3
4 from flask import current_app as app
5 import flask
6 import flask_login
7
8
9 @ui.route('/', methods=["GET"])
10 @access.authenticated
11 def index():
12 return flask.redirect(flask.url_for('.user_settings'))
13
14 @ui.route('/announcement', methods=['GET', 'POST'])
15 @access.global_admin
16 def announcement():
17 form = forms.AnnouncementForm()
18 if form.validate_on_submit():
19 for user in models.User.query.all():
20 user.sendmail(form.announcement_subject.data,
21 form.announcement_body.data)
22 # Force-empty the form
23 form.announcement_subject.data = ''
24 form.announcement_body.data = ''
25 flask.flash('Your announcement was sent', 'success')
26 return flask.render_template('announcement.html', form=form)
27
28 @ui.route('/webmail', methods=['GET'])
29 def webmail():
30 return flask.redirect(app.config['WEB_WEBMAIL'])
31
32 @ui.route('/client', methods=['GET'])
33 def client():
34 return flask.render_template('client.html')
35
36 @ui.route('/webui_antispam', methods=['GET'])
37 def antispam():
38 return flask.render_template('antispam.html')
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/admin/mailu/ui/views/base.py b/core/admin/mailu/ui/views/base.py
--- a/core/admin/mailu/ui/views/base.py
+++ b/core/admin/mailu/ui/views/base.py
@@ -11,6 +11,10 @@
def index():
return flask.redirect(flask.url_for('.user_settings'))
[email protected]('/ui/')
+def redirect_old_path():
+ return flask.redirect(flask.url_for('.index'), code=301)
+
@ui.route('/announcement', methods=['GET', 'POST'])
@access.global_admin
def announcement():
| {"golden_diff": "diff --git a/core/admin/mailu/ui/views/base.py b/core/admin/mailu/ui/views/base.py\n--- a/core/admin/mailu/ui/views/base.py\n+++ b/core/admin/mailu/ui/views/base.py\n@@ -11,6 +11,10 @@\n def index():\n return flask.redirect(flask.url_for('.user_settings'))\n \[email protected]('/ui/')\n+def redirect_old_path():\n+ return flask.redirect(flask.url_for('.index'), code=301)\n+\n @ui.route('/announcement', methods=['GET', 'POST'])\n @access.global_admin\n def announcement():\n", "issue": "Error 404 not found when opening admin after upgrade 1.8 to master\n## Before you open your issue\r\n- [X] Check if no issue or pull-request for this already exists.\r\n- [X] Check [documentation](https://mailu.io/master/) and [FAQ](https://mailu.io/master/faq.html). (Tip, use the search function on the documentation page)\r\n- [X] You understand `Mailu` is made by volunteers in their **free time** \u2014 be conscise, civil and accept that delays can occur.\r\n- [X] The title of the issue should be short and simple. It should contain specific terms related to the actual issue. Be specific while writing the title.\r\n\r\n## Environment & Versions\r\n### Environment\r\n - [X] docker-compose\r\n\r\n### Versions\r\n\r\nBefore upgrade: Docker 1.8 images.\r\nAfter upgrade: Docker master images (pulled 30 December 2021).\r\n\r\n## Description\r\n\r\n**Mailu 1.8** image redirects `/admin` to `/admin/ui`.\r\n\r\n**Mailu master** image no longer redirects `/admin/ui` as the `ui` part in the URL has been removed according to [Tomcat 1929.enhacement](https://github.com/Mailu/Mailu/blob/master/towncrier/newsfragments/1929.enhancement):\r\n\r\n> Removed the /admin/ prefix to reduce complexity of routing with Mailu. Admin is accessible directly via /admin instead of /admin/ui\r\n\r\nAfter the upgrade from `1.8` to `master` and visiting the admin page, the browser still uses the cached URL `/admin/ui` and results in 404 not found.\r\n\r\n\r\n## Replication Steps\r\n\r\n1. Create 1.8 production environment on AMD64 platform using `mailu 1.8 Docker images`.\r\n2. Make sure the Admin page works.\r\n3. Remove docker containers (`docker-compose down`).\r\n4. Recreate **all** containers at the same time using `mailu master Docker images`.\r\n5. Open root mail domain. The browser uses the cached URL `admin/ui` and shows Error 404 not found.\r\n\r\nNote: Tested with `TLS_FLAVOR=letsencrypt`, admin and roundcube and Firefox.\r\n\r\n\r\n## Expected behaviour\r\n\r\nBackwards compatibility after Mailu 1.8 upgrade without the need of removing browser caches.\r\n\r\n## Front log\r\n\r\n```\r\nfront_1 | <IP> - - [30/Dec/2021:10:14:35 +0000] \"GET /admin/ui/ HTTP/2.0\" 404 198 \"https://mail.mydomain.nl/sso/login\" \"Mozilla/5.0 (X11; Linux x86_64; rv:95.0) Gecko/20100101 Firefox/95.0\"\r\n```\r\n\r\n## Bugfix\r\n\r\nProposal is to redirect `/admin/ui` always to `/admin` to prevent browser caching problems after the upgrade.\n", "before_files": [{"content": "from mailu import models, utils\nfrom mailu.ui import ui, forms, access\n\nfrom flask import current_app as app\nimport flask\nimport flask_login\n\n\[email protected]('/', methods=[\"GET\"])\[email protected]\ndef index():\n return flask.redirect(flask.url_for('.user_settings'))\n\[email protected]('/announcement', methods=['GET', 'POST'])\[email protected]_admin\ndef announcement():\n form = forms.AnnouncementForm()\n if form.validate_on_submit():\n for user in models.User.query.all():\n user.sendmail(form.announcement_subject.data,\n form.announcement_body.data)\n # Force-empty the form\n form.announcement_subject.data = ''\n form.announcement_body.data = ''\n flask.flash('Your announcement was sent', 'success')\n return flask.render_template('announcement.html', form=form)\n\[email protected]('/webmail', methods=['GET'])\ndef webmail():\n return flask.redirect(app.config['WEB_WEBMAIL'])\n\[email protected]('/client', methods=['GET'])\ndef client():\n return flask.render_template('client.html')\n\[email protected]('/webui_antispam', methods=['GET'])\ndef antispam():\n return flask.render_template('antispam.html')\n", "path": "core/admin/mailu/ui/views/base.py"}], "after_files": [{"content": "from mailu import models, utils\nfrom mailu.ui import ui, forms, access\n\nfrom flask import current_app as app\nimport flask\nimport flask_login\n\n\[email protected]('/', methods=[\"GET\"])\[email protected]\ndef index():\n return flask.redirect(flask.url_for('.user_settings'))\n\[email protected]('/ui/')\ndef redirect_old_path():\n return flask.redirect(flask.url_for('.index'), code=301)\n\[email protected]('/announcement', methods=['GET', 'POST'])\[email protected]_admin\ndef announcement():\n form = forms.AnnouncementForm()\n if form.validate_on_submit():\n for user in models.User.query.all():\n user.sendmail(form.announcement_subject.data,\n form.announcement_body.data)\n # Force-empty the form\n form.announcement_subject.data = ''\n form.announcement_body.data = ''\n flask.flash('Your announcement was sent', 'success')\n return flask.render_template('announcement.html', form=form)\n\[email protected]('/webmail', methods=['GET'])\ndef webmail():\n return flask.redirect(app.config['WEB_WEBMAIL'])\n\[email protected]('/client', methods=['GET'])\ndef client():\n return flask.render_template('client.html')\n\[email protected]('/webui_antispam', methods=['GET'])\ndef antispam():\n return flask.render_template('antispam.html')\n", "path": "core/admin/mailu/ui/views/base.py"}]} | 1,230 | 126 |
gh_patches_debug_8696 | rasdani/github-patches | git_diff | easybuilders__easybuild-framework-757 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
santiy_check_commands doesn't work for ipython
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `easybuild/framework/extensioneasyblock.py`
Content:
```
1 ##
2 # Copyright 2013 Ghent University
3 #
4 # This file is part of EasyBuild,
5 # originally created by the HPC team of the University of Ghent (http://ugent.be/hpc).
6 #
7 # http://github.com/hpcugent/easybuild
8 #
9 # EasyBuild is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation v2.
12 #
13 # EasyBuild is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the GNU General Public License
19 # along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
20 ##
21 """
22 EasyBuild support for building and installing extensions as actual extensions or as stand-alone modules,
23 implemented as an easyblock
24
25 @author: Kenneth Hoste (Ghent University)
26 """
27 import copy
28 import os
29
30 from easybuild.framework.easyblock import EasyBlock
31 from easybuild.framework.easyconfig import CUSTOM
32 from easybuild.framework.extension import Extension
33 from easybuild.tools.filetools import apply_patch, extract_file
34 from easybuild.tools.utilities import remove_unwanted_chars
35
36
37 class ExtensionEasyBlock(EasyBlock, Extension):
38 """
39 Install an extension as a separate module, or as an extension.
40
41 Deriving classes should implement the following functions:
42 * required EasyBlock functions:
43 - configure_step
44 - build_step
45 - install_step
46 * required Extension functions
47 - run
48 """
49
50 @staticmethod
51 def extra_options(extra_vars=None):
52 """Extra easyconfig parameters specific to ExtensionEasyBlock."""
53
54 # using [] as default value is a bad idea, so we handle it this way
55 if extra_vars is None:
56 extra_vars = []
57
58 extra_vars.extend([
59 ('options', [{}, "Dictionary with extension options.", CUSTOM]),
60 ])
61 return EasyBlock.extra_options(extra_vars)
62
63 def __init__(self, *args, **kwargs):
64 """Initialize either as EasyBlock or as Extension."""
65
66 self.is_extension = False
67
68 if isinstance(args[0], EasyBlock):
69 Extension.__init__(self, *args, **kwargs)
70 # name and version properties of EasyBlock are used, so make sure name and version are correct
71 self.cfg['name'] = self.ext.get('name', None)
72 self.cfg['version'] = self.ext.get('version', None)
73 self.builddir = self.master.builddir
74 self.installdir = self.master.installdir
75 self.is_extension = True
76 self.unpack_options = None
77 else:
78 EasyBlock.__init__(self, *args, **kwargs)
79 self.options = copy.deepcopy(self.cfg.get('options', {})) # we need this for Extension.sanity_check_step
80
81 self.ext_dir = None # dir where extension source was unpacked
82
83 def run(self, unpack_src=False):
84 """Common operations for extensions: unpacking sources, patching, ..."""
85
86 # unpack file if desired
87 if unpack_src:
88 targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name))
89 self.ext_dir = extract_file("%s" % self.src, targetdir, extra_options=self.unpack_options)
90
91 # patch if needed
92 if self.patches:
93 for patchfile in self.patches:
94 if not apply_patch(patchfile, self.ext_dir):
95 self.log.error("Applying patch %s failed" % patchfile)
96
97 def sanity_check_step(self, exts_filter=None, custom_paths=None, custom_commands=None):
98 """
99 Custom sanity check for extensions, whether installed as stand-alone module or not
100 """
101 if not self.cfg['exts_filter']:
102 self.cfg['exts_filter'] = exts_filter
103 self.log.debug("starting sanity check for extension with filter %s", self.cfg['exts_filter'])
104
105 if not self.is_extension:
106 # load fake module
107 fake_mod_data = self.load_fake_module(purge=True)
108
109 # perform sanity check
110 sanity_check_ok = Extension.sanity_check_step(self)
111
112 if not self.is_extension:
113 # unload fake module and clean up
114 self.clean_up_fake_module(fake_mod_data)
115
116 if custom_paths or custom_commands:
117 EasyBlock.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands,
118 extension=self.is_extension)
119
120 # pass or fail sanity check
121 if not sanity_check_ok:
122 msg = "Sanity check for %s failed: %s" % (self.name, '; '.join(self.sanity_check_fail_msgs))
123 if self.is_extension:
124 self.log.warning(msg)
125 else:
126 self.log.error(msg)
127 return False
128 else:
129 self.log.info("Sanity check for %s successful!" % self.name)
130 return True
131
132 def make_module_extra(self, extra=None):
133 """Add custom entries to module."""
134
135 txt = EasyBlock.make_module_extra(self)
136 if not extra is None:
137 txt += extra
138 return txt
139
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/easybuild/framework/extensioneasyblock.py b/easybuild/framework/extensioneasyblock.py
--- a/easybuild/framework/extensioneasyblock.py
+++ b/easybuild/framework/extensioneasyblock.py
@@ -113,7 +113,7 @@
# unload fake module and clean up
self.clean_up_fake_module(fake_mod_data)
- if custom_paths or custom_commands:
+ if custom_paths or self.cfg['sanity_check_paths'] or custom_commands or self.cfg['sanity_check_commands']:
EasyBlock.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands,
extension=self.is_extension)
| {"golden_diff": "diff --git a/easybuild/framework/extensioneasyblock.py b/easybuild/framework/extensioneasyblock.py\n--- a/easybuild/framework/extensioneasyblock.py\n+++ b/easybuild/framework/extensioneasyblock.py\n@@ -113,7 +113,7 @@\n # unload fake module and clean up\n self.clean_up_fake_module(fake_mod_data)\n \n- if custom_paths or custom_commands:\n+ if custom_paths or self.cfg['sanity_check_paths'] or custom_commands or self.cfg['sanity_check_commands']:\n EasyBlock.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands,\n extension=self.is_extension)\n", "issue": "santiy_check_commands doesn't work for ipython\n\n", "before_files": [{"content": "##\n# Copyright 2013 Ghent University\n#\n# This file is part of EasyBuild,\n# originally created by the HPC team of the University of Ghent (http://ugent.be/hpc).\n#\n# http://github.com/hpcugent/easybuild\n#\n# EasyBuild is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation v2.\n#\n# EasyBuild is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.\n##\n\"\"\"\nEasyBuild support for building and installing extensions as actual extensions or as stand-alone modules,\nimplemented as an easyblock\n\n@author: Kenneth Hoste (Ghent University)\n\"\"\"\nimport copy\nimport os\n\nfrom easybuild.framework.easyblock import EasyBlock\nfrom easybuild.framework.easyconfig import CUSTOM\nfrom easybuild.framework.extension import Extension\nfrom easybuild.tools.filetools import apply_patch, extract_file\nfrom easybuild.tools.utilities import remove_unwanted_chars\n\n\nclass ExtensionEasyBlock(EasyBlock, Extension):\n \"\"\"\n Install an extension as a separate module, or as an extension.\n\n Deriving classes should implement the following functions:\n * required EasyBlock functions:\n - configure_step\n - build_step\n - install_step\n * required Extension functions\n - run\n \"\"\"\n\n @staticmethod\n def extra_options(extra_vars=None):\n \"\"\"Extra easyconfig parameters specific to ExtensionEasyBlock.\"\"\"\n\n # using [] as default value is a bad idea, so we handle it this way\n if extra_vars is None:\n extra_vars = []\n\n extra_vars.extend([\n ('options', [{}, \"Dictionary with extension options.\", CUSTOM]),\n ])\n return EasyBlock.extra_options(extra_vars)\n\n def __init__(self, *args, **kwargs):\n \"\"\"Initialize either as EasyBlock or as Extension.\"\"\"\n\n self.is_extension = False\n\n if isinstance(args[0], EasyBlock):\n Extension.__init__(self, *args, **kwargs)\n # name and version properties of EasyBlock are used, so make sure name and version are correct\n self.cfg['name'] = self.ext.get('name', None)\n self.cfg['version'] = self.ext.get('version', None)\n self.builddir = self.master.builddir\n self.installdir = self.master.installdir\n self.is_extension = True\n self.unpack_options = None\n else:\n EasyBlock.__init__(self, *args, **kwargs)\n self.options = copy.deepcopy(self.cfg.get('options', {})) # we need this for Extension.sanity_check_step\n\n self.ext_dir = None # dir where extension source was unpacked\n\n def run(self, unpack_src=False):\n \"\"\"Common operations for extensions: unpacking sources, patching, ...\"\"\"\n\n # unpack file if desired\n if unpack_src:\n targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name))\n self.ext_dir = extract_file(\"%s\" % self.src, targetdir, extra_options=self.unpack_options)\n\n # patch if needed\n if self.patches:\n for patchfile in self.patches:\n if not apply_patch(patchfile, self.ext_dir):\n self.log.error(\"Applying patch %s failed\" % patchfile)\n\n def sanity_check_step(self, exts_filter=None, custom_paths=None, custom_commands=None):\n \"\"\"\n Custom sanity check for extensions, whether installed as stand-alone module or not\n \"\"\"\n if not self.cfg['exts_filter']:\n self.cfg['exts_filter'] = exts_filter\n self.log.debug(\"starting sanity check for extension with filter %s\", self.cfg['exts_filter'])\n\n if not self.is_extension:\n # load fake module\n fake_mod_data = self.load_fake_module(purge=True)\n\n # perform sanity check\n sanity_check_ok = Extension.sanity_check_step(self)\n\n if not self.is_extension:\n # unload fake module and clean up\n self.clean_up_fake_module(fake_mod_data)\n\n if custom_paths or custom_commands:\n EasyBlock.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands,\n extension=self.is_extension)\n\n # pass or fail sanity check\n if not sanity_check_ok:\n msg = \"Sanity check for %s failed: %s\" % (self.name, '; '.join(self.sanity_check_fail_msgs))\n if self.is_extension:\n self.log.warning(msg)\n else:\n self.log.error(msg)\n return False\n else:\n self.log.info(\"Sanity check for %s successful!\" % self.name)\n return True\n\n def make_module_extra(self, extra=None):\n \"\"\"Add custom entries to module.\"\"\"\n\n txt = EasyBlock.make_module_extra(self)\n if not extra is None:\n txt += extra\n return txt\n", "path": "easybuild/framework/extensioneasyblock.py"}], "after_files": [{"content": "##\n# Copyright 2013 Ghent University\n#\n# This file is part of EasyBuild,\n# originally created by the HPC team of the University of Ghent (http://ugent.be/hpc).\n#\n# http://github.com/hpcugent/easybuild\n#\n# EasyBuild is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation v2.\n#\n# EasyBuild is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.\n##\n\"\"\"\nEasyBuild support for building and installing extensions as actual extensions or as stand-alone modules,\nimplemented as an easyblock\n\n@author: Kenneth Hoste (Ghent University)\n\"\"\"\nimport copy\nimport os\n\nfrom easybuild.framework.easyblock import EasyBlock\nfrom easybuild.framework.easyconfig import CUSTOM\nfrom easybuild.framework.extension import Extension\nfrom easybuild.tools.filetools import apply_patch, extract_file\nfrom easybuild.tools.utilities import remove_unwanted_chars\n\n\nclass ExtensionEasyBlock(EasyBlock, Extension):\n \"\"\"\n Install an extension as a separate module, or as an extension.\n\n Deriving classes should implement the following functions:\n * required EasyBlock functions:\n - configure_step\n - build_step\n - install_step\n * required Extension functions\n - run\n \"\"\"\n\n @staticmethod\n def extra_options(extra_vars=None):\n \"\"\"Extra easyconfig parameters specific to ExtensionEasyBlock.\"\"\"\n\n # using [] as default value is a bad idea, so we handle it this way\n if extra_vars is None:\n extra_vars = []\n\n extra_vars.extend([\n ('options', [{}, \"Dictionary with extension options.\", CUSTOM]),\n ])\n return EasyBlock.extra_options(extra_vars)\n\n def __init__(self, *args, **kwargs):\n \"\"\"Initialize either as EasyBlock or as Extension.\"\"\"\n\n self.is_extension = False\n\n if isinstance(args[0], EasyBlock):\n Extension.__init__(self, *args, **kwargs)\n # name and version properties of EasyBlock are used, so make sure name and version are correct\n self.cfg['name'] = self.ext.get('name', None)\n self.cfg['version'] = self.ext.get('version', None)\n self.builddir = self.master.builddir\n self.installdir = self.master.installdir\n self.is_extension = True\n self.unpack_options = None\n else:\n EasyBlock.__init__(self, *args, **kwargs)\n self.options = copy.deepcopy(self.cfg.get('options', {})) # we need this for Extension.sanity_check_step\n\n self.ext_dir = None # dir where extension source was unpacked\n\n def run(self, unpack_src=False):\n \"\"\"Common operations for extensions: unpacking sources, patching, ...\"\"\"\n\n # unpack file if desired\n if unpack_src:\n targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name))\n self.ext_dir = extract_file(\"%s\" % self.src, targetdir, extra_options=self.unpack_options)\n\n # patch if needed\n if self.patches:\n for patchfile in self.patches:\n if not apply_patch(patchfile, self.ext_dir):\n self.log.error(\"Applying patch %s failed\" % patchfile)\n\n def sanity_check_step(self, exts_filter=None, custom_paths=None, custom_commands=None):\n \"\"\"\n Custom sanity check for extensions, whether installed as stand-alone module or not\n \"\"\"\n if not self.cfg['exts_filter']:\n self.cfg['exts_filter'] = exts_filter\n self.log.debug(\"starting sanity check for extension with filter %s\", self.cfg['exts_filter'])\n\n if not self.is_extension:\n # load fake module\n fake_mod_data = self.load_fake_module(purge=True)\n\n # perform sanity check\n sanity_check_ok = Extension.sanity_check_step(self)\n\n if not self.is_extension:\n # unload fake module and clean up\n self.clean_up_fake_module(fake_mod_data)\n\n if custom_paths or self.cfg['sanity_check_paths'] or custom_commands or self.cfg['sanity_check_commands']:\n EasyBlock.sanity_check_step(self, custom_paths=custom_paths, custom_commands=custom_commands,\n extension=self.is_extension)\n\n # pass or fail sanity check\n if not sanity_check_ok:\n msg = \"Sanity check for %s failed: %s\" % (self.name, '; '.join(self.sanity_check_fail_msgs))\n if self.is_extension:\n self.log.warning(msg)\n else:\n self.log.error(msg)\n return False\n else:\n self.log.info(\"Sanity check for %s successful!\" % self.name)\n return True\n\n def make_module_extra(self, extra=None):\n \"\"\"Add custom entries to module.\"\"\"\n\n txt = EasyBlock.make_module_extra(self)\n if not extra is None:\n txt += extra\n return txt\n", "path": "easybuild/framework/extensioneasyblock.py"}]} | 1,702 | 144 |
gh_patches_debug_3275 | rasdani/github-patches | git_diff | apache__tvm-6502 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[TOPI] Typo in operator key
https://github.com/apache/incubator-tvm/blob/bdfefbb03f5aab96ee677ee28a166dd6ab5dbf3f/python/tvm/topi/bifrost/dense.py#L26
"biforst" should be "bifrost". This bug makes the op totally unavailable in Relay.
I can fix this bug if expected, but I don't know how to add a proper test.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python/tvm/topi/bifrost/dense.py`
Content:
```
1 # Licensed to the Apache Software Foundation (ASF) under one
2 # or more contributor license agreements. See the NOTICE file
3 # distributed with this work for additional information
4 # regarding copyright ownership. The ASF licenses this file
5 # to you under the Apache License, Version 2.0 (the
6 # "License"); you may not use this file except in compliance
7 # with the License. You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing,
12 # software distributed under the License is distributed on an
13 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 # KIND, either express or implied. See the License for the
15 # specific language governing permissions and limitations
16 # under the License.
17 # pylint: disable=invalid-name,unused-variable
18 """dense schedule on ARM Mali Biforst GPU"""
19 from tvm import te
20 from tvm import autotvm
21
22 from .. import nn
23 from ..util import traverse_inline
24
25
26 @autotvm.register_topi_compute("dense.biforst")
27 def dense(_, data, weight, bias=None, out_dtype=None):
28 """Dense operator on Biforst"""
29 return nn.dense(data, weight, bias, out_dtype)
30
31
32 @autotvm.register_topi_schedule("dense.bifrost")
33 def schedule_dense(cfg, outs):
34 """Schedule for dense operator.
35
36 Parameters
37 ----------
38 cfg: ConfigEntity
39 The config entity for this template
40 outs: Array of Tensor
41 The computation graph description of dense
42 in the format of an array of tensors.
43
44 Returns
45 -------
46 s: Schedule
47 The computation schedule for dense.
48 """
49 outs = [outs] if isinstance(outs, te.tensor.Tensor) else outs
50 s = te.create_schedule([x.op for x in outs])
51
52 def _callback(op):
53 if op.tag == "dense":
54 vec_size = [1, 2, 4, 8, 16]
55 max_unroll = 32
56
57 dense_out = op.output(0)
58 output = outs[0]
59
60 y, x = s[output].op.axis
61 c = s[dense_out].op.reduce_axis[0]
62
63 ##### space definition begin #####
64 cfg.define_split("tile_y", y, num_outputs=3)
65 cfg.define_split("tile_x", x, num_outputs=3)
66 cfg.define_split("c_unroll", c, num_outputs=2, max_factor=64)
67
68 # fallback support
69 if cfg.is_fallback:
70 ref_log = autotvm.tophub.load_reference_log("mali", "rk3399", "dense.bifrost")
71 cfg.fallback_with_reference_log(ref_log)
72 ##### space definition end #####
73
74 if dense_out.op in s.outputs:
75 dense_out = s.cache_write(output, "local")
76
77 by, ty, yi = cfg["tile_y"].apply(s, output, y)
78 bx, tx, xi = cfg["tile_x"].apply(s, output, x)
79
80 s[output].bind(by, te.thread_axis("blockIdx.y"))
81 s[output].bind(bx, te.thread_axis("blockIdx.x"))
82 s[output].bind(ty, te.thread_axis("threadIdx.y"))
83 s[output].bind(tx, te.thread_axis("threadIdx.x"))
84
85 if cfg["tile_y"].size[-1] < max_unroll:
86 s[output].unroll(yi)
87 if cfg["tile_x"].size[-1] in vec_size:
88 s[output].vectorize(xi)
89 s[dense_out].compute_at(s[output], tx)
90
91 k = s[dense_out].op.reduce_axis[0]
92 y, x = s[dense_out].op.axis
93 k, k_unroll = cfg["c_unroll"].apply(s, dense_out, k)
94 s[dense_out].reorder(k, k_unroll, y, x)
95 s[dense_out].unroll(k_unroll)
96 if cfg["tile_y"].size[-1] < max_unroll:
97 s[dense_out].unroll(y)
98 if cfg["tile_x"].size[-1] in vec_size:
99 s[dense_out].vectorize(x)
100
101 traverse_inline(s, outs[0].op, _callback)
102 return s
103
104
105 def fuse_and_bind(s, tensor, axis=None, num_thread=None):
106 """ fuse all the axis and bind to GPU threads """
107 axis = axis or s[tensor].op.axis
108 fused = s[tensor].fuse(*axis)
109 bx, tx = s[tensor].split(fused, num_thread)
110 s[tensor].bind(bx, te.thread_axis("blockIdx.x"))
111 s[tensor].bind(tx, te.thread_axis("threadIdx.x"))
112 return bx, tx
113
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/python/tvm/topi/bifrost/dense.py b/python/tvm/topi/bifrost/dense.py
--- a/python/tvm/topi/bifrost/dense.py
+++ b/python/tvm/topi/bifrost/dense.py
@@ -23,7 +23,7 @@
from ..util import traverse_inline
[email protected]_topi_compute("dense.biforst")
[email protected]_topi_compute("dense.bifrost")
def dense(_, data, weight, bias=None, out_dtype=None):
"""Dense operator on Biforst"""
return nn.dense(data, weight, bias, out_dtype)
| {"golden_diff": "diff --git a/python/tvm/topi/bifrost/dense.py b/python/tvm/topi/bifrost/dense.py\n--- a/python/tvm/topi/bifrost/dense.py\n+++ b/python/tvm/topi/bifrost/dense.py\n@@ -23,7 +23,7 @@\n from ..util import traverse_inline\n \n \[email protected]_topi_compute(\"dense.biforst\")\[email protected]_topi_compute(\"dense.bifrost\")\n def dense(_, data, weight, bias=None, out_dtype=None):\n \"\"\"Dense operator on Biforst\"\"\"\n return nn.dense(data, weight, bias, out_dtype)\n", "issue": "[TOPI] Typo in operator key\nhttps://github.com/apache/incubator-tvm/blob/bdfefbb03f5aab96ee677ee28a166dd6ab5dbf3f/python/tvm/topi/bifrost/dense.py#L26\r\n\r\n\"biforst\" should be \"bifrost\". This bug makes the op totally unavailable in Relay.\r\n\r\nI can fix this bug if expected, but I don't know how to add a proper test.\n", "before_files": [{"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n# pylint: disable=invalid-name,unused-variable\n\"\"\"dense schedule on ARM Mali Biforst GPU\"\"\"\nfrom tvm import te\nfrom tvm import autotvm\n\nfrom .. import nn\nfrom ..util import traverse_inline\n\n\[email protected]_topi_compute(\"dense.biforst\")\ndef dense(_, data, weight, bias=None, out_dtype=None):\n \"\"\"Dense operator on Biforst\"\"\"\n return nn.dense(data, weight, bias, out_dtype)\n\n\[email protected]_topi_schedule(\"dense.bifrost\")\ndef schedule_dense(cfg, outs):\n \"\"\"Schedule for dense operator.\n\n Parameters\n ----------\n cfg: ConfigEntity\n The config entity for this template\n outs: Array of Tensor\n The computation graph description of dense\n in the format of an array of tensors.\n\n Returns\n -------\n s: Schedule\n The computation schedule for dense.\n \"\"\"\n outs = [outs] if isinstance(outs, te.tensor.Tensor) else outs\n s = te.create_schedule([x.op for x in outs])\n\n def _callback(op):\n if op.tag == \"dense\":\n vec_size = [1, 2, 4, 8, 16]\n max_unroll = 32\n\n dense_out = op.output(0)\n output = outs[0]\n\n y, x = s[output].op.axis\n c = s[dense_out].op.reduce_axis[0]\n\n ##### space definition begin #####\n cfg.define_split(\"tile_y\", y, num_outputs=3)\n cfg.define_split(\"tile_x\", x, num_outputs=3)\n cfg.define_split(\"c_unroll\", c, num_outputs=2, max_factor=64)\n\n # fallback support\n if cfg.is_fallback:\n ref_log = autotvm.tophub.load_reference_log(\"mali\", \"rk3399\", \"dense.bifrost\")\n cfg.fallback_with_reference_log(ref_log)\n ##### space definition end #####\n\n if dense_out.op in s.outputs:\n dense_out = s.cache_write(output, \"local\")\n\n by, ty, yi = cfg[\"tile_y\"].apply(s, output, y)\n bx, tx, xi = cfg[\"tile_x\"].apply(s, output, x)\n\n s[output].bind(by, te.thread_axis(\"blockIdx.y\"))\n s[output].bind(bx, te.thread_axis(\"blockIdx.x\"))\n s[output].bind(ty, te.thread_axis(\"threadIdx.y\"))\n s[output].bind(tx, te.thread_axis(\"threadIdx.x\"))\n\n if cfg[\"tile_y\"].size[-1] < max_unroll:\n s[output].unroll(yi)\n if cfg[\"tile_x\"].size[-1] in vec_size:\n s[output].vectorize(xi)\n s[dense_out].compute_at(s[output], tx)\n\n k = s[dense_out].op.reduce_axis[0]\n y, x = s[dense_out].op.axis\n k, k_unroll = cfg[\"c_unroll\"].apply(s, dense_out, k)\n s[dense_out].reorder(k, k_unroll, y, x)\n s[dense_out].unroll(k_unroll)\n if cfg[\"tile_y\"].size[-1] < max_unroll:\n s[dense_out].unroll(y)\n if cfg[\"tile_x\"].size[-1] in vec_size:\n s[dense_out].vectorize(x)\n\n traverse_inline(s, outs[0].op, _callback)\n return s\n\n\ndef fuse_and_bind(s, tensor, axis=None, num_thread=None):\n \"\"\" fuse all the axis and bind to GPU threads \"\"\"\n axis = axis or s[tensor].op.axis\n fused = s[tensor].fuse(*axis)\n bx, tx = s[tensor].split(fused, num_thread)\n s[tensor].bind(bx, te.thread_axis(\"blockIdx.x\"))\n s[tensor].bind(tx, te.thread_axis(\"threadIdx.x\"))\n return bx, tx\n", "path": "python/tvm/topi/bifrost/dense.py"}], "after_files": [{"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n# pylint: disable=invalid-name,unused-variable\n\"\"\"dense schedule on ARM Mali Biforst GPU\"\"\"\nfrom tvm import te\nfrom tvm import autotvm\n\nfrom .. import nn\nfrom ..util import traverse_inline\n\n\[email protected]_topi_compute(\"dense.bifrost\")\ndef dense(_, data, weight, bias=None, out_dtype=None):\n \"\"\"Dense operator on Biforst\"\"\"\n return nn.dense(data, weight, bias, out_dtype)\n\n\[email protected]_topi_schedule(\"dense.bifrost\")\ndef schedule_dense(cfg, outs):\n \"\"\"Schedule for dense operator.\n\n Parameters\n ----------\n cfg: ConfigEntity\n The config entity for this template\n outs: Array of Tensor\n The computation graph description of dense\n in the format of an array of tensors.\n\n Returns\n -------\n s: Schedule\n The computation schedule for dense.\n \"\"\"\n outs = [outs] if isinstance(outs, te.tensor.Tensor) else outs\n s = te.create_schedule([x.op for x in outs])\n\n def _callback(op):\n if op.tag == \"dense\":\n vec_size = [1, 2, 4, 8, 16]\n max_unroll = 32\n\n dense_out = op.output(0)\n output = outs[0]\n\n y, x = s[output].op.axis\n c = s[dense_out].op.reduce_axis[0]\n\n ##### space definition begin #####\n cfg.define_split(\"tile_y\", y, num_outputs=3)\n cfg.define_split(\"tile_x\", x, num_outputs=3)\n cfg.define_split(\"c_unroll\", c, num_outputs=2, max_factor=64)\n\n # fallback support\n if cfg.is_fallback:\n ref_log = autotvm.tophub.load_reference_log(\"mali\", \"rk3399\", \"dense.bifrost\")\n cfg.fallback_with_reference_log(ref_log)\n ##### space definition end #####\n\n if dense_out.op in s.outputs:\n dense_out = s.cache_write(output, \"local\")\n\n by, ty, yi = cfg[\"tile_y\"].apply(s, output, y)\n bx, tx, xi = cfg[\"tile_x\"].apply(s, output, x)\n\n s[output].bind(by, te.thread_axis(\"blockIdx.y\"))\n s[output].bind(bx, te.thread_axis(\"blockIdx.x\"))\n s[output].bind(ty, te.thread_axis(\"threadIdx.y\"))\n s[output].bind(tx, te.thread_axis(\"threadIdx.x\"))\n\n if cfg[\"tile_y\"].size[-1] < max_unroll:\n s[output].unroll(yi)\n if cfg[\"tile_x\"].size[-1] in vec_size:\n s[output].vectorize(xi)\n s[dense_out].compute_at(s[output], tx)\n\n k = s[dense_out].op.reduce_axis[0]\n y, x = s[dense_out].op.axis\n k, k_unroll = cfg[\"c_unroll\"].apply(s, dense_out, k)\n s[dense_out].reorder(k, k_unroll, y, x)\n s[dense_out].unroll(k_unroll)\n if cfg[\"tile_y\"].size[-1] < max_unroll:\n s[dense_out].unroll(y)\n if cfg[\"tile_x\"].size[-1] in vec_size:\n s[dense_out].vectorize(x)\n\n traverse_inline(s, outs[0].op, _callback)\n return s\n\n\ndef fuse_and_bind(s, tensor, axis=None, num_thread=None):\n \"\"\" fuse all the axis and bind to GPU threads \"\"\"\n axis = axis or s[tensor].op.axis\n fused = s[tensor].fuse(*axis)\n bx, tx = s[tensor].split(fused, num_thread)\n s[tensor].bind(bx, te.thread_axis(\"blockIdx.x\"))\n s[tensor].bind(tx, te.thread_axis(\"threadIdx.x\"))\n return bx, tx\n", "path": "python/tvm/topi/bifrost/dense.py"}]} | 1,665 | 143 |
gh_patches_debug_20922 | rasdani/github-patches | git_diff | pystiche__pystiche-228 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MD5 hash error
Hi, I get this error when running the script given in the example for beginner.
`FileExistsError: bird1.jpg with a different MD5 hash already exists in /root/.cache/pystiche. If you want to overwrite it, set overwrite=True.`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pystiche/demo.py`
Content:
```
1 import logging
2 import sys
3
4 from pystiche.data import (
5 DownloadableImage,
6 DownloadableImageCollection,
7 PixabayLicense,
8 PublicDomainLicense,
9 )
10 from pystiche.optim import OptimLogger
11
12 __all__ = ["demo_images", "demo_logger"]
13
14
15 def demo_images():
16 return DownloadableImageCollection(
17 {
18 "dancing": DownloadableImage(
19 "https://pytorch.org/tutorials/_static/img/neural-style/dancing.jpg",
20 md5="0a2df538901452d639170a2ed89815a4",
21 ),
22 "picasso": DownloadableImage(
23 "https://pytorch.org/tutorials/_static/img/neural-style/picasso.jpg",
24 md5="d1d60fc3f9d0b22d2d826c47934a37ea",
25 ),
26 "bird1": DownloadableImage(
27 "https://cdn.pixabay.com/photo/2016/01/14/11/26/bird-1139734_960_720.jpg",
28 file="bird1.jpg",
29 author="gholmz0",
30 date="09.03.2013",
31 license=PixabayLicense(),
32 md5="d42444d3cd0afa47f07066cd083d6cea",
33 ),
34 "paint": DownloadableImage(
35 "https://cdn.pixabay.com/photo/2017/07/03/20/17/abstract-2468874_960_720.jpg",
36 file="paint.jpg",
37 author="garageband",
38 date="03.07.2017",
39 license=PixabayLicense(),
40 md5="a991e222806ef49d34b172a67cf97d91",
41 ),
42 "bird2": DownloadableImage(
43 "https://cdn.pixabay.com/photo/2013/03/12/17/53/bird-92956_960_720.jpg",
44 file="bird2.jpg",
45 author="12019",
46 date="09.04.2012",
47 license=PixabayLicense(),
48 md5="dda3e1d0f93f783de823b4f91129d44e",
49 ),
50 "mosaic": DownloadableImage(
51 "https://upload.wikimedia.org/wikipedia/commons/2/23/Mosaic_ducks_Massimo.jpg",
52 file="mosaic.jpg",
53 author="Marie-Lan Nguyen",
54 date="2006",
55 license=PublicDomainLicense(),
56 md5="5b60cd1724395f7a0c21dc6dd006f8ae",
57 ),
58 }
59 )
60
61
62 def demo_logger():
63 logger = logging.getLogger("demo_logger")
64 logger.setLevel(logging.INFO)
65
66 sh = logging.StreamHandler(sys.stdout)
67 sh.setLevel(logging.INFO)
68 logger.addHandler(sh)
69
70 return OptimLogger(logger)
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pystiche/demo.py b/pystiche/demo.py
--- a/pystiche/demo.py
+++ b/pystiche/demo.py
@@ -29,7 +29,7 @@
author="gholmz0",
date="09.03.2013",
license=PixabayLicense(),
- md5="d42444d3cd0afa47f07066cd083d6cea",
+ md5="36e5fef725943a5d1d22b5048095da86",
),
"paint": DownloadableImage(
"https://cdn.pixabay.com/photo/2017/07/03/20/17/abstract-2468874_960_720.jpg",
@@ -45,7 +45,7 @@
author="12019",
date="09.04.2012",
license=PixabayLicense(),
- md5="dda3e1d0f93f783de823b4f91129d44e",
+ md5="8c5b608bd579d931e2cfe7229840fe9b",
),
"mosaic": DownloadableImage(
"https://upload.wikimedia.org/wikipedia/commons/2/23/Mosaic_ducks_Massimo.jpg",
| {"golden_diff": "diff --git a/pystiche/demo.py b/pystiche/demo.py\n--- a/pystiche/demo.py\n+++ b/pystiche/demo.py\n@@ -29,7 +29,7 @@\n author=\"gholmz0\",\n date=\"09.03.2013\",\n license=PixabayLicense(),\n- md5=\"d42444d3cd0afa47f07066cd083d6cea\",\n+ md5=\"36e5fef725943a5d1d22b5048095da86\",\n ),\n \"paint\": DownloadableImage(\n \"https://cdn.pixabay.com/photo/2017/07/03/20/17/abstract-2468874_960_720.jpg\",\n@@ -45,7 +45,7 @@\n author=\"12019\",\n date=\"09.04.2012\",\n license=PixabayLicense(),\n- md5=\"dda3e1d0f93f783de823b4f91129d44e\",\n+ md5=\"8c5b608bd579d931e2cfe7229840fe9b\",\n ),\n \"mosaic\": DownloadableImage(\n \"https://upload.wikimedia.org/wikipedia/commons/2/23/Mosaic_ducks_Massimo.jpg\",\n", "issue": "MD5 hash error\nHi, I get this error when running the script given in the example for beginner.\r\n\r\n`FileExistsError: bird1.jpg with a different MD5 hash already exists in /root/.cache/pystiche. If you want to overwrite it, set overwrite=True.`\n", "before_files": [{"content": "import logging\nimport sys\n\nfrom pystiche.data import (\n DownloadableImage,\n DownloadableImageCollection,\n PixabayLicense,\n PublicDomainLicense,\n)\nfrom pystiche.optim import OptimLogger\n\n__all__ = [\"demo_images\", \"demo_logger\"]\n\n\ndef demo_images():\n return DownloadableImageCollection(\n {\n \"dancing\": DownloadableImage(\n \"https://pytorch.org/tutorials/_static/img/neural-style/dancing.jpg\",\n md5=\"0a2df538901452d639170a2ed89815a4\",\n ),\n \"picasso\": DownloadableImage(\n \"https://pytorch.org/tutorials/_static/img/neural-style/picasso.jpg\",\n md5=\"d1d60fc3f9d0b22d2d826c47934a37ea\",\n ),\n \"bird1\": DownloadableImage(\n \"https://cdn.pixabay.com/photo/2016/01/14/11/26/bird-1139734_960_720.jpg\",\n file=\"bird1.jpg\",\n author=\"gholmz0\",\n date=\"09.03.2013\",\n license=PixabayLicense(),\n md5=\"d42444d3cd0afa47f07066cd083d6cea\",\n ),\n \"paint\": DownloadableImage(\n \"https://cdn.pixabay.com/photo/2017/07/03/20/17/abstract-2468874_960_720.jpg\",\n file=\"paint.jpg\",\n author=\"garageband\",\n date=\"03.07.2017\",\n license=PixabayLicense(),\n md5=\"a991e222806ef49d34b172a67cf97d91\",\n ),\n \"bird2\": DownloadableImage(\n \"https://cdn.pixabay.com/photo/2013/03/12/17/53/bird-92956_960_720.jpg\",\n file=\"bird2.jpg\",\n author=\"12019\",\n date=\"09.04.2012\",\n license=PixabayLicense(),\n md5=\"dda3e1d0f93f783de823b4f91129d44e\",\n ),\n \"mosaic\": DownloadableImage(\n \"https://upload.wikimedia.org/wikipedia/commons/2/23/Mosaic_ducks_Massimo.jpg\",\n file=\"mosaic.jpg\",\n author=\"Marie-Lan Nguyen\",\n date=\"2006\",\n license=PublicDomainLicense(),\n md5=\"5b60cd1724395f7a0c21dc6dd006f8ae\",\n ),\n }\n )\n\n\ndef demo_logger():\n logger = logging.getLogger(\"demo_logger\")\n logger.setLevel(logging.INFO)\n\n sh = logging.StreamHandler(sys.stdout)\n sh.setLevel(logging.INFO)\n logger.addHandler(sh)\n\n return OptimLogger(logger)\n", "path": "pystiche/demo.py"}], "after_files": [{"content": "import logging\nimport sys\n\nfrom pystiche.data import (\n DownloadableImage,\n DownloadableImageCollection,\n PixabayLicense,\n PublicDomainLicense,\n)\nfrom pystiche.optim import OptimLogger\n\n__all__ = [\"demo_images\", \"demo_logger\"]\n\n\ndef demo_images():\n return DownloadableImageCollection(\n {\n \"dancing\": DownloadableImage(\n \"https://pytorch.org/tutorials/_static/img/neural-style/dancing.jpg\",\n md5=\"0a2df538901452d639170a2ed89815a4\",\n ),\n \"picasso\": DownloadableImage(\n \"https://pytorch.org/tutorials/_static/img/neural-style/picasso.jpg\",\n md5=\"d1d60fc3f9d0b22d2d826c47934a37ea\",\n ),\n \"bird1\": DownloadableImage(\n \"https://cdn.pixabay.com/photo/2016/01/14/11/26/bird-1139734_960_720.jpg\",\n file=\"bird1.jpg\",\n author=\"gholmz0\",\n date=\"09.03.2013\",\n license=PixabayLicense(),\n md5=\"36e5fef725943a5d1d22b5048095da86\",\n ),\n \"paint\": DownloadableImage(\n \"https://cdn.pixabay.com/photo/2017/07/03/20/17/abstract-2468874_960_720.jpg\",\n file=\"paint.jpg\",\n author=\"garageband\",\n date=\"03.07.2017\",\n license=PixabayLicense(),\n md5=\"a991e222806ef49d34b172a67cf97d91\",\n ),\n \"bird2\": DownloadableImage(\n \"https://cdn.pixabay.com/photo/2013/03/12/17/53/bird-92956_960_720.jpg\",\n file=\"bird2.jpg\",\n author=\"12019\",\n date=\"09.04.2012\",\n license=PixabayLicense(),\n md5=\"8c5b608bd579d931e2cfe7229840fe9b\",\n ),\n \"mosaic\": DownloadableImage(\n \"https://upload.wikimedia.org/wikipedia/commons/2/23/Mosaic_ducks_Massimo.jpg\",\n file=\"mosaic.jpg\",\n author=\"Marie-Lan Nguyen\",\n date=\"2006\",\n license=PublicDomainLicense(),\n md5=\"5b60cd1724395f7a0c21dc6dd006f8ae\",\n ),\n }\n )\n\n\ndef demo_logger():\n logger = logging.getLogger(\"demo_logger\")\n logger.setLevel(logging.INFO)\n\n sh = logging.StreamHandler(sys.stdout)\n sh.setLevel(logging.INFO)\n logger.addHandler(sh)\n\n return OptimLogger(logger)\n", "path": "pystiche/demo.py"}]} | 1,184 | 345 |
gh_patches_debug_28865 | rasdani/github-patches | git_diff | bokeh__bokeh-2790 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create example of using Hover tool to display custom images
It would be nice to show how someone can use the hovertool to display custom images using URL/URI upon hovering over a region of interest. It would allow users to embed an additional dimension into plots.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py`
Content:
```
1 from bokeh.plotting import figure, output_file, show, ColumnDataSource
2 from bokeh.models import HoverTool
3
4 output_file("toolbar.html")
5
6 source = ColumnDataSource(
7 data=dict(
8 x=[1, 2, 3, 4, 5],
9 y=[2, 5, 8, 2, 7],
10 desc=['A', 'b', 'C', 'd', 'E'],
11 )
12 )
13
14 hover = HoverTool(
15 tooltips="""
16 <div>
17 <span style="font-size: 17px; font-weight: bold;">@desc</span>
18 <span style="font-size: 15px; color: #966;">[$index]</span>
19 </div>
20 <div>
21 <span style="font-size: 15px;">Location</span>
22 <span style="font-size: 10px; color: #696;">($x, $y)</span>
23 </div>
24 """
25 )
26
27 p = figure(plot_width=400, plot_height=400, tools=[hover],
28 title="Mouse over the dots")
29
30 p.circle('x', 'y', size=20, source=source)
31
32 show(p)
33
34
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py b/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py
--- a/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py
+++ b/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py
@@ -8,18 +8,34 @@
x=[1, 2, 3, 4, 5],
y=[2, 5, 8, 2, 7],
desc=['A', 'b', 'C', 'd', 'E'],
+ imgs = [
+ 'http://bokeh.pydata.org/static/snake.jpg',
+ 'http://bokeh.pydata.org/static/snake2.png',
+ 'http://bokeh.pydata.org/static/snake3D.png',
+ 'http://bokeh.pydata.org/static/snake4_TheRevenge.png',
+ 'http://bokeh.pydata.org/static/snakebite.jpg'
+ ]
)
)
hover = HoverTool(
tooltips="""
<div>
- <span style="font-size: 17px; font-weight: bold;">@desc</span>
- <span style="font-size: 15px; color: #966;">[$index]</span>
- </div>
- <div>
- <span style="font-size: 15px;">Location</span>
- <span style="font-size: 10px; color: #696;">($x, $y)</span>
+ <div>
+ <img
+ src="@imgs" height="42" alt="@imgs" width="42"
+ style="float: left; margin: 0px 15px 15px 0px;"
+ border="2"
+ ></img>
+ </div>
+ <div>
+ <span style="font-size: 17px; font-weight: bold;">@desc</span>
+ <span style="font-size: 15px; color: #966;">[$index]</span>
+ </div>
+ <div>
+ <span style="font-size: 15px;">Location</span>
+ <span style="font-size: 10px; color: #696;">($x, $y)</span>
+ </div>
</div>
"""
)
@@ -30,5 +46,3 @@
p.circle('x', 'y', size=20, source=source)
show(p)
-
-
| {"golden_diff": "diff --git a/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py b/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py\n--- a/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py\n+++ b/sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py\n@@ -8,18 +8,34 @@\n x=[1, 2, 3, 4, 5],\n y=[2, 5, 8, 2, 7],\n desc=['A', 'b', 'C', 'd', 'E'],\n+ imgs = [\n+ 'http://bokeh.pydata.org/static/snake.jpg',\n+ 'http://bokeh.pydata.org/static/snake2.png',\n+ 'http://bokeh.pydata.org/static/snake3D.png',\n+ 'http://bokeh.pydata.org/static/snake4_TheRevenge.png',\n+ 'http://bokeh.pydata.org/static/snakebite.jpg'\n+ ]\n )\n )\n \n hover = HoverTool(\n tooltips=\"\"\"\n <div>\n- <span style=\"font-size: 17px; font-weight: bold;\">@desc</span>\n- <span style=\"font-size: 15px; color: #966;\">[$index]</span>\n- </div>\n- <div>\n- <span style=\"font-size: 15px;\">Location</span>\n- <span style=\"font-size: 10px; color: #696;\">($x, $y)</span>\n+ <div>\n+ <img\n+ src=\"@imgs\" height=\"42\" alt=\"@imgs\" width=\"42\"\n+ style=\"float: left; margin: 0px 15px 15px 0px;\"\n+ border=\"2\"\n+ ></img>\n+ </div>\n+ <div>\n+ <span style=\"font-size: 17px; font-weight: bold;\">@desc</span>\n+ <span style=\"font-size: 15px; color: #966;\">[$index]</span>\n+ </div>\n+ <div>\n+ <span style=\"font-size: 15px;\">Location</span>\n+ <span style=\"font-size: 10px; color: #696;\">($x, $y)</span>\n+ </div>\n </div>\n \"\"\"\n )\n@@ -30,5 +46,3 @@\n p.circle('x', 'y', size=20, source=source)\n \n show(p)\n-\n-\n", "issue": "Create example of using Hover tool to display custom images\nIt would be nice to show how someone can use the hovertool to display custom images using URL/URI upon hovering over a region of interest. It would allow users to embed an additional dimension into plots.\n\n", "before_files": [{"content": "from bokeh.plotting import figure, output_file, show, ColumnDataSource\nfrom bokeh.models import HoverTool\n\noutput_file(\"toolbar.html\")\n\nsource = ColumnDataSource(\n data=dict(\n x=[1, 2, 3, 4, 5],\n y=[2, 5, 8, 2, 7],\n desc=['A', 'b', 'C', 'd', 'E'],\n )\n )\n\nhover = HoverTool(\n tooltips=\"\"\"\n <div>\n <span style=\"font-size: 17px; font-weight: bold;\">@desc</span>\n <span style=\"font-size: 15px; color: #966;\">[$index]</span>\n </div>\n <div>\n <span style=\"font-size: 15px;\">Location</span>\n <span style=\"font-size: 10px; color: #696;\">($x, $y)</span>\n </div>\n \"\"\"\n )\n\np = figure(plot_width=400, plot_height=400, tools=[hover],\n title=\"Mouse over the dots\")\n\np.circle('x', 'y', size=20, source=source)\n\nshow(p)\n\n \n", "path": "sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py"}], "after_files": [{"content": "from bokeh.plotting import figure, output_file, show, ColumnDataSource\nfrom bokeh.models import HoverTool\n\noutput_file(\"toolbar.html\")\n\nsource = ColumnDataSource(\n data=dict(\n x=[1, 2, 3, 4, 5],\n y=[2, 5, 8, 2, 7],\n desc=['A', 'b', 'C', 'd', 'E'],\n imgs = [\n 'http://bokeh.pydata.org/static/snake.jpg',\n 'http://bokeh.pydata.org/static/snake2.png',\n 'http://bokeh.pydata.org/static/snake3D.png',\n 'http://bokeh.pydata.org/static/snake4_TheRevenge.png',\n 'http://bokeh.pydata.org/static/snakebite.jpg'\n ]\n )\n )\n\nhover = HoverTool(\n tooltips=\"\"\"\n <div>\n <div>\n <img\n src=\"@imgs\" height=\"42\" alt=\"@imgs\" width=\"42\"\n style=\"float: left; margin: 0px 15px 15px 0px;\"\n border=\"2\"\n ></img>\n </div>\n <div>\n <span style=\"font-size: 17px; font-weight: bold;\">@desc</span>\n <span style=\"font-size: 15px; color: #966;\">[$index]</span>\n </div>\n <div>\n <span style=\"font-size: 15px;\">Location</span>\n <span style=\"font-size: 10px; color: #696;\">($x, $y)</span>\n </div>\n </div>\n \"\"\"\n )\n\np = figure(plot_width=400, plot_height=400, tools=[hover],\n title=\"Mouse over the dots\")\n\np.circle('x', 'y', size=20, source=source)\n\nshow(p)\n", "path": "sphinx/source/docs/user_guide/source_examples/tools_hover_custom_tooltip.py"}]} | 654 | 575 |
gh_patches_debug_11629 | rasdani/github-patches | git_diff | beeware__toga-193 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ProgressBar doesn't appears in a Box [Core] [Cocoa]
Example code: https://gist.github.com/Dayof/528f9dc38f4178dbc25db6bab553e19a
When a progress bar is add inside of a box (bellow the label is the progress bar):

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cocoa/toga_cocoa/widgets/progressbar.py`
Content:
```
1 from toga.interface import ProgressBar as ProgressBarInterface
2
3 from ..libs import *
4 from .base import WidgetMixin
5
6
7 class ProgressBar(ProgressBarInterface, WidgetMixin):
8 def __init__(self, id=None, style=None, max=None, value=None):
9 super().__init__(id=id, style=style, max=max, value=value)
10 self._create()
11
12 def create(self):
13 self._impl = NSProgressIndicator.new()
14 self._impl.setStyle_(NSProgressIndicatorBarStyle)
15 self._impl.setDisplayedWhenStopped_(True)
16
17 # Add the layout constraints
18 self._add_constraints()
19
20 def _set_value(self, value):
21 if value is not None:
22 self._impl.setDoubleValue_(value)
23
24 def start(self):
25 if self._impl and not self._running:
26 self._impl.startAnimation_(self._impl)
27 self._running = True
28
29 def stop(self):
30 if self._impl and self._running:
31 self._impl.stopAnimation_(self._impl)
32 self._running = False
33
34 def _set_max(self, value):
35 if value:
36 self._impl.setIndeterminate_(False)
37 self._impl.setMaxValue_(value)
38 else:
39 self._impl.setIndeterminate_(True)
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cocoa/toga_cocoa/widgets/progressbar.py b/src/cocoa/toga_cocoa/widgets/progressbar.py
--- a/src/cocoa/toga_cocoa/widgets/progressbar.py
+++ b/src/cocoa/toga_cocoa/widgets/progressbar.py
@@ -16,6 +16,7 @@
# Add the layout constraints
self._add_constraints()
+ self.rehint()
def _set_value(self, value):
if value is not None:
@@ -37,3 +38,9 @@
self._impl.setMaxValue_(value)
else:
self._impl.setIndeterminate_(True)
+
+ def rehint(self):
+ self.style.hint(
+ height=self._impl.fittingSize().height,
+ width=self._impl.fittingSize().width
+ )
| {"golden_diff": "diff --git a/src/cocoa/toga_cocoa/widgets/progressbar.py b/src/cocoa/toga_cocoa/widgets/progressbar.py\n--- a/src/cocoa/toga_cocoa/widgets/progressbar.py\n+++ b/src/cocoa/toga_cocoa/widgets/progressbar.py\n@@ -16,6 +16,7 @@\n \n # Add the layout constraints\n self._add_constraints()\n+ self.rehint()\n \n def _set_value(self, value):\n if value is not None:\n@@ -37,3 +38,9 @@\n self._impl.setMaxValue_(value)\n else:\n self._impl.setIndeterminate_(True)\n+\n+ def rehint(self):\n+ self.style.hint(\n+ height=self._impl.fittingSize().height,\n+ width=self._impl.fittingSize().width\n+ )\n", "issue": "ProgressBar doesn't appears in a Box [Core] [Cocoa]\nExample code: https://gist.github.com/Dayof/528f9dc38f4178dbc25db6bab553e19a\r\n\r\nWhen a progress bar is add inside of a box (bellow the label is the progress bar):\r\n\r\n\r\n\n", "before_files": [{"content": "from toga.interface import ProgressBar as ProgressBarInterface\n\nfrom ..libs import *\nfrom .base import WidgetMixin\n\n\nclass ProgressBar(ProgressBarInterface, WidgetMixin):\n def __init__(self, id=None, style=None, max=None, value=None):\n super().__init__(id=id, style=style, max=max, value=value)\n self._create()\n\n def create(self):\n self._impl = NSProgressIndicator.new()\n self._impl.setStyle_(NSProgressIndicatorBarStyle)\n self._impl.setDisplayedWhenStopped_(True)\n\n # Add the layout constraints\n self._add_constraints()\n\n def _set_value(self, value):\n if value is not None:\n self._impl.setDoubleValue_(value)\n\n def start(self):\n if self._impl and not self._running:\n self._impl.startAnimation_(self._impl)\n self._running = True\n\n def stop(self):\n if self._impl and self._running:\n self._impl.stopAnimation_(self._impl)\n self._running = False\n\n def _set_max(self, value):\n if value:\n self._impl.setIndeterminate_(False)\n self._impl.setMaxValue_(value)\n else:\n self._impl.setIndeterminate_(True)\n", "path": "src/cocoa/toga_cocoa/widgets/progressbar.py"}], "after_files": [{"content": "from toga.interface import ProgressBar as ProgressBarInterface\n\nfrom ..libs import *\nfrom .base import WidgetMixin\n\n\nclass ProgressBar(ProgressBarInterface, WidgetMixin):\n def __init__(self, id=None, style=None, max=None, value=None):\n super().__init__(id=id, style=style, max=max, value=value)\n self._create()\n\n def create(self):\n self._impl = NSProgressIndicator.new()\n self._impl.setStyle_(NSProgressIndicatorBarStyle)\n self._impl.setDisplayedWhenStopped_(True)\n\n # Add the layout constraints\n self._add_constraints()\n self.rehint()\n\n def _set_value(self, value):\n if value is not None:\n self._impl.setDoubleValue_(value)\n\n def start(self):\n if self._impl and not self._running:\n self._impl.startAnimation_(self._impl)\n self._running = True\n\n def stop(self):\n if self._impl and self._running:\n self._impl.stopAnimation_(self._impl)\n self._running = False\n\n def _set_max(self, value):\n if value:\n self._impl.setIndeterminate_(False)\n self._impl.setMaxValue_(value)\n else:\n self._impl.setIndeterminate_(True)\n\n def rehint(self):\n self.style.hint(\n height=self._impl.fittingSize().height,\n width=self._impl.fittingSize().width\n )\n", "path": "src/cocoa/toga_cocoa/widgets/progressbar.py"}]} | 768 | 181 |
gh_patches_debug_1094 | rasdani/github-patches | git_diff | ESMCI__cime-4035 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
cheyenne needs a module load python
Now that we require python 3.5+, we need to do a module load python on cheyenne.
The lack of this module load is responsible for a failure in `J_TestCreateNewcase.test_f_createnewcase_with_user_compset` if you run the whole `J_TestCreateNewcase` suite, and may cause other problems as well.
I'll get a fix in shortly.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/Tools/standard_script_setup.py`
Content:
```
1 """
2 Encapsulate the importing of python utils and logging setup, things
3 that every script should do.
4 """
5 # pylint: disable=unused-import
6
7 import sys, os
8 import __main__ as main
9 _CIMEROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..","..")
10 _LIB_DIR = os.path.join(_CIMEROOT, "scripts", "lib")
11 sys.path.append(_LIB_DIR)
12
13 # Important: Allows external tools to link up with CIME
14 os.environ["CIMEROOT"] = _CIMEROOT
15
16 import CIME.utils
17 CIME.utils.check_minimum_python_version(2, 7)
18 CIME.utils.stop_buffering_output()
19 import logging, argparse
20
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/Tools/standard_script_setup.py b/scripts/Tools/standard_script_setup.py
--- a/scripts/Tools/standard_script_setup.py
+++ b/scripts/Tools/standard_script_setup.py
@@ -14,6 +14,6 @@
os.environ["CIMEROOT"] = _CIMEROOT
import CIME.utils
-CIME.utils.check_minimum_python_version(2, 7)
+CIME.utils.check_minimum_python_version(3, 6)
CIME.utils.stop_buffering_output()
import logging, argparse
| {"golden_diff": "diff --git a/scripts/Tools/standard_script_setup.py b/scripts/Tools/standard_script_setup.py\n--- a/scripts/Tools/standard_script_setup.py\n+++ b/scripts/Tools/standard_script_setup.py\n@@ -14,6 +14,6 @@\n os.environ[\"CIMEROOT\"] = _CIMEROOT\n \n import CIME.utils\n-CIME.utils.check_minimum_python_version(2, 7)\n+CIME.utils.check_minimum_python_version(3, 6)\n CIME.utils.stop_buffering_output()\n import logging, argparse\n", "issue": "cheyenne needs a module load python\nNow that we require python 3.5+, we need to do a module load python on cheyenne.\r\n\r\nThe lack of this module load is responsible for a failure in `J_TestCreateNewcase.test_f_createnewcase_with_user_compset` if you run the whole `J_TestCreateNewcase` suite, and may cause other problems as well.\r\n\r\nI'll get a fix in shortly.\n", "before_files": [{"content": "\"\"\"\nEncapsulate the importing of python utils and logging setup, things\nthat every script should do.\n\"\"\"\n# pylint: disable=unused-import\n\nimport sys, os\nimport __main__ as main\n_CIMEROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"..\",\"..\")\n_LIB_DIR = os.path.join(_CIMEROOT, \"scripts\", \"lib\")\nsys.path.append(_LIB_DIR)\n\n# Important: Allows external tools to link up with CIME\nos.environ[\"CIMEROOT\"] = _CIMEROOT\n\nimport CIME.utils\nCIME.utils.check_minimum_python_version(2, 7)\nCIME.utils.stop_buffering_output()\nimport logging, argparse\n", "path": "scripts/Tools/standard_script_setup.py"}], "after_files": [{"content": "\"\"\"\nEncapsulate the importing of python utils and logging setup, things\nthat every script should do.\n\"\"\"\n# pylint: disable=unused-import\n\nimport sys, os\nimport __main__ as main\n_CIMEROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"..\",\"..\")\n_LIB_DIR = os.path.join(_CIMEROOT, \"scripts\", \"lib\")\nsys.path.append(_LIB_DIR)\n\n# Important: Allows external tools to link up with CIME\nos.environ[\"CIMEROOT\"] = _CIMEROOT\n\nimport CIME.utils\nCIME.utils.check_minimum_python_version(3, 6)\nCIME.utils.stop_buffering_output()\nimport logging, argparse\n", "path": "scripts/Tools/standard_script_setup.py"}]} | 538 | 115 |
gh_patches_debug_33119 | rasdani/github-patches | git_diff | nilearn__nilearn-4334 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Globbing should be advertised in doc/examples.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/00_tutorials/plot_nilearn_101.py`
Content:
```
1 """
2 Basic nilearn example: manipulating and looking at data
3 =======================================================
4
5 A simple example showing how to load an existing Nifti file and use
6 basic nilearn functionalities.
7 """
8
9 # Let us use a Nifti file that is shipped with nilearn
10 from nilearn.datasets import MNI152_FILE_PATH
11
12 # Note that the variable MNI152_FILE_PATH is just a path to a Nifti file
13 print(f"Path to MNI152 template: {MNI152_FILE_PATH!r}")
14
15 # %%
16 # A first step: looking at our data
17 # ----------------------------------
18 #
19 # Let's quickly plot this file:
20 from nilearn import plotting
21
22 plotting.plot_img(MNI152_FILE_PATH)
23
24 # %%
25 # This is not a very pretty plot. We just used the simplest possible
26 # code. There is a whole :ref:`section of the documentation <plotting>`
27 # on making prettier code.
28 #
29 # **Exercise**: Try plotting one of your own files. In the above,
30 # MNI152_FILE_PATH is nothing more than a string with a path pointing to
31 # a nifti image. You can replace it with a string pointing to a file on
32 # your disk. Note that it should be a 3D volume, and not a 4D volume.
33
34 # %%
35 # Simple image manipulation: smoothing
36 # ------------------------------------
37 #
38 # Let's use an image-smoothing function from nilearn:
39 # :func:`nilearn.image.smooth_img`
40 #
41 # Functions containing 'img' can take either a filename or an image as input.
42 #
43 # Here we give as inputs the image filename and the smoothing value in mm
44 from nilearn import image
45
46 smooth_anat_img = image.smooth_img(MNI152_FILE_PATH, fwhm=3)
47
48 # While we are giving a file name as input, the function returns
49 # an in-memory object:
50 smooth_anat_img
51
52 # %%
53 # This is an in-memory object. We can pass it to nilearn function, for
54 # instance to look at it
55 plotting.plot_img(smooth_anat_img)
56
57 # %%
58 # We could also pass it to the smoothing function
59 more_smooth_anat_img = image.smooth_img(smooth_anat_img, fwhm=3)
60 plotting.plot_img(more_smooth_anat_img)
61
62 # %%
63 # Saving results to a file
64 # -------------------------
65 #
66 # We can save any in-memory object as follows:
67 from pathlib import Path
68
69 output_dir = Path.cwd() / "results" / "plot_nilearn_101"
70 output_dir.mkdir(exist_ok=True, parents=True)
71 print(f"Output will be saved to: {output_dir}")
72 more_smooth_anat_img.to_filename(output_dir / "more_smooth_anat_img.nii.gz")
73
74 # %%
75 # Finally, calling plotting.show() is necessary to display the figure
76 # when running as a script outside IPython
77 plotting.show()
78
79 # %%
80 # |
81 #
82 # ______
83 #
84 # To recap, all the nilearn tools can take data as filenames or in-memory
85 # objects, and return brain volumes as in-memory objects. These can be
86 # passed on to other nilearn tools, or saved to disk.
87
88 # sphinx_gallery_dummy_images=1
89
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/00_tutorials/plot_nilearn_101.py b/examples/00_tutorials/plot_nilearn_101.py
--- a/examples/00_tutorials/plot_nilearn_101.py
+++ b/examples/00_tutorials/plot_nilearn_101.py
@@ -59,17 +59,49 @@
more_smooth_anat_img = image.smooth_img(smooth_anat_img, fwhm=3)
plotting.plot_img(more_smooth_anat_img)
+
+# %%
+# Globbing over multiple 3D volumes
+# ---------------------------------
+# Nilearn also supports reading multiple volumes at once,
+# using glob-style patterns.
+# For instance, we can smooth volumes from many subjects
+# at once and get a 4D image as output.
+
+# %%
+# First let's fetch Haxby dataset for subject 1 and 2
+from nilearn import datasets
+
+haxby = datasets.fetch_haxby(subjects=[1, 2])
+
+# %%
+# Now we can find the anatomical images from both
+# subjects using the `*` wildcard
+from pathlib import Path
+
+anats_all_subjects = (
+ Path(datasets.get_data_dirs()[0]) / "haxby2001" / "subj*" / "anat*"
+)
+
+# %%
+# Now we can smooth all the anatomical images at once
+anats_all_subjects_smooth = image.smooth_img(anats_all_subjects, fwhm=5)
+
+# %%
+# This is a 4D image containing one volume per subject
+print(anats_all_subjects_smooth.shape)
+
# %%
# Saving results to a file
# -------------------------
#
# We can save any in-memory object as follows:
-from pathlib import Path
-
output_dir = Path.cwd() / "results" / "plot_nilearn_101"
output_dir.mkdir(exist_ok=True, parents=True)
print(f"Output will be saved to: {output_dir}")
-more_smooth_anat_img.to_filename(output_dir / "more_smooth_anat_img.nii.gz")
+anats_all_subjects_smooth.to_filename(
+ output_dir / "anats_all_subjects_smooth.nii.gz"
+)
# %%
# Finally, calling plotting.show() is necessary to display the figure
@@ -81,8 +113,9 @@
#
# ______
#
-# To recap, all the nilearn tools can take data as filenames or in-memory
-# objects, and return brain volumes as in-memory objects. These can be
+# To recap, all the nilearn tools can take data as filenames or
+# glob-style patterns or in-memory objects, and return brain
+# volumes as in-memory objects. These can be
# passed on to other nilearn tools, or saved to disk.
# sphinx_gallery_dummy_images=1
| {"golden_diff": "diff --git a/examples/00_tutorials/plot_nilearn_101.py b/examples/00_tutorials/plot_nilearn_101.py\n--- a/examples/00_tutorials/plot_nilearn_101.py\n+++ b/examples/00_tutorials/plot_nilearn_101.py\n@@ -59,17 +59,49 @@\n more_smooth_anat_img = image.smooth_img(smooth_anat_img, fwhm=3)\n plotting.plot_img(more_smooth_anat_img)\n \n+\n+# %%\n+# Globbing over multiple 3D volumes\n+# ---------------------------------\n+# Nilearn also supports reading multiple volumes at once,\n+# using glob-style patterns.\n+# For instance, we can smooth volumes from many subjects\n+# at once and get a 4D image as output.\n+\n+# %%\n+# First let's fetch Haxby dataset for subject 1 and 2\n+from nilearn import datasets\n+\n+haxby = datasets.fetch_haxby(subjects=[1, 2])\n+\n+# %%\n+# Now we can find the anatomical images from both\n+# subjects using the `*` wildcard\n+from pathlib import Path\n+\n+anats_all_subjects = (\n+ Path(datasets.get_data_dirs()[0]) / \"haxby2001\" / \"subj*\" / \"anat*\"\n+)\n+\n+# %%\n+# Now we can smooth all the anatomical images at once\n+anats_all_subjects_smooth = image.smooth_img(anats_all_subjects, fwhm=5)\n+\n+# %%\n+# This is a 4D image containing one volume per subject\n+print(anats_all_subjects_smooth.shape)\n+\n # %%\n # Saving results to a file\n # -------------------------\n #\n # We can save any in-memory object as follows:\n-from pathlib import Path\n-\n output_dir = Path.cwd() / \"results\" / \"plot_nilearn_101\"\n output_dir.mkdir(exist_ok=True, parents=True)\n print(f\"Output will be saved to: {output_dir}\")\n-more_smooth_anat_img.to_filename(output_dir / \"more_smooth_anat_img.nii.gz\")\n+anats_all_subjects_smooth.to_filename(\n+ output_dir / \"anats_all_subjects_smooth.nii.gz\"\n+)\n \n # %%\n # Finally, calling plotting.show() is necessary to display the figure\n@@ -81,8 +113,9 @@\n #\n # ______\n #\n-# To recap, all the nilearn tools can take data as filenames or in-memory\n-# objects, and return brain volumes as in-memory objects. These can be\n+# To recap, all the nilearn tools can take data as filenames or\n+# glob-style patterns or in-memory objects, and return brain\n+# volumes as in-memory objects. These can be\n # passed on to other nilearn tools, or saved to disk.\n \n # sphinx_gallery_dummy_images=1\n", "issue": "Globbing should be advertised in doc/examples.\n\n", "before_files": [{"content": "\"\"\"\nBasic nilearn example: manipulating and looking at data\n=======================================================\n\nA simple example showing how to load an existing Nifti file and use\nbasic nilearn functionalities.\n\"\"\"\n\n# Let us use a Nifti file that is shipped with nilearn\nfrom nilearn.datasets import MNI152_FILE_PATH\n\n# Note that the variable MNI152_FILE_PATH is just a path to a Nifti file\nprint(f\"Path to MNI152 template: {MNI152_FILE_PATH!r}\")\n\n# %%\n# A first step: looking at our data\n# ----------------------------------\n#\n# Let's quickly plot this file:\nfrom nilearn import plotting\n\nplotting.plot_img(MNI152_FILE_PATH)\n\n# %%\n# This is not a very pretty plot. We just used the simplest possible\n# code. There is a whole :ref:`section of the documentation <plotting>`\n# on making prettier code.\n#\n# **Exercise**: Try plotting one of your own files. In the above,\n# MNI152_FILE_PATH is nothing more than a string with a path pointing to\n# a nifti image. You can replace it with a string pointing to a file on\n# your disk. Note that it should be a 3D volume, and not a 4D volume.\n\n# %%\n# Simple image manipulation: smoothing\n# ------------------------------------\n#\n# Let's use an image-smoothing function from nilearn:\n# :func:`nilearn.image.smooth_img`\n#\n# Functions containing 'img' can take either a filename or an image as input.\n#\n# Here we give as inputs the image filename and the smoothing value in mm\nfrom nilearn import image\n\nsmooth_anat_img = image.smooth_img(MNI152_FILE_PATH, fwhm=3)\n\n# While we are giving a file name as input, the function returns\n# an in-memory object:\nsmooth_anat_img\n\n# %%\n# This is an in-memory object. We can pass it to nilearn function, for\n# instance to look at it\nplotting.plot_img(smooth_anat_img)\n\n# %%\n# We could also pass it to the smoothing function\nmore_smooth_anat_img = image.smooth_img(smooth_anat_img, fwhm=3)\nplotting.plot_img(more_smooth_anat_img)\n\n# %%\n# Saving results to a file\n# -------------------------\n#\n# We can save any in-memory object as follows:\nfrom pathlib import Path\n\noutput_dir = Path.cwd() / \"results\" / \"plot_nilearn_101\"\noutput_dir.mkdir(exist_ok=True, parents=True)\nprint(f\"Output will be saved to: {output_dir}\")\nmore_smooth_anat_img.to_filename(output_dir / \"more_smooth_anat_img.nii.gz\")\n\n# %%\n# Finally, calling plotting.show() is necessary to display the figure\n# when running as a script outside IPython\nplotting.show()\n\n# %%\n# |\n#\n# ______\n#\n# To recap, all the nilearn tools can take data as filenames or in-memory\n# objects, and return brain volumes as in-memory objects. These can be\n# passed on to other nilearn tools, or saved to disk.\n\n# sphinx_gallery_dummy_images=1\n", "path": "examples/00_tutorials/plot_nilearn_101.py"}], "after_files": [{"content": "\"\"\"\nBasic nilearn example: manipulating and looking at data\n=======================================================\n\nA simple example showing how to load an existing Nifti file and use\nbasic nilearn functionalities.\n\"\"\"\n\n# Let us use a Nifti file that is shipped with nilearn\nfrom nilearn.datasets import MNI152_FILE_PATH\n\n# Note that the variable MNI152_FILE_PATH is just a path to a Nifti file\nprint(f\"Path to MNI152 template: {MNI152_FILE_PATH!r}\")\n\n# %%\n# A first step: looking at our data\n# ----------------------------------\n#\n# Let's quickly plot this file:\nfrom nilearn import plotting\n\nplotting.plot_img(MNI152_FILE_PATH)\n\n# %%\n# This is not a very pretty plot. We just used the simplest possible\n# code. There is a whole :ref:`section of the documentation <plotting>`\n# on making prettier code.\n#\n# **Exercise**: Try plotting one of your own files. In the above,\n# MNI152_FILE_PATH is nothing more than a string with a path pointing to\n# a nifti image. You can replace it with a string pointing to a file on\n# your disk. Note that it should be a 3D volume, and not a 4D volume.\n\n# %%\n# Simple image manipulation: smoothing\n# ------------------------------------\n#\n# Let's use an image-smoothing function from nilearn:\n# :func:`nilearn.image.smooth_img`\n#\n# Functions containing 'img' can take either a filename or an image as input.\n#\n# Here we give as inputs the image filename and the smoothing value in mm\nfrom nilearn import image\n\nsmooth_anat_img = image.smooth_img(MNI152_FILE_PATH, fwhm=3)\n\n# While we are giving a file name as input, the function returns\n# an in-memory object:\nsmooth_anat_img\n\n# %%\n# This is an in-memory object. We can pass it to nilearn function, for\n# instance to look at it\nplotting.plot_img(smooth_anat_img)\n\n# %%\n# We could also pass it to the smoothing function\nmore_smooth_anat_img = image.smooth_img(smooth_anat_img, fwhm=3)\nplotting.plot_img(more_smooth_anat_img)\n\n\n# %%\n# Globbing over multiple 3D volumes\n# ---------------------------------\n# Nilearn also supports reading multiple volumes at once,\n# using glob-style patterns.\n# For instance, we can smooth volumes from many subjects\n# at once and get a 4D image as output.\n\n# %%\n# First let's fetch Haxby dataset for subject 1 and 2\nfrom nilearn import datasets\n\nhaxby = datasets.fetch_haxby(subjects=[1, 2])\n\n# %%\n# Now we can find the anatomical images from both\n# subjects using the `*` wildcard\nfrom pathlib import Path\n\nanats_all_subjects = (\n Path(datasets.get_data_dirs()[0]) / \"haxby2001\" / \"subj*\" / \"anat*\"\n)\n\n# %%\n# Now we can smooth all the anatomical images at once\nanats_all_subjects_smooth = image.smooth_img(anats_all_subjects, fwhm=5)\n\n# %%\n# This is a 4D image containing one volume per subject\nprint(anats_all_subjects_smooth.shape)\n\n# %%\n# Saving results to a file\n# -------------------------\n#\n# We can save any in-memory object as follows:\noutput_dir = Path.cwd() / \"results\" / \"plot_nilearn_101\"\noutput_dir.mkdir(exist_ok=True, parents=True)\nprint(f\"Output will be saved to: {output_dir}\")\nanats_all_subjects_smooth.to_filename(\n output_dir / \"anats_all_subjects_smooth.nii.gz\"\n)\n\n# %%\n# Finally, calling plotting.show() is necessary to display the figure\n# when running as a script outside IPython\nplotting.show()\n\n# %%\n# |\n#\n# ______\n#\n# To recap, all the nilearn tools can take data as filenames or\n# glob-style patterns or in-memory objects, and return brain\n# volumes as in-memory objects. These can be\n# passed on to other nilearn tools, or saved to disk.\n\n# sphinx_gallery_dummy_images=1\n", "path": "examples/00_tutorials/plot_nilearn_101.py"}]} | 1,148 | 626 |
gh_patches_debug_16643 | rasdani/github-patches | git_diff | sktime__sktime-5330 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] `temporal_train_test_split` does not work on panel datatypes with unequal length series.
**Describe the bug**
<!--
A clear and concise description of what the bug is.
-->
Also relates to #4968
`temporal_train_test_split` wrongly split panel datatypes (splitting per unique unequal time series). It could be that the split function does not support this type yet, If so, it should throw an error msg telling that it does not currently supported the datatypes.
**To Reproduce**
<!--
Add a Minimal, Complete, and Verifiable example (for more details, see e.g. https://stackoverflow.com/help/mcve
If the code is too long, feel free to put it in a public gist and link it in the issue: https://gist.github.com
-->
```python
from sktime.forecasting.model_selection import temporal_train_test_split
from sktime.utils._testing.panel import _make_panel
y = _make_panel(n_instances=2, n_timepoints=6)
# make first instance series shorter than the second
y.iloc[4:6] =None
y.dropna(inplace=True)
train_size, test_size = temporal_train_test_split(y, test_size=2)
# show shapes
print(
f"""
{y.shape=}
{train_size.shape=}, {test_size.shape=} #train size should be (6,1) 2+4=6
"""
)
# has also the same issue as #4968 below is the minimal example
train_fh, test_fh = temporal_train_test_split(y, fh=[1,2])
# show shapes
print(
f"""
{y.shape=}
{train_fh.shape=}, {test_fh.shape=} #train size should be (6,1) and test (4,1)
"""
)
```
output
```
y.shape=(10, 1)
train_size.shape=(4, 1), test_size.shape=(4, 1) #train size should be 6 (2+4)
y.shape=(10, 1)
train_fh.shape=(8, 1), test_fh.shape=(2, 1) #train size should be (6,1) and test (4,1)
```
**Versions**
<details>
<!--
Please run the following code snippet and paste the output here:
from sktime import show_versions; show_versions()
-->
main at [3cf69ed](https://github.com/sktime/sktime/commit/3cf69eddba315d6130b661ca5fe8e132e236aa47)
</details>
<!-- Thanks for contributing! -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sktime/split/__init__.py`
Content:
```
1 """Module for splitters."""
2
3 __all__ = [
4 "CutoffSplitter",
5 "ExpandingGreedySplitter",
6 "ExpandingWindowSplitter",
7 "SameLocSplitter",
8 "SingleWindowSplitter",
9 "SlidingWindowSplitter",
10 "TestPlusTrainSplitter",
11 "temporal_train_test_split",
12 ]
13
14 from sktime.split.cutoff import CutoffSplitter
15 from sktime.split.expandinggreedy import ExpandingGreedySplitter
16 from sktime.split.expandingwindow import ExpandingWindowSplitter
17 from sktime.split.sameloc import SameLocSplitter
18 from sktime.split.singlewindow import SingleWindowSplitter
19 from sktime.split.slidingwindow import SlidingWindowSplitter
20 from sktime.split.temporal_train_test_split import temporal_train_test_split
21 from sktime.split.testplustrain import TestPlusTrainSplitter
22
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sktime/split/__init__.py b/sktime/split/__init__.py
--- a/sktime/split/__init__.py
+++ b/sktime/split/__init__.py
@@ -7,6 +7,7 @@
"SameLocSplitter",
"SingleWindowSplitter",
"SlidingWindowSplitter",
+ "TemporalTrainTestSplitter",
"TestPlusTrainSplitter",
"temporal_train_test_split",
]
@@ -17,5 +18,8 @@
from sktime.split.sameloc import SameLocSplitter
from sktime.split.singlewindow import SingleWindowSplitter
from sktime.split.slidingwindow import SlidingWindowSplitter
-from sktime.split.temporal_train_test_split import temporal_train_test_split
+from sktime.split.temporal_train_test_split import (
+ TemporalTrainTestSplitter,
+ temporal_train_test_split,
+)
from sktime.split.testplustrain import TestPlusTrainSplitter
| {"golden_diff": "diff --git a/sktime/split/__init__.py b/sktime/split/__init__.py\n--- a/sktime/split/__init__.py\n+++ b/sktime/split/__init__.py\n@@ -7,6 +7,7 @@\n \"SameLocSplitter\",\n \"SingleWindowSplitter\",\n \"SlidingWindowSplitter\",\n+ \"TemporalTrainTestSplitter\",\n \"TestPlusTrainSplitter\",\n \"temporal_train_test_split\",\n ]\n@@ -17,5 +18,8 @@\n from sktime.split.sameloc import SameLocSplitter\n from sktime.split.singlewindow import SingleWindowSplitter\n from sktime.split.slidingwindow import SlidingWindowSplitter\n-from sktime.split.temporal_train_test_split import temporal_train_test_split\n+from sktime.split.temporal_train_test_split import (\n+ TemporalTrainTestSplitter,\n+ temporal_train_test_split,\n+)\n from sktime.split.testplustrain import TestPlusTrainSplitter\n", "issue": "[BUG] `temporal_train_test_split` does not work on panel datatypes with unequal length series.\n**Describe the bug**\r\n<!--\r\nA clear and concise description of what the bug is.\r\n-->\r\nAlso relates to #4968 \r\n\r\n`temporal_train_test_split` wrongly split panel datatypes (splitting per unique unequal time series). It could be that the split function does not support this type yet, If so, it should throw an error msg telling that it does not currently supported the datatypes.\r\n\r\n**To Reproduce**\r\n<!--\r\nAdd a Minimal, Complete, and Verifiable example (for more details, see e.g. https://stackoverflow.com/help/mcve\r\n\r\nIf the code is too long, feel free to put it in a public gist and link it in the issue: https://gist.github.com\r\n-->\r\n\r\n```python\r\nfrom sktime.forecasting.model_selection import temporal_train_test_split\r\nfrom sktime.utils._testing.panel import _make_panel\r\n\r\ny = _make_panel(n_instances=2, n_timepoints=6)\r\n# make first instance series shorter than the second\r\ny.iloc[4:6] =None\r\ny.dropna(inplace=True)\r\n\r\ntrain_size, test_size = temporal_train_test_split(y, test_size=2) \r\n# show shapes\r\nprint(\r\n f\"\"\"\r\n {y.shape=} \r\n {train_size.shape=}, {test_size.shape=} #train size should be (6,1) 2+4=6\r\n \"\"\"\r\n)\r\n\r\n# has also the same issue as #4968 below is the minimal example\r\ntrain_fh, test_fh = temporal_train_test_split(y, fh=[1,2]) \r\n# show shapes\r\nprint(\r\n f\"\"\"\r\n {y.shape=} \r\n {train_fh.shape=}, {test_fh.shape=} #train size should be (6,1) and test (4,1)\r\n \"\"\"\r\n)\r\n```\r\n\r\noutput\r\n```\r\ny.shape=(10, 1) \r\ntrain_size.shape=(4, 1), test_size.shape=(4, 1) #train size should be 6 (2+4)\r\n\r\ny.shape=(10, 1) \r\ntrain_fh.shape=(8, 1), test_fh.shape=(2, 1) #train size should be (6,1) and test (4,1)\r\n```\r\n\r\n**Versions**\r\n<details>\r\n\r\n<!--\r\nPlease run the following code snippet and paste the output here:\r\n\r\nfrom sktime import show_versions; show_versions()\r\n-->\r\n main at [3cf69ed](https://github.com/sktime/sktime/commit/3cf69eddba315d6130b661ca5fe8e132e236aa47)\r\n\r\n</details>\r\n\r\n<!-- Thanks for contributing! -->\r\n\n", "before_files": [{"content": "\"\"\"Module for splitters.\"\"\"\n\n__all__ = [\n \"CutoffSplitter\",\n \"ExpandingGreedySplitter\",\n \"ExpandingWindowSplitter\",\n \"SameLocSplitter\",\n \"SingleWindowSplitter\",\n \"SlidingWindowSplitter\",\n \"TestPlusTrainSplitter\",\n \"temporal_train_test_split\",\n]\n\nfrom sktime.split.cutoff import CutoffSplitter\nfrom sktime.split.expandinggreedy import ExpandingGreedySplitter\nfrom sktime.split.expandingwindow import ExpandingWindowSplitter\nfrom sktime.split.sameloc import SameLocSplitter\nfrom sktime.split.singlewindow import SingleWindowSplitter\nfrom sktime.split.slidingwindow import SlidingWindowSplitter\nfrom sktime.split.temporal_train_test_split import temporal_train_test_split\nfrom sktime.split.testplustrain import TestPlusTrainSplitter\n", "path": "sktime/split/__init__.py"}], "after_files": [{"content": "\"\"\"Module for splitters.\"\"\"\n\n__all__ = [\n \"CutoffSplitter\",\n \"ExpandingGreedySplitter\",\n \"ExpandingWindowSplitter\",\n \"SameLocSplitter\",\n \"SingleWindowSplitter\",\n \"SlidingWindowSplitter\",\n \"TemporalTrainTestSplitter\",\n \"TestPlusTrainSplitter\",\n \"temporal_train_test_split\",\n]\n\nfrom sktime.split.cutoff import CutoffSplitter\nfrom sktime.split.expandinggreedy import ExpandingGreedySplitter\nfrom sktime.split.expandingwindow import ExpandingWindowSplitter\nfrom sktime.split.sameloc import SameLocSplitter\nfrom sktime.split.singlewindow import SingleWindowSplitter\nfrom sktime.split.slidingwindow import SlidingWindowSplitter\nfrom sktime.split.temporal_train_test_split import (\n TemporalTrainTestSplitter,\n temporal_train_test_split,\n)\nfrom sktime.split.testplustrain import TestPlusTrainSplitter\n", "path": "sktime/split/__init__.py"}]} | 1,068 | 217 |
gh_patches_debug_47400 | rasdani/github-patches | git_diff | python-telegram-bot__python-telegram-bot-2132 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add support for Py 3.9
With Py 3.9 out, we should add it to the tests at some point. Maybe that can wait, until 3.9.x, x>0 is out, though
Need to check, if all the job thingies work out, as APS doesn't support py3.9 yet and there has been a [report](https://t.me/pythontelegrambotgroup/382731) that it doesn't work (with PTB).
On a related note: APS seems to be [preparing for v4.0](https://github.com/agronholm/apscheduler/issues/465), which will break some stuff, but also supports py3.9 and even uses the new ZoneInfo (also backporting to py3.6+), lifting the restriction to use `pytz` timezones. I already subscribed to releases. I guess updating APS in PTB should be done only when 4.x, x>0 is out and we're doing breaking things anyway …
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 """The setup and build script for the python-telegram-bot library."""
3
4 import codecs
5 import os
6 import sys
7
8 from setuptools import setup, find_packages
9
10
11 def requirements():
12 """Build the requirements list for this project"""
13 requirements_list = []
14
15 with open('requirements.txt') as requirements:
16 for install in requirements:
17 requirements_list.append(install.strip())
18
19 return requirements_list
20
21
22 packages = find_packages(exclude=['tests*'])
23 requirements = requirements()
24
25 # Allow for a package install to not use the vendored urllib3
26 UPSTREAM_URLLIB3_FLAG = '--with-upstream-urllib3'
27 if UPSTREAM_URLLIB3_FLAG in sys.argv:
28 sys.argv.remove(UPSTREAM_URLLIB3_FLAG)
29 requirements.append('urllib3 >= 1.19.1')
30 packages = [x for x in packages if not x.startswith('telegram.vendor.ptb_urllib3')]
31
32 with codecs.open('README.rst', 'r', 'utf-8') as fd:
33 fn = os.path.join('telegram', 'version.py')
34 with open(fn) as fh:
35 code = compile(fh.read(), fn, 'exec')
36 exec(code)
37
38 setup(name='python-telegram-bot',
39 version=__version__,
40 author='Leandro Toledo',
41 author_email='[email protected]',
42 license='LGPLv3',
43 url='https://python-telegram-bot.org/',
44 keywords='python telegram bot api wrapper',
45 description="We have made you a wrapper you can't refuse",
46 long_description=fd.read(),
47 packages=packages,
48 install_requires=requirements,
49 extras_require={
50 'json': 'ujson',
51 'socks': 'PySocks'
52 },
53 include_package_data=True,
54 classifiers=[
55 'Development Status :: 5 - Production/Stable',
56 'Intended Audience :: Developers',
57 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
58 'Operating System :: OS Independent',
59 'Topic :: Software Development :: Libraries :: Python Modules',
60 'Topic :: Communications :: Chat',
61 'Topic :: Internet',
62 'Programming Language :: Python',
63 'Programming Language :: Python :: 3',
64 'Programming Language :: Python :: 3.6',
65 'Programming Language :: Python :: 3.7',
66 'Programming Language :: Python :: 3.8',
67 ],)
68
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -64,4 +64,5 @@
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
],)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -64,4 +64,5 @@\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n+ 'Programming Language :: Python :: 3.9',\n ],)\n", "issue": "Add support for Py 3.9\nWith Py 3.9 out, we should add it to the tests at some point. Maybe that can wait, until 3.9.x, x>0 is out, though\r\n\r\nNeed to check, if all the job thingies work out, as APS doesn't support py3.9 yet and there has been a [report](https://t.me/pythontelegrambotgroup/382731) that it doesn't work (with PTB).\r\n\r\nOn a related note: APS seems to be [preparing for v4.0](https://github.com/agronholm/apscheduler/issues/465), which will break some stuff, but also supports py3.9 and even uses the new ZoneInfo (also backporting to py3.6+), lifting the restriction to use `pytz` timezones. I already subscribed to releases. I guess updating APS in PTB should be done only when 4.x, x>0 is out and we're doing breaking things anyway \u2026\n", "before_files": [{"content": "#!/usr/bin/env python\n\"\"\"The setup and build script for the python-telegram-bot library.\"\"\"\n\nimport codecs\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\ndef requirements():\n \"\"\"Build the requirements list for this project\"\"\"\n requirements_list = []\n\n with open('requirements.txt') as requirements:\n for install in requirements:\n requirements_list.append(install.strip())\n\n return requirements_list\n\n\npackages = find_packages(exclude=['tests*'])\nrequirements = requirements()\n\n# Allow for a package install to not use the vendored urllib3\nUPSTREAM_URLLIB3_FLAG = '--with-upstream-urllib3'\nif UPSTREAM_URLLIB3_FLAG in sys.argv:\n sys.argv.remove(UPSTREAM_URLLIB3_FLAG)\n requirements.append('urllib3 >= 1.19.1')\n packages = [x for x in packages if not x.startswith('telegram.vendor.ptb_urllib3')]\n\nwith codecs.open('README.rst', 'r', 'utf-8') as fd:\n fn = os.path.join('telegram', 'version.py')\n with open(fn) as fh:\n code = compile(fh.read(), fn, 'exec')\n exec(code)\n\n setup(name='python-telegram-bot',\n version=__version__,\n author='Leandro Toledo',\n author_email='[email protected]',\n license='LGPLv3',\n url='https://python-telegram-bot.org/',\n keywords='python telegram bot api wrapper',\n description=\"We have made you a wrapper you can't refuse\",\n long_description=fd.read(),\n packages=packages,\n install_requires=requirements,\n extras_require={\n 'json': 'ujson',\n 'socks': 'PySocks'\n },\n include_package_data=True,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',\n 'Operating System :: OS Independent',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Communications :: Chat',\n 'Topic :: Internet',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n ],)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\"\"\"The setup and build script for the python-telegram-bot library.\"\"\"\n\nimport codecs\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\ndef requirements():\n \"\"\"Build the requirements list for this project\"\"\"\n requirements_list = []\n\n with open('requirements.txt') as requirements:\n for install in requirements:\n requirements_list.append(install.strip())\n\n return requirements_list\n\n\npackages = find_packages(exclude=['tests*'])\nrequirements = requirements()\n\n# Allow for a package install to not use the vendored urllib3\nUPSTREAM_URLLIB3_FLAG = '--with-upstream-urllib3'\nif UPSTREAM_URLLIB3_FLAG in sys.argv:\n sys.argv.remove(UPSTREAM_URLLIB3_FLAG)\n requirements.append('urllib3 >= 1.19.1')\n packages = [x for x in packages if not x.startswith('telegram.vendor.ptb_urllib3')]\n\nwith codecs.open('README.rst', 'r', 'utf-8') as fd:\n fn = os.path.join('telegram', 'version.py')\n with open(fn) as fh:\n code = compile(fh.read(), fn, 'exec')\n exec(code)\n\n setup(name='python-telegram-bot',\n version=__version__,\n author='Leandro Toledo',\n author_email='[email protected]',\n license='LGPLv3',\n url='https://python-telegram-bot.org/',\n keywords='python telegram bot api wrapper',\n description=\"We have made you a wrapper you can't refuse\",\n long_description=fd.read(),\n packages=packages,\n install_requires=requirements,\n extras_require={\n 'json': 'ujson',\n 'socks': 'PySocks'\n },\n include_package_data=True,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',\n 'Operating System :: OS Independent',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Communications :: Chat',\n 'Topic :: Internet',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n ],)\n", "path": "setup.py"}]} | 1,120 | 84 |
gh_patches_debug_20456 | rasdani/github-patches | git_diff | translate__pootle-5699 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Lost timeSince data for suggestions
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/apps/pootle_misc/templatetags/locale.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) Pootle contributors.
4 #
5 # This file is a part of the Pootle project. It is distributed under the GPL3
6 # or later license. See the LICENSE file for a copy of the license and the
7 # AUTHORS file for copyright and authorship information.
8
9 from django import template
10 from django.utils.formats import get_format
11 from django.utils.translation import trans_real
12
13 from pootle.core.utils import dateformat
14
15
16 register = template.Library()
17
18
19 @register.simple_tag
20 def locale_dir():
21 """Returns current locale's direction."""
22 return trans_real.get_language_bidi() and "rtl" or "ltr"
23
24
25 @register.filter(name='dateformat')
26 def do_dateformat(value, use_format='c'):
27 """Formats a `value` date using `format`.
28
29 :param value: a datetime object.
30 :param use_format: a format string accepted by
31 :func:`django.utils.formats.get_format` or
32 :func:`django.utils.dateformat.format`. If none is set, the current
33 locale's default format will be used.
34 """
35 try:
36 use_format = get_format(use_format)
37 except AttributeError:
38 pass
39
40 return dateformat.format(value, use_format)
41
42
43 @register.simple_tag
44 def locale_align():
45 """Returns current locale's default alignment."""
46 return trans_real.get_language_bidi() and "right" or "left"
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pootle/apps/pootle_misc/templatetags/locale.py b/pootle/apps/pootle_misc/templatetags/locale.py
--- a/pootle/apps/pootle_misc/templatetags/locale.py
+++ b/pootle/apps/pootle_misc/templatetags/locale.py
@@ -6,11 +6,14 @@
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
+import calendar
+
from django import template
from django.utils.formats import get_format
from django.utils.translation import trans_real
from pootle.core.utils import dateformat
+from pootle.local.dates import timesince
register = template.Library()
@@ -40,6 +43,11 @@
return dateformat.format(value, use_format)
[email protected](name='relative_datetime_format')
+def do_relative_datetime_format(value):
+ return timesince(calendar.timegm(value.timetuple()))
+
+
@register.simple_tag
def locale_align():
"""Returns current locale's default alignment."""
| {"golden_diff": "diff --git a/pootle/apps/pootle_misc/templatetags/locale.py b/pootle/apps/pootle_misc/templatetags/locale.py\n--- a/pootle/apps/pootle_misc/templatetags/locale.py\n+++ b/pootle/apps/pootle_misc/templatetags/locale.py\n@@ -6,11 +6,14 @@\n # or later license. See the LICENSE file for a copy of the license and the\n # AUTHORS file for copyright and authorship information.\n \n+import calendar\n+\n from django import template\n from django.utils.formats import get_format\n from django.utils.translation import trans_real\n \n from pootle.core.utils import dateformat\n+from pootle.local.dates import timesince\n \n \n register = template.Library()\n@@ -40,6 +43,11 @@\n return dateformat.format(value, use_format)\n \n \[email protected](name='relative_datetime_format')\n+def do_relative_datetime_format(value):\n+ return timesince(calendar.timegm(value.timetuple()))\n+\n+\n @register.simple_tag\n def locale_align():\n \"\"\"Returns current locale's default alignment.\"\"\"\n", "issue": "Lost timeSince data for suggestions\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django import template\nfrom django.utils.formats import get_format\nfrom django.utils.translation import trans_real\n\nfrom pootle.core.utils import dateformat\n\n\nregister = template.Library()\n\n\[email protected]_tag\ndef locale_dir():\n \"\"\"Returns current locale's direction.\"\"\"\n return trans_real.get_language_bidi() and \"rtl\" or \"ltr\"\n\n\[email protected](name='dateformat')\ndef do_dateformat(value, use_format='c'):\n \"\"\"Formats a `value` date using `format`.\n\n :param value: a datetime object.\n :param use_format: a format string accepted by\n :func:`django.utils.formats.get_format` or\n :func:`django.utils.dateformat.format`. If none is set, the current\n locale's default format will be used.\n \"\"\"\n try:\n use_format = get_format(use_format)\n except AttributeError:\n pass\n\n return dateformat.format(value, use_format)\n\n\[email protected]_tag\ndef locale_align():\n \"\"\"Returns current locale's default alignment.\"\"\"\n return trans_real.get_language_bidi() and \"right\" or \"left\"\n", "path": "pootle/apps/pootle_misc/templatetags/locale.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport calendar\n\nfrom django import template\nfrom django.utils.formats import get_format\nfrom django.utils.translation import trans_real\n\nfrom pootle.core.utils import dateformat\nfrom pootle.local.dates import timesince\n\n\nregister = template.Library()\n\n\[email protected]_tag\ndef locale_dir():\n \"\"\"Returns current locale's direction.\"\"\"\n return trans_real.get_language_bidi() and \"rtl\" or \"ltr\"\n\n\[email protected](name='dateformat')\ndef do_dateformat(value, use_format='c'):\n \"\"\"Formats a `value` date using `format`.\n\n :param value: a datetime object.\n :param use_format: a format string accepted by\n :func:`django.utils.formats.get_format` or\n :func:`django.utils.dateformat.format`. If none is set, the current\n locale's default format will be used.\n \"\"\"\n try:\n use_format = get_format(use_format)\n except AttributeError:\n pass\n\n return dateformat.format(value, use_format)\n\n\[email protected](name='relative_datetime_format')\ndef do_relative_datetime_format(value):\n return timesince(calendar.timegm(value.timetuple()))\n\n\[email protected]_tag\ndef locale_align():\n \"\"\"Returns current locale's default alignment.\"\"\"\n return trans_real.get_language_bidi() and \"right\" or \"left\"\n", "path": "pootle/apps/pootle_misc/templatetags/locale.py"}]} | 671 | 243 |
gh_patches_debug_1597 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-915 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix a few issues in Django example
The Django example has a few issues, fix them.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/examples/django/pages/views.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from django.http import HttpResponse
15
16 from opentelemetry import trace
17 from opentelemetry.sdk.trace import TracerProvider
18 from opentelemetry.sdk.trace.export import (
19 ConsoleSpanExporter,
20 SimpleExportSpanProcessor,
21 )
22
23 trace.set_tracer_provider(TracerProvider())
24 tracer = trace.get_tracer_provider().get_tracer(__name__)
25
26 trace.get_tracer_provider().add_span_processor(
27 SimpleExportSpanProcessor(ConsoleSpanExporter())
28 )
29
30
31 def home_page_view(request):
32 return HttpResponse("Hello, world")
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/examples/django/pages/views.py b/docs/examples/django/pages/views.py
--- a/docs/examples/django/pages/views.py
+++ b/docs/examples/django/pages/views.py
@@ -21,7 +21,6 @@
)
trace.set_tracer_provider(TracerProvider())
-tracer = trace.get_tracer_provider().get_tracer(__name__)
trace.get_tracer_provider().add_span_processor(
SimpleExportSpanProcessor(ConsoleSpanExporter())
| {"golden_diff": "diff --git a/docs/examples/django/pages/views.py b/docs/examples/django/pages/views.py\n--- a/docs/examples/django/pages/views.py\n+++ b/docs/examples/django/pages/views.py\n@@ -21,7 +21,6 @@\n )\n \n trace.set_tracer_provider(TracerProvider())\n-tracer = trace.get_tracer_provider().get_tracer(__name__)\n \n trace.get_tracer_provider().add_span_processor(\n SimpleExportSpanProcessor(ConsoleSpanExporter())\n", "issue": "Fix a few issues in Django example\nThe Django example has a few issues, fix them.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom django.http import HttpResponse\n\nfrom opentelemetry import trace\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import (\n ConsoleSpanExporter,\n SimpleExportSpanProcessor,\n)\n\ntrace.set_tracer_provider(TracerProvider())\ntracer = trace.get_tracer_provider().get_tracer(__name__)\n\ntrace.get_tracer_provider().add_span_processor(\n SimpleExportSpanProcessor(ConsoleSpanExporter())\n)\n\n\ndef home_page_view(request):\n return HttpResponse(\"Hello, world\")\n", "path": "docs/examples/django/pages/views.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom django.http import HttpResponse\n\nfrom opentelemetry import trace\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import (\n ConsoleSpanExporter,\n SimpleExportSpanProcessor,\n)\n\ntrace.set_tracer_provider(TracerProvider())\n\ntrace.get_tracer_provider().add_span_processor(\n SimpleExportSpanProcessor(ConsoleSpanExporter())\n)\n\n\ndef home_page_view(request):\n return HttpResponse(\"Hello, world\")\n", "path": "docs/examples/django/pages/views.py"}]} | 573 | 99 |
gh_patches_debug_21112 | rasdani/github-patches | git_diff | bridgecrewio__checkov-5171 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Checkov v2.3.261 fails with CKV_AWS_356 for KMS actions which must specify 'all resources'
**Describe the issue**
Checkov v2.3.261 fails with CKV_AWS_356 highlights IAM policies which are overly permissive but is incorrectly identifying actions for KMS policies which need to be for all resources potentially scoped with conditional access per https://docs.aws.amazon.com/kms/latest/developerguide/key-policy-services.html
Similar issue for https://github.com/bridgecrewio/checkov/issues/5134 where certain actions like 'list' require all resources.
**Examples**
```
data "aws_iam_policy_document" "myKmsKey" {
actions = [
"kms:GenerateDataKey",
"kms:Decrypt"
]
resources = [
"*"
]
condition {
test = "ArnEquals"
variable = "aws:SourceArn"
values = [
<SOME OTHER RESOURCE>.arn
]
}
}
}
```
**Version (please complete the following information):**
- Checkov Version 2.3.261
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py`
Content:
```
1 from typing import Dict, List, Any
2
3 from checkov.common.util.data_structures_utils import pickle_deepcopy
4
5
6 def convert_terraform_conf_to_iam_policy(conf: Dict[str, List[Dict[str, Any]]]) -> Dict[str, List[Dict[str, Any]]]:
7 """
8 converts terraform parsed configuration to iam policy document
9 """
10 result = pickle_deepcopy(conf)
11 if "statement" in result.keys():
12 result["Statement"] = result.pop("statement")
13 for statement in result["Statement"]:
14 if "actions" in statement:
15 statement["Action"] = statement.pop("actions")[0]
16 if "resources" in statement:
17 statement["Resource"] = statement.pop("resources")[0]
18 if "not_actions" in statement:
19 statement["NotAction"] = statement.pop("not_actions")[0]
20 if "not_resources" in statement:
21 statement["NotResource"] = statement.pop("not_resources")[0]
22 if "effect" in statement:
23 statement["Effect"] = statement.pop("effect")[0]
24 if "effect" not in statement and "Effect" not in statement:
25 statement["Effect"] = "Allow"
26 return result
27
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py b/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py
--- a/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py
+++ b/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from typing import Dict, List, Any
from checkov.common.util.data_structures_utils import pickle_deepcopy
@@ -23,4 +25,13 @@
statement["Effect"] = statement.pop("effect")[0]
if "effect" not in statement and "Effect" not in statement:
statement["Effect"] = "Allow"
+ if "condition" in statement:
+ conditions = statement.pop("condition")
+ if conditions and isinstance(conditions, list):
+ statement["Condition"] = {}
+ for condition in conditions:
+ cond_operator = condition["test"][0]
+ cond_key = condition["variable"][0]
+ cond_value = condition["values"][0]
+ statement["Condition"].setdefault(cond_operator, {})[cond_key] = cond_value
return result
| {"golden_diff": "diff --git a/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py b/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py\n--- a/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py\n+++ b/checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py\n@@ -1,3 +1,5 @@\n+from __future__ import annotations\n+\n from typing import Dict, List, Any\n \n from checkov.common.util.data_structures_utils import pickle_deepcopy\n@@ -23,4 +25,13 @@\n statement[\"Effect\"] = statement.pop(\"effect\")[0]\n if \"effect\" not in statement and \"Effect\" not in statement:\n statement[\"Effect\"] = \"Allow\"\n+ if \"condition\" in statement:\n+ conditions = statement.pop(\"condition\")\n+ if conditions and isinstance(conditions, list):\n+ statement[\"Condition\"] = {}\n+ for condition in conditions:\n+ cond_operator = condition[\"test\"][0]\n+ cond_key = condition[\"variable\"][0]\n+ cond_value = condition[\"values\"][0]\n+ statement[\"Condition\"].setdefault(cond_operator, {})[cond_key] = cond_value\n return result\n", "issue": "Checkov v2.3.261 fails with CKV_AWS_356 for KMS actions which must specify 'all resources'\n**Describe the issue**\r\nCheckov v2.3.261 fails with CKV_AWS_356 highlights IAM policies which are overly permissive but is incorrectly identifying actions for KMS policies which need to be for all resources potentially scoped with conditional access per https://docs.aws.amazon.com/kms/latest/developerguide/key-policy-services.html\r\n\r\nSimilar issue for https://github.com/bridgecrewio/checkov/issues/5134 where certain actions like 'list' require all resources.\r\n\r\n**Examples**\r\n```\r\ndata \"aws_iam_policy_document\" \"myKmsKey\" {\r\n actions = [\r\n \"kms:GenerateDataKey\",\r\n \"kms:Decrypt\"\r\n ]\r\n resources = [\r\n \"*\"\r\n ]\r\n\r\n condition {\r\n test = \"ArnEquals\"\r\n variable = \"aws:SourceArn\"\r\n values = [\r\n <SOME OTHER RESOURCE>.arn\r\n ]\r\n }\r\n }\r\n}\r\n```\r\n**Version (please complete the following information):**\r\n - Checkov Version 2.3.261\r\n\n", "before_files": [{"content": "from typing import Dict, List, Any\n\nfrom checkov.common.util.data_structures_utils import pickle_deepcopy\n\n\ndef convert_terraform_conf_to_iam_policy(conf: Dict[str, List[Dict[str, Any]]]) -> Dict[str, List[Dict[str, Any]]]:\n \"\"\"\n converts terraform parsed configuration to iam policy document\n \"\"\"\n result = pickle_deepcopy(conf)\n if \"statement\" in result.keys():\n result[\"Statement\"] = result.pop(\"statement\")\n for statement in result[\"Statement\"]:\n if \"actions\" in statement:\n statement[\"Action\"] = statement.pop(\"actions\")[0]\n if \"resources\" in statement:\n statement[\"Resource\"] = statement.pop(\"resources\")[0]\n if \"not_actions\" in statement:\n statement[\"NotAction\"] = statement.pop(\"not_actions\")[0]\n if \"not_resources\" in statement:\n statement[\"NotResource\"] = statement.pop(\"not_resources\")[0]\n if \"effect\" in statement:\n statement[\"Effect\"] = statement.pop(\"effect\")[0]\n if \"effect\" not in statement and \"Effect\" not in statement:\n statement[\"Effect\"] = \"Allow\"\n return result\n", "path": "checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom typing import Dict, List, Any\n\nfrom checkov.common.util.data_structures_utils import pickle_deepcopy\n\n\ndef convert_terraform_conf_to_iam_policy(conf: Dict[str, List[Dict[str, Any]]]) -> Dict[str, List[Dict[str, Any]]]:\n \"\"\"\n converts terraform parsed configuration to iam policy document\n \"\"\"\n result = pickle_deepcopy(conf)\n if \"statement\" in result.keys():\n result[\"Statement\"] = result.pop(\"statement\")\n for statement in result[\"Statement\"]:\n if \"actions\" in statement:\n statement[\"Action\"] = statement.pop(\"actions\")[0]\n if \"resources\" in statement:\n statement[\"Resource\"] = statement.pop(\"resources\")[0]\n if \"not_actions\" in statement:\n statement[\"NotAction\"] = statement.pop(\"not_actions\")[0]\n if \"not_resources\" in statement:\n statement[\"NotResource\"] = statement.pop(\"not_resources\")[0]\n if \"effect\" in statement:\n statement[\"Effect\"] = statement.pop(\"effect\")[0]\n if \"effect\" not in statement and \"Effect\" not in statement:\n statement[\"Effect\"] = \"Allow\"\n if \"condition\" in statement:\n conditions = statement.pop(\"condition\")\n if conditions and isinstance(conditions, list):\n statement[\"Condition\"] = {}\n for condition in conditions:\n cond_operator = condition[\"test\"][0]\n cond_key = condition[\"variable\"][0]\n cond_value = condition[\"values\"][0]\n statement[\"Condition\"].setdefault(cond_operator, {})[cond_key] = cond_value\n return result\n", "path": "checkov/terraform/checks/utils/iam_terraform_document_to_policy_converter.py"}]} | 818 | 270 |
gh_patches_debug_8908 | rasdani/github-patches | git_diff | facebookresearch__ParlAI-371 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Slow loading of image features during training
For VQA tasks, it takes massive amount of time to load the image features for training. The overhead is so much that for the same model it increases the training time by a factor of 50.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parlai/core/image_featurizers.py`
Content:
```
1 # All rights reserved.
2 # This source code is licensed under the BSD-style license found in the
3 # LICENSE file in the root directory of this source tree. An additional grant
4 # of patent rights can be found in the PATENTS file in the same directory.
5
6 import parlai.core.build_data as build_data
7
8 import os
9 import copy
10 import numpy as np
11 from PIL import Image
12
13 _greyscale = ' .,:;crsA23hHG#98&@'
14
15 class ImageLoader():
16 """Extract image feature using pretrained CNN network.
17 """
18 def __init__(self, opt):
19 self.opt = copy.deepcopy(opt)
20 self.netCNN = None
21
22 def init_cnn(self):
23 """Lazy initialization of preprocessor model in case we don't need any image preprocessing."""
24 try:
25 import torch
26 except ModuleNotFoundError:
27 raise ModuleNotFoundError('Need to install Pytorch: go to pytorch.org')
28 from torch.autograd import Variable
29 import torchvision
30 import torchvision.transforms as transforms
31 import torch.nn as nn
32
33 opt = self.opt
34 self.image_size = opt['image_size']
35 self.crop_size = opt['image_cropsize']
36 self.datatype = opt['datatype']
37 self.image_mode = opt['image_mode']
38
39 opt['cuda'] = not opt['no_cuda'] and torch.cuda.is_available()
40 self.use_cuda = opt['cuda']
41
42 if self.use_cuda:
43 print('[ Using CUDA ]')
44 torch.cuda.set_device(opt['gpu'])
45
46 cnn_type, layer_num = self.image_mode_switcher()
47
48 # initialize the pretrained CNN using pytorch.
49 CNN = getattr(torchvision.models, cnn_type)
50
51 # cut off the additional layer.
52 self.netCNN = nn.Sequential(*list(CNN(pretrained=True).children())[:layer_num])
53
54 # initialize the transform function using torch vision.
55 self.transform = transforms.Compose([
56 transforms.Scale(self.image_size),
57 transforms.CenterCrop(self.crop_size),
58 transforms.ToTensor(),
59 transforms.Normalize(mean=[0.485, 0.456, 0.406],
60 std=[0.229, 0.224, 0.225])
61 ])
62
63 # container for single image
64 self.xs = torch.FloatTensor(1, 3, self.crop_size, self.crop_size).fill_(0)
65
66 if self.use_cuda:
67 self.cuda()
68 self.xs = self.xs.cuda()
69
70 # make self.xs variable.
71 self.xs = Variable(self.xs)
72
73 def cuda(self):
74 self.netCNN.cuda()
75
76 def save(self, feature, path):
77 np.save(path, feature)
78
79 def image_mode_switcher(self):
80 switcher = {
81 'resnet152': ['resnet152', -1],
82 'resnet101': ['resnet101', -1],
83 'resnet50': ['resnet50', -1],
84 'resnet34': ['resnet34', -1],
85 'resnet18': ['resnet18', -1],
86 'resnet152_spatial': ['resnet152', -2],
87 'resnet101_spatial': ['resnet101', -2],
88 'resnet50_spatial': ['resnet50', -2],
89 'resnet34_spatial': ['resnet34', -2],
90 'resnet18_spatial': ['resnet18', -2],
91 }
92
93 if self.image_mode not in switcher:
94 raise NotImplementedError('image preprocessing mode' +
95 '{} not supported yet'.format(self.image_mode))
96
97 return switcher.get(self.image_mode)
98
99 def extract(self, image, path):
100 # check whether initlize CNN network.
101 if not self.netCNN:
102 self.init_cnn()
103
104 self.xs.data.copy_(self.transform(image))
105 # extract the image feature
106 feature = self.netCNN(self.xs)
107 feature = feature.cpu().data.numpy()
108 # save the feature
109 self.save(feature, path)
110 return feature
111
112 def img_to_ascii(self, path):
113 im = Image.open(path)
114 im.thumbnail((60, 40), Image.BICUBIC)
115 im = im.convert('L')
116 asc = []
117 for y in range(0, im.size[1]):
118 for x in range(0, im.size[0]):
119 lum = 255 - im.getpixel((x, y))
120 asc.append(_greyscale[lum * len(_greyscale) // 256])
121 asc.append('\n')
122 return ''.join(asc)
123
124 def load(self, path):
125 opt = self.opt
126 mode = opt.get('image_mode', 'raw')
127 if mode is None or mode == 'none':
128 # don't need to load images
129 return None
130 elif mode == 'raw':
131 # raw just returns RGB values
132 return Image.open(path).convert('RGB')
133 elif mode == 'ascii':
134 # convert images to ascii ¯\_(ツ)_/¯
135 return self.img_to_ascii(path)
136 else:
137 # otherwise, looks for preprocessed version under 'mode' directory
138 prepath, imagefn = os.path.split(path)
139
140 dpath = os.path.join(prepath, mode)
141
142 if not os.path.exists(dpath):
143 build_data.make_dir(dpath)
144
145 imagefn = imagefn.split('.')[0]
146 imagefn = imagefn + '.npy'
147 new_path = os.path.join(prepath, mode, imagefn)
148
149 if not os.path.isfile(new_path):
150 return self.extract(Image.open(path).convert('RGB'), new_path)
151 else:
152 return np.load(new_path)
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/parlai/core/image_featurizers.py b/parlai/core/image_featurizers.py
--- a/parlai/core/image_featurizers.py
+++ b/parlai/core/image_featurizers.py
@@ -9,6 +9,7 @@
import copy
import numpy as np
from PIL import Image
+from functools import lru_cache
_greyscale = ' .,:;crsA23hHG#98&@'
@@ -121,6 +122,7 @@
asc.append('\n')
return ''.join(asc)
+ @lru_cache(maxsize=None)
def load(self, path):
opt = self.opt
mode = opt.get('image_mode', 'raw')
| {"golden_diff": "diff --git a/parlai/core/image_featurizers.py b/parlai/core/image_featurizers.py\n--- a/parlai/core/image_featurizers.py\n+++ b/parlai/core/image_featurizers.py\n@@ -9,6 +9,7 @@\n import copy\n import numpy as np\n from PIL import Image\n+from functools import lru_cache\n \n _greyscale = ' .,:;crsA23hHG#98&@'\n \n@@ -121,6 +122,7 @@\n asc.append('\\n')\n return ''.join(asc)\n \n+ @lru_cache(maxsize=None)\n def load(self, path):\n opt = self.opt\n mode = opt.get('image_mode', 'raw')\n", "issue": "Slow loading of image features during training\nFor VQA tasks, it takes massive amount of time to load the image features for training. The overhead is so much that for the same model it increases the training time by a factor of 50.\n", "before_files": [{"content": "# All rights reserved.\n# This source code is licensed under the BSD-style license found in the\n# LICENSE file in the root directory of this source tree. An additional grant\n# of patent rights can be found in the PATENTS file in the same directory.\n\nimport parlai.core.build_data as build_data\n\nimport os\nimport copy\nimport numpy as np\nfrom PIL import Image\n\n_greyscale = ' .,:;crsA23hHG#98&@'\n\nclass ImageLoader():\n \"\"\"Extract image feature using pretrained CNN network.\n \"\"\"\n def __init__(self, opt):\n self.opt = copy.deepcopy(opt)\n self.netCNN = None\n\n def init_cnn(self):\n \"\"\"Lazy initialization of preprocessor model in case we don't need any image preprocessing.\"\"\"\n try:\n import torch\n except ModuleNotFoundError:\n raise ModuleNotFoundError('Need to install Pytorch: go to pytorch.org')\n from torch.autograd import Variable\n import torchvision\n import torchvision.transforms as transforms\n import torch.nn as nn\n\n opt = self.opt\n self.image_size = opt['image_size']\n self.crop_size = opt['image_cropsize']\n self.datatype = opt['datatype']\n self.image_mode = opt['image_mode']\n\n opt['cuda'] = not opt['no_cuda'] and torch.cuda.is_available()\n self.use_cuda = opt['cuda']\n\n if self.use_cuda:\n print('[ Using CUDA ]')\n torch.cuda.set_device(opt['gpu'])\n\n cnn_type, layer_num = self.image_mode_switcher()\n\n # initialize the pretrained CNN using pytorch.\n CNN = getattr(torchvision.models, cnn_type)\n\n # cut off the additional layer.\n self.netCNN = nn.Sequential(*list(CNN(pretrained=True).children())[:layer_num])\n\n # initialize the transform function using torch vision.\n self.transform = transforms.Compose([\n transforms.Scale(self.image_size),\n transforms.CenterCrop(self.crop_size),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n ])\n\n # container for single image\n self.xs = torch.FloatTensor(1, 3, self.crop_size, self.crop_size).fill_(0)\n\n if self.use_cuda:\n self.cuda()\n self.xs = self.xs.cuda()\n\n # make self.xs variable.\n self.xs = Variable(self.xs)\n\n def cuda(self):\n self.netCNN.cuda()\n\n def save(self, feature, path):\n np.save(path, feature)\n\n def image_mode_switcher(self):\n switcher = {\n 'resnet152': ['resnet152', -1],\n 'resnet101': ['resnet101', -1],\n 'resnet50': ['resnet50', -1],\n 'resnet34': ['resnet34', -1],\n 'resnet18': ['resnet18', -1],\n 'resnet152_spatial': ['resnet152', -2],\n 'resnet101_spatial': ['resnet101', -2],\n 'resnet50_spatial': ['resnet50', -2],\n 'resnet34_spatial': ['resnet34', -2],\n 'resnet18_spatial': ['resnet18', -2],\n }\n\n if self.image_mode not in switcher:\n raise NotImplementedError('image preprocessing mode' +\n '{} not supported yet'.format(self.image_mode))\n\n return switcher.get(self.image_mode)\n\n def extract(self, image, path):\n # check whether initlize CNN network.\n if not self.netCNN:\n self.init_cnn()\n\n self.xs.data.copy_(self.transform(image))\n # extract the image feature\n feature = self.netCNN(self.xs)\n feature = feature.cpu().data.numpy()\n # save the feature\n self.save(feature, path)\n return feature\n\n def img_to_ascii(self, path):\n im = Image.open(path)\n im.thumbnail((60, 40), Image.BICUBIC)\n im = im.convert('L')\n asc = []\n for y in range(0, im.size[1]):\n for x in range(0, im.size[0]):\n lum = 255 - im.getpixel((x, y))\n asc.append(_greyscale[lum * len(_greyscale) // 256])\n asc.append('\\n')\n return ''.join(asc)\n\n def load(self, path):\n opt = self.opt\n mode = opt.get('image_mode', 'raw')\n if mode is None or mode == 'none':\n # don't need to load images\n return None\n elif mode == 'raw':\n # raw just returns RGB values\n return Image.open(path).convert('RGB')\n elif mode == 'ascii':\n # convert images to ascii \u00af\\_(\u30c4)_/\u00af\n return self.img_to_ascii(path)\n else:\n # otherwise, looks for preprocessed version under 'mode' directory\n prepath, imagefn = os.path.split(path)\n\n dpath = os.path.join(prepath, mode)\n\n if not os.path.exists(dpath):\n build_data.make_dir(dpath)\n\n imagefn = imagefn.split('.')[0]\n imagefn = imagefn + '.npy'\n new_path = os.path.join(prepath, mode, imagefn)\n\n if not os.path.isfile(new_path):\n return self.extract(Image.open(path).convert('RGB'), new_path)\n else:\n return np.load(new_path)\n", "path": "parlai/core/image_featurizers.py"}], "after_files": [{"content": "# All rights reserved.\n# This source code is licensed under the BSD-style license found in the\n# LICENSE file in the root directory of this source tree. An additional grant\n# of patent rights can be found in the PATENTS file in the same directory.\n\nimport parlai.core.build_data as build_data\n\nimport os\nimport copy\nimport numpy as np\nfrom PIL import Image\nfrom functools import lru_cache\n\n_greyscale = ' .,:;crsA23hHG#98&@'\n\nclass ImageLoader():\n \"\"\"Extract image feature using pretrained CNN network.\n \"\"\"\n def __init__(self, opt):\n self.opt = copy.deepcopy(opt)\n self.netCNN = None\n\n def init_cnn(self):\n \"\"\"Lazy initialization of preprocessor model in case we don't need any image preprocessing.\"\"\"\n try:\n import torch\n except ModuleNotFoundError:\n raise ModuleNotFoundError('Need to install Pytorch: go to pytorch.org')\n from torch.autograd import Variable\n import torchvision\n import torchvision.transforms as transforms\n import torch.nn as nn\n\n opt = self.opt\n self.image_size = opt['image_size']\n self.crop_size = opt['image_cropsize']\n self.datatype = opt['datatype']\n self.image_mode = opt['image_mode']\n\n opt['cuda'] = not opt['no_cuda'] and torch.cuda.is_available()\n self.use_cuda = opt['cuda']\n\n if self.use_cuda:\n print('[ Using CUDA ]')\n torch.cuda.set_device(opt['gpu'])\n\n cnn_type, layer_num = self.image_mode_switcher()\n\n # initialize the pretrained CNN using pytorch.\n CNN = getattr(torchvision.models, cnn_type)\n\n # cut off the additional layer.\n self.netCNN = nn.Sequential(*list(CNN(pretrained=True).children())[:layer_num])\n\n # initialize the transform function using torch vision.\n self.transform = transforms.Compose([\n transforms.Scale(self.image_size),\n transforms.CenterCrop(self.crop_size),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n ])\n\n # container for single image\n self.xs = torch.FloatTensor(1, 3, self.crop_size, self.crop_size).fill_(0)\n\n if self.use_cuda:\n self.cuda()\n self.xs = self.xs.cuda()\n\n # make self.xs variable.\n self.xs = Variable(self.xs)\n\n def cuda(self):\n self.netCNN.cuda()\n\n def save(self, feature, path):\n np.save(path, feature)\n\n def image_mode_switcher(self):\n switcher = {\n 'resnet152': ['resnet152', -1],\n 'resnet101': ['resnet101', -1],\n 'resnet50': ['resnet50', -1],\n 'resnet34': ['resnet34', -1],\n 'resnet18': ['resnet18', -1],\n 'resnet152_spatial': ['resnet152', -2],\n 'resnet101_spatial': ['resnet101', -2],\n 'resnet50_spatial': ['resnet50', -2],\n 'resnet34_spatial': ['resnet34', -2],\n 'resnet18_spatial': ['resnet18', -2],\n }\n\n if self.image_mode not in switcher:\n raise NotImplementedError('image preprocessing mode' +\n '{} not supported yet'.format(self.image_mode))\n\n return switcher.get(self.image_mode)\n\n def extract(self, image, path):\n # check whether initlize CNN network.\n if not self.netCNN:\n self.init_cnn()\n\n self.xs.data.copy_(self.transform(image))\n # extract the image feature\n feature = self.netCNN(self.xs)\n feature = feature.cpu().data.numpy()\n # save the feature\n self.save(feature, path)\n return feature\n\n def img_to_ascii(self, path):\n im = Image.open(path)\n im.thumbnail((60, 40), Image.BICUBIC)\n im = im.convert('L')\n asc = []\n for y in range(0, im.size[1]):\n for x in range(0, im.size[0]):\n lum = 255 - im.getpixel((x, y))\n asc.append(_greyscale[lum * len(_greyscale) // 256])\n asc.append('\\n')\n return ''.join(asc)\n\n @lru_cache(maxsize=None)\n def load(self, path):\n opt = self.opt\n mode = opt.get('image_mode', 'raw')\n if mode is None or mode == 'none':\n # don't need to load images\n return None\n elif mode == 'raw':\n # raw just returns RGB values\n return Image.open(path).convert('RGB')\n elif mode == 'ascii':\n # convert images to ascii \u00af\\_(\u30c4)_/\u00af\n return self.img_to_ascii(path)\n else:\n # otherwise, looks for preprocessed version under 'mode' directory\n prepath, imagefn = os.path.split(path)\n\n dpath = os.path.join(prepath, mode)\n\n if not os.path.exists(dpath):\n build_data.make_dir(dpath)\n\n imagefn = imagefn.split('.')[0]\n imagefn = imagefn + '.npy'\n new_path = os.path.join(prepath, mode, imagefn)\n\n if not os.path.isfile(new_path):\n return self.extract(Image.open(path).convert('RGB'), new_path)\n else:\n return np.load(new_path)\n", "path": "parlai/core/image_featurizers.py"}]} | 1,926 | 169 |
gh_patches_debug_35693 | rasdani/github-patches | git_diff | falconry__falcon-1987 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Update asgi look examples to use aioredis 2
A new major vesion of aioredis was released, and it has some api changes.
This is the changelog https://github.com/aio-libs/aioredis-py/blob/master/CHANGELOG.md
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/asgilook/asgilook/config.py`
Content:
```
1 import os
2 import pathlib
3 import uuid
4
5 import aioredis
6
7
8 class Config:
9 DEFAULT_CONFIG_PATH = '/tmp/asgilook'
10 DEFAULT_MIN_THUMB_SIZE = 64
11 DEFAULT_REDIS_HOST = 'redis://localhost'
12 DEFAULT_REDIS_POOL = aioredis.create_redis_pool
13 DEFAULT_UUID_GENERATOR = uuid.uuid4
14
15 def __init__(self):
16 self.storage_path = pathlib.Path(
17 os.environ.get('ASGI_LOOK_STORAGE_PATH', self.DEFAULT_CONFIG_PATH)
18 )
19 self.storage_path.mkdir(parents=True, exist_ok=True)
20
21 self.create_redis_pool = Config.DEFAULT_REDIS_POOL
22 self.min_thumb_size = self.DEFAULT_MIN_THUMB_SIZE
23 self.redis_host = self.DEFAULT_REDIS_HOST
24 self.uuid_generator = Config.DEFAULT_UUID_GENERATOR
25
```
Path: `examples/asgilook/asgilook/cache.py`
Content:
```
1 import msgpack
2
3
4 class RedisCache:
5 PREFIX = 'asgilook:'
6 INVALIDATE_ON = frozenset({'DELETE', 'POST', 'PUT'})
7 CACHE_HEADER = 'X-ASGILook-Cache'
8 TTL = 3600
9
10 def __init__(self, config):
11 self._config = config
12
13 # NOTE(vytas): To be initialized upon application startup (see the
14 # method below).
15 self._redis = None
16
17 async def _serialize_response(self, resp):
18 data = await resp.render_body()
19 return msgpack.packb([resp.content_type, data], use_bin_type=True)
20
21 def _deserialize_response(self, resp, data):
22 resp.content_type, resp.data = msgpack.unpackb(data, raw=False)
23 resp.complete = True
24 resp.context.cached = True
25
26 async def process_startup(self, scope, event):
27 if self._redis is None:
28 self._redis = await self._config.create_redis_pool(self._config.redis_host)
29
30 async def process_request(self, req, resp):
31 resp.context.cached = False
32
33 if req.method in self.INVALIDATE_ON:
34 return
35
36 key = f'{self.PREFIX}/{req.path}'
37 data = await self._redis.get(key)
38 if data is not None:
39 self._deserialize_response(resp, data)
40 resp.set_header(self.CACHE_HEADER, 'Hit')
41 else:
42 resp.set_header(self.CACHE_HEADER, 'Miss')
43
44 async def process_response(self, req, resp, resource, req_succeeded):
45 if not req_succeeded:
46 return
47
48 key = f'{self.PREFIX}/{req.path}'
49
50 if req.method in self.INVALIDATE_ON:
51 await self._redis.delete(key)
52 elif not resp.context.cached:
53 data = await self._serialize_response(resp)
54 await self._redis.set(key, data, expire=self.TTL)
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/asgilook/asgilook/cache.py b/examples/asgilook/asgilook/cache.py
--- a/examples/asgilook/asgilook/cache.py
+++ b/examples/asgilook/asgilook/cache.py
@@ -9,10 +9,7 @@
def __init__(self, config):
self._config = config
-
- # NOTE(vytas): To be initialized upon application startup (see the
- # method below).
- self._redis = None
+ self._redis = self._config.redis_from_url(self._config.redis_host)
async def _serialize_response(self, resp):
data = await resp.render_body()
@@ -24,8 +21,10 @@
resp.context.cached = True
async def process_startup(self, scope, event):
- if self._redis is None:
- self._redis = await self._config.create_redis_pool(self._config.redis_host)
+ await self._redis.ping()
+
+ async def process_shutdown(self, scope, event):
+ await self._redis.close()
async def process_request(self, req, resp):
resp.context.cached = False
@@ -51,4 +50,4 @@
await self._redis.delete(key)
elif not resp.context.cached:
data = await self._serialize_response(resp)
- await self._redis.set(key, data, expire=self.TTL)
+ await self._redis.set(key, data, ex=self.TTL)
diff --git a/examples/asgilook/asgilook/config.py b/examples/asgilook/asgilook/config.py
--- a/examples/asgilook/asgilook/config.py
+++ b/examples/asgilook/asgilook/config.py
@@ -9,7 +9,7 @@
DEFAULT_CONFIG_PATH = '/tmp/asgilook'
DEFAULT_MIN_THUMB_SIZE = 64
DEFAULT_REDIS_HOST = 'redis://localhost'
- DEFAULT_REDIS_POOL = aioredis.create_redis_pool
+ DEFAULT_REDIS_FROM_URL = aioredis.from_url
DEFAULT_UUID_GENERATOR = uuid.uuid4
def __init__(self):
@@ -18,7 +18,7 @@
)
self.storage_path.mkdir(parents=True, exist_ok=True)
- self.create_redis_pool = Config.DEFAULT_REDIS_POOL
+ self.redis_from_url = Config.DEFAULT_REDIS_FROM_URL
self.min_thumb_size = self.DEFAULT_MIN_THUMB_SIZE
self.redis_host = self.DEFAULT_REDIS_HOST
self.uuid_generator = Config.DEFAULT_UUID_GENERATOR
| {"golden_diff": "diff --git a/examples/asgilook/asgilook/cache.py b/examples/asgilook/asgilook/cache.py\n--- a/examples/asgilook/asgilook/cache.py\n+++ b/examples/asgilook/asgilook/cache.py\n@@ -9,10 +9,7 @@\n \n def __init__(self, config):\n self._config = config\n-\n- # NOTE(vytas): To be initialized upon application startup (see the\n- # method below).\n- self._redis = None\n+ self._redis = self._config.redis_from_url(self._config.redis_host)\n \n async def _serialize_response(self, resp):\n data = await resp.render_body()\n@@ -24,8 +21,10 @@\n resp.context.cached = True\n \n async def process_startup(self, scope, event):\n- if self._redis is None:\n- self._redis = await self._config.create_redis_pool(self._config.redis_host)\n+ await self._redis.ping()\n+\n+ async def process_shutdown(self, scope, event):\n+ await self._redis.close()\n \n async def process_request(self, req, resp):\n resp.context.cached = False\n@@ -51,4 +50,4 @@\n await self._redis.delete(key)\n elif not resp.context.cached:\n data = await self._serialize_response(resp)\n- await self._redis.set(key, data, expire=self.TTL)\n+ await self._redis.set(key, data, ex=self.TTL)\ndiff --git a/examples/asgilook/asgilook/config.py b/examples/asgilook/asgilook/config.py\n--- a/examples/asgilook/asgilook/config.py\n+++ b/examples/asgilook/asgilook/config.py\n@@ -9,7 +9,7 @@\n DEFAULT_CONFIG_PATH = '/tmp/asgilook'\n DEFAULT_MIN_THUMB_SIZE = 64\n DEFAULT_REDIS_HOST = 'redis://localhost'\n- DEFAULT_REDIS_POOL = aioredis.create_redis_pool\n+ DEFAULT_REDIS_FROM_URL = aioredis.from_url\n DEFAULT_UUID_GENERATOR = uuid.uuid4\n \n def __init__(self):\n@@ -18,7 +18,7 @@\n )\n self.storage_path.mkdir(parents=True, exist_ok=True)\n \n- self.create_redis_pool = Config.DEFAULT_REDIS_POOL\n+ self.redis_from_url = Config.DEFAULT_REDIS_FROM_URL\n self.min_thumb_size = self.DEFAULT_MIN_THUMB_SIZE\n self.redis_host = self.DEFAULT_REDIS_HOST\n self.uuid_generator = Config.DEFAULT_UUID_GENERATOR\n", "issue": "Update asgi look examples to use aioredis 2\nA new major vesion of aioredis was released, and it has some api changes.\r\nThis is the changelog https://github.com/aio-libs/aioredis-py/blob/master/CHANGELOG.md\n", "before_files": [{"content": "import os\nimport pathlib\nimport uuid\n\nimport aioredis\n\n\nclass Config:\n DEFAULT_CONFIG_PATH = '/tmp/asgilook'\n DEFAULT_MIN_THUMB_SIZE = 64\n DEFAULT_REDIS_HOST = 'redis://localhost'\n DEFAULT_REDIS_POOL = aioredis.create_redis_pool\n DEFAULT_UUID_GENERATOR = uuid.uuid4\n\n def __init__(self):\n self.storage_path = pathlib.Path(\n os.environ.get('ASGI_LOOK_STORAGE_PATH', self.DEFAULT_CONFIG_PATH)\n )\n self.storage_path.mkdir(parents=True, exist_ok=True)\n\n self.create_redis_pool = Config.DEFAULT_REDIS_POOL\n self.min_thumb_size = self.DEFAULT_MIN_THUMB_SIZE\n self.redis_host = self.DEFAULT_REDIS_HOST\n self.uuid_generator = Config.DEFAULT_UUID_GENERATOR\n", "path": "examples/asgilook/asgilook/config.py"}, {"content": "import msgpack\n\n\nclass RedisCache:\n PREFIX = 'asgilook:'\n INVALIDATE_ON = frozenset({'DELETE', 'POST', 'PUT'})\n CACHE_HEADER = 'X-ASGILook-Cache'\n TTL = 3600\n\n def __init__(self, config):\n self._config = config\n\n # NOTE(vytas): To be initialized upon application startup (see the\n # method below).\n self._redis = None\n\n async def _serialize_response(self, resp):\n data = await resp.render_body()\n return msgpack.packb([resp.content_type, data], use_bin_type=True)\n\n def _deserialize_response(self, resp, data):\n resp.content_type, resp.data = msgpack.unpackb(data, raw=False)\n resp.complete = True\n resp.context.cached = True\n\n async def process_startup(self, scope, event):\n if self._redis is None:\n self._redis = await self._config.create_redis_pool(self._config.redis_host)\n\n async def process_request(self, req, resp):\n resp.context.cached = False\n\n if req.method in self.INVALIDATE_ON:\n return\n\n key = f'{self.PREFIX}/{req.path}'\n data = await self._redis.get(key)\n if data is not None:\n self._deserialize_response(resp, data)\n resp.set_header(self.CACHE_HEADER, 'Hit')\n else:\n resp.set_header(self.CACHE_HEADER, 'Miss')\n\n async def process_response(self, req, resp, resource, req_succeeded):\n if not req_succeeded:\n return\n\n key = f'{self.PREFIX}/{req.path}'\n\n if req.method in self.INVALIDATE_ON:\n await self._redis.delete(key)\n elif not resp.context.cached:\n data = await self._serialize_response(resp)\n await self._redis.set(key, data, expire=self.TTL)\n", "path": "examples/asgilook/asgilook/cache.py"}], "after_files": [{"content": "import os\nimport pathlib\nimport uuid\n\nimport aioredis\n\n\nclass Config:\n DEFAULT_CONFIG_PATH = '/tmp/asgilook'\n DEFAULT_MIN_THUMB_SIZE = 64\n DEFAULT_REDIS_HOST = 'redis://localhost'\n DEFAULT_REDIS_FROM_URL = aioredis.from_url\n DEFAULT_UUID_GENERATOR = uuid.uuid4\n\n def __init__(self):\n self.storage_path = pathlib.Path(\n os.environ.get('ASGI_LOOK_STORAGE_PATH', self.DEFAULT_CONFIG_PATH)\n )\n self.storage_path.mkdir(parents=True, exist_ok=True)\n\n self.redis_from_url = Config.DEFAULT_REDIS_FROM_URL\n self.min_thumb_size = self.DEFAULT_MIN_THUMB_SIZE\n self.redis_host = self.DEFAULT_REDIS_HOST\n self.uuid_generator = Config.DEFAULT_UUID_GENERATOR\n", "path": "examples/asgilook/asgilook/config.py"}, {"content": "import msgpack\n\n\nclass RedisCache:\n PREFIX = 'asgilook:'\n INVALIDATE_ON = frozenset({'DELETE', 'POST', 'PUT'})\n CACHE_HEADER = 'X-ASGILook-Cache'\n TTL = 3600\n\n def __init__(self, config):\n self._config = config\n self._redis = self._config.redis_from_url(self._config.redis_host)\n\n async def _serialize_response(self, resp):\n data = await resp.render_body()\n return msgpack.packb([resp.content_type, data], use_bin_type=True)\n\n def _deserialize_response(self, resp, data):\n resp.content_type, resp.data = msgpack.unpackb(data, raw=False)\n resp.complete = True\n resp.context.cached = True\n\n async def process_startup(self, scope, event):\n await self._redis.ping()\n\n async def process_shutdown(self, scope, event):\n await self._redis.close()\n\n async def process_request(self, req, resp):\n resp.context.cached = False\n\n if req.method in self.INVALIDATE_ON:\n return\n\n key = f'{self.PREFIX}/{req.path}'\n data = await self._redis.get(key)\n if data is not None:\n self._deserialize_response(resp, data)\n resp.set_header(self.CACHE_HEADER, 'Hit')\n else:\n resp.set_header(self.CACHE_HEADER, 'Miss')\n\n async def process_response(self, req, resp, resource, req_succeeded):\n if not req_succeeded:\n return\n\n key = f'{self.PREFIX}/{req.path}'\n\n if req.method in self.INVALIDATE_ON:\n await self._redis.delete(key)\n elif not resp.context.cached:\n data = await self._serialize_response(resp)\n await self._redis.set(key, data, ex=self.TTL)\n", "path": "examples/asgilook/asgilook/cache.py"}]} | 1,071 | 552 |
gh_patches_debug_8028 | rasdani/github-patches | git_diff | e-valuation__EvaP-848 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Username case sensitivity
Usernames are case sensitive. The importer makes all usernames lowercase, but automatically created accounts when logging in with Kerberos authentification can have uppercase letters.
This can lead to two users having the same username and then the system crashed on login.
Automatically created accounts should also get lowercase usernames, even if the user enters the name differently.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evap/evaluation/forms.py`
Content:
```
1 from django import forms
2 from django.contrib.auth import authenticate
3 from django.utils.translation import ugettext_lazy as _
4 from django.views.decorators.debug import sensitive_variables
5
6 from evap.evaluation.models import UserProfile
7
8
9 class LoginUsernameForm(forms.Form):
10 """Form encapsulating the login with username and password, for example from an Active Directory.
11 """
12
13 username = forms.CharField(label=_("Username"), max_length=254)
14 password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
15
16 def __init__(self, request=None, *args, **kwargs):
17 """
18 If request is passed in, the form will validate that cookies are
19 enabled. Note that the request (a HttpRequest object) must have set a
20 cookie with the key TEST_COOKIE_NAME and value TEST_COOKIE_VALUE before
21 running this validation.
22 """
23 self.request = request
24 self.user_cache = None
25 super().__init__(*args, **kwargs)
26
27 @sensitive_variables('password')
28 def clean_password(self):
29 username = self.cleaned_data.get('username')
30 password = self.cleaned_data.get('password')
31
32 if username and password:
33 self.user_cache = authenticate(username=username, password=password)
34 if self.user_cache is None:
35 raise forms.ValidationError(_("Please enter a correct username and password."))
36 self.check_for_test_cookie()
37 return password
38
39 def check_for_test_cookie(self):
40 if self.request and not self.request.session.test_cookie_worked():
41 raise forms.ValidationError(_("Your Web browser doesn't appear to have cookies enabled. Cookies are required for logging in."))
42
43 def get_user_id(self):
44 if self.user_cache:
45 return self.user_cache.id
46 return None
47
48 def get_user(self):
49 return self.user_cache
50
51
52 class NewKeyForm(forms.Form):
53 email = forms.EmailField(label=_("Email address"))
54
55 def __init__(self, *args, **kwargs):
56 self.user_cache = None
57
58 super().__init__(*args, **kwargs)
59
60 def clean_email(self):
61 email = self.cleaned_data.get('email')
62
63 if not UserProfile.email_needs_login_key(email):
64 raise forms.ValidationError(_("HPI users cannot request login keys. Please login using your domain credentials."))
65
66 try:
67 user = UserProfile.objects.get(email__iexact=email)
68 self.user_cache = user
69 except UserProfile.DoesNotExist:
70 raise forms.ValidationError(_("No user with this email address was found. Please make sure to enter the email address already known to the university office."))
71
72 return email
73
74 def get_user(self):
75 return self.user_cache
76
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/evap/evaluation/forms.py b/evap/evaluation/forms.py
--- a/evap/evaluation/forms.py
+++ b/evap/evaluation/forms.py
@@ -29,6 +29,9 @@
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
+ # django-auth-kerberos might create a new userprofile. make sure it gets a lowercase username.
+ username = username.lower()
+
if username and password:
self.user_cache = authenticate(username=username, password=password)
if self.user_cache is None:
| {"golden_diff": "diff --git a/evap/evaluation/forms.py b/evap/evaluation/forms.py\n--- a/evap/evaluation/forms.py\n+++ b/evap/evaluation/forms.py\n@@ -29,6 +29,9 @@\n username = self.cleaned_data.get('username')\n password = self.cleaned_data.get('password')\n \n+ # django-auth-kerberos might create a new userprofile. make sure it gets a lowercase username.\n+ username = username.lower()\n+\n if username and password:\n self.user_cache = authenticate(username=username, password=password)\n if self.user_cache is None:\n", "issue": "Username case sensitivity\nUsernames are case sensitive. The importer makes all usernames lowercase, but automatically created accounts when logging in with Kerberos authentification can have uppercase letters.\nThis can lead to two users having the same username and then the system crashed on login.\n\nAutomatically created accounts should also get lowercase usernames, even if the user enters the name differently.\n\n", "before_files": [{"content": "from django import forms\nfrom django.contrib.auth import authenticate\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.views.decorators.debug import sensitive_variables\n\nfrom evap.evaluation.models import UserProfile\n\n\nclass LoginUsernameForm(forms.Form):\n \"\"\"Form encapsulating the login with username and password, for example from an Active Directory.\n \"\"\"\n\n username = forms.CharField(label=_(\"Username\"), max_length=254)\n password = forms.CharField(label=_(\"Password\"), widget=forms.PasswordInput)\n\n def __init__(self, request=None, *args, **kwargs):\n \"\"\"\n If request is passed in, the form will validate that cookies are\n enabled. Note that the request (a HttpRequest object) must have set a\n cookie with the key TEST_COOKIE_NAME and value TEST_COOKIE_VALUE before\n running this validation.\n \"\"\"\n self.request = request\n self.user_cache = None\n super().__init__(*args, **kwargs)\n\n @sensitive_variables('password')\n def clean_password(self):\n username = self.cleaned_data.get('username')\n password = self.cleaned_data.get('password')\n\n if username and password:\n self.user_cache = authenticate(username=username, password=password)\n if self.user_cache is None:\n raise forms.ValidationError(_(\"Please enter a correct username and password.\"))\n self.check_for_test_cookie()\n return password\n\n def check_for_test_cookie(self):\n if self.request and not self.request.session.test_cookie_worked():\n raise forms.ValidationError(_(\"Your Web browser doesn't appear to have cookies enabled. Cookies are required for logging in.\"))\n\n def get_user_id(self):\n if self.user_cache:\n return self.user_cache.id\n return None\n\n def get_user(self):\n return self.user_cache\n\n\nclass NewKeyForm(forms.Form):\n email = forms.EmailField(label=_(\"Email address\"))\n\n def __init__(self, *args, **kwargs):\n self.user_cache = None\n\n super().__init__(*args, **kwargs)\n\n def clean_email(self):\n email = self.cleaned_data.get('email')\n\n if not UserProfile.email_needs_login_key(email):\n raise forms.ValidationError(_(\"HPI users cannot request login keys. Please login using your domain credentials.\"))\n\n try:\n user = UserProfile.objects.get(email__iexact=email)\n self.user_cache = user\n except UserProfile.DoesNotExist:\n raise forms.ValidationError(_(\"No user with this email address was found. Please make sure to enter the email address already known to the university office.\"))\n\n return email\n\n def get_user(self):\n return self.user_cache\n", "path": "evap/evaluation/forms.py"}], "after_files": [{"content": "from django import forms\nfrom django.contrib.auth import authenticate\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.views.decorators.debug import sensitive_variables\n\nfrom evap.evaluation.models import UserProfile\n\n\nclass LoginUsernameForm(forms.Form):\n \"\"\"Form encapsulating the login with username and password, for example from an Active Directory.\n \"\"\"\n\n username = forms.CharField(label=_(\"Username\"), max_length=254)\n password = forms.CharField(label=_(\"Password\"), widget=forms.PasswordInput)\n\n def __init__(self, request=None, *args, **kwargs):\n \"\"\"\n If request is passed in, the form will validate that cookies are\n enabled. Note that the request (a HttpRequest object) must have set a\n cookie with the key TEST_COOKIE_NAME and value TEST_COOKIE_VALUE before\n running this validation.\n \"\"\"\n self.request = request\n self.user_cache = None\n super().__init__(*args, **kwargs)\n\n @sensitive_variables('password')\n def clean_password(self):\n username = self.cleaned_data.get('username')\n password = self.cleaned_data.get('password')\n\n # django-auth-kerberos might create a new userprofile. make sure it gets a lowercase username.\n username = username.lower()\n\n if username and password:\n self.user_cache = authenticate(username=username, password=password)\n if self.user_cache is None:\n raise forms.ValidationError(_(\"Please enter a correct username and password.\"))\n self.check_for_test_cookie()\n return password\n\n def check_for_test_cookie(self):\n if self.request and not self.request.session.test_cookie_worked():\n raise forms.ValidationError(_(\"Your Web browser doesn't appear to have cookies enabled. Cookies are required for logging in.\"))\n\n def get_user_id(self):\n if self.user_cache:\n return self.user_cache.id\n return None\n\n def get_user(self):\n return self.user_cache\n\n\nclass NewKeyForm(forms.Form):\n email = forms.EmailField(label=_(\"Email address\"))\n\n def __init__(self, *args, **kwargs):\n self.user_cache = None\n\n super().__init__(*args, **kwargs)\n\n def clean_email(self):\n email = self.cleaned_data.get('email')\n\n if not UserProfile.email_needs_login_key(email):\n raise forms.ValidationError(_(\"HPI users cannot request login keys. Please login using your domain credentials.\"))\n\n try:\n user = UserProfile.objects.get(email__iexact=email)\n self.user_cache = user\n except UserProfile.DoesNotExist:\n raise forms.ValidationError(_(\"No user with this email address was found. Please make sure to enter the email address already known to the university office.\"))\n\n return email\n\n def get_user(self):\n return self.user_cache\n", "path": "evap/evaluation/forms.py"}]} | 1,023 | 130 |
gh_patches_debug_6806 | rasdani/github-patches | git_diff | Pylons__pyramid-2674 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
reify docstring doesn't render properly
The testsetup section is omitted from the rendered docs at http://pyramid.readthedocs.io/en/latest/api/decorator.html#pyramid.decorator.reify
Unfortunately this section is essential for understanding the example.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyramid/decorator.py`
Content:
```
1 from functools import update_wrapper
2
3
4 class reify(object):
5 """ Use as a class method decorator. It operates almost exactly like the
6 Python ``@property`` decorator, but it puts the result of the method it
7 decorates into the instance dict after the first call, effectively
8 replacing the function it decorates with an instance variable. It is, in
9 Python parlance, a non-data descriptor. An example:
10
11 .. testsetup::
12
13 from pyramid.decorator import reify
14
15 class Foo(object):
16 @reify
17 def jammy(self):
18 print('jammy called')
19 return 1
20
21 And usage of Foo:
22
23 .. doctest::
24
25 >>> f = Foo()
26 >>> v = f.jammy
27 jammy called
28 >>> print(v)
29 1
30 >>> f.jammy
31 1
32 >>> # jammy func not called the second time; it replaced itself with 1
33 >>> # Note: reassignment is possible
34 >>> f.jammy = 2
35 >>> f.jammy
36 2
37 """
38 def __init__(self, wrapped):
39 self.wrapped = wrapped
40 update_wrapper(self, wrapped)
41
42 def __get__(self, inst, objtype=None):
43 if inst is None:
44 return self
45 val = self.wrapped(inst)
46 setattr(inst, self.wrapped.__name__, val)
47 return val
48
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pyramid/decorator.py b/pyramid/decorator.py
--- a/pyramid/decorator.py
+++ b/pyramid/decorator.py
@@ -8,6 +8,16 @@
replacing the function it decorates with an instance variable. It is, in
Python parlance, a non-data descriptor. An example:
+ .. code-block:: python
+
+ from pyramid.decorator import reify
+
+ class Foo(object):
+ @reify
+ def jammy(self):
+ print('jammy called')
+ return 1
+
.. testsetup::
from pyramid.decorator import reify
| {"golden_diff": "diff --git a/pyramid/decorator.py b/pyramid/decorator.py\n--- a/pyramid/decorator.py\n+++ b/pyramid/decorator.py\n@@ -8,6 +8,16 @@\n replacing the function it decorates with an instance variable. It is, in\n Python parlance, a non-data descriptor. An example:\n \n+ .. code-block:: python\n+\n+ from pyramid.decorator import reify\n+\n+ class Foo(object):\n+ @reify\n+ def jammy(self):\n+ print('jammy called')\n+ return 1\n+\n .. testsetup::\n \n from pyramid.decorator import reify\n", "issue": "reify docstring doesn't render properly\nThe testsetup section is omitted from the rendered docs at http://pyramid.readthedocs.io/en/latest/api/decorator.html#pyramid.decorator.reify\n\nUnfortunately this section is essential for understanding the example.\n\n", "before_files": [{"content": "from functools import update_wrapper\n\n\nclass reify(object):\n \"\"\" Use as a class method decorator. It operates almost exactly like the\n Python ``@property`` decorator, but it puts the result of the method it\n decorates into the instance dict after the first call, effectively\n replacing the function it decorates with an instance variable. It is, in\n Python parlance, a non-data descriptor. An example:\n\n .. testsetup::\n\n from pyramid.decorator import reify\n\n class Foo(object):\n @reify\n def jammy(self):\n print('jammy called')\n return 1\n\n And usage of Foo:\n\n .. doctest::\n\n >>> f = Foo()\n >>> v = f.jammy\n jammy called\n >>> print(v)\n 1\n >>> f.jammy\n 1\n >>> # jammy func not called the second time; it replaced itself with 1\n >>> # Note: reassignment is possible\n >>> f.jammy = 2\n >>> f.jammy\n 2\n \"\"\"\n def __init__(self, wrapped):\n self.wrapped = wrapped\n update_wrapper(self, wrapped)\n\n def __get__(self, inst, objtype=None):\n if inst is None:\n return self\n val = self.wrapped(inst)\n setattr(inst, self.wrapped.__name__, val)\n return val\n\n", "path": "pyramid/decorator.py"}], "after_files": [{"content": "from functools import update_wrapper\n\n\nclass reify(object):\n \"\"\" Use as a class method decorator. It operates almost exactly like the\n Python ``@property`` decorator, but it puts the result of the method it\n decorates into the instance dict after the first call, effectively\n replacing the function it decorates with an instance variable. It is, in\n Python parlance, a non-data descriptor. An example:\n\n .. code-block:: python\n\n from pyramid.decorator import reify\n\n class Foo(object):\n @reify\n def jammy(self):\n print('jammy called')\n return 1\n\n .. testsetup::\n\n from pyramid.decorator import reify\n\n class Foo(object):\n @reify\n def jammy(self):\n print('jammy called')\n return 1\n\n And usage of Foo:\n\n .. doctest::\n\n >>> f = Foo()\n >>> v = f.jammy\n jammy called\n >>> print(v)\n 1\n >>> f.jammy\n 1\n >>> # jammy func not called the second time; it replaced itself with 1\n >>> # Note: reassignment is possible\n >>> f.jammy = 2\n >>> f.jammy\n 2\n \"\"\"\n def __init__(self, wrapped):\n self.wrapped = wrapped\n update_wrapper(self, wrapped)\n\n def __get__(self, inst, objtype=None):\n if inst is None:\n return self\n val = self.wrapped(inst)\n setattr(inst, self.wrapped.__name__, val)\n return val\n\n", "path": "pyramid/decorator.py"}]} | 719 | 149 |
gh_patches_debug_27882 | rasdani/github-patches | git_diff | cornellius-gp__gpytorch-644 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Where is `fast_pred_var` moving?
I received the following warning when using `fast_pred_var`:
```
/cluster/nhunt/anaconda/envs/bayes_opt/lib/python3.7/site-packages/gpytorch/beta_features.py:17:
DeprecationWarning: `gpytorch.settings.fast_pred_var` has moved to `gpytorch.settings.fast_pred_var`.
```
It seems that I'm being warned that `fast_pred_var` has moved to its current location. Was there a typo in the warning about how we should be using this setting now?
```bash
$ pip list | grep gpytorch
gpytorch 0.2.1
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gpytorch/__init__.py`
Content:
```
1 #!/usr/bin/env python3
2 from .module import Module
3 from . import (
4 beta_features,
5 distributions,
6 kernels,
7 lazy,
8 likelihoods,
9 means,
10 mlls,
11 models,
12 priors,
13 settings,
14 utils,
15 variational,
16 )
17 from .functions import (
18 add_diag,
19 add_jitter,
20 dsmm,
21 inv_matmul,
22 inv_quad,
23 inv_quad_logdet,
24 logdet,
25 log_normal_cdf,
26 matmul,
27 normal_cdf,
28 root_decomposition,
29 root_inv_decomposition,
30 # Deprecated
31 inv_quad_log_det,
32 log_det,
33 )
34 from .mlls import ExactMarginalLogLikelihood, VariationalMarginalLogLikelihood
35 from .lazy import lazify, delazify
36
37
38 __version__ = "0.2.1"
39
40 # Old deprecated stuff
41 fast_pred_var = beta_features._moved_beta_feature(settings.fast_pred_var, "gpytorch.settings.fast_pred_var")
42
43 __all__ = [
44 # Submodules
45 "distributions",
46 "kernels",
47 "lazy",
48 "likelihoods",
49 "means",
50 "mlls",
51 "models",
52 "priors",
53 "utils",
54 "variational",
55 # Classes
56 "Module",
57 "ExactMarginalLogLikelihood",
58 "VariationalMarginalLogLikelihood",
59 # Functions
60 "add_diag",
61 "add_jitter",
62 "delazify",
63 "dsmm",
64 "inv_matmul",
65 "inv_quad",
66 "inv_quad_logdet",
67 "lazify",
68 "logdet",
69 "log_normal_cdf",
70 "matmul",
71 "normal_cdf",
72 "root_decomposition",
73 "root_inv_decomposition",
74 # Context managers
75 "beta_features",
76 "settings",
77 # Other
78 "__version__",
79 # Deprecated
80 "fast_pred_var",
81 "inv_quad_log_det",
82 "log_det",
83 ]
84
```
Path: `gpytorch/beta_features.py`
Content:
```
1 #!/usr/bin/env python3
2
3 import warnings
4 from .settings import _feature_flag, _value_context
5 from .settings import fast_pred_var as _fast_pred_var
6 from .settings import fast_pred_samples as _fast_pred_samples
7
8
9 class _moved_beta_feature(object):
10 def __init__(self, new_cls, orig_name=None):
11 self.new_cls = new_cls
12 self.orig_name = orig_name if orig_name is not None else "gpytorch.settings.{}".format(new_cls.__name__)
13
14 def __call__(self, *args, **kwargs):
15 warnings.warn(
16 "`{}` has moved to `gpytorch.settings.{}`.".format(self.orig_name, self.new_cls.__name__),
17 DeprecationWarning
18 )
19 return self.new_cls(*args, **kwargs)
20
21 def __getattr__(self, name):
22 return getattr(self.new_cls, name)
23
24
25 fast_pred_var = _moved_beta_feature(_fast_pred_var)
26 fast_pred_samples = _moved_beta_feature(_fast_pred_samples)
27
28
29 class checkpoint_kernel(_value_context):
30 """
31 Should the kernel be computed in chunks with checkpointing or not? (Default, no)
32
33 If `split_size = 0`:
34 The kernel is computed explicitly. During training, the kernel matrix is
35 kept in memory for the backward pass. This is the fastest option but the
36 most memory intensive.
37 If `split_size > 0`:
38 The kernel is never fully computed or stored. Instead, the kernel is only
39 accessed through matrix multiplication. The matrix multiplication is
40 computed in `segments` chunks. This is slower, but requires significantly less memory.
41
42 Default: 0
43 """
44
45 _global_value = 0
46
47
48 class diagonal_correction(_feature_flag):
49 """
50 Add a diagonal correction to scalable inducing point methods
51 """
52
53 _state = True
54
55
56 class default_preconditioner(_feature_flag):
57 """
58 Add a diagonal correction to scalable inducing point methods
59 """
60
61 pass
62
63
64 __all__ = ["fast_pred_var", "fast_pred_samples", "diagonal_correction", "default_preconditioner"]
65
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/gpytorch/__init__.py b/gpytorch/__init__.py
--- a/gpytorch/__init__.py
+++ b/gpytorch/__init__.py
@@ -37,9 +37,6 @@
__version__ = "0.2.1"
-# Old deprecated stuff
-fast_pred_var = beta_features._moved_beta_feature(settings.fast_pred_var, "gpytorch.settings.fast_pred_var")
-
__all__ = [
# Submodules
"distributions",
@@ -77,7 +74,6 @@
# Other
"__version__",
# Deprecated
- "fast_pred_var",
"inv_quad_log_det",
"log_det",
]
diff --git a/gpytorch/beta_features.py b/gpytorch/beta_features.py
--- a/gpytorch/beta_features.py
+++ b/gpytorch/beta_features.py
@@ -2,8 +2,6 @@
import warnings
from .settings import _feature_flag, _value_context
-from .settings import fast_pred_var as _fast_pred_var
-from .settings import fast_pred_samples as _fast_pred_samples
class _moved_beta_feature(object):
@@ -22,10 +20,6 @@
return getattr(self.new_cls, name)
-fast_pred_var = _moved_beta_feature(_fast_pred_var)
-fast_pred_samples = _moved_beta_feature(_fast_pred_samples)
-
-
class checkpoint_kernel(_value_context):
"""
Should the kernel be computed in chunks with checkpointing or not? (Default, no)
@@ -61,4 +55,4 @@
pass
-__all__ = ["fast_pred_var", "fast_pred_samples", "diagonal_correction", "default_preconditioner"]
+__all__ = ["checkpoint_kernel", "diagonal_correction", "default_preconditioner"]
| {"golden_diff": "diff --git a/gpytorch/__init__.py b/gpytorch/__init__.py\n--- a/gpytorch/__init__.py\n+++ b/gpytorch/__init__.py\n@@ -37,9 +37,6 @@\n \n __version__ = \"0.2.1\"\n \n-# Old deprecated stuff\n-fast_pred_var = beta_features._moved_beta_feature(settings.fast_pred_var, \"gpytorch.settings.fast_pred_var\")\n-\n __all__ = [\n # Submodules\n \"distributions\",\n@@ -77,7 +74,6 @@\n # Other\n \"__version__\",\n # Deprecated\n- \"fast_pred_var\",\n \"inv_quad_log_det\",\n \"log_det\",\n ]\ndiff --git a/gpytorch/beta_features.py b/gpytorch/beta_features.py\n--- a/gpytorch/beta_features.py\n+++ b/gpytorch/beta_features.py\n@@ -2,8 +2,6 @@\n \n import warnings\n from .settings import _feature_flag, _value_context\n-from .settings import fast_pred_var as _fast_pred_var\n-from .settings import fast_pred_samples as _fast_pred_samples\n \n \n class _moved_beta_feature(object):\n@@ -22,10 +20,6 @@\n return getattr(self.new_cls, name)\n \n \n-fast_pred_var = _moved_beta_feature(_fast_pred_var)\n-fast_pred_samples = _moved_beta_feature(_fast_pred_samples)\n-\n-\n class checkpoint_kernel(_value_context):\n \"\"\"\n Should the kernel be computed in chunks with checkpointing or not? (Default, no)\n@@ -61,4 +55,4 @@\n pass\n \n \n-__all__ = [\"fast_pred_var\", \"fast_pred_samples\", \"diagonal_correction\", \"default_preconditioner\"]\n+__all__ = [\"checkpoint_kernel\", \"diagonal_correction\", \"default_preconditioner\"]\n", "issue": "Where is `fast_pred_var` moving?\nI received the following warning when using `fast_pred_var`:\r\n\r\n```\r\n/cluster/nhunt/anaconda/envs/bayes_opt/lib/python3.7/site-packages/gpytorch/beta_features.py:17:\r\nDeprecationWarning: `gpytorch.settings.fast_pred_var` has moved to `gpytorch.settings.fast_pred_var`.\r\n```\r\n\r\nIt seems that I'm being warned that `fast_pred_var` has moved to its current location. Was there a typo in the warning about how we should be using this setting now?\r\n\r\n```bash\r\n$ pip list | grep gpytorch\r\ngpytorch 0.2.1\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python3\nfrom .module import Module\nfrom . import (\n beta_features,\n distributions,\n kernels,\n lazy,\n likelihoods,\n means,\n mlls,\n models,\n priors,\n settings,\n utils,\n variational,\n)\nfrom .functions import (\n add_diag,\n add_jitter,\n dsmm,\n inv_matmul,\n inv_quad,\n inv_quad_logdet,\n logdet,\n log_normal_cdf,\n matmul,\n normal_cdf,\n root_decomposition,\n root_inv_decomposition,\n # Deprecated\n inv_quad_log_det,\n log_det,\n)\nfrom .mlls import ExactMarginalLogLikelihood, VariationalMarginalLogLikelihood\nfrom .lazy import lazify, delazify\n\n\n__version__ = \"0.2.1\"\n\n# Old deprecated stuff\nfast_pred_var = beta_features._moved_beta_feature(settings.fast_pred_var, \"gpytorch.settings.fast_pred_var\")\n\n__all__ = [\n # Submodules\n \"distributions\",\n \"kernels\",\n \"lazy\",\n \"likelihoods\",\n \"means\",\n \"mlls\",\n \"models\",\n \"priors\",\n \"utils\",\n \"variational\",\n # Classes\n \"Module\",\n \"ExactMarginalLogLikelihood\",\n \"VariationalMarginalLogLikelihood\",\n # Functions\n \"add_diag\",\n \"add_jitter\",\n \"delazify\",\n \"dsmm\",\n \"inv_matmul\",\n \"inv_quad\",\n \"inv_quad_logdet\",\n \"lazify\",\n \"logdet\",\n \"log_normal_cdf\",\n \"matmul\",\n \"normal_cdf\",\n \"root_decomposition\",\n \"root_inv_decomposition\",\n # Context managers\n \"beta_features\",\n \"settings\",\n # Other\n \"__version__\",\n # Deprecated\n \"fast_pred_var\",\n \"inv_quad_log_det\",\n \"log_det\",\n]\n", "path": "gpytorch/__init__.py"}, {"content": "#!/usr/bin/env python3\n\nimport warnings\nfrom .settings import _feature_flag, _value_context\nfrom .settings import fast_pred_var as _fast_pred_var\nfrom .settings import fast_pred_samples as _fast_pred_samples\n\n\nclass _moved_beta_feature(object):\n def __init__(self, new_cls, orig_name=None):\n self.new_cls = new_cls\n self.orig_name = orig_name if orig_name is not None else \"gpytorch.settings.{}\".format(new_cls.__name__)\n\n def __call__(self, *args, **kwargs):\n warnings.warn(\n \"`{}` has moved to `gpytorch.settings.{}`.\".format(self.orig_name, self.new_cls.__name__),\n DeprecationWarning\n )\n return self.new_cls(*args, **kwargs)\n\n def __getattr__(self, name):\n return getattr(self.new_cls, name)\n\n\nfast_pred_var = _moved_beta_feature(_fast_pred_var)\nfast_pred_samples = _moved_beta_feature(_fast_pred_samples)\n\n\nclass checkpoint_kernel(_value_context):\n \"\"\"\n Should the kernel be computed in chunks with checkpointing or not? (Default, no)\n\n If `split_size = 0`:\n The kernel is computed explicitly. During training, the kernel matrix is\n kept in memory for the backward pass. This is the fastest option but the\n most memory intensive.\n If `split_size > 0`:\n The kernel is never fully computed or stored. Instead, the kernel is only\n accessed through matrix multiplication. The matrix multiplication is\n computed in `segments` chunks. This is slower, but requires significantly less memory.\n\n Default: 0\n \"\"\"\n\n _global_value = 0\n\n\nclass diagonal_correction(_feature_flag):\n \"\"\"\n Add a diagonal correction to scalable inducing point methods\n \"\"\"\n\n _state = True\n\n\nclass default_preconditioner(_feature_flag):\n \"\"\"\n Add a diagonal correction to scalable inducing point methods\n \"\"\"\n\n pass\n\n\n__all__ = [\"fast_pred_var\", \"fast_pred_samples\", \"diagonal_correction\", \"default_preconditioner\"]\n", "path": "gpytorch/beta_features.py"}], "after_files": [{"content": "#!/usr/bin/env python3\nfrom .module import Module\nfrom . import (\n beta_features,\n distributions,\n kernels,\n lazy,\n likelihoods,\n means,\n mlls,\n models,\n priors,\n settings,\n utils,\n variational,\n)\nfrom .functions import (\n add_diag,\n add_jitter,\n dsmm,\n inv_matmul,\n inv_quad,\n inv_quad_logdet,\n logdet,\n log_normal_cdf,\n matmul,\n normal_cdf,\n root_decomposition,\n root_inv_decomposition,\n # Deprecated\n inv_quad_log_det,\n log_det,\n)\nfrom .mlls import ExactMarginalLogLikelihood, VariationalMarginalLogLikelihood\nfrom .lazy import lazify, delazify\n\n\n__version__ = \"0.2.1\"\n\n__all__ = [\n # Submodules\n \"distributions\",\n \"kernels\",\n \"lazy\",\n \"likelihoods\",\n \"means\",\n \"mlls\",\n \"models\",\n \"priors\",\n \"utils\",\n \"variational\",\n # Classes\n \"Module\",\n \"ExactMarginalLogLikelihood\",\n \"VariationalMarginalLogLikelihood\",\n # Functions\n \"add_diag\",\n \"add_jitter\",\n \"delazify\",\n \"dsmm\",\n \"inv_matmul\",\n \"inv_quad\",\n \"inv_quad_logdet\",\n \"lazify\",\n \"logdet\",\n \"log_normal_cdf\",\n \"matmul\",\n \"normal_cdf\",\n \"root_decomposition\",\n \"root_inv_decomposition\",\n # Context managers\n \"beta_features\",\n \"settings\",\n # Other\n \"__version__\",\n # Deprecated\n \"inv_quad_log_det\",\n \"log_det\",\n]\n", "path": "gpytorch/__init__.py"}, {"content": "#!/usr/bin/env python3\n\nimport warnings\nfrom .settings import _feature_flag, _value_context\n\n\nclass _moved_beta_feature(object):\n def __init__(self, new_cls, orig_name=None):\n self.new_cls = new_cls\n self.orig_name = orig_name if orig_name is not None else \"gpytorch.settings.{}\".format(new_cls.__name__)\n\n def __call__(self, *args, **kwargs):\n warnings.warn(\n \"`{}` has moved to `gpytorch.settings.{}`.\".format(self.orig_name, self.new_cls.__name__),\n DeprecationWarning\n )\n return self.new_cls(*args, **kwargs)\n\n def __getattr__(self, name):\n return getattr(self.new_cls, name)\n\n\nclass checkpoint_kernel(_value_context):\n \"\"\"\n Should the kernel be computed in chunks with checkpointing or not? (Default, no)\n\n If `split_size = 0`:\n The kernel is computed explicitly. During training, the kernel matrix is\n kept in memory for the backward pass. This is the fastest option but the\n most memory intensive.\n If `split_size > 0`:\n The kernel is never fully computed or stored. Instead, the kernel is only\n accessed through matrix multiplication. The matrix multiplication is\n computed in `segments` chunks. This is slower, but requires significantly less memory.\n\n Default: 0\n \"\"\"\n\n _global_value = 0\n\n\nclass diagonal_correction(_feature_flag):\n \"\"\"\n Add a diagonal correction to scalable inducing point methods\n \"\"\"\n\n _state = True\n\n\nclass default_preconditioner(_feature_flag):\n \"\"\"\n Add a diagonal correction to scalable inducing point methods\n \"\"\"\n\n pass\n\n\n__all__ = [\"checkpoint_kernel\", \"diagonal_correction\", \"default_preconditioner\"]\n", "path": "gpytorch/beta_features.py"}]} | 1,598 | 397 |
gh_patches_debug_652 | rasdani/github-patches | git_diff | pex-tool__pex-2086 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.127
On the docket:
+ [x] Pex fails to subset a "foo @ file:///bar" URL lock. #2083
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.126"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.126"
+__version__ = "2.1.127"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.126\"\n+__version__ = \"2.1.127\"\n", "issue": "Release 2.1.127\nOn the docket:\r\n+ [x] Pex fails to subset a \"foo @ file:///bar\" URL lock. #2083\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.126\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.127\"\n", "path": "pex/version.py"}]} | 349 | 98 |
gh_patches_debug_27127 | rasdani/github-patches | git_diff | mindsdb__mindsdb-1011 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Endpoint to disable telemtry
fairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.
Endpoint to disable telemtry
fairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mindsdb/api/http/namespaces/util.py`
Content:
```
1 from flask import request
2 from flask_restx import Resource, abort
3 from flask import current_app as ca
4
5 from mindsdb.api.http.namespaces.configs.util import ns_conf
6 from mindsdb import __about__
7
8 @ns_conf.route('/ping')
9 class Ping(Resource):
10 @ns_conf.doc('get_ping')
11 def get(self):
12 '''Checks server avaliable'''
13 return {'status': 'ok'}
14
15 @ns_conf.route('/report_uuid')
16 class ReportUUID(Resource):
17 @ns_conf.doc('get_report_uuid')
18 def get(self):
19 metamodel_name = '___monitroing_metamodel___'
20 predictor = ca.mindsdb_native.create(metamodel_name)
21 return {
22 'report_uuid': predictor.report_uuid
23 }
24
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mindsdb/api/http/namespaces/util.py b/mindsdb/api/http/namespaces/util.py
--- a/mindsdb/api/http/namespaces/util.py
+++ b/mindsdb/api/http/namespaces/util.py
@@ -1,3 +1,4 @@
+import os
from flask import request
from flask_restx import Resource, abort
from flask import current_app as ca
@@ -5,6 +6,8 @@
from mindsdb.api.http.namespaces.configs.util import ns_conf
from mindsdb import __about__
+TELEMETRY_FILE = 'telemetry.lock'
+
@ns_conf.route('/ping')
class Ping(Resource):
@ns_conf.doc('get_ping')
@@ -21,3 +24,34 @@
return {
'report_uuid': predictor.report_uuid
}
+
+@ns_conf.route('/telemetry')
+class Telemetry(Resource):
+ @ns_conf.doc('get_telemetry_status')
+ def get(self):
+ status = "enabled" if is_telemetry_active() else "disabled"
+ return {"status": status}
+
+ @ns_conf.doc('set_telemetry')
+ def post(self):
+ data = request.json
+ action = data['action']
+ if str(action).lower() in ["true", "enable", "on"]:
+ enable_telemetry()
+ else:
+ disable_telemetry()
+
+
+def enable_telemetry():
+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)
+ if os.path.exists(path):
+ os.remove(path)
+
+def disable_telemetry():
+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)
+ with open(path, 'w') as _:
+ pass
+
+def is_telemetry_active():
+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)
+ return not os.path.exists(path)
| {"golden_diff": "diff --git a/mindsdb/api/http/namespaces/util.py b/mindsdb/api/http/namespaces/util.py\n--- a/mindsdb/api/http/namespaces/util.py\n+++ b/mindsdb/api/http/namespaces/util.py\n@@ -1,3 +1,4 @@\n+import os\n from flask import request\n from flask_restx import Resource, abort\n from flask import current_app as ca\n@@ -5,6 +6,8 @@\n from mindsdb.api.http.namespaces.configs.util import ns_conf\n from mindsdb import __about__\n \n+TELEMETRY_FILE = 'telemetry.lock'\n+\n @ns_conf.route('/ping')\n class Ping(Resource):\n @ns_conf.doc('get_ping')\n@@ -21,3 +24,34 @@\n return {\n 'report_uuid': predictor.report_uuid\n }\n+\n+@ns_conf.route('/telemetry')\n+class Telemetry(Resource):\n+ @ns_conf.doc('get_telemetry_status')\n+ def get(self):\n+ status = \"enabled\" if is_telemetry_active() else \"disabled\"\n+ return {\"status\": status}\n+\n+ @ns_conf.doc('set_telemetry')\n+ def post(self):\n+ data = request.json\n+ action = data['action']\n+ if str(action).lower() in [\"true\", \"enable\", \"on\"]:\n+ enable_telemetry()\n+ else:\n+ disable_telemetry()\n+\n+\n+def enable_telemetry():\n+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n+ if os.path.exists(path):\n+ os.remove(path)\n+\n+def disable_telemetry():\n+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n+ with open(path, 'w') as _:\n+ pass\n+\n+def is_telemetry_active():\n+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n+ return not os.path.exists(path)\n", "issue": "Endpoint to disable telemtry\nfairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.\r\n\nEndpoint to disable telemtry\nfairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.\r\n\n", "before_files": [{"content": "from flask import request\nfrom flask_restx import Resource, abort\nfrom flask import current_app as ca\n\nfrom mindsdb.api.http.namespaces.configs.util import ns_conf\nfrom mindsdb import __about__\n\n@ns_conf.route('/ping')\nclass Ping(Resource):\n @ns_conf.doc('get_ping')\n def get(self):\n '''Checks server avaliable'''\n return {'status': 'ok'}\n\n@ns_conf.route('/report_uuid')\nclass ReportUUID(Resource):\n @ns_conf.doc('get_report_uuid')\n def get(self):\n metamodel_name = '___monitroing_metamodel___'\n predictor = ca.mindsdb_native.create(metamodel_name)\n return {\n 'report_uuid': predictor.report_uuid\n }\n", "path": "mindsdb/api/http/namespaces/util.py"}], "after_files": [{"content": "import os\nfrom flask import request\nfrom flask_restx import Resource, abort\nfrom flask import current_app as ca\n\nfrom mindsdb.api.http.namespaces.configs.util import ns_conf\nfrom mindsdb import __about__\n\nTELEMETRY_FILE = 'telemetry.lock'\n\n@ns_conf.route('/ping')\nclass Ping(Resource):\n @ns_conf.doc('get_ping')\n def get(self):\n '''Checks server avaliable'''\n return {'status': 'ok'}\n\n@ns_conf.route('/report_uuid')\nclass ReportUUID(Resource):\n @ns_conf.doc('get_report_uuid')\n def get(self):\n metamodel_name = '___monitroing_metamodel___'\n predictor = ca.mindsdb_native.create(metamodel_name)\n return {\n 'report_uuid': predictor.report_uuid\n }\n\n@ns_conf.route('/telemetry')\nclass Telemetry(Resource):\n @ns_conf.doc('get_telemetry_status')\n def get(self):\n status = \"enabled\" if is_telemetry_active() else \"disabled\"\n return {\"status\": status}\n\n @ns_conf.doc('set_telemetry')\n def post(self):\n data = request.json\n action = data['action']\n if str(action).lower() in [\"true\", \"enable\", \"on\"]:\n enable_telemetry()\n else:\n disable_telemetry()\n\n\ndef enable_telemetry():\n path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n if os.path.exists(path):\n os.remove(path)\n\ndef disable_telemetry():\n path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n with open(path, 'w') as _:\n pass\n\ndef is_telemetry_active():\n path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n return not os.path.exists(path)\n", "path": "mindsdb/api/http/namespaces/util.py"}]} | 531 | 433 |
gh_patches_debug_67113 | rasdani/github-patches | git_diff | zestedesavoir__zds-site-5120 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Erreur 403 après recherche dans la bibliothèque
> Bonjour,
>
> Je tombe sur une erreur 403 "Vous n’avez pas les droits suffisants pour accéder à cette page." après une recherche dans les cours présents dans la bibliothèque.
Voilà comment elle est apparue :
>
> - Arrivé sur le site déconnecté
> - Je suis allé dans "Développement web" depuis le bandeau en haut du site
> - A partir de là je me suis connecté à mon compte (même onglet, bouton classique), ce qui m'a ramené sur la page
> - Puis j'ai fait une recherche "PHP" dans la barre de rechercher, ce qui m'a amené sur [ce lien](https://zestedesavoir.com/rechercher/?q=PHP&models=content&from_library=on&category=informatique&subcategory=site-web)
>
> L'erreur 403 se produit lorsque je coche 1 à 3 cases (sous la barre de recherche), pour filtrer les résultats, et que je clique à nouveau sur le bouton "rechercher" dans la barre.
>
> Voilà [un exemple de lien](https://zestedesavoir.com/rechercher/?q=PHP&category=informatique&subcategory=site-web&from_library=on&models=) provoquant une erreur 403 de mon côté.
>
> Bye
Sujet : https://zestedesavoir.com/forums/sujet/11609/erreur-403-apres-recherche-dans-la-bibliotheque/
*Envoyé depuis Zeste de Savoir*
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `zds/searchv2/forms.py`
Content:
```
1 import os
2 import random
3
4 from django import forms
5 from django.conf import settings
6 from django.utils.translation import ugettext_lazy as _
7
8 from crispy_forms.bootstrap import StrictButton
9 from crispy_forms.helper import FormHelper
10 from crispy_forms.layout import Layout, Field
11 from django.core.urlresolvers import reverse
12
13
14 class SearchForm(forms.Form):
15 q = forms.CharField(
16 label=_('Recherche'),
17 max_length=150,
18 required=False,
19 widget=forms.TextInput(
20 attrs={
21 'type': 'search',
22 'required': 'required'
23 }
24 )
25 )
26
27 choices = sorted(
28 [(k, v[0]) for k, v in settings.ZDS_APP['search']['search_groups'].items()],
29 key=lambda pair: pair[1]
30 )
31
32 models = forms.MultipleChoiceField(
33 label='',
34 widget=forms.CheckboxSelectMultiple,
35 required=False,
36 choices=choices
37 )
38
39 category = forms.CharField(widget=forms.HiddenInput, required=False)
40 subcategory = forms.CharField(widget=forms.HiddenInput, required=False)
41 from_library = forms.CharField(widget=forms.HiddenInput, required=False)
42
43 def __init__(self, *args, **kwargs):
44
45 super(SearchForm, self).__init__(*args, **kwargs)
46
47 self.helper = FormHelper()
48 self.helper.form_id = 'search-form'
49 self.helper.form_class = 'clearfix'
50 self.helper.form_method = 'get'
51 self.helper.form_action = reverse('search:query')
52
53 try:
54 with open(os.path.join(settings.BASE_DIR, 'suggestions.txt'), 'r') as suggestions_file:
55 suggestions = ', '.join(random.sample(suggestions_file.readlines(), 5)) + '…'
56 except OSError:
57 suggestions = _('Mathématiques, Droit, UDK, Langues, Python…')
58
59 self.fields['q'].widget.attrs['placeholder'] = suggestions
60
61 self.helper.layout = Layout(
62 Field('q'),
63 StrictButton('', type='submit', css_class='ico-after ico-search', title=_('Rechercher')),
64 Field('category'),
65 Field('subcategory'),
66 Field('from_library')
67 )
68
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/zds/searchv2/forms.py b/zds/searchv2/forms.py
--- a/zds/searchv2/forms.py
+++ b/zds/searchv2/forms.py
@@ -31,7 +31,12 @@
models = forms.MultipleChoiceField(
label='',
- widget=forms.CheckboxSelectMultiple,
+ widget=forms.CheckboxSelectMultiple(
+ attrs={
+ 'class': 'search-filters',
+ 'form': 'search-form'
+ }
+ ),
required=False,
choices=choices
)
| {"golden_diff": "diff --git a/zds/searchv2/forms.py b/zds/searchv2/forms.py\n--- a/zds/searchv2/forms.py\n+++ b/zds/searchv2/forms.py\n@@ -31,7 +31,12 @@\n \n models = forms.MultipleChoiceField(\n label='',\n- widget=forms.CheckboxSelectMultiple,\n+ widget=forms.CheckboxSelectMultiple(\n+ attrs={\n+ 'class': 'search-filters',\n+ 'form': 'search-form'\n+ }\n+ ),\n required=False,\n choices=choices\n )\n", "issue": "Erreur 403 apr\u00e8s recherche dans la biblioth\u00e8que\n> Bonjour,\r\n> \r\n> Je tombe sur une erreur 403 \"Vous n\u2019avez pas les droits suffisants pour acc\u00e9der \u00e0 cette page.\" apr\u00e8s une recherche dans les cours pr\u00e9sents dans la biblioth\u00e8que.\r\nVoil\u00e0 comment elle est apparue :\r\n> \r\n> - Arriv\u00e9 sur le site d\u00e9connect\u00e9\r\n> - Je suis all\u00e9 dans \"D\u00e9veloppement web\" depuis le bandeau en haut du site\r\n> - A partir de l\u00e0 je me suis connect\u00e9 \u00e0 mon compte (m\u00eame onglet, bouton classique), ce qui m'a ramen\u00e9 sur la page\r\n> - Puis j'ai fait une recherche \"PHP\" dans la barre de rechercher, ce qui m'a amen\u00e9 sur [ce lien](https://zestedesavoir.com/rechercher/?q=PHP&models=content&from_library=on&category=informatique&subcategory=site-web)\r\n> \r\n> L'erreur 403 se produit lorsque je coche 1 \u00e0 3 cases (sous la barre de recherche), pour filtrer les r\u00e9sultats, et que je clique \u00e0 nouveau sur le bouton \"rechercher\" dans la barre.\r\n> \r\n> Voil\u00e0 [un exemple de lien](https://zestedesavoir.com/rechercher/?q=PHP&category=informatique&subcategory=site-web&from_library=on&models=) provoquant une erreur 403 de mon c\u00f4t\u00e9.\r\n> \r\n> Bye\r\n\r\nSujet : https://zestedesavoir.com/forums/sujet/11609/erreur-403-apres-recherche-dans-la-bibliotheque/\r\n*Envoy\u00e9 depuis Zeste de Savoir*\n", "before_files": [{"content": "import os\nimport random\n\nfrom django import forms\nfrom django.conf import settings\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom crispy_forms.bootstrap import StrictButton\nfrom crispy_forms.helper import FormHelper\nfrom crispy_forms.layout import Layout, Field\nfrom django.core.urlresolvers import reverse\n\n\nclass SearchForm(forms.Form):\n q = forms.CharField(\n label=_('Recherche'),\n max_length=150,\n required=False,\n widget=forms.TextInput(\n attrs={\n 'type': 'search',\n 'required': 'required'\n }\n )\n )\n\n choices = sorted(\n [(k, v[0]) for k, v in settings.ZDS_APP['search']['search_groups'].items()],\n key=lambda pair: pair[1]\n )\n\n models = forms.MultipleChoiceField(\n label='',\n widget=forms.CheckboxSelectMultiple,\n required=False,\n choices=choices\n )\n\n category = forms.CharField(widget=forms.HiddenInput, required=False)\n subcategory = forms.CharField(widget=forms.HiddenInput, required=False)\n from_library = forms.CharField(widget=forms.HiddenInput, required=False)\n\n def __init__(self, *args, **kwargs):\n\n super(SearchForm, self).__init__(*args, **kwargs)\n\n self.helper = FormHelper()\n self.helper.form_id = 'search-form'\n self.helper.form_class = 'clearfix'\n self.helper.form_method = 'get'\n self.helper.form_action = reverse('search:query')\n\n try:\n with open(os.path.join(settings.BASE_DIR, 'suggestions.txt'), 'r') as suggestions_file:\n suggestions = ', '.join(random.sample(suggestions_file.readlines(), 5)) + '\u2026'\n except OSError:\n suggestions = _('Math\u00e9matiques, Droit, UDK, Langues, Python\u2026')\n\n self.fields['q'].widget.attrs['placeholder'] = suggestions\n\n self.helper.layout = Layout(\n Field('q'),\n StrictButton('', type='submit', css_class='ico-after ico-search', title=_('Rechercher')),\n Field('category'),\n Field('subcategory'),\n Field('from_library')\n )\n", "path": "zds/searchv2/forms.py"}], "after_files": [{"content": "import os\nimport random\n\nfrom django import forms\nfrom django.conf import settings\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom crispy_forms.bootstrap import StrictButton\nfrom crispy_forms.helper import FormHelper\nfrom crispy_forms.layout import Layout, Field\nfrom django.core.urlresolvers import reverse\n\n\nclass SearchForm(forms.Form):\n q = forms.CharField(\n label=_('Recherche'),\n max_length=150,\n required=False,\n widget=forms.TextInput(\n attrs={\n 'type': 'search',\n 'required': 'required'\n }\n )\n )\n\n choices = sorted(\n [(k, v[0]) for k, v in settings.ZDS_APP['search']['search_groups'].items()],\n key=lambda pair: pair[1]\n )\n\n models = forms.MultipleChoiceField(\n label='',\n widget=forms.CheckboxSelectMultiple(\n attrs={\n 'class': 'search-filters',\n 'form': 'search-form'\n }\n ),\n required=False,\n choices=choices\n )\n\n category = forms.CharField(widget=forms.HiddenInput, required=False)\n subcategory = forms.CharField(widget=forms.HiddenInput, required=False)\n from_library = forms.CharField(widget=forms.HiddenInput, required=False)\n\n def __init__(self, *args, **kwargs):\n\n super(SearchForm, self).__init__(*args, **kwargs)\n\n self.helper = FormHelper()\n self.helper.form_id = 'search-form'\n self.helper.form_class = 'clearfix'\n self.helper.form_method = 'get'\n self.helper.form_action = reverse('search:query')\n\n try:\n with open(os.path.join(settings.BASE_DIR, 'suggestions.txt'), 'r') as suggestions_file:\n suggestions = ', '.join(random.sample(suggestions_file.readlines(), 5)) + '\u2026'\n except OSError:\n suggestions = _('Math\u00e9matiques, Droit, UDK, Langues, Python\u2026')\n\n self.fields['q'].widget.attrs['placeholder'] = suggestions\n\n self.helper.layout = Layout(\n Field('q'),\n StrictButton('', type='submit', css_class='ico-after ico-search', title=_('Rechercher')),\n Field('category'),\n Field('subcategory'),\n Field('from_library')\n )\n", "path": "zds/searchv2/forms.py"}]} | 1,229 | 124 |
gh_patches_debug_2761 | rasdani/github-patches | git_diff | napari__napari-1088 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ListModel.append does not check type
## 🐛 Bug
in working on layer groups, I found a strange lack of type checking when appending to a `ListModel` (which inherits from `TypedList`). [`ListModel.append`](https://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L59) jumps right over `TypedList.append`
https://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L58-L60
... and if you try to something that is not a `Layer` to a `LayerList`, it works fine up until throwing an error (unrelated to typing) in `components.layerlist._add`. Is that supposed to be `TypedList.append(self, obj)`? or was that intentional?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `napari/utils/list/_model.py`
Content:
```
1 from ...utils.event import EmitterGroup
2
3 from ._multi import MultiIndexList
4 from ._typed import TypedList
5
6
7 class ListModel(MultiIndexList, TypedList):
8 """List with events, tuple-indexing, typing, and filtering.
9
10 Parameters
11 ----------
12 basetype : type
13 Type of the elements in the list.
14 iterable : iterable, optional
15 Elements to initialize the list with.
16 lookup : dict of type : function(object, ``basetype``) -> bool
17 Functions that determine if an object is a reference to an
18 element of the list.
19
20 Attributes
21 ----------
22 events : vispy.util.event.EmitterGroup
23 Group of events for adding, removing, and reordering elements
24 within the list.
25 """
26
27 def __init__(self, basetype, iterable=(), lookup=None):
28 super().__init__(basetype, iterable, lookup)
29 self.events = EmitterGroup(
30 source=self,
31 auto_connect=True,
32 added=None,
33 removed=None,
34 reordered=None,
35 changed=None,
36 )
37 self.events.added.connect(self.events.changed)
38 self.events.removed.connect(self.events.changed)
39 self.events.reordered.connect(self.events.changed)
40
41 def __setitem__(self, query, values):
42 indices = tuple(self.__prsitem__(query))
43 new_indices = tuple(values)
44
45 if sorted(indices) != sorted(self.index(v) for v in new_indices):
46 raise TypeError(
47 'must be a reordering of indices; '
48 'setting of list items not allowed'
49 )
50
51 super().__setitem__(indices, new_indices)
52 self.events.reordered()
53
54 def insert(self, index, obj):
55 super().insert(index, obj)
56 self.events.added(item=obj, index=self.__locitem__(index))
57
58 def append(self, obj):
59 super(TypedList, self).append(obj)
60 self.events.added(item=obj, index=len(self) - 1)
61
62 def pop(self, key):
63 obj = super().pop(key)
64 self.events.removed(item=obj, index=key)
65 return obj
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/napari/utils/list/_model.py b/napari/utils/list/_model.py
--- a/napari/utils/list/_model.py
+++ b/napari/utils/list/_model.py
@@ -56,7 +56,7 @@
self.events.added(item=obj, index=self.__locitem__(index))
def append(self, obj):
- super(TypedList, self).append(obj)
+ TypedList.append(self, obj)
self.events.added(item=obj, index=len(self) - 1)
def pop(self, key):
| {"golden_diff": "diff --git a/napari/utils/list/_model.py b/napari/utils/list/_model.py\n--- a/napari/utils/list/_model.py\n+++ b/napari/utils/list/_model.py\n@@ -56,7 +56,7 @@\n self.events.added(item=obj, index=self.__locitem__(index))\n \n def append(self, obj):\n- super(TypedList, self).append(obj)\n+ TypedList.append(self, obj)\n self.events.added(item=obj, index=len(self) - 1)\n \n def pop(self, key):\n", "issue": "ListModel.append does not check type\n## \ud83d\udc1b Bug\r\nin working on layer groups, I found a strange lack of type checking when appending to a `ListModel` (which inherits from `TypedList`). [`ListModel.append`](https://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L59) jumps right over `TypedList.append`\r\nhttps://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L58-L60\r\n\r\n... and if you try to something that is not a `Layer` to a `LayerList`, it works fine up until throwing an error (unrelated to typing) in `components.layerlist._add`. Is that supposed to be `TypedList.append(self, obj)`? or was that intentional?\n", "before_files": [{"content": "from ...utils.event import EmitterGroup\n\nfrom ._multi import MultiIndexList\nfrom ._typed import TypedList\n\n\nclass ListModel(MultiIndexList, TypedList):\n \"\"\"List with events, tuple-indexing, typing, and filtering.\n\n Parameters\n ----------\n basetype : type\n Type of the elements in the list.\n iterable : iterable, optional\n Elements to initialize the list with.\n lookup : dict of type : function(object, ``basetype``) -> bool\n Functions that determine if an object is a reference to an\n element of the list.\n\n Attributes\n ----------\n events : vispy.util.event.EmitterGroup\n Group of events for adding, removing, and reordering elements\n within the list.\n \"\"\"\n\n def __init__(self, basetype, iterable=(), lookup=None):\n super().__init__(basetype, iterable, lookup)\n self.events = EmitterGroup(\n source=self,\n auto_connect=True,\n added=None,\n removed=None,\n reordered=None,\n changed=None,\n )\n self.events.added.connect(self.events.changed)\n self.events.removed.connect(self.events.changed)\n self.events.reordered.connect(self.events.changed)\n\n def __setitem__(self, query, values):\n indices = tuple(self.__prsitem__(query))\n new_indices = tuple(values)\n\n if sorted(indices) != sorted(self.index(v) for v in new_indices):\n raise TypeError(\n 'must be a reordering of indices; '\n 'setting of list items not allowed'\n )\n\n super().__setitem__(indices, new_indices)\n self.events.reordered()\n\n def insert(self, index, obj):\n super().insert(index, obj)\n self.events.added(item=obj, index=self.__locitem__(index))\n\n def append(self, obj):\n super(TypedList, self).append(obj)\n self.events.added(item=obj, index=len(self) - 1)\n\n def pop(self, key):\n obj = super().pop(key)\n self.events.removed(item=obj, index=key)\n return obj\n", "path": "napari/utils/list/_model.py"}], "after_files": [{"content": "from ...utils.event import EmitterGroup\n\nfrom ._multi import MultiIndexList\nfrom ._typed import TypedList\n\n\nclass ListModel(MultiIndexList, TypedList):\n \"\"\"List with events, tuple-indexing, typing, and filtering.\n\n Parameters\n ----------\n basetype : type\n Type of the elements in the list.\n iterable : iterable, optional\n Elements to initialize the list with.\n lookup : dict of type : function(object, ``basetype``) -> bool\n Functions that determine if an object is a reference to an\n element of the list.\n\n Attributes\n ----------\n events : vispy.util.event.EmitterGroup\n Group of events for adding, removing, and reordering elements\n within the list.\n \"\"\"\n\n def __init__(self, basetype, iterable=(), lookup=None):\n super().__init__(basetype, iterable, lookup)\n self.events = EmitterGroup(\n source=self,\n auto_connect=True,\n added=None,\n removed=None,\n reordered=None,\n changed=None,\n )\n self.events.added.connect(self.events.changed)\n self.events.removed.connect(self.events.changed)\n self.events.reordered.connect(self.events.changed)\n\n def __setitem__(self, query, values):\n indices = tuple(self.__prsitem__(query))\n new_indices = tuple(values)\n\n if sorted(indices) != sorted(self.index(v) for v in new_indices):\n raise TypeError(\n 'must be a reordering of indices; '\n 'setting of list items not allowed'\n )\n\n super().__setitem__(indices, new_indices)\n self.events.reordered()\n\n def insert(self, index, obj):\n super().insert(index, obj)\n self.events.added(item=obj, index=self.__locitem__(index))\n\n def append(self, obj):\n TypedList.append(self, obj)\n self.events.added(item=obj, index=len(self) - 1)\n\n def pop(self, key):\n obj = super().pop(key)\n self.events.removed(item=obj, index=key)\n return obj\n", "path": "napari/utils/list/_model.py"}]} | 1,085 | 125 |
gh_patches_debug_17308 | rasdani/github-patches | git_diff | pre-commit__pre-commit-315 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
:args seems to break with {} in list.
I am working on a repo with some hooks for my company: https://github.com/marick/pre-commit-hooks
There is a hook that works fine with this `.pre-commit-config.yaml`:
``` yaml
- repo: /Users/marick/src/pre-commit-hooks
sha: d6dee96f56bf9290f7ebb852c4252c50b8f6215d
stages: [commit, push]
hooks:
- id: prohibit-suspicious-patterns
args: ["AKIA[[:alnum]]", --]
```
However, it I change the first arg by adding `{1}`:
``` yaml
args: ["AKIA[[:alnum]]{1}", --]
```
... I get this:
```
prohibit suspicious patterns..................................................................
An unexpected error has occurred: IndexError: tuple index out of range
Check the log at ~/.pre-commit/pre-commit.log
```
The contents of `pre-commit.log`:
```
An unexpected error has occurred: IndexError: tuple index out of range
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/pre_commit/error_handler.py", line 36, in error_handler
yield
File "/usr/local/lib/python2.7/site-packages/pre_commit/main.py", line 150, in main
return run(runner, args)
File "/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py", line 212, in run
return _run_hooks(repo_hooks, args, write, environ)
File "/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py", line 136, in _run_hooks
retval |= _run_single_hook(hook, repo, args, write, skips)
File "/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py", line 89, in _run_single_hook
retcode, stdout, stderr = repo.run_hook(hook, filenames)
File "/usr/local/lib/python2.7/site-packages/pre_commit/repository.py", line 145, in run_hook
self.cmd_runner, hook, file_args,
File "/usr/local/lib/python2.7/site-packages/pre_commit/languages/script.py", line 23, in run_hook
encoding=None,
File "/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py", line 40, in run
replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)
File "/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py", line 11, in _replace_cmd
return [part.format(**kwargs) for part in cmd]
IndexError: tuple index out of range
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/prefixed_command_runner.py`
Content:
```
1 from __future__ import unicode_literals
2
3 import os
4 import os.path
5 import subprocess
6
7 from pre_commit.util import cmd_output
8
9
10 def _replace_cmd(cmd, **kwargs):
11 return [part.format(**kwargs) for part in cmd]
12
13
14 class PrefixedCommandRunner(object):
15 """A PrefixedCommandRunner allows you to run subprocess commands with
16 comand substitution.
17
18 For instance:
19 PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
20
21 will run ['/tmp/foo/foo.sh', 'bar', 'baz']
22 """
23
24 def __init__(
25 self,
26 prefix_dir,
27 popen=subprocess.Popen,
28 makedirs=os.makedirs
29 ):
30 self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
31 self.__popen = popen
32 self.__makedirs = makedirs
33
34 def _create_path_if_not_exists(self):
35 if not os.path.exists(self.prefix_dir):
36 self.__makedirs(self.prefix_dir)
37
38 def run(self, cmd, **kwargs):
39 self._create_path_if_not_exists()
40 replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)
41 return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
42
43 def path(self, *parts):
44 path = os.path.join(self.prefix_dir, *parts)
45 return os.path.normpath(path)
46
47 def exists(self, *parts):
48 return os.path.exists(self.path(*parts))
49
50 @classmethod
51 def from_command_runner(cls, command_runner, path_end):
52 """Constructs a new command runner from an existing one by appending
53 `path_end` to the command runner's prefix directory.
54 """
55 return cls(
56 command_runner.path(path_end),
57 popen=command_runner.__popen,
58 makedirs=command_runner.__makedirs,
59 )
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pre_commit/prefixed_command_runner.py b/pre_commit/prefixed_command_runner.py
--- a/pre_commit/prefixed_command_runner.py
+++ b/pre_commit/prefixed_command_runner.py
@@ -7,10 +7,6 @@
from pre_commit.util import cmd_output
-def _replace_cmd(cmd, **kwargs):
- return [part.format(**kwargs) for part in cmd]
-
-
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
@@ -37,7 +33,9 @@
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
- replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)
+ replaced_cmd = [
+ part.replace('{prefix}', self.prefix_dir) for part in cmd
+ ]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
| {"golden_diff": "diff --git a/pre_commit/prefixed_command_runner.py b/pre_commit/prefixed_command_runner.py\n--- a/pre_commit/prefixed_command_runner.py\n+++ b/pre_commit/prefixed_command_runner.py\n@@ -7,10 +7,6 @@\n from pre_commit.util import cmd_output\n \n \n-def _replace_cmd(cmd, **kwargs):\n- return [part.format(**kwargs) for part in cmd]\n-\n-\n class PrefixedCommandRunner(object):\n \"\"\"A PrefixedCommandRunner allows you to run subprocess commands with\n comand substitution.\n@@ -37,7 +33,9 @@\n \n def run(self, cmd, **kwargs):\n self._create_path_if_not_exists()\n- replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)\n+ replaced_cmd = [\n+ part.replace('{prefix}', self.prefix_dir) for part in cmd\n+ ]\n return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)\n \n def path(self, *parts):\n", "issue": ":args seems to break with {} in list.\nI am working on a repo with some hooks for my company: https://github.com/marick/pre-commit-hooks\n\nThere is a hook that works fine with this `.pre-commit-config.yaml`:\n\n``` yaml\n- repo: /Users/marick/src/pre-commit-hooks\n sha: d6dee96f56bf9290f7ebb852c4252c50b8f6215d\n stages: [commit, push]\n hooks:\n - id: prohibit-suspicious-patterns\n args: [\"AKIA[[:alnum]]\", --]\n```\n\nHowever, it I change the first arg by adding `{1}`:\n\n``` yaml\n args: [\"AKIA[[:alnum]]{1}\", --]\n```\n\n... I get this:\n\n```\nprohibit suspicious patterns..................................................................\nAn unexpected error has occurred: IndexError: tuple index out of range\nCheck the log at ~/.pre-commit/pre-commit.log\n```\n\nThe contents of `pre-commit.log`:\n\n```\nAn unexpected error has occurred: IndexError: tuple index out of range\nTraceback (most recent call last):\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/error_handler.py\", line 36, in error_handler\n yield\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/main.py\", line 150, in main\n return run(runner, args)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 212, in run\n return _run_hooks(repo_hooks, args, write, environ)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 136, in _run_hooks\n retval |= _run_single_hook(hook, repo, args, write, skips)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 89, in _run_single_hook\n retcode, stdout, stderr = repo.run_hook(hook, filenames)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/repository.py\", line 145, in run_hook\n self.cmd_runner, hook, file_args,\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/languages/script.py\", line 23, in run_hook\n encoding=None,\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py\", line 40, in run\n replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py\", line 11, in _replace_cmd\n return [part.format(**kwargs) for part in cmd]\nIndexError: tuple index out of range\n\n```\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport os\nimport os.path\nimport subprocess\n\nfrom pre_commit.util import cmd_output\n\n\ndef _replace_cmd(cmd, **kwargs):\n return [part.format(**kwargs) for part in cmd]\n\n\nclass PrefixedCommandRunner(object):\n \"\"\"A PrefixedCommandRunner allows you to run subprocess commands with\n comand substitution.\n\n For instance:\n PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])\n\n will run ['/tmp/foo/foo.sh', 'bar', 'baz']\n \"\"\"\n\n def __init__(\n self,\n prefix_dir,\n popen=subprocess.Popen,\n makedirs=os.makedirs\n ):\n self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep\n self.__popen = popen\n self.__makedirs = makedirs\n\n def _create_path_if_not_exists(self):\n if not os.path.exists(self.prefix_dir):\n self.__makedirs(self.prefix_dir)\n\n def run(self, cmd, **kwargs):\n self._create_path_if_not_exists()\n replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)\n return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)\n\n def path(self, *parts):\n path = os.path.join(self.prefix_dir, *parts)\n return os.path.normpath(path)\n\n def exists(self, *parts):\n return os.path.exists(self.path(*parts))\n\n @classmethod\n def from_command_runner(cls, command_runner, path_end):\n \"\"\"Constructs a new command runner from an existing one by appending\n `path_end` to the command runner's prefix directory.\n \"\"\"\n return cls(\n command_runner.path(path_end),\n popen=command_runner.__popen,\n makedirs=command_runner.__makedirs,\n )\n", "path": "pre_commit/prefixed_command_runner.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nimport os\nimport os.path\nimport subprocess\n\nfrom pre_commit.util import cmd_output\n\n\nclass PrefixedCommandRunner(object):\n \"\"\"A PrefixedCommandRunner allows you to run subprocess commands with\n comand substitution.\n\n For instance:\n PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])\n\n will run ['/tmp/foo/foo.sh', 'bar', 'baz']\n \"\"\"\n\n def __init__(\n self,\n prefix_dir,\n popen=subprocess.Popen,\n makedirs=os.makedirs\n ):\n self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep\n self.__popen = popen\n self.__makedirs = makedirs\n\n def _create_path_if_not_exists(self):\n if not os.path.exists(self.prefix_dir):\n self.__makedirs(self.prefix_dir)\n\n def run(self, cmd, **kwargs):\n self._create_path_if_not_exists()\n replaced_cmd = [\n part.replace('{prefix}', self.prefix_dir) for part in cmd\n ]\n return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)\n\n def path(self, *parts):\n path = os.path.join(self.prefix_dir, *parts)\n return os.path.normpath(path)\n\n def exists(self, *parts):\n return os.path.exists(self.path(*parts))\n\n @classmethod\n def from_command_runner(cls, command_runner, path_end):\n \"\"\"Constructs a new command runner from an existing one by appending\n `path_end` to the command runner's prefix directory.\n \"\"\"\n return cls(\n command_runner.path(path_end),\n popen=command_runner.__popen,\n makedirs=command_runner.__makedirs,\n )\n", "path": "pre_commit/prefixed_command_runner.py"}]} | 1,392 | 217 |
gh_patches_debug_31061 | rasdani/github-patches | git_diff | conan-io__conan-center-index-2686 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[package] libunwind/1.3.1 library dependency order is wrong
### Package and Environment Details (include every applicable attribute)
* Package Name/Version: **libunwind/1.3.1**
* Operating System+version: **Linux Ubuntu 18.04**
* Compiler+version: **GCC 7**
* Conan version: **conan 1.21.1**
* Python version: **Python 2.7.17, 3.6.9**
### Conan profile (output of `conan profile show default` or `conan profile show <profile>` if custom profile is in use)
```
[settings]
os=Linux
os_build=Linux
arch=x86_64
arch_build=x86_64
compiler=gcc
compiler.version=7
compiler.libcxx=libstdc++1+
[options]
[build_requires]
[env]
```
But **error occurs when static linking**, that is shared=False.
### Steps to reproduce (Include if Applicable)
Soon I will write an example to reproduce.
But there is [an example about libunwind library dependency](https://github.com/daniel-thompson/libunwind-examples/blob/master/Makefile)
* current order : `unwind` `unwind-generic` `unwind-ptrace`
* right order : `unwind-ptrace` `unwind-generic` `unwind`
### Logs (Include/Attach if Applicable)
<details><summary>Click to expand log</summary>
```
Put your log output here
```
</details>
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `recipes/libunwind/all/conanfile.py`
Content:
```
1 from conans import ConanFile, AutoToolsBuildEnvironment, tools
2 from conans.errors import ConanInvalidConfiguration
3 import os
4 import glob
5
6
7 class LiunwindConan(ConanFile):
8 name = "libunwind"
9 description = "Manipulate the preserved state of each call-frame and resume the execution at any point."
10 topics = ("conan", "libunwind", "unwind", "debuggers", "exception-handling", "introspection", "setjmp")
11 url = "https://github.com/conan-io/conan-center-index"
12 homepage = "https://github.com/libunwind/libunwind"
13 license = "MIT"
14 settings = "os", "arch", "compiler", "build_type"
15 options = {"shared": [True, False], "fPIC": [True, False], "coredump": [True, False], "ptrace": [True, False], "setjmp": [True, False]}
16 default_options = {"shared": False, "fPIC": True, "coredump": True, "ptrace": True, "setjmp": True}
17 requires = "xz_utils/5.2.4"
18 _autotools = None
19
20 @property
21 def _source_subfolder(self):
22 return "source_subfolder"
23
24 def configure(self):
25 if self.settings.os not in ["Linux", "FreeBSD"]:
26 raise ConanInvalidConfiguration("libunwind is only supported on Linux and FreeBSD")
27 del self.settings.compiler.libcxx
28 del self.settings.compiler.cppstd
29
30 def source(self):
31 tools.get(**self.conan_data["sources"][self.version])
32 extracted_dir = self.name + "-" + self.version
33 os.rename(extracted_dir, self._source_subfolder)
34
35 def _configure_autotools(self):
36 if not self._autotools:
37 self._autotools = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows)
38 args = [
39 "--enable-shared={}".format("yes" if self.options.shared else "no"),
40 "--enable-static={}".format("no" if self.options.shared else "yes"),
41 "--enable-coredump={}".format("yes" if self.options.coredump else "no"),
42 "--enable-ptrace={}".format("yes" if self.options.ptrace else "no"),
43 "--enable-setjmp={}".format("yes" if self.options.setjmp else "no"),
44 "--disable-tests",
45 "--disable-documentation"
46 ]
47 self._autotools.configure(configure_dir=self._source_subfolder, args=args)
48 return self._autotools
49
50 def build(self):
51 autotools = self._configure_autotools()
52 autotools.make()
53
54 def package(self):
55 self.copy(pattern="COPYING", dst="licenses", src=self._source_subfolder)
56 autotools = self._configure_autotools()
57 autotools.install()
58 tools.rmdir(os.path.join(self.package_folder, 'lib', 'pkgconfig'))
59 with tools.chdir(os.path.join(self.package_folder, "lib")):
60 for filename in glob.glob("*.la"):
61 os.unlink(filename)
62
63 def package_info(self):
64 self.cpp_info.libs = tools.collect_libs(self)
65 if self.settings.os == "Linux":
66 self.cpp_info.system_libs.append("pthread")
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/recipes/libunwind/all/conanfile.py b/recipes/libunwind/all/conanfile.py
--- a/recipes/libunwind/all/conanfile.py
+++ b/recipes/libunwind/all/conanfile.py
@@ -24,6 +24,8 @@
def configure(self):
if self.settings.os not in ["Linux", "FreeBSD"]:
raise ConanInvalidConfiguration("libunwind is only supported on Linux and FreeBSD")
+ if self.options.shared:
+ del self.options.fPIC
del self.settings.compiler.libcxx
del self.settings.compiler.cppstd
@@ -61,6 +63,23 @@
os.unlink(filename)
def package_info(self):
- self.cpp_info.libs = tools.collect_libs(self)
+ self.cpp_info.components["unwind"].names["pkg_config"] = "libunwind"
+ self.cpp_info.components["unwind"].libs = ["unwind"]
+ self.cpp_info.components["unwind"].requires = ["xz_utils::xz_utils"]
if self.settings.os == "Linux":
- self.cpp_info.system_libs.append("pthread")
+ self.cpp_info.components["unwind"].system_libs.append("pthread")
+ self.cpp_info.components["generic"].names["pkg_config"] = "libunwind-generic"
+ self.cpp_info.components["generic"].libs = ["unwind-generic"]
+ self.cpp_info.components["generic"].requires = ["unwind"]
+ if self.options.ptrace:
+ self.cpp_info.components["ptrace"].names["pkg_config"] = "libunwind-ptrace"
+ self.cpp_info.components["ptrace"].libs = ["unwind-ptrace"]
+ self.cpp_info.components["ptrace"].requires = ["generic", "unwind"]
+ if self.options.setjmp:
+ self.cpp_info.components["setjmp"].names["pkg_config"] = "libunwind-setjmp"
+ self.cpp_info.components["setjmp"].libs = ["unwind-setjmp"]
+ self.cpp_info.components["setjmp"].requires = ["unwind"]
+ if self.options.coredump:
+ self.cpp_info.components["coredump"].names["pkg_config"] = "libunwind-coredump"
+ self.cpp_info.components["coredump"].libs = ["unwind-coredump"]
+ self.cpp_info.components["coredump"].requires = ["generic", "unwind"]
| {"golden_diff": "diff --git a/recipes/libunwind/all/conanfile.py b/recipes/libunwind/all/conanfile.py\n--- a/recipes/libunwind/all/conanfile.py\n+++ b/recipes/libunwind/all/conanfile.py\n@@ -24,6 +24,8 @@\n def configure(self):\n if self.settings.os not in [\"Linux\", \"FreeBSD\"]:\n raise ConanInvalidConfiguration(\"libunwind is only supported on Linux and FreeBSD\")\n+ if self.options.shared:\n+ del self.options.fPIC\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n \n@@ -61,6 +63,23 @@\n os.unlink(filename)\n \n def package_info(self):\n- self.cpp_info.libs = tools.collect_libs(self)\n+ self.cpp_info.components[\"unwind\"].names[\"pkg_config\"] = \"libunwind\"\n+ self.cpp_info.components[\"unwind\"].libs = [\"unwind\"]\n+ self.cpp_info.components[\"unwind\"].requires = [\"xz_utils::xz_utils\"]\n if self.settings.os == \"Linux\":\n- self.cpp_info.system_libs.append(\"pthread\")\n+ self.cpp_info.components[\"unwind\"].system_libs.append(\"pthread\")\n+ self.cpp_info.components[\"generic\"].names[\"pkg_config\"] = \"libunwind-generic\"\n+ self.cpp_info.components[\"generic\"].libs = [\"unwind-generic\"]\n+ self.cpp_info.components[\"generic\"].requires = [\"unwind\"]\n+ if self.options.ptrace:\n+ self.cpp_info.components[\"ptrace\"].names[\"pkg_config\"] = \"libunwind-ptrace\"\n+ self.cpp_info.components[\"ptrace\"].libs = [\"unwind-ptrace\"]\n+ self.cpp_info.components[\"ptrace\"].requires = [\"generic\", \"unwind\"]\n+ if self.options.setjmp:\n+ self.cpp_info.components[\"setjmp\"].names[\"pkg_config\"] = \"libunwind-setjmp\"\n+ self.cpp_info.components[\"setjmp\"].libs = [\"unwind-setjmp\"]\n+ self.cpp_info.components[\"setjmp\"].requires = [\"unwind\"]\n+ if self.options.coredump:\n+ self.cpp_info.components[\"coredump\"].names[\"pkg_config\"] = \"libunwind-coredump\"\n+ self.cpp_info.components[\"coredump\"].libs = [\"unwind-coredump\"]\n+ self.cpp_info.components[\"coredump\"].requires = [\"generic\", \"unwind\"]\n", "issue": "[package] libunwind/1.3.1 library dependency order is wrong\n### Package and Environment Details (include every applicable attribute)\r\n * Package Name/Version: **libunwind/1.3.1**\r\n * Operating System+version: **Linux Ubuntu 18.04**\r\n * Compiler+version: **GCC 7**\r\n * Conan version: **conan 1.21.1**\r\n * Python version: **Python 2.7.17, 3.6.9**\r\n\r\n\r\n### Conan profile (output of `conan profile show default` or `conan profile show <profile>` if custom profile is in use)\r\n```\r\n[settings]\r\nos=Linux\r\nos_build=Linux\r\narch=x86_64\r\narch_build=x86_64\r\ncompiler=gcc\r\ncompiler.version=7\r\ncompiler.libcxx=libstdc++1+\r\n[options]\r\n[build_requires]\r\n[env]\r\n```\r\n\r\nBut **error occurs when static linking**, that is shared=False.\r\n\r\n### Steps to reproduce (Include if Applicable)\r\n\r\nSoon I will write an example to reproduce.\r\n\r\nBut there is [an example about libunwind library dependency](https://github.com/daniel-thompson/libunwind-examples/blob/master/Makefile)\r\n\r\n* current order : `unwind` `unwind-generic` `unwind-ptrace`\r\n* right order : `unwind-ptrace` `unwind-generic` `unwind`\r\n\r\n### Logs (Include/Attach if Applicable)\r\n<details><summary>Click to expand log</summary>\r\n\r\n```\r\nPut your log output here\r\n```\r\n\r\n</details>\r\n\n", "before_files": [{"content": "from conans import ConanFile, AutoToolsBuildEnvironment, tools\nfrom conans.errors import ConanInvalidConfiguration\nimport os\nimport glob\n\n\nclass LiunwindConan(ConanFile):\n name = \"libunwind\"\n description = \"Manipulate the preserved state of each call-frame and resume the execution at any point.\"\n topics = (\"conan\", \"libunwind\", \"unwind\", \"debuggers\", \"exception-handling\", \"introspection\", \"setjmp\")\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://github.com/libunwind/libunwind\"\n license = \"MIT\"\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\"shared\": [True, False], \"fPIC\": [True, False], \"coredump\": [True, False], \"ptrace\": [True, False], \"setjmp\": [True, False]}\n default_options = {\"shared\": False, \"fPIC\": True, \"coredump\": True, \"ptrace\": True, \"setjmp\": True}\n requires = \"xz_utils/5.2.4\"\n _autotools = None\n\n @property\n def _source_subfolder(self):\n return \"source_subfolder\"\n\n def configure(self):\n if self.settings.os not in [\"Linux\", \"FreeBSD\"]:\n raise ConanInvalidConfiguration(\"libunwind is only supported on Linux and FreeBSD\")\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n extracted_dir = self.name + \"-\" + self.version\n os.rename(extracted_dir, self._source_subfolder)\n\n def _configure_autotools(self):\n if not self._autotools:\n self._autotools = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows)\n args = [\n \"--enable-shared={}\".format(\"yes\" if self.options.shared else \"no\"),\n \"--enable-static={}\".format(\"no\" if self.options.shared else \"yes\"),\n \"--enable-coredump={}\".format(\"yes\" if self.options.coredump else \"no\"),\n \"--enable-ptrace={}\".format(\"yes\" if self.options.ptrace else \"no\"),\n \"--enable-setjmp={}\".format(\"yes\" if self.options.setjmp else \"no\"),\n \"--disable-tests\",\n \"--disable-documentation\"\n ]\n self._autotools.configure(configure_dir=self._source_subfolder, args=args)\n return self._autotools\n\n def build(self):\n autotools = self._configure_autotools()\n autotools.make()\n\n def package(self):\n self.copy(pattern=\"COPYING\", dst=\"licenses\", src=self._source_subfolder)\n autotools = self._configure_autotools()\n autotools.install()\n tools.rmdir(os.path.join(self.package_folder, 'lib', 'pkgconfig'))\n with tools.chdir(os.path.join(self.package_folder, \"lib\")):\n for filename in glob.glob(\"*.la\"):\n os.unlink(filename)\n\n def package_info(self):\n self.cpp_info.libs = tools.collect_libs(self)\n if self.settings.os == \"Linux\":\n self.cpp_info.system_libs.append(\"pthread\")\n", "path": "recipes/libunwind/all/conanfile.py"}], "after_files": [{"content": "from conans import ConanFile, AutoToolsBuildEnvironment, tools\nfrom conans.errors import ConanInvalidConfiguration\nimport os\nimport glob\n\n\nclass LiunwindConan(ConanFile):\n name = \"libunwind\"\n description = \"Manipulate the preserved state of each call-frame and resume the execution at any point.\"\n topics = (\"conan\", \"libunwind\", \"unwind\", \"debuggers\", \"exception-handling\", \"introspection\", \"setjmp\")\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://github.com/libunwind/libunwind\"\n license = \"MIT\"\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\"shared\": [True, False], \"fPIC\": [True, False], \"coredump\": [True, False], \"ptrace\": [True, False], \"setjmp\": [True, False]}\n default_options = {\"shared\": False, \"fPIC\": True, \"coredump\": True, \"ptrace\": True, \"setjmp\": True}\n requires = \"xz_utils/5.2.4\"\n _autotools = None\n\n @property\n def _source_subfolder(self):\n return \"source_subfolder\"\n\n def configure(self):\n if self.settings.os not in [\"Linux\", \"FreeBSD\"]:\n raise ConanInvalidConfiguration(\"libunwind is only supported on Linux and FreeBSD\")\n if self.options.shared:\n del self.options.fPIC\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n extracted_dir = self.name + \"-\" + self.version\n os.rename(extracted_dir, self._source_subfolder)\n\n def _configure_autotools(self):\n if not self._autotools:\n self._autotools = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows)\n args = [\n \"--enable-shared={}\".format(\"yes\" if self.options.shared else \"no\"),\n \"--enable-static={}\".format(\"no\" if self.options.shared else \"yes\"),\n \"--enable-coredump={}\".format(\"yes\" if self.options.coredump else \"no\"),\n \"--enable-ptrace={}\".format(\"yes\" if self.options.ptrace else \"no\"),\n \"--enable-setjmp={}\".format(\"yes\" if self.options.setjmp else \"no\"),\n \"--disable-tests\",\n \"--disable-documentation\"\n ]\n self._autotools.configure(configure_dir=self._source_subfolder, args=args)\n return self._autotools\n\n def build(self):\n autotools = self._configure_autotools()\n autotools.make()\n\n def package(self):\n self.copy(pattern=\"COPYING\", dst=\"licenses\", src=self._source_subfolder)\n autotools = self._configure_autotools()\n autotools.install()\n tools.rmdir(os.path.join(self.package_folder, 'lib', 'pkgconfig'))\n with tools.chdir(os.path.join(self.package_folder, \"lib\")):\n for filename in glob.glob(\"*.la\"):\n os.unlink(filename)\n\n def package_info(self):\n self.cpp_info.components[\"unwind\"].names[\"pkg_config\"] = \"libunwind\"\n self.cpp_info.components[\"unwind\"].libs = [\"unwind\"]\n self.cpp_info.components[\"unwind\"].requires = [\"xz_utils::xz_utils\"]\n if self.settings.os == \"Linux\":\n self.cpp_info.components[\"unwind\"].system_libs.append(\"pthread\")\n self.cpp_info.components[\"generic\"].names[\"pkg_config\"] = \"libunwind-generic\"\n self.cpp_info.components[\"generic\"].libs = [\"unwind-generic\"]\n self.cpp_info.components[\"generic\"].requires = [\"unwind\"]\n if self.options.ptrace:\n self.cpp_info.components[\"ptrace\"].names[\"pkg_config\"] = \"libunwind-ptrace\"\n self.cpp_info.components[\"ptrace\"].libs = [\"unwind-ptrace\"]\n self.cpp_info.components[\"ptrace\"].requires = [\"generic\", \"unwind\"]\n if self.options.setjmp:\n self.cpp_info.components[\"setjmp\"].names[\"pkg_config\"] = \"libunwind-setjmp\"\n self.cpp_info.components[\"setjmp\"].libs = [\"unwind-setjmp\"]\n self.cpp_info.components[\"setjmp\"].requires = [\"unwind\"]\n if self.options.coredump:\n self.cpp_info.components[\"coredump\"].names[\"pkg_config\"] = \"libunwind-coredump\"\n self.cpp_info.components[\"coredump\"].libs = [\"unwind-coredump\"]\n self.cpp_info.components[\"coredump\"].requires = [\"generic\", \"unwind\"]\n", "path": "recipes/libunwind/all/conanfile.py"}]} | 1,448 | 519 |
gh_patches_debug_5977 | rasdani/github-patches | git_diff | bokeh__bokeh-4754 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Typo in categorical example plot title
File examples/plotting/file/categorical.py contains a typo in plot name.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/plotting/file/categorical.py`
Content:
```
1 from bokeh.layouts import row
2 from bokeh.plotting import figure, show, output_file
3
4 factors = ["a", "b", "c", "d", "e", "f", "g", "h"]
5 x = [50, 40, 65, 10, 25, 37, 80, 60]
6
7 dot = figure(title="Catgorical Dot Plot", tools="", toolbar_location=None,
8 y_range=factors, x_range=[0,100])
9
10 dot.segment(0, factors, x, factors, line_width=2, line_color="green", )
11 dot.circle(x, factors, size=15, fill_color="orange", line_color="green", line_width=3, )
12
13 factors = ["foo", "bar", "baz"]
14 x = ["foo", "foo", "foo", "bar", "bar", "bar", "baz", "baz", "baz"]
15 y = ["foo", "bar", "baz", "foo", "bar", "baz", "foo", "bar", "baz"]
16 colors = [
17 "#0B486B", "#79BD9A", "#CFF09E",
18 "#79BD9A", "#0B486B", "#79BD9A",
19 "#CFF09E", "#79BD9A", "#0B486B"
20 ]
21
22 hm = figure(title="Categorical Heatmap", tools="hover", toolbar_location=None,
23 x_range=factors, y_range=factors)
24
25 hm.rect(x, y, color=colors, width=1, height=1)
26
27 output_file("categorical.html", title="categorical.py example")
28
29 show(row(hm, dot, sizing_mode="scale_width")) # open a browser
30
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/plotting/file/categorical.py b/examples/plotting/file/categorical.py
--- a/examples/plotting/file/categorical.py
+++ b/examples/plotting/file/categorical.py
@@ -4,7 +4,7 @@
factors = ["a", "b", "c", "d", "e", "f", "g", "h"]
x = [50, 40, 65, 10, 25, 37, 80, 60]
-dot = figure(title="Catgorical Dot Plot", tools="", toolbar_location=None,
+dot = figure(title="Categorical Dot Plot", tools="", toolbar_location=None,
y_range=factors, x_range=[0,100])
dot.segment(0, factors, x, factors, line_width=2, line_color="green", )
| {"golden_diff": "diff --git a/examples/plotting/file/categorical.py b/examples/plotting/file/categorical.py\n--- a/examples/plotting/file/categorical.py\n+++ b/examples/plotting/file/categorical.py\n@@ -4,7 +4,7 @@\n factors = [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\"]\n x = [50, 40, 65, 10, 25, 37, 80, 60]\n \n-dot = figure(title=\"Catgorical Dot Plot\", tools=\"\", toolbar_location=None,\n+dot = figure(title=\"Categorical Dot Plot\", tools=\"\", toolbar_location=None,\n y_range=factors, x_range=[0,100])\n \n dot.segment(0, factors, x, factors, line_width=2, line_color=\"green\", )\n", "issue": "Typo in categorical example plot title\nFile examples/plotting/file/categorical.py contains a typo in plot name.\n\n", "before_files": [{"content": "from bokeh.layouts import row\nfrom bokeh.plotting import figure, show, output_file\n\nfactors = [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\"]\nx = [50, 40, 65, 10, 25, 37, 80, 60]\n\ndot = figure(title=\"Catgorical Dot Plot\", tools=\"\", toolbar_location=None,\n y_range=factors, x_range=[0,100])\n\ndot.segment(0, factors, x, factors, line_width=2, line_color=\"green\", )\ndot.circle(x, factors, size=15, fill_color=\"orange\", line_color=\"green\", line_width=3, )\n\nfactors = [\"foo\", \"bar\", \"baz\"]\nx = [\"foo\", \"foo\", \"foo\", \"bar\", \"bar\", \"bar\", \"baz\", \"baz\", \"baz\"]\ny = [\"foo\", \"bar\", \"baz\", \"foo\", \"bar\", \"baz\", \"foo\", \"bar\", \"baz\"]\ncolors = [\n \"#0B486B\", \"#79BD9A\", \"#CFF09E\",\n \"#79BD9A\", \"#0B486B\", \"#79BD9A\",\n \"#CFF09E\", \"#79BD9A\", \"#0B486B\"\n]\n\nhm = figure(title=\"Categorical Heatmap\", tools=\"hover\", toolbar_location=None,\n x_range=factors, y_range=factors)\n\nhm.rect(x, y, color=colors, width=1, height=1)\n\noutput_file(\"categorical.html\", title=\"categorical.py example\")\n\nshow(row(hm, dot, sizing_mode=\"scale_width\")) # open a browser\n", "path": "examples/plotting/file/categorical.py"}], "after_files": [{"content": "from bokeh.layouts import row\nfrom bokeh.plotting import figure, show, output_file\n\nfactors = [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\"]\nx = [50, 40, 65, 10, 25, 37, 80, 60]\n\ndot = figure(title=\"Categorical Dot Plot\", tools=\"\", toolbar_location=None,\n y_range=factors, x_range=[0,100])\n\ndot.segment(0, factors, x, factors, line_width=2, line_color=\"green\", )\ndot.circle(x, factors, size=15, fill_color=\"orange\", line_color=\"green\", line_width=3, )\n\nfactors = [\"foo\", \"bar\", \"baz\"]\nx = [\"foo\", \"foo\", \"foo\", \"bar\", \"bar\", \"bar\", \"baz\", \"baz\", \"baz\"]\ny = [\"foo\", \"bar\", \"baz\", \"foo\", \"bar\", \"baz\", \"foo\", \"bar\", \"baz\"]\ncolors = [\n \"#0B486B\", \"#79BD9A\", \"#CFF09E\",\n \"#79BD9A\", \"#0B486B\", \"#79BD9A\",\n \"#CFF09E\", \"#79BD9A\", \"#0B486B\"\n]\n\nhm = figure(title=\"Categorical Heatmap\", tools=\"hover\", toolbar_location=None,\n x_range=factors, y_range=factors)\n\nhm.rect(x, y, color=colors, width=1, height=1)\n\noutput_file(\"categorical.html\", title=\"categorical.py example\")\n\nshow(row(hm, dot, sizing_mode=\"scale_width\")) # open a browser\n", "path": "examples/plotting/file/categorical.py"}]} | 729 | 188 |
gh_patches_debug_26203 | rasdani/github-patches | git_diff | pyg-team__pytorch_geometric-6546 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bipartite graph support for utils.dense_to_sparse
### 🚀 The feature, motivation and pitch
I have a nearly-dense bipartite graph (that is, most features in node set A are connected to most features in node set B), and so it is easiest for me to define the edge adjacency matrix as a dense, non-square matrix. However, the message passing class expects a sparse edge adjacency layout. The dense_to_sparse utility would seem to be ideal for this purpose, but it can only take square matrices (thus, is unhelpful for bipartite graphs).
### Alternatives
A way to implicitly request propagate to pass messages from every node in A to every node in B would be even better (storing fully connected graphs is very memory inefficient), but I know that pyg is meant for sparser graph constructions so this would likely be a feature that wasn't used very much by other people.
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torch_geometric/utils/sparse.py`
Content:
```
1 from typing import Any, Optional, Tuple, Union
2
3 import torch
4 from torch import Tensor
5
6 from torch_geometric.typing import SparseTensor
7
8
9 def dense_to_sparse(adj: Tensor) -> Tuple[Tensor, Tensor]:
10 r"""Converts a dense adjacency matrix to a sparse adjacency matrix defined
11 by edge indices and edge attributes.
12
13 Args:
14 adj (Tensor): The dense adjacency matrix.
15 :rtype: (:class:`LongTensor`, :class:`Tensor`)
16
17 Examples:
18
19 >>> # Forr a single adjacency matrix
20 >>> adj = torch.tensor([[3, 1],
21 ... [2, 0]])
22 >>> dense_to_sparse(adj)
23 (tensor([[0, 0, 1],
24 [0, 1, 0]]),
25 tensor([3, 1, 2]))
26
27 >>> # For two adjacency matrixes
28 >>> adj = torch.tensor([[[3, 1],
29 ... [2, 0]],
30 ... [[0, 1],
31 ... [0, 2]]])
32 >>> dense_to_sparse(adj)
33 (tensor([[0, 0, 1, 2, 3],
34 [0, 1, 0, 3, 3]]),
35 tensor([3, 1, 2, 1, 2]))
36 """
37 assert adj.dim() >= 2 and adj.dim() <= 3
38 assert adj.size(-1) == adj.size(-2)
39
40 edge_index = adj.nonzero().t()
41
42 if edge_index.size(0) == 2:
43 edge_attr = adj[edge_index[0], edge_index[1]]
44 return edge_index, edge_attr
45 else:
46 edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]
47 batch = edge_index[0] * adj.size(-1)
48 row = batch + edge_index[1]
49 col = batch + edge_index[2]
50 return torch.stack([row, col], dim=0), edge_attr
51
52
53 def is_torch_sparse_tensor(src: Any) -> bool:
54 """Returns :obj:`True` if the input :obj:`src` is a
55 :class:`torch.sparse.Tensor` (in any sparse layout).
56
57 Args:
58 src (Any): The input object to be checked.
59 """
60 return isinstance(src, Tensor) and src.is_sparse
61
62
63 def is_sparse(src: Any) -> bool:
64 """Returns :obj:`True` if the input :obj:`src` is of type
65 :class:`torch.sparse.Tensor` (in any sparse layout) or of type
66 :class:`torch_sparse.SparseTensor`.
67
68 Args:
69 src (Any): The input object to be checked.
70 """
71 return is_torch_sparse_tensor(src) or isinstance(src, SparseTensor)
72
73
74 def to_torch_coo_tensor(
75 edge_index: Tensor,
76 edge_attr: Optional[Tensor] = None,
77 size: Optional[Union[int, Tuple[int, int]]] = None,
78 ) -> Tensor:
79 """Converts a sparse adjacency matrix defined by edge indices and edge
80 attributes to a :class:`torch.sparse.Tensor`.
81
82 Args:
83 edge_index (LongTensor): The edge indices.
84 edge_attr (Tensor, optional): The edge attributes.
85 (default: :obj:`None`)
86 size (int or (int, int), optional): The size of the sparse matrix.
87 If given as an integer, will create a quadratic sparse matrix.
88 If set to :obj:`None`, will infer a quadratic sparse matrix based
89 on :obj:`edge_index.max() + 1`. (default: :obj:`None`)
90
91 :rtype: :class:`torch.sparse.FloatTensor`
92
93 Example:
94
95 >>> edge_index = torch.tensor([[0, 1, 1, 2, 2, 3],
96 ... [1, 0, 2, 1, 3, 2]])
97 >>> to_torch_coo_tensor(edge_index)
98 tensor(indices=tensor([[0, 1, 1, 2, 2, 3],
99 [1, 0, 2, 1, 3, 2]]),
100 values=tensor([1., 1., 1., 1., 1., 1.]),
101 size=(4, 4), nnz=6, layout=torch.sparse_coo)
102
103 """
104 if size is None:
105 size = int(edge_index.max()) + 1
106 if not isinstance(size, (tuple, list)):
107 size = (size, size)
108
109 if edge_attr is None:
110 edge_attr = torch.ones(edge_index.size(1), device=edge_index.device)
111
112 size = tuple(size) + edge_attr.size()[1:]
113 out = torch.sparse_coo_tensor(edge_index, edge_attr, size,
114 device=edge_index.device)
115 out = out.coalesce()
116 return out
117
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/torch_geometric/utils/sparse.py b/torch_geometric/utils/sparse.py
--- a/torch_geometric/utils/sparse.py
+++ b/torch_geometric/utils/sparse.py
@@ -11,7 +11,10 @@
by edge indices and edge attributes.
Args:
- adj (Tensor): The dense adjacency matrix.
+ adj (Tensor): The dense adjacency matrix of shape
+ :obj:`[num_nodes, num_nodes]` or
+ :obj:`[batch_size, num_nodes, num_nodes]`.
+
:rtype: (:class:`LongTensor`, :class:`Tensor`)
Examples:
@@ -34,8 +37,9 @@
[0, 1, 0, 3, 3]]),
tensor([3, 1, 2, 1, 2]))
"""
- assert adj.dim() >= 2 and adj.dim() <= 3
- assert adj.size(-1) == adj.size(-2)
+ if adj.dim() < 2 or adj.dim() > 3:
+ raise ValueError(f"Dense adjacency matrix 'adj' must be 2- or "
+ f"3-dimensional (got {adj.dim()} dimensions)")
edge_index = adj.nonzero().t()
@@ -44,9 +48,8 @@
return edge_index, edge_attr
else:
edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]
- batch = edge_index[0] * adj.size(-1)
- row = batch + edge_index[1]
- col = batch + edge_index[2]
+ row = edge_index[1] + adj.size(-2) * edge_index[0]
+ col = edge_index[2] + adj.size(-1) * edge_index[0]
return torch.stack([row, col], dim=0), edge_attr
| {"golden_diff": "diff --git a/torch_geometric/utils/sparse.py b/torch_geometric/utils/sparse.py\n--- a/torch_geometric/utils/sparse.py\n+++ b/torch_geometric/utils/sparse.py\n@@ -11,7 +11,10 @@\n by edge indices and edge attributes.\n \n Args:\n- adj (Tensor): The dense adjacency matrix.\n+ adj (Tensor): The dense adjacency matrix of shape\n+ :obj:`[num_nodes, num_nodes]` or\n+ :obj:`[batch_size, num_nodes, num_nodes]`.\n+\n :rtype: (:class:`LongTensor`, :class:`Tensor`)\n \n Examples:\n@@ -34,8 +37,9 @@\n [0, 1, 0, 3, 3]]),\n tensor([3, 1, 2, 1, 2]))\n \"\"\"\n- assert adj.dim() >= 2 and adj.dim() <= 3\n- assert adj.size(-1) == adj.size(-2)\n+ if adj.dim() < 2 or adj.dim() > 3:\n+ raise ValueError(f\"Dense adjacency matrix 'adj' must be 2- or \"\n+ f\"3-dimensional (got {adj.dim()} dimensions)\")\n \n edge_index = adj.nonzero().t()\n \n@@ -44,9 +48,8 @@\n return edge_index, edge_attr\n else:\n edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]\n- batch = edge_index[0] * adj.size(-1)\n- row = batch + edge_index[1]\n- col = batch + edge_index[2]\n+ row = edge_index[1] + adj.size(-2) * edge_index[0]\n+ col = edge_index[2] + adj.size(-1) * edge_index[0]\n return torch.stack([row, col], dim=0), edge_attr\n", "issue": "Bipartite graph support for utils.dense_to_sparse\n### \ud83d\ude80 The feature, motivation and pitch\n\nI have a nearly-dense bipartite graph (that is, most features in node set A are connected to most features in node set B), and so it is easiest for me to define the edge adjacency matrix as a dense, non-square matrix. However, the message passing class expects a sparse edge adjacency layout. The dense_to_sparse utility would seem to be ideal for this purpose, but it can only take square matrices (thus, is unhelpful for bipartite graphs).\n\n### Alternatives\n\nA way to implicitly request propagate to pass messages from every node in A to every node in B would be even better (storing fully connected graphs is very memory inefficient), but I know that pyg is meant for sparser graph constructions so this would likely be a feature that wasn't used very much by other people.\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "from typing import Any, Optional, Tuple, Union\n\nimport torch\nfrom torch import Tensor\n\nfrom torch_geometric.typing import SparseTensor\n\n\ndef dense_to_sparse(adj: Tensor) -> Tuple[Tensor, Tensor]:\n r\"\"\"Converts a dense adjacency matrix to a sparse adjacency matrix defined\n by edge indices and edge attributes.\n\n Args:\n adj (Tensor): The dense adjacency matrix.\n :rtype: (:class:`LongTensor`, :class:`Tensor`)\n\n Examples:\n\n >>> # Forr a single adjacency matrix\n >>> adj = torch.tensor([[3, 1],\n ... [2, 0]])\n >>> dense_to_sparse(adj)\n (tensor([[0, 0, 1],\n [0, 1, 0]]),\n tensor([3, 1, 2]))\n\n >>> # For two adjacency matrixes\n >>> adj = torch.tensor([[[3, 1],\n ... [2, 0]],\n ... [[0, 1],\n ... [0, 2]]])\n >>> dense_to_sparse(adj)\n (tensor([[0, 0, 1, 2, 3],\n [0, 1, 0, 3, 3]]),\n tensor([3, 1, 2, 1, 2]))\n \"\"\"\n assert adj.dim() >= 2 and adj.dim() <= 3\n assert adj.size(-1) == adj.size(-2)\n\n edge_index = adj.nonzero().t()\n\n if edge_index.size(0) == 2:\n edge_attr = adj[edge_index[0], edge_index[1]]\n return edge_index, edge_attr\n else:\n edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]\n batch = edge_index[0] * adj.size(-1)\n row = batch + edge_index[1]\n col = batch + edge_index[2]\n return torch.stack([row, col], dim=0), edge_attr\n\n\ndef is_torch_sparse_tensor(src: Any) -> bool:\n \"\"\"Returns :obj:`True` if the input :obj:`src` is a\n :class:`torch.sparse.Tensor` (in any sparse layout).\n\n Args:\n src (Any): The input object to be checked.\n \"\"\"\n return isinstance(src, Tensor) and src.is_sparse\n\n\ndef is_sparse(src: Any) -> bool:\n \"\"\"Returns :obj:`True` if the input :obj:`src` is of type\n :class:`torch.sparse.Tensor` (in any sparse layout) or of type\n :class:`torch_sparse.SparseTensor`.\n\n Args:\n src (Any): The input object to be checked.\n \"\"\"\n return is_torch_sparse_tensor(src) or isinstance(src, SparseTensor)\n\n\ndef to_torch_coo_tensor(\n edge_index: Tensor,\n edge_attr: Optional[Tensor] = None,\n size: Optional[Union[int, Tuple[int, int]]] = None,\n) -> Tensor:\n \"\"\"Converts a sparse adjacency matrix defined by edge indices and edge\n attributes to a :class:`torch.sparse.Tensor`.\n\n Args:\n edge_index (LongTensor): The edge indices.\n edge_attr (Tensor, optional): The edge attributes.\n (default: :obj:`None`)\n size (int or (int, int), optional): The size of the sparse matrix.\n If given as an integer, will create a quadratic sparse matrix.\n If set to :obj:`None`, will infer a quadratic sparse matrix based\n on :obj:`edge_index.max() + 1`. (default: :obj:`None`)\n\n :rtype: :class:`torch.sparse.FloatTensor`\n\n Example:\n\n >>> edge_index = torch.tensor([[0, 1, 1, 2, 2, 3],\n ... [1, 0, 2, 1, 3, 2]])\n >>> to_torch_coo_tensor(edge_index)\n tensor(indices=tensor([[0, 1, 1, 2, 2, 3],\n [1, 0, 2, 1, 3, 2]]),\n values=tensor([1., 1., 1., 1., 1., 1.]),\n size=(4, 4), nnz=6, layout=torch.sparse_coo)\n\n \"\"\"\n if size is None:\n size = int(edge_index.max()) + 1\n if not isinstance(size, (tuple, list)):\n size = (size, size)\n\n if edge_attr is None:\n edge_attr = torch.ones(edge_index.size(1), device=edge_index.device)\n\n size = tuple(size) + edge_attr.size()[1:]\n out = torch.sparse_coo_tensor(edge_index, edge_attr, size,\n device=edge_index.device)\n out = out.coalesce()\n return out\n", "path": "torch_geometric/utils/sparse.py"}], "after_files": [{"content": "from typing import Any, Optional, Tuple, Union\n\nimport torch\nfrom torch import Tensor\n\nfrom torch_geometric.typing import SparseTensor\n\n\ndef dense_to_sparse(adj: Tensor) -> Tuple[Tensor, Tensor]:\n r\"\"\"Converts a dense adjacency matrix to a sparse adjacency matrix defined\n by edge indices and edge attributes.\n\n Args:\n adj (Tensor): The dense adjacency matrix of shape\n :obj:`[num_nodes, num_nodes]` or\n :obj:`[batch_size, num_nodes, num_nodes]`.\n\n :rtype: (:class:`LongTensor`, :class:`Tensor`)\n\n Examples:\n\n >>> # Forr a single adjacency matrix\n >>> adj = torch.tensor([[3, 1],\n ... [2, 0]])\n >>> dense_to_sparse(adj)\n (tensor([[0, 0, 1],\n [0, 1, 0]]),\n tensor([3, 1, 2]))\n\n >>> # For two adjacency matrixes\n >>> adj = torch.tensor([[[3, 1],\n ... [2, 0]],\n ... [[0, 1],\n ... [0, 2]]])\n >>> dense_to_sparse(adj)\n (tensor([[0, 0, 1, 2, 3],\n [0, 1, 0, 3, 3]]),\n tensor([3, 1, 2, 1, 2]))\n \"\"\"\n if adj.dim() < 2 or adj.dim() > 3:\n raise ValueError(f\"Dense adjacency matrix 'adj' must be 2- or \"\n f\"3-dimensional (got {adj.dim()} dimensions)\")\n\n edge_index = adj.nonzero().t()\n\n if edge_index.size(0) == 2:\n edge_attr = adj[edge_index[0], edge_index[1]]\n return edge_index, edge_attr\n else:\n edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]\n row = edge_index[1] + adj.size(-2) * edge_index[0]\n col = edge_index[2] + adj.size(-1) * edge_index[0]\n return torch.stack([row, col], dim=0), edge_attr\n\n\ndef is_torch_sparse_tensor(src: Any) -> bool:\n \"\"\"Returns :obj:`True` if the input :obj:`src` is a\n :class:`torch.sparse.Tensor` (in any sparse layout).\n\n Args:\n src (Any): The input object to be checked.\n \"\"\"\n return isinstance(src, Tensor) and src.is_sparse\n\n\ndef is_sparse(src: Any) -> bool:\n \"\"\"Returns :obj:`True` if the input :obj:`src` is of type\n :class:`torch.sparse.Tensor` (in any sparse layout) or of type\n :class:`torch_sparse.SparseTensor`.\n\n Args:\n src (Any): The input object to be checked.\n \"\"\"\n return is_torch_sparse_tensor(src) or isinstance(src, SparseTensor)\n\n\ndef to_torch_coo_tensor(\n edge_index: Tensor,\n edge_attr: Optional[Tensor] = None,\n size: Optional[Union[int, Tuple[int, int]]] = None,\n) -> Tensor:\n \"\"\"Converts a sparse adjacency matrix defined by edge indices and edge\n attributes to a :class:`torch.sparse.Tensor`.\n\n Args:\n edge_index (LongTensor): The edge indices.\n edge_attr (Tensor, optional): The edge attributes.\n (default: :obj:`None`)\n size (int or (int, int), optional): The size of the sparse matrix.\n If given as an integer, will create a quadratic sparse matrix.\n If set to :obj:`None`, will infer a quadratic sparse matrix based\n on :obj:`edge_index.max() + 1`. (default: :obj:`None`)\n\n :rtype: :class:`torch.sparse.FloatTensor`\n\n Example:\n\n >>> edge_index = torch.tensor([[0, 1, 1, 2, 2, 3],\n ... [1, 0, 2, 1, 3, 2]])\n >>> to_torch_coo_tensor(edge_index)\n tensor(indices=tensor([[0, 1, 1, 2, 2, 3],\n [1, 0, 2, 1, 3, 2]]),\n values=tensor([1., 1., 1., 1., 1., 1.]),\n size=(4, 4), nnz=6, layout=torch.sparse_coo)\n\n \"\"\"\n if size is None:\n size = int(edge_index.max()) + 1\n if not isinstance(size, (tuple, list)):\n size = (size, size)\n\n if edge_attr is None:\n edge_attr = torch.ones(edge_index.size(1), device=edge_index.device)\n\n size = tuple(size) + edge_attr.size()[1:]\n out = torch.sparse_coo_tensor(edge_index, edge_attr, size,\n device=edge_index.device)\n out = out.coalesce()\n return out\n", "path": "torch_geometric/utils/sparse.py"}]} | 1,763 | 423 |
gh_patches_debug_2242 | rasdani/github-patches | git_diff | python-poetry__poetry-1577 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
poetry v1.0.0b4 breaks on zip packages
<!-- Checked checkbox should look like this: [x] -->
- [x] I am on the [latest](https://github.com/sdispater/poetry/releases/latest) Poetry version.
- [x] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate.
- [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option).
- **OS version and name**: Windows 10 with a virtual environment for Python v3.7.4
- **Poetry version**: 1.0.0b4
- **Link of a [Gist](https://gist.github.com/) with the contents of your pyproject.toml file**: (empty project)
## Issue Summary
The newly refactored distribution loading mechanism from [PR 1549](https://github.com/sdispater/poetry/pull/1549/) fails when a zip-based package is present. The presenting problem is that the `zipp.Path` class is not compatible with the `__fspath__` protocol.
## Issue Details
After updating to Poetry v1.0.0b4, I get this traceback
```
% poetry update -v
Using virtualenv: C:\Users\garyd\devel\video-storage\venv-new
[TypeError]
expected str, bytes or os.PathLike object, not Path
Traceback (most recent call last):
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\clikit\console_application.py", line 131, in run
status_code = command.handle(parsed_args, io)
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\clikit\api\command\command.py", line 120, in handle
status_code = self._do_handle(args, io)
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\clikit\api\command\command.py", line 171, in _do_handle
return getattr(handler, handler_method)(args, io, self)
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\cleo\commands\command.py", line 92, in wrap_handle
return self.handle()
File "C:\Users\garyd\.poetry\lib\poetry\console\commands\update.py", line 36, in handle
self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool
File "C:\Users\garyd\.poetry\lib\poetry\installation\installer.py", line 55, in __init__
installed = self._get_installed()
File "C:\Users\garyd\.poetry\lib\poetry\installation\installer.py", line 507, in _get_installed
return InstalledRepository.load(self._env)
File "C:\Users\garyd\.poetry\lib\poetry\repositories\installed_repository.py", line 30, in load
path = Path(distribution._path)
File "C:\Users\garyd\AppData\Local\Programs\Python\Python37\lib\pathlib.py", line 1010, in __new__
self = cls._from_parts(args, init=False)
```
When I run the broken part of the code in my console, I find that the broken distribution is:
* type == <class 'importlib_metadata.PathDistribution'>
* type(dist._path) == <class 'zipp.Path'>
* dist._path == C:\Users\garyd\devel\video-storage\venv\lib\site-packages\setuptools-40.8.0-py3.7.egg/EGG-INFO/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `poetry/repositories/installed_repository.py`
Content:
```
1 from importlib_metadata import distributions
2 from poetry.packages import Package
3 from poetry.utils._compat import Path
4 from poetry.utils.env import Env
5
6 from .repository import Repository
7
8
9 class InstalledRepository(Repository):
10 @classmethod
11 def load(cls, env): # type: (Env) -> InstalledRepository
12 """
13 Load installed packages.
14
15 For now, it uses the pip "freeze" command.
16 """
17 repo = cls()
18
19 for distribution in sorted(
20 distributions(path=env.sys_path), key=lambda d: str(d._path),
21 ):
22 metadata = distribution.metadata
23 name = metadata["name"]
24 version = metadata["version"]
25 package = Package(name, version, version)
26 package.description = metadata.get("summary", "")
27
28 repo.add_package(package)
29
30 path = Path(distribution._path)
31 is_standard_package = True
32 try:
33 path.relative_to(env.site_packages)
34 except ValueError:
35 is_standard_package = False
36
37 if is_standard_package:
38 continue
39
40 src_path = env.path / "src"
41
42 # A VCS dependency should have been installed
43 # in the src directory. If not, it's a path dependency
44 try:
45 path.relative_to(src_path)
46
47 from poetry.vcs.git import Git
48
49 git = Git()
50 revision = git.rev_parse("HEAD", src_path / package.name).strip()
51 url = git.remote_url(src_path / package.name)
52
53 package.source_type = "git"
54 package.source_url = url
55 package.source_reference = revision
56 except ValueError:
57 package.source_type = "directory"
58 package.source_url = str(path.parent)
59
60 return repo
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/poetry/repositories/installed_repository.py b/poetry/repositories/installed_repository.py
--- a/poetry/repositories/installed_repository.py
+++ b/poetry/repositories/installed_repository.py
@@ -27,7 +27,7 @@
repo.add_package(package)
- path = Path(distribution._path)
+ path = Path(str(distribution._path))
is_standard_package = True
try:
path.relative_to(env.site_packages)
| {"golden_diff": "diff --git a/poetry/repositories/installed_repository.py b/poetry/repositories/installed_repository.py\n--- a/poetry/repositories/installed_repository.py\n+++ b/poetry/repositories/installed_repository.py\n@@ -27,7 +27,7 @@\n \n repo.add_package(package)\n \n- path = Path(distribution._path)\n+ path = Path(str(distribution._path))\n is_standard_package = True\n try:\n path.relative_to(env.site_packages)\n", "issue": "poetry v1.0.0b4 breaks on zip packages\n<!-- Checked checkbox should look like this: [x] -->\r\n- [x] I am on the [latest](https://github.com/sdispater/poetry/releases/latest) Poetry version.\r\n- [x] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate.\r\n- [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option).\r\n\r\n- **OS version and name**: Windows 10 with a virtual environment for Python v3.7.4\r\n- **Poetry version**: 1.0.0b4\r\n- **Link of a [Gist](https://gist.github.com/) with the contents of your pyproject.toml file**: (empty project)\r\n\r\n## Issue Summary\r\nThe newly refactored distribution loading mechanism from [PR 1549](https://github.com/sdispater/poetry/pull/1549/) fails when a zip-based package is present. The presenting problem is that the `zipp.Path` class is not compatible with the `__fspath__` protocol.\r\n\r\n## Issue Details\r\nAfter updating to Poetry v1.0.0b4, I get this traceback\r\n\r\n```\r\n% poetry update -v\r\nUsing virtualenv: C:\\Users\\garyd\\devel\\video-storage\\venv-new\r\n[TypeError]\r\nexpected str, bytes or os.PathLike object, not Path\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\clikit\\console_application.py\", line 131, in run\r\n status_code = command.handle(parsed_args, io)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\clikit\\api\\command\\command.py\", line 120, in handle\r\n status_code = self._do_handle(args, io)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\clikit\\api\\command\\command.py\", line 171, in _do_handle\r\n return getattr(handler, handler_method)(args, io, self)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\cleo\\commands\\command.py\", line 92, in wrap_handle\r\n return self.handle()\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\console\\commands\\update.py\", line 36, in handle\r\n self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\installation\\installer.py\", line 55, in __init__\r\n installed = self._get_installed()\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\installation\\installer.py\", line 507, in _get_installed\r\n return InstalledRepository.load(self._env)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\repositories\\installed_repository.py\", line 30, in load\r\n path = Path(distribution._path)\r\n File \"C:\\Users\\garyd\\AppData\\Local\\Programs\\Python\\Python37\\lib\\pathlib.py\", line 1010, in __new__\r\n self = cls._from_parts(args, init=False)\r\n```\r\n\r\nWhen I run the broken part of the code in my console, I find that the broken distribution is:\r\n\r\n* type == <class 'importlib_metadata.PathDistribution'>\r\n* type(dist._path) == <class 'zipp.Path'>\r\n* dist._path == C:\\Users\\garyd\\devel\\video-storage\\venv\\lib\\site-packages\\setuptools-40.8.0-py3.7.egg/EGG-INFO/\r\n\r\n\n", "before_files": [{"content": "from importlib_metadata import distributions\nfrom poetry.packages import Package\nfrom poetry.utils._compat import Path\nfrom poetry.utils.env import Env\n\nfrom .repository import Repository\n\n\nclass InstalledRepository(Repository):\n @classmethod\n def load(cls, env): # type: (Env) -> InstalledRepository\n \"\"\"\n Load installed packages.\n\n For now, it uses the pip \"freeze\" command.\n \"\"\"\n repo = cls()\n\n for distribution in sorted(\n distributions(path=env.sys_path), key=lambda d: str(d._path),\n ):\n metadata = distribution.metadata\n name = metadata[\"name\"]\n version = metadata[\"version\"]\n package = Package(name, version, version)\n package.description = metadata.get(\"summary\", \"\")\n\n repo.add_package(package)\n\n path = Path(distribution._path)\n is_standard_package = True\n try:\n path.relative_to(env.site_packages)\n except ValueError:\n is_standard_package = False\n\n if is_standard_package:\n continue\n\n src_path = env.path / \"src\"\n\n # A VCS dependency should have been installed\n # in the src directory. If not, it's a path dependency\n try:\n path.relative_to(src_path)\n\n from poetry.vcs.git import Git\n\n git = Git()\n revision = git.rev_parse(\"HEAD\", src_path / package.name).strip()\n url = git.remote_url(src_path / package.name)\n\n package.source_type = \"git\"\n package.source_url = url\n package.source_reference = revision\n except ValueError:\n package.source_type = \"directory\"\n package.source_url = str(path.parent)\n\n return repo\n", "path": "poetry/repositories/installed_repository.py"}], "after_files": [{"content": "from importlib_metadata import distributions\nfrom poetry.packages import Package\nfrom poetry.utils._compat import Path\nfrom poetry.utils.env import Env\n\nfrom .repository import Repository\n\n\nclass InstalledRepository(Repository):\n @classmethod\n def load(cls, env): # type: (Env) -> InstalledRepository\n \"\"\"\n Load installed packages.\n\n For now, it uses the pip \"freeze\" command.\n \"\"\"\n repo = cls()\n\n for distribution in sorted(\n distributions(path=env.sys_path), key=lambda d: str(d._path),\n ):\n metadata = distribution.metadata\n name = metadata[\"name\"]\n version = metadata[\"version\"]\n package = Package(name, version, version)\n package.description = metadata.get(\"summary\", \"\")\n\n repo.add_package(package)\n\n path = Path(str(distribution._path))\n is_standard_package = True\n try:\n path.relative_to(env.site_packages)\n except ValueError:\n is_standard_package = False\n\n if is_standard_package:\n continue\n\n src_path = env.path / \"src\"\n\n # A VCS dependency should have been installed\n # in the src directory. If not, it's a path dependency\n try:\n path.relative_to(src_path)\n\n from poetry.vcs.git import Git\n\n git = Git()\n revision = git.rev_parse(\"HEAD\", src_path / package.name).strip()\n url = git.remote_url(src_path / package.name)\n\n package.source_type = \"git\"\n package.source_url = url\n package.source_reference = revision\n except ValueError:\n package.source_type = \"directory\"\n package.source_url = str(path.parent)\n\n return repo\n", "path": "poetry/repositories/installed_repository.py"}]} | 1,613 | 107 |
gh_patches_debug_21740 | rasdani/github-patches | git_diff | mathesar-foundation__mathesar-503 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
New CSV imports use autogenerated column names.
## Description
<!-- A clear and concise description of what the bug is. -->
According to #459, newly imported CSVs are supposed to use the first row as headers by default. However, newly uploaded CSVs are showing autogenerated column names.
## Expected behavior
<!-- A clear and concise description of what you expected to happen. -->
The first row of the CSV should be used as header names by default.
## To Reproduce
<!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. -->
Upload a CSV from the Mathesar UI. Column names will be of the form `column_0`, etc.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mathesar/serializers.py`
Content:
```
1 from django.urls import reverse
2 from rest_framework import serializers
3
4 from mathesar.models import Table, Schema, DataFile, Database
5
6
7 class NestedTableSerializer(serializers.HyperlinkedModelSerializer):
8 url = serializers.SerializerMethodField()
9
10 class Meta:
11 model = Table
12 fields = ['id', 'name', 'url']
13
14 def get_url(self, obj):
15 request = self.context['request']
16 return request.build_absolute_uri(reverse('table-detail', kwargs={'pk': obj.pk}))
17
18
19 class ModelNameField(serializers.CharField):
20 """
21 De-serializes the request field as a string, but serializes the response field as
22 `model.name`. Required to support passing and returing a model name from the
23 endpoint, while also storing the model as a related field.
24 """
25 def to_representation(self, value):
26 return value.name
27
28
29 class SchemaSerializer(serializers.HyperlinkedModelSerializer):
30 tables = NestedTableSerializer(many=True, read_only=True)
31 name = serializers.CharField()
32 database = ModelNameField(max_length=128)
33
34 class Meta:
35 model = Schema
36 fields = ['id', 'name', 'tables', 'database', 'has_dependencies']
37
38
39 class SimpleColumnSerializer(serializers.Serializer):
40 name = serializers.CharField()
41 type = serializers.CharField()
42
43
44 class ColumnSerializer(SimpleColumnSerializer):
45 index = serializers.IntegerField(source='column_index', read_only=True)
46 nullable = serializers.BooleanField(default=True)
47 primary_key = serializers.BooleanField(default=False)
48 valid_target_types = serializers.ListField(read_only=True)
49
50
51 class TableSerializer(serializers.ModelSerializer):
52 columns = SimpleColumnSerializer(many=True, read_only=True, source='sa_columns')
53 records = serializers.SerializerMethodField()
54 name = serializers.CharField()
55 data_files = serializers.PrimaryKeyRelatedField(required=False, many=True, queryset=DataFile.objects.all())
56
57 class Meta:
58 model = Table
59 fields = ['id', 'name', 'schema', 'created_at', 'updated_at',
60 'columns', 'records', 'data_files', 'has_dependencies']
61
62 def get_records(self, obj):
63 if isinstance(obj, Table):
64 # Only get records if we are serializing an existing table
65 request = self.context['request']
66 return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk}))
67 else:
68 return None
69
70
71 class RecordSerializer(serializers.BaseSerializer):
72 def to_representation(self, instance):
73 return instance._asdict()
74
75
76 class RecordListParameterSerializer(serializers.Serializer):
77 filters = serializers.JSONField(required=False, default=[])
78 order_by = serializers.JSONField(required=False, default=[])
79 group_count_by = serializers.JSONField(required=False, default=[])
80
81
82 class DatabaseSerializer(serializers.ModelSerializer):
83 supported_types = serializers.ListField(child=serializers.CharField())
84
85 class Meta:
86 model = Database
87 fields = ['id', 'name', 'deleted', 'supported_types']
88 read_only_fields = ['id', 'name', 'deleted', 'supported_types']
89
90
91 class DataFileSerializer(serializers.ModelSerializer):
92 user = serializers.PrimaryKeyRelatedField(
93 default=serializers.CurrentUserDefault(), read_only=True
94 )
95
96 class Meta:
97 model = DataFile
98 fields = [
99 'id', 'file', 'table_imported_to', 'user', 'header', 'delimiter', 'escapechar', 'quotechar'
100 ]
101 extra_kwargs = {'delimiter': {'trim_whitespace': False},
102 'escapechar': {'trim_whitespace': False},
103 'quotechar': {'trim_whitespace': False},
104 'header': {'required': True}}
105 # We only currently support importing to a new table, so setting a table via API is invalid.
106 # User should be set automatically, not submitted via the API.
107 read_only_fields = ['table_imported_to']
108
109 def save(self, **kwargs):
110 """
111 Set user to current user while saving the data file.
112 """
113 current_user = self.fields['user'].get_default()
114 if current_user.is_authenticated:
115 kwargs['user'] = current_user
116 return super().save(**kwargs)
117
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mathesar/serializers.py b/mathesar/serializers.py
--- a/mathesar/serializers.py
+++ b/mathesar/serializers.py
@@ -92,6 +92,7 @@
user = serializers.PrimaryKeyRelatedField(
default=serializers.CurrentUserDefault(), read_only=True
)
+ header = serializers.BooleanField(default=True)
class Meta:
model = DataFile
@@ -100,8 +101,8 @@
]
extra_kwargs = {'delimiter': {'trim_whitespace': False},
'escapechar': {'trim_whitespace': False},
- 'quotechar': {'trim_whitespace': False},
- 'header': {'required': True}}
+ 'quotechar': {'trim_whitespace': False}
+ }
# We only currently support importing to a new table, so setting a table via API is invalid.
# User should be set automatically, not submitted via the API.
read_only_fields = ['table_imported_to']
| {"golden_diff": "diff --git a/mathesar/serializers.py b/mathesar/serializers.py\n--- a/mathesar/serializers.py\n+++ b/mathesar/serializers.py\n@@ -92,6 +92,7 @@\n user = serializers.PrimaryKeyRelatedField(\n default=serializers.CurrentUserDefault(), read_only=True\n )\n+ header = serializers.BooleanField(default=True)\n \n class Meta:\n model = DataFile\n@@ -100,8 +101,8 @@\n ]\n extra_kwargs = {'delimiter': {'trim_whitespace': False},\n 'escapechar': {'trim_whitespace': False},\n- 'quotechar': {'trim_whitespace': False},\n- 'header': {'required': True}}\n+ 'quotechar': {'trim_whitespace': False}\n+ }\n # We only currently support importing to a new table, so setting a table via API is invalid.\n # User should be set automatically, not submitted via the API.\n read_only_fields = ['table_imported_to']\n", "issue": "New CSV imports use autogenerated column names.\n## Description\r\n<!-- A clear and concise description of what the bug is. -->\r\nAccording to #459, newly imported CSVs are supposed to use the first row as headers by default. However, newly uploaded CSVs are showing autogenerated column names.\r\n\r\n## Expected behavior\r\n<!-- A clear and concise description of what you expected to happen. -->\r\nThe first row of the CSV should be used as header names by default.\r\n\r\n## To Reproduce\r\n<!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. -->\r\nUpload a CSV from the Mathesar UI. Column names will be of the form `column_0`, etc.\n", "before_files": [{"content": "from django.urls import reverse\nfrom rest_framework import serializers\n\nfrom mathesar.models import Table, Schema, DataFile, Database\n\n\nclass NestedTableSerializer(serializers.HyperlinkedModelSerializer):\n url = serializers.SerializerMethodField()\n\n class Meta:\n model = Table\n fields = ['id', 'name', 'url']\n\n def get_url(self, obj):\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-detail', kwargs={'pk': obj.pk}))\n\n\nclass ModelNameField(serializers.CharField):\n \"\"\"\n De-serializes the request field as a string, but serializes the response field as\n `model.name`. Required to support passing and returing a model name from the\n endpoint, while also storing the model as a related field.\n \"\"\"\n def to_representation(self, value):\n return value.name\n\n\nclass SchemaSerializer(serializers.HyperlinkedModelSerializer):\n tables = NestedTableSerializer(many=True, read_only=True)\n name = serializers.CharField()\n database = ModelNameField(max_length=128)\n\n class Meta:\n model = Schema\n fields = ['id', 'name', 'tables', 'database', 'has_dependencies']\n\n\nclass SimpleColumnSerializer(serializers.Serializer):\n name = serializers.CharField()\n type = serializers.CharField()\n\n\nclass ColumnSerializer(SimpleColumnSerializer):\n index = serializers.IntegerField(source='column_index', read_only=True)\n nullable = serializers.BooleanField(default=True)\n primary_key = serializers.BooleanField(default=False)\n valid_target_types = serializers.ListField(read_only=True)\n\n\nclass TableSerializer(serializers.ModelSerializer):\n columns = SimpleColumnSerializer(many=True, read_only=True, source='sa_columns')\n records = serializers.SerializerMethodField()\n name = serializers.CharField()\n data_files = serializers.PrimaryKeyRelatedField(required=False, many=True, queryset=DataFile.objects.all())\n\n class Meta:\n model = Table\n fields = ['id', 'name', 'schema', 'created_at', 'updated_at',\n 'columns', 'records', 'data_files', 'has_dependencies']\n\n def get_records(self, obj):\n if isinstance(obj, Table):\n # Only get records if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk}))\n else:\n return None\n\n\nclass RecordSerializer(serializers.BaseSerializer):\n def to_representation(self, instance):\n return instance._asdict()\n\n\nclass RecordListParameterSerializer(serializers.Serializer):\n filters = serializers.JSONField(required=False, default=[])\n order_by = serializers.JSONField(required=False, default=[])\n group_count_by = serializers.JSONField(required=False, default=[])\n\n\nclass DatabaseSerializer(serializers.ModelSerializer):\n supported_types = serializers.ListField(child=serializers.CharField())\n\n class Meta:\n model = Database\n fields = ['id', 'name', 'deleted', 'supported_types']\n read_only_fields = ['id', 'name', 'deleted', 'supported_types']\n\n\nclass DataFileSerializer(serializers.ModelSerializer):\n user = serializers.PrimaryKeyRelatedField(\n default=serializers.CurrentUserDefault(), read_only=True\n )\n\n class Meta:\n model = DataFile\n fields = [\n 'id', 'file', 'table_imported_to', 'user', 'header', 'delimiter', 'escapechar', 'quotechar'\n ]\n extra_kwargs = {'delimiter': {'trim_whitespace': False},\n 'escapechar': {'trim_whitespace': False},\n 'quotechar': {'trim_whitespace': False},\n 'header': {'required': True}}\n # We only currently support importing to a new table, so setting a table via API is invalid.\n # User should be set automatically, not submitted via the API.\n read_only_fields = ['table_imported_to']\n\n def save(self, **kwargs):\n \"\"\"\n Set user to current user while saving the data file.\n \"\"\"\n current_user = self.fields['user'].get_default()\n if current_user.is_authenticated:\n kwargs['user'] = current_user\n return super().save(**kwargs)\n", "path": "mathesar/serializers.py"}], "after_files": [{"content": "from django.urls import reverse\nfrom rest_framework import serializers\n\nfrom mathesar.models import Table, Schema, DataFile, Database\n\n\nclass NestedTableSerializer(serializers.HyperlinkedModelSerializer):\n url = serializers.SerializerMethodField()\n\n class Meta:\n model = Table\n fields = ['id', 'name', 'url']\n\n def get_url(self, obj):\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-detail', kwargs={'pk': obj.pk}))\n\n\nclass ModelNameField(serializers.CharField):\n \"\"\"\n De-serializes the request field as a string, but serializes the response field as\n `model.name`. Required to support passing and returing a model name from the\n endpoint, while also storing the model as a related field.\n \"\"\"\n def to_representation(self, value):\n return value.name\n\n\nclass SchemaSerializer(serializers.HyperlinkedModelSerializer):\n tables = NestedTableSerializer(many=True, read_only=True)\n name = serializers.CharField()\n database = ModelNameField(max_length=128)\n\n class Meta:\n model = Schema\n fields = ['id', 'name', 'tables', 'database', 'has_dependencies']\n\n\nclass SimpleColumnSerializer(serializers.Serializer):\n name = serializers.CharField()\n type = serializers.CharField()\n\n\nclass ColumnSerializer(SimpleColumnSerializer):\n index = serializers.IntegerField(source='column_index', read_only=True)\n nullable = serializers.BooleanField(default=True)\n primary_key = serializers.BooleanField(default=False)\n valid_target_types = serializers.ListField(read_only=True)\n\n\nclass TableSerializer(serializers.ModelSerializer):\n columns = SimpleColumnSerializer(many=True, read_only=True, source='sa_columns')\n records = serializers.SerializerMethodField()\n name = serializers.CharField()\n data_files = serializers.PrimaryKeyRelatedField(required=False, many=True, queryset=DataFile.objects.all())\n\n class Meta:\n model = Table\n fields = ['id', 'name', 'schema', 'created_at', 'updated_at',\n 'columns', 'records', 'data_files', 'has_dependencies']\n\n def get_records(self, obj):\n if isinstance(obj, Table):\n # Only get records if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk}))\n else:\n return None\n\n\nclass RecordSerializer(serializers.BaseSerializer):\n def to_representation(self, instance):\n return instance._asdict()\n\n\nclass RecordListParameterSerializer(serializers.Serializer):\n filters = serializers.JSONField(required=False, default=[])\n order_by = serializers.JSONField(required=False, default=[])\n group_count_by = serializers.JSONField(required=False, default=[])\n\n\nclass DatabaseSerializer(serializers.ModelSerializer):\n supported_types = serializers.ListField(child=serializers.CharField())\n\n class Meta:\n model = Database\n fields = ['id', 'name', 'deleted', 'supported_types']\n read_only_fields = ['id', 'name', 'deleted', 'supported_types']\n\n\nclass DataFileSerializer(serializers.ModelSerializer):\n user = serializers.PrimaryKeyRelatedField(\n default=serializers.CurrentUserDefault(), read_only=True\n )\n header = serializers.BooleanField(default=True)\n\n class Meta:\n model = DataFile\n fields = [\n 'id', 'file', 'table_imported_to', 'user', 'header', 'delimiter', 'escapechar', 'quotechar'\n ]\n extra_kwargs = {'delimiter': {'trim_whitespace': False},\n 'escapechar': {'trim_whitespace': False},\n 'quotechar': {'trim_whitespace': False}\n }\n # We only currently support importing to a new table, so setting a table via API is invalid.\n # User should be set automatically, not submitted via the API.\n read_only_fields = ['table_imported_to']\n\n def save(self, **kwargs):\n \"\"\"\n Set user to current user while saving the data file.\n \"\"\"\n current_user = self.fields['user'].get_default()\n if current_user.is_authenticated:\n kwargs['user'] = current_user\n return super().save(**kwargs)\n", "path": "mathesar/serializers.py"}]} | 1,529 | 214 |
gh_patches_debug_47851 | rasdani/github-patches | git_diff | searx__searx-2256 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make secret_key default one that will fail if not set to a custom value
Currently, the `secret_key` default value is `ultrasecretkey` which is a valid value. Would it not be better to let the default value of this setting be one that will make searx fail to start? This will force the user to conciously change this setting to a secure value instead of accidentally forgetting to set this to something random and secure.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/__init__.py`
Content:
```
1 '''
2 searx is free software: you can redistribute it and/or modify
3 it under the terms of the GNU Affero General Public License as published by
4 the Free Software Foundation, either version 3 of the License, or
5 (at your option) any later version.
6
7 searx is distributed in the hope that it will be useful,
8 but WITHOUT ANY WARRANTY; without even the implied warranty of
9 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 GNU Affero General Public License for more details.
11
12 You should have received a copy of the GNU Affero General Public License
13 along with searx. If not, see < http://www.gnu.org/licenses/ >.
14
15 (C) 2013- by Adam Tauber, <[email protected]>
16 '''
17
18 import logging
19 from os import environ
20 from os.path import realpath, dirname, join, abspath, isfile
21 from io import open
22 from yaml import safe_load
23
24
25 searx_dir = abspath(dirname(__file__))
26 engine_dir = dirname(realpath(__file__))
27 static_path = abspath(join(dirname(__file__), 'static'))
28
29
30 def check_settings_yml(file_name):
31 if isfile(file_name):
32 return file_name
33 else:
34 return None
35
36
37 # find location of settings.yml
38 if 'SEARX_SETTINGS_PATH' in environ:
39 # if possible set path to settings using the
40 # enviroment variable SEARX_SETTINGS_PATH
41 settings_path = check_settings_yml(environ['SEARX_SETTINGS_PATH'])
42 else:
43 # if not, get it from searx code base or last solution from /etc/searx
44 settings_path = check_settings_yml(join(searx_dir, 'settings.yml')) or check_settings_yml('/etc/searx/settings.yml')
45
46 if not settings_path:
47 raise Exception('settings.yml not found')
48
49 # load settings
50 with open(settings_path, 'r', encoding='utf-8') as settings_yaml:
51 settings = safe_load(settings_yaml)
52
53 if settings['ui']['static_path']:
54 static_path = settings['ui']['static_path']
55
56 '''
57 enable debug if
58 the environnement variable SEARX_DEBUG is 1 or true
59 (whatever the value in settings.yml)
60 or general.debug=True in settings.yml
61
62 disable debug if
63 the environnement variable SEARX_DEBUG is 0 or false
64 (whatever the value in settings.yml)
65 or general.debug=False in settings.yml
66 '''
67 searx_debug_env = environ.get('SEARX_DEBUG', '').lower()
68 if searx_debug_env == 'true' or searx_debug_env == '1':
69 searx_debug = True
70 elif searx_debug_env == 'false' or searx_debug_env == '0':
71 searx_debug = False
72 else:
73 searx_debug = settings.get('general', {}).get('debug')
74
75 if searx_debug:
76 logging.basicConfig(level=logging.DEBUG)
77 else:
78 logging.basicConfig(level=logging.WARNING)
79
80 logger = logging.getLogger('searx')
81 logger.debug('read configuration from %s', settings_path)
82 logger.info('Initialisation done')
83
84 if 'SEARX_SECRET' in environ:
85 settings['server']['secret_key'] = environ['SEARX_SECRET']
86 if 'SEARX_BIND_ADDRESS' in environ:
87 settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/searx/__init__.py b/searx/__init__.py
--- a/searx/__init__.py
+++ b/searx/__init__.py
@@ -85,3 +85,7 @@
settings['server']['secret_key'] = environ['SEARX_SECRET']
if 'SEARX_BIND_ADDRESS' in environ:
settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']
+
+if not searx_debug and settings['server']['secret_key'] == 'ultrasecretkey':
+ logger.error('server.secret_key is not changed. Please use something else instead of ultrasecretkey.')
+ exit(1)
| {"golden_diff": "diff --git a/searx/__init__.py b/searx/__init__.py\n--- a/searx/__init__.py\n+++ b/searx/__init__.py\n@@ -85,3 +85,7 @@\n settings['server']['secret_key'] = environ['SEARX_SECRET']\n if 'SEARX_BIND_ADDRESS' in environ:\n settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']\n+\n+if not searx_debug and settings['server']['secret_key'] == 'ultrasecretkey':\n+ logger.error('server.secret_key is not changed. Please use something else instead of ultrasecretkey.')\n+ exit(1)\n", "issue": "Make secret_key default one that will fail if not set to a custom value\nCurrently, the `secret_key` default value is `ultrasecretkey` which is a valid value. Would it not be better to let the default value of this setting be one that will make searx fail to start? This will force the user to conciously change this setting to a secure value instead of accidentally forgetting to set this to something random and secure.\n", "before_files": [{"content": "'''\nsearx is free software: you can redistribute it and/or modify\nit under the terms of the GNU Affero General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nsearx is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU Affero General Public License for more details.\n\nYou should have received a copy of the GNU Affero General Public License\nalong with searx. If not, see < http://www.gnu.org/licenses/ >.\n\n(C) 2013- by Adam Tauber, <[email protected]>\n'''\n\nimport logging\nfrom os import environ\nfrom os.path import realpath, dirname, join, abspath, isfile\nfrom io import open\nfrom yaml import safe_load\n\n\nsearx_dir = abspath(dirname(__file__))\nengine_dir = dirname(realpath(__file__))\nstatic_path = abspath(join(dirname(__file__), 'static'))\n\n\ndef check_settings_yml(file_name):\n if isfile(file_name):\n return file_name\n else:\n return None\n\n\n# find location of settings.yml\nif 'SEARX_SETTINGS_PATH' in environ:\n # if possible set path to settings using the\n # enviroment variable SEARX_SETTINGS_PATH\n settings_path = check_settings_yml(environ['SEARX_SETTINGS_PATH'])\nelse:\n # if not, get it from searx code base or last solution from /etc/searx\n settings_path = check_settings_yml(join(searx_dir, 'settings.yml')) or check_settings_yml('/etc/searx/settings.yml')\n\nif not settings_path:\n raise Exception('settings.yml not found')\n\n# load settings\nwith open(settings_path, 'r', encoding='utf-8') as settings_yaml:\n settings = safe_load(settings_yaml)\n\nif settings['ui']['static_path']:\n static_path = settings['ui']['static_path']\n\n'''\nenable debug if\nthe environnement variable SEARX_DEBUG is 1 or true\n(whatever the value in settings.yml)\nor general.debug=True in settings.yml\n\ndisable debug if\nthe environnement variable SEARX_DEBUG is 0 or false\n(whatever the value in settings.yml)\nor general.debug=False in settings.yml\n'''\nsearx_debug_env = environ.get('SEARX_DEBUG', '').lower()\nif searx_debug_env == 'true' or searx_debug_env == '1':\n searx_debug = True\nelif searx_debug_env == 'false' or searx_debug_env == '0':\n searx_debug = False\nelse:\n searx_debug = settings.get('general', {}).get('debug')\n\nif searx_debug:\n logging.basicConfig(level=logging.DEBUG)\nelse:\n logging.basicConfig(level=logging.WARNING)\n\nlogger = logging.getLogger('searx')\nlogger.debug('read configuration from %s', settings_path)\nlogger.info('Initialisation done')\n\nif 'SEARX_SECRET' in environ:\n settings['server']['secret_key'] = environ['SEARX_SECRET']\nif 'SEARX_BIND_ADDRESS' in environ:\n settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']\n", "path": "searx/__init__.py"}], "after_files": [{"content": "'''\nsearx is free software: you can redistribute it and/or modify\nit under the terms of the GNU Affero General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nsearx is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU Affero General Public License for more details.\n\nYou should have received a copy of the GNU Affero General Public License\nalong with searx. If not, see < http://www.gnu.org/licenses/ >.\n\n(C) 2013- by Adam Tauber, <[email protected]>\n'''\n\nimport logging\nfrom os import environ\nfrom os.path import realpath, dirname, join, abspath, isfile\nfrom io import open\nfrom yaml import safe_load\n\n\nsearx_dir = abspath(dirname(__file__))\nengine_dir = dirname(realpath(__file__))\nstatic_path = abspath(join(dirname(__file__), 'static'))\n\n\ndef check_settings_yml(file_name):\n if isfile(file_name):\n return file_name\n else:\n return None\n\n\n# find location of settings.yml\nif 'SEARX_SETTINGS_PATH' in environ:\n # if possible set path to settings using the\n # enviroment variable SEARX_SETTINGS_PATH\n settings_path = check_settings_yml(environ['SEARX_SETTINGS_PATH'])\nelse:\n # if not, get it from searx code base or last solution from /etc/searx\n settings_path = check_settings_yml(join(searx_dir, 'settings.yml')) or check_settings_yml('/etc/searx/settings.yml')\n\nif not settings_path:\n raise Exception('settings.yml not found')\n\n# load settings\nwith open(settings_path, 'r', encoding='utf-8') as settings_yaml:\n settings = safe_load(settings_yaml)\n\nif settings['ui']['static_path']:\n static_path = settings['ui']['static_path']\n\n'''\nenable debug if\nthe environnement variable SEARX_DEBUG is 1 or true\n(whatever the value in settings.yml)\nor general.debug=True in settings.yml\n\ndisable debug if\nthe environnement variable SEARX_DEBUG is 0 or false\n(whatever the value in settings.yml)\nor general.debug=False in settings.yml\n'''\nsearx_debug_env = environ.get('SEARX_DEBUG', '').lower()\nif searx_debug_env == 'true' or searx_debug_env == '1':\n searx_debug = True\nelif searx_debug_env == 'false' or searx_debug_env == '0':\n searx_debug = False\nelse:\n searx_debug = settings.get('general', {}).get('debug')\n\nif searx_debug:\n logging.basicConfig(level=logging.DEBUG)\nelse:\n logging.basicConfig(level=logging.WARNING)\n\nlogger = logging.getLogger('searx')\nlogger.debug('read configuration from %s', settings_path)\nlogger.info('Initialisation done')\n\nif 'SEARX_SECRET' in environ:\n settings['server']['secret_key'] = environ['SEARX_SECRET']\nif 'SEARX_BIND_ADDRESS' in environ:\n settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']\n\nif not searx_debug and settings['server']['secret_key'] == 'ultrasecretkey':\n logger.error('server.secret_key is not changed. Please use something else instead of ultrasecretkey.')\n exit(1)\n", "path": "searx/__init__.py"}]} | 1,234 | 149 |
gh_patches_debug_20566 | rasdani/github-patches | git_diff | vispy__vispy-930 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Importing gloo should not automatically load the OpenGL library
I'm trying to run vispy on a headless server with docker, to use the ipynb webgl backend exclusively. I cannot `import vispy.gloo`:
```
File "/opt/conda/lib/python3.4/site-packages/vispy/gloo/__init__.py", line 47, in <module>
from . import gl # noqa
File "/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/__init__.py", line 213, in <module>
from . import gl2 as default_backend # noqa
File "/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/gl2.py", line 46, in <module>
raise RuntimeError('Could not load OpenGL library.')
RuntimeError: Could not load OpenGL library.
```
I should not need to have the OpenGL library on a headless server when using a remote backend.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vispy/gloo/gl/gl2.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright (c) 2015, Vispy Development Team.
3 # Distributed under the (new) BSD License. See LICENSE.txt for more info.
4
5 """ GL ES 2.0 API implemented via desktop GL (i.e subset of normal OpenGL).
6 """
7
8 import os
9 import sys
10 import ctypes.util
11
12 from . import _copy_gl_functions
13 from ._constants import * # noqa
14
15 # Ctypes stuff
16
17
18 # Load the OpenGL library. We more or less follow the same approach
19 # as PyOpenGL does internally
20
21 _have_get_proc_address = False
22 _lib = os.getenv('VISPY_GL_LIB', '')
23 if _lib != '':
24 if sys.platform.startswith('win'):
25 _lib = ctypes.windll.LoadLibrary(_lib)
26 else:
27 _lib = ctypes.cdll.LoadLibrary(_lib)
28 elif sys.platform.startswith('win'):
29 # Windows
30 _lib = ctypes.windll.opengl32
31 try:
32 wglGetProcAddress = _lib.wglGetProcAddress
33 wglGetProcAddress.restype = ctypes.CFUNCTYPE(
34 ctypes.POINTER(ctypes.c_int))
35 wglGetProcAddress.argtypes = [ctypes.c_char_p]
36 _have_get_proc_address = True
37 except AttributeError:
38 pass
39 else:
40 # Unix-ish
41 if sys.platform.startswith('darwin'):
42 _fname = ctypes.util.find_library('OpenGL')
43 else:
44 _fname = ctypes.util.find_library('GL')
45 if not _fname:
46 raise RuntimeError('Could not load OpenGL library.')
47 # Load lib
48 _lib = ctypes.cdll.LoadLibrary(_fname)
49
50
51 def _have_context():
52 return _lib.glGetError() != 1282 # GL_INVALID_OPERATION
53
54
55 def _get_gl_version(_lib):
56 """Helper to get the GL version string"""
57 try:
58 return _lib.glGetString(7938).decode('utf-8')
59 except Exception:
60 return 'unknown'
61
62
63 def _get_gl_func(name, restype, argtypes):
64 # Based on a function in Pyglet
65 try:
66 # Try using normal ctypes stuff
67 func = getattr(_lib, name)
68 func.restype = restype
69 func.argtypes = argtypes
70 return func
71 except AttributeError:
72 if sys.platform.startswith('win'):
73 # Ask for a pointer to the function, this is the approach
74 # for OpenGL extensions on Windows
75 fargs = (restype,) + argtypes
76 ftype = ctypes.WINFUNCTYPE(*fargs)
77 if not _have_get_proc_address:
78 raise RuntimeError('Function %s not available '
79 '(OpenGL version is %s).'
80 % (name, _get_gl_version(_lib)))
81 if not _have_context():
82 raise RuntimeError('Using %s with no OpenGL context.' % name)
83 address = wglGetProcAddress(name.encode('utf-8'))
84 if address:
85 return ctypes.cast(address, ftype)
86 # If not Windows or if we did not return function object on Windows:
87 raise RuntimeError('Function %s not present in context '
88 '(OpenGL version is %s).'
89 % (name, _get_gl_version(_lib)))
90
91
92 # Inject
93
94 from . import _gl2 # noqa
95 _copy_gl_functions(_gl2, globals())
96
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/vispy/gloo/gl/gl2.py b/vispy/gloo/gl/gl2.py
--- a/vispy/gloo/gl/gl2.py
+++ b/vispy/gloo/gl/gl2.py
@@ -11,6 +11,7 @@
from . import _copy_gl_functions
from ._constants import * # noqa
+from ...util import logger
# Ctypes stuff
@@ -43,9 +44,11 @@
else:
_fname = ctypes.util.find_library('GL')
if not _fname:
- raise RuntimeError('Could not load OpenGL library.')
- # Load lib
- _lib = ctypes.cdll.LoadLibrary(_fname)
+ logger.warning('Could not load OpenGL library.')
+ _lib = None
+ else:
+ # Load lib
+ _lib = ctypes.cdll.LoadLibrary(_fname)
def _have_context():
@@ -62,6 +65,8 @@
def _get_gl_func(name, restype, argtypes):
# Based on a function in Pyglet
+ if _lib is None:
+ raise RuntimeError('Could not load OpenGL library, gl cannot be used')
try:
# Try using normal ctypes stuff
func = getattr(_lib, name)
| {"golden_diff": "diff --git a/vispy/gloo/gl/gl2.py b/vispy/gloo/gl/gl2.py\n--- a/vispy/gloo/gl/gl2.py\n+++ b/vispy/gloo/gl/gl2.py\n@@ -11,6 +11,7 @@\n \n from . import _copy_gl_functions\n from ._constants import * # noqa\n+from ...util import logger\n \n # Ctypes stuff\n \n@@ -43,9 +44,11 @@\n else:\n _fname = ctypes.util.find_library('GL')\n if not _fname:\n- raise RuntimeError('Could not load OpenGL library.')\n- # Load lib\n- _lib = ctypes.cdll.LoadLibrary(_fname)\n+ logger.warning('Could not load OpenGL library.')\n+ _lib = None\n+ else:\n+ # Load lib\n+ _lib = ctypes.cdll.LoadLibrary(_fname)\n \n \n def _have_context():\n@@ -62,6 +65,8 @@\n \n def _get_gl_func(name, restype, argtypes):\n # Based on a function in Pyglet\n+ if _lib is None:\n+ raise RuntimeError('Could not load OpenGL library, gl cannot be used')\n try:\n # Try using normal ctypes stuff\n func = getattr(_lib, name)\n", "issue": "Importing gloo should not automatically load the OpenGL library\nI'm trying to run vispy on a headless server with docker, to use the ipynb webgl backend exclusively. I cannot `import vispy.gloo`:\n\n```\n File \"/opt/conda/lib/python3.4/site-packages/vispy/gloo/__init__.py\", line 47, in <module>\n from . import gl # noqa\n File \"/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/__init__.py\", line 213, in <module>\n from . import gl2 as default_backend # noqa\n File \"/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/gl2.py\", line 46, in <module>\n raise RuntimeError('Could not load OpenGL library.')\nRuntimeError: Could not load OpenGL library.\n```\n\nI should not need to have the OpenGL library on a headless server when using a remote backend.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2015, Vispy Development Team.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n\n\"\"\" GL ES 2.0 API implemented via desktop GL (i.e subset of normal OpenGL).\n\"\"\"\n\nimport os\nimport sys\nimport ctypes.util\n\nfrom . import _copy_gl_functions\nfrom ._constants import * # noqa\n\n# Ctypes stuff\n\n\n# Load the OpenGL library. We more or less follow the same approach\n# as PyOpenGL does internally\n\n_have_get_proc_address = False\n_lib = os.getenv('VISPY_GL_LIB', '')\nif _lib != '':\n if sys.platform.startswith('win'):\n _lib = ctypes.windll.LoadLibrary(_lib)\n else:\n _lib = ctypes.cdll.LoadLibrary(_lib)\nelif sys.platform.startswith('win'):\n # Windows\n _lib = ctypes.windll.opengl32\n try:\n wglGetProcAddress = _lib.wglGetProcAddress\n wglGetProcAddress.restype = ctypes.CFUNCTYPE(\n ctypes.POINTER(ctypes.c_int))\n wglGetProcAddress.argtypes = [ctypes.c_char_p]\n _have_get_proc_address = True\n except AttributeError:\n pass\nelse:\n # Unix-ish\n if sys.platform.startswith('darwin'):\n _fname = ctypes.util.find_library('OpenGL')\n else:\n _fname = ctypes.util.find_library('GL')\n if not _fname:\n raise RuntimeError('Could not load OpenGL library.')\n # Load lib\n _lib = ctypes.cdll.LoadLibrary(_fname)\n\n\ndef _have_context():\n return _lib.glGetError() != 1282 # GL_INVALID_OPERATION\n\n\ndef _get_gl_version(_lib):\n \"\"\"Helper to get the GL version string\"\"\"\n try:\n return _lib.glGetString(7938).decode('utf-8')\n except Exception:\n return 'unknown'\n\n\ndef _get_gl_func(name, restype, argtypes):\n # Based on a function in Pyglet\n try:\n # Try using normal ctypes stuff\n func = getattr(_lib, name)\n func.restype = restype\n func.argtypes = argtypes\n return func\n except AttributeError:\n if sys.platform.startswith('win'):\n # Ask for a pointer to the function, this is the approach\n # for OpenGL extensions on Windows\n fargs = (restype,) + argtypes\n ftype = ctypes.WINFUNCTYPE(*fargs)\n if not _have_get_proc_address:\n raise RuntimeError('Function %s not available '\n '(OpenGL version is %s).'\n % (name, _get_gl_version(_lib)))\n if not _have_context():\n raise RuntimeError('Using %s with no OpenGL context.' % name)\n address = wglGetProcAddress(name.encode('utf-8'))\n if address:\n return ctypes.cast(address, ftype)\n # If not Windows or if we did not return function object on Windows:\n raise RuntimeError('Function %s not present in context '\n '(OpenGL version is %s).'\n % (name, _get_gl_version(_lib)))\n\n\n# Inject\n\nfrom . import _gl2 # noqa\n_copy_gl_functions(_gl2, globals())\n", "path": "vispy/gloo/gl/gl2.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2015, Vispy Development Team.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n\n\"\"\" GL ES 2.0 API implemented via desktop GL (i.e subset of normal OpenGL).\n\"\"\"\n\nimport os\nimport sys\nimport ctypes.util\n\nfrom . import _copy_gl_functions\nfrom ._constants import * # noqa\nfrom ...util import logger\n\n# Ctypes stuff\n\n\n# Load the OpenGL library. We more or less follow the same approach\n# as PyOpenGL does internally\n\n_have_get_proc_address = False\n_lib = os.getenv('VISPY_GL_LIB', '')\nif _lib != '':\n if sys.platform.startswith('win'):\n _lib = ctypes.windll.LoadLibrary(_lib)\n else:\n _lib = ctypes.cdll.LoadLibrary(_lib)\nelif sys.platform.startswith('win'):\n # Windows\n _lib = ctypes.windll.opengl32\n try:\n wglGetProcAddress = _lib.wglGetProcAddress\n wglGetProcAddress.restype = ctypes.CFUNCTYPE(\n ctypes.POINTER(ctypes.c_int))\n wglGetProcAddress.argtypes = [ctypes.c_char_p]\n _have_get_proc_address = True\n except AttributeError:\n pass\nelse:\n # Unix-ish\n if sys.platform.startswith('darwin'):\n _fname = ctypes.util.find_library('OpenGL')\n else:\n _fname = ctypes.util.find_library('GL')\n if not _fname:\n logger.warning('Could not load OpenGL library.')\n _lib = None\n else:\n # Load lib\n _lib = ctypes.cdll.LoadLibrary(_fname)\n\n\ndef _have_context():\n return _lib.glGetError() != 1282 # GL_INVALID_OPERATION\n\n\ndef _get_gl_version(_lib):\n \"\"\"Helper to get the GL version string\"\"\"\n try:\n return _lib.glGetString(7938).decode('utf-8')\n except Exception:\n return 'unknown'\n\n\ndef _get_gl_func(name, restype, argtypes):\n # Based on a function in Pyglet\n if _lib is None:\n raise RuntimeError('Could not load OpenGL library, gl cannot be used')\n try:\n # Try using normal ctypes stuff\n func = getattr(_lib, name)\n func.restype = restype\n func.argtypes = argtypes\n return func\n except AttributeError:\n if sys.platform.startswith('win'):\n # Ask for a pointer to the function, this is the approach\n # for OpenGL extensions on Windows\n fargs = (restype,) + argtypes\n ftype = ctypes.WINFUNCTYPE(*fargs)\n if not _have_get_proc_address:\n raise RuntimeError('Function %s not available '\n '(OpenGL version is %s).'\n % (name, _get_gl_version(_lib)))\n if not _have_context():\n raise RuntimeError('Using %s with no OpenGL context.' % name)\n address = wglGetProcAddress(name.encode('utf-8'))\n if address:\n return ctypes.cast(address, ftype)\n # If not Windows or if we did not return function object on Windows:\n raise RuntimeError('Function %s not present in context '\n '(OpenGL version is %s).'\n % (name, _get_gl_version(_lib)))\n\n\n# Inject\n\nfrom . import _gl2 # noqa\n_copy_gl_functions(_gl2, globals())\n", "path": "vispy/gloo/gl/gl2.py"}]} | 1,365 | 281 |
gh_patches_debug_29995 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-1360 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Petsmart Canada
https://www.petsmart.ca/store-locator/all/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/petsmart.py`
Content:
```
1 import datetime
2 import re
3 import scrapy
4 from locations.items import GeojsonPointItem
5 from locations.hours import OpeningHours
6
7 day_mapping = {'MON': 'Mo','TUE': 'Tu','WED': 'We','THU': 'Th',
8 'FRI': 'Fr','SAT': 'Sa','SUN': 'Su'}
9
10 def convert_24hour(time):
11 """
12 Takes 12 hour time as a string and converts it to 24 hour time.
13 """
14
15 if len(time[:-2].split(':')) < 2:
16 hour = time[:-2]
17 minute = '00'
18 else:
19 hour, minute = time[:-2].split(':')
20
21 if time[-2:] == 'AM':
22 time_formatted = hour + ':' + minute
23 elif time[-2:] == 'PM':
24 time_formatted = str(int(hour)+ 12) + ':' + minute
25
26 if time_formatted in ['24:00','0:00','00:00']:
27 time_formatted = '23:59'
28
29 return time_formatted
30
31 class PetSmartSpider(scrapy.Spider):
32 download_delay = 0.2
33 name = "petsmart"
34 item_attributes = { 'brand': "Petsmart" }
35 allowed_domains = ["petsmart.com"]
36 start_urls = (
37 'https://www.petsmart.com/store-locator/all/',
38 )
39
40 def parse(self, response):
41 state_urls = response.xpath('//li[@class="col-sm-12 col-md-4"]/a/@href').extract()
42 is_store_details_urls = response.xpath('//a[@class="store-details-link"]/@href').extract()
43
44 if not state_urls and is_store_details_urls:
45 for url in is_store_details_urls:
46 yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
47 else:
48 for url in state_urls:
49 yield scrapy.Request(response.urljoin(url))
50
51 def parse_store(self, response):
52 ref = re.search(r'.+/?\?(.+)', response.url).group(1)
53
54 properties = {
55 'name': response.xpath('//span[@itemprop="name"]/text()').extract_first().strip(),
56 'addr_full': response.xpath('//div[@itemprop="streetAddress"]/text()').extract_first(),
57 'city': response.xpath('//span[@itemprop="addressLocality"][1]/text()').extract_first().title(),
58 'state': response.xpath('//span[@itemprop="addressLocality"][2]/text()').extract_first(),
59 'postcode': response.xpath('//span[@itemprop="postalCode"]/text()').extract_first(),
60 'lat': float(response.xpath('//input[@name="storeLatitudeVal"]/@value').extract_first()),
61 'lon': float(response.xpath('//input[@name="storeLongitudeVal"]/@value').extract_first()),
62 'phone': response.xpath('//a[@class="store-contact-info"]/text()').extract_first(),
63 'ref': ref,
64 'website': response.url
65 }
66
67 hours = self.parse_hours(response.xpath('//div[@class="store-detail-address"]'))
68
69 if hours:
70 properties['opening_hours'] = hours
71
72 yield GeojsonPointItem(**properties)
73
74 def parse_hours(self, elements):
75 opening_hours = OpeningHours()
76
77 days = elements.xpath('//span[@itemprop="dayOfWeek"]/text()').extract()
78 today = (set(day_mapping) - set(days)).pop()
79 days.remove('TODAY')
80 days.insert(0,today)
81 open_hours = elements.xpath('//div[@class="store-hours"]/time[@itemprop="opens"]/@content').extract()
82 close_hours = elements.xpath('//div[@class="store-hours"]/time[@itemprop="closes"]/@content').extract()
83
84 store_hours = dict((z[0],list(z[1:])) for z in zip(days, open_hours, close_hours))
85
86 for day, hours in store_hours.items():
87 if 'CLOSED' in hours:
88 continue
89 opening_hours.add_range(day=day_mapping[day],
90 open_time=convert_24hour(hours[0]),
91 close_time=convert_24hour(hours[1]))
92 return opening_hours.as_opening_hours()
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/petsmart.py b/locations/spiders/petsmart.py
--- a/locations/spiders/petsmart.py
+++ b/locations/spiders/petsmart.py
@@ -32,9 +32,10 @@
download_delay = 0.2
name = "petsmart"
item_attributes = { 'brand': "Petsmart" }
- allowed_domains = ["petsmart.com"]
+ allowed_domains = ["petsmart.com", "petsmart.ca"]
start_urls = (
'https://www.petsmart.com/store-locator/all/',
+ 'https://www.petsmart.ca/store-locator/all/'
)
def parse(self, response):
@@ -50,6 +51,10 @@
def parse_store(self, response):
ref = re.search(r'.+/?\?(.+)', response.url).group(1)
+ if 'petsmart.ca' in response.url:
+ country = 'CA'
+ elif 'petsmart.com' in response.url:
+ country = 'US'
properties = {
'name': response.xpath('//span[@itemprop="name"]/text()').extract_first().strip(),
@@ -60,6 +65,7 @@
'lat': float(response.xpath('//input[@name="storeLatitudeVal"]/@value').extract_first()),
'lon': float(response.xpath('//input[@name="storeLongitudeVal"]/@value').extract_first()),
'phone': response.xpath('//a[@class="store-contact-info"]/text()').extract_first(),
+ 'country': country,
'ref': ref,
'website': response.url
}
| {"golden_diff": "diff --git a/locations/spiders/petsmart.py b/locations/spiders/petsmart.py\n--- a/locations/spiders/petsmart.py\n+++ b/locations/spiders/petsmart.py\n@@ -32,9 +32,10 @@\n download_delay = 0.2\n name = \"petsmart\"\n item_attributes = { 'brand': \"Petsmart\" }\n- allowed_domains = [\"petsmart.com\"]\n+ allowed_domains = [\"petsmart.com\", \"petsmart.ca\"]\n start_urls = (\n 'https://www.petsmart.com/store-locator/all/',\n+ 'https://www.petsmart.ca/store-locator/all/'\n )\n \n def parse(self, response):\n@@ -50,6 +51,10 @@\n \n def parse_store(self, response):\n ref = re.search(r'.+/?\\?(.+)', response.url).group(1)\n+ if 'petsmart.ca' in response.url:\n+ country = 'CA'\n+ elif 'petsmart.com' in response.url:\n+ country = 'US'\n \n properties = {\n 'name': response.xpath('//span[@itemprop=\"name\"]/text()').extract_first().strip(),\n@@ -60,6 +65,7 @@\n 'lat': float(response.xpath('//input[@name=\"storeLatitudeVal\"]/@value').extract_first()),\n 'lon': float(response.xpath('//input[@name=\"storeLongitudeVal\"]/@value').extract_first()),\n 'phone': response.xpath('//a[@class=\"store-contact-info\"]/text()').extract_first(),\n+ 'country': country,\n 'ref': ref,\n 'website': response.url\n }\n", "issue": "Petsmart Canada\nhttps://www.petsmart.ca/store-locator/all/\n", "before_files": [{"content": "import datetime\nimport re\nimport scrapy\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\nday_mapping = {'MON': 'Mo','TUE': 'Tu','WED': 'We','THU': 'Th',\n 'FRI': 'Fr','SAT': 'Sa','SUN': 'Su'}\n\ndef convert_24hour(time):\n \"\"\"\n Takes 12 hour time as a string and converts it to 24 hour time.\n \"\"\"\n\n if len(time[:-2].split(':')) < 2:\n hour = time[:-2]\n minute = '00'\n else:\n hour, minute = time[:-2].split(':')\n\n if time[-2:] == 'AM':\n time_formatted = hour + ':' + minute\n elif time[-2:] == 'PM':\n time_formatted = str(int(hour)+ 12) + ':' + minute\n\n if time_formatted in ['24:00','0:00','00:00']:\n time_formatted = '23:59'\n\n return time_formatted\n\nclass PetSmartSpider(scrapy.Spider):\n download_delay = 0.2\n name = \"petsmart\"\n item_attributes = { 'brand': \"Petsmart\" }\n allowed_domains = [\"petsmart.com\"]\n start_urls = (\n 'https://www.petsmart.com/store-locator/all/',\n )\n\n def parse(self, response):\n state_urls = response.xpath('//li[@class=\"col-sm-12 col-md-4\"]/a/@href').extract()\n is_store_details_urls = response.xpath('//a[@class=\"store-details-link\"]/@href').extract()\n\n if not state_urls and is_store_details_urls:\n for url in is_store_details_urls:\n yield scrapy.Request(response.urljoin(url), callback=self.parse_store)\n else:\n for url in state_urls:\n yield scrapy.Request(response.urljoin(url))\n\n def parse_store(self, response):\n ref = re.search(r'.+/?\\?(.+)', response.url).group(1)\n\n properties = {\n 'name': response.xpath('//span[@itemprop=\"name\"]/text()').extract_first().strip(),\n 'addr_full': response.xpath('//div[@itemprop=\"streetAddress\"]/text()').extract_first(),\n 'city': response.xpath('//span[@itemprop=\"addressLocality\"][1]/text()').extract_first().title(),\n 'state': response.xpath('//span[@itemprop=\"addressLocality\"][2]/text()').extract_first(),\n 'postcode': response.xpath('//span[@itemprop=\"postalCode\"]/text()').extract_first(),\n 'lat': float(response.xpath('//input[@name=\"storeLatitudeVal\"]/@value').extract_first()),\n 'lon': float(response.xpath('//input[@name=\"storeLongitudeVal\"]/@value').extract_first()),\n 'phone': response.xpath('//a[@class=\"store-contact-info\"]/text()').extract_first(),\n 'ref': ref,\n 'website': response.url\n }\n\n hours = self.parse_hours(response.xpath('//div[@class=\"store-detail-address\"]'))\n\n if hours:\n properties['opening_hours'] = hours\n\n yield GeojsonPointItem(**properties)\n\n def parse_hours(self, elements):\n opening_hours = OpeningHours()\n\n days = elements.xpath('//span[@itemprop=\"dayOfWeek\"]/text()').extract()\n today = (set(day_mapping) - set(days)).pop()\n days.remove('TODAY')\n days.insert(0,today)\n open_hours = elements.xpath('//div[@class=\"store-hours\"]/time[@itemprop=\"opens\"]/@content').extract()\n close_hours = elements.xpath('//div[@class=\"store-hours\"]/time[@itemprop=\"closes\"]/@content').extract()\n\n store_hours = dict((z[0],list(z[1:])) for z in zip(days, open_hours, close_hours))\n\n for day, hours in store_hours.items():\n if 'CLOSED' in hours:\n continue\n opening_hours.add_range(day=day_mapping[day],\n open_time=convert_24hour(hours[0]),\n close_time=convert_24hour(hours[1]))\n return opening_hours.as_opening_hours()\n", "path": "locations/spiders/petsmart.py"}], "after_files": [{"content": "import datetime\nimport re\nimport scrapy\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\nday_mapping = {'MON': 'Mo','TUE': 'Tu','WED': 'We','THU': 'Th',\n 'FRI': 'Fr','SAT': 'Sa','SUN': 'Su'}\n\ndef convert_24hour(time):\n \"\"\"\n Takes 12 hour time as a string and converts it to 24 hour time.\n \"\"\"\n\n if len(time[:-2].split(':')) < 2:\n hour = time[:-2]\n minute = '00'\n else:\n hour, minute = time[:-2].split(':')\n\n if time[-2:] == 'AM':\n time_formatted = hour + ':' + minute\n elif time[-2:] == 'PM':\n time_formatted = str(int(hour)+ 12) + ':' + minute\n\n if time_formatted in ['24:00','0:00','00:00']:\n time_formatted = '23:59'\n\n return time_formatted\n\nclass PetSmartSpider(scrapy.Spider):\n download_delay = 0.2\n name = \"petsmart\"\n item_attributes = { 'brand': \"Petsmart\" }\n allowed_domains = [\"petsmart.com\", \"petsmart.ca\"]\n start_urls = (\n 'https://www.petsmart.com/store-locator/all/',\n 'https://www.petsmart.ca/store-locator/all/'\n )\n\n def parse(self, response):\n state_urls = response.xpath('//li[@class=\"col-sm-12 col-md-4\"]/a/@href').extract()\n is_store_details_urls = response.xpath('//a[@class=\"store-details-link\"]/@href').extract()\n\n if not state_urls and is_store_details_urls:\n for url in is_store_details_urls:\n yield scrapy.Request(response.urljoin(url), callback=self.parse_store)\n else:\n for url in state_urls:\n yield scrapy.Request(response.urljoin(url))\n\n def parse_store(self, response):\n ref = re.search(r'.+/?\\?(.+)', response.url).group(1)\n if 'petsmart.ca' in response.url:\n country = 'CA'\n elif 'petsmart.com' in response.url:\n country = 'US'\n\n properties = {\n 'name': response.xpath('//span[@itemprop=\"name\"]/text()').extract_first().strip(),\n 'addr_full': response.xpath('//div[@itemprop=\"streetAddress\"]/text()').extract_first(),\n 'city': response.xpath('//span[@itemprop=\"addressLocality\"][1]/text()').extract_first().title(),\n 'state': response.xpath('//span[@itemprop=\"addressLocality\"][2]/text()').extract_first(),\n 'postcode': response.xpath('//span[@itemprop=\"postalCode\"]/text()').extract_first(),\n 'lat': float(response.xpath('//input[@name=\"storeLatitudeVal\"]/@value').extract_first()),\n 'lon': float(response.xpath('//input[@name=\"storeLongitudeVal\"]/@value').extract_first()),\n 'phone': response.xpath('//a[@class=\"store-contact-info\"]/text()').extract_first(),\n 'country': country,\n 'ref': ref,\n 'website': response.url\n }\n\n hours = self.parse_hours(response.xpath('//div[@class=\"store-detail-address\"]'))\n\n if hours:\n properties['opening_hours'] = hours\n\n yield GeojsonPointItem(**properties)\n\n def parse_hours(self, elements):\n opening_hours = OpeningHours()\n\n days = elements.xpath('//span[@itemprop=\"dayOfWeek\"]/text()').extract()\n today = (set(day_mapping) - set(days)).pop()\n days.remove('TODAY')\n days.insert(0,today)\n open_hours = elements.xpath('//div[@class=\"store-hours\"]/time[@itemprop=\"opens\"]/@content').extract()\n close_hours = elements.xpath('//div[@class=\"store-hours\"]/time[@itemprop=\"closes\"]/@content').extract()\n\n store_hours = dict((z[0],list(z[1:])) for z in zip(days, open_hours, close_hours))\n\n for day, hours in store_hours.items():\n if 'CLOSED' in hours:\n continue\n opening_hours.add_range(day=day_mapping[day],\n open_time=convert_24hour(hours[0]),\n close_time=convert_24hour(hours[1]))\n return opening_hours.as_opening_hours()\n", "path": "locations/spiders/petsmart.py"}]} | 1,362 | 361 |
gh_patches_debug_36927 | rasdani/github-patches | git_diff | mdn__kuma-7800 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
🧹 Remove hompage related frontend code
Once https://github.com/mdn/yari/issues/2438 lands, we need to go into the Kuma forest and find all things related to the homepage frontend code, chop it down, and clear it out.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kuma/landing/views.py`
Content:
```
1 from django.conf import settings
2 from django.http import HttpResponse
3 from django.shortcuts import redirect, render
4 from django.views import static
5 from django.views.decorators.cache import never_cache
6 from django.views.generic import RedirectView
7
8 from kuma.core.decorators import ensure_wiki_domain, shared_cache_control
9 from kuma.core.utils import is_wiki
10 from kuma.feeder.models import Bundle
11 from kuma.feeder.sections import SECTION_HACKS
12 from kuma.search.models import Filter
13
14 from .utils import favicon_url
15
16
17 @shared_cache_control
18 def contribute_json(request):
19 return static.serve(request, "contribute.json", document_root=settings.ROOT)
20
21
22 @shared_cache_control
23 def home(request):
24 """Home page."""
25 context = {}
26 # Need for both wiki and react homepage
27 context["updates"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])
28
29 # The default template name
30 template_name = "landing/react_homepage.html"
31 if is_wiki(request):
32 template_name = "landing/homepage.html"
33 context["default_filters"] = Filter.objects.default_filters()
34 return render(request, template_name, context)
35
36
37 @ensure_wiki_domain
38 @never_cache
39 def maintenance_mode(request):
40 if settings.MAINTENANCE_MODE:
41 return render(request, "landing/maintenance-mode.html")
42 else:
43 return redirect("home")
44
45
46 @ensure_wiki_domain
47 @shared_cache_control
48 def promote_buttons(request):
49 """Bug 646192: MDN affiliate buttons"""
50 return render(request, "landing/promote_buttons.html")
51
52
53 ROBOTS_ALL_ALLOWED_TXT = """\
54 User-agent: *
55 Sitemap: https://wiki.developer.mozilla.org/sitemap.xml
56
57 Disallow:
58 """
59
60 ROBOTS_ALLOWED_TXT = """\
61 User-agent: *
62 Sitemap: https://developer.mozilla.org/sitemap.xml
63
64 Disallow: /api/
65 Disallow: /*docs/get-documents
66 Disallow: /*docs/Experiment:*
67 Disallow: /*$children
68 Disallow: /*docs.json
69 Disallow: /*/files/
70 Disallow: /media
71 Disallow: /*profiles*/edit
72 """ + "\n".join(
73 "Disallow: /{locale}/search".format(locale=locale)
74 for locale in settings.ENABLED_LOCALES
75 )
76
77 ROBOTS_GO_AWAY_TXT = """\
78 User-Agent: *
79 Disallow: /
80 """
81
82
83 @shared_cache_control
84 def robots_txt(request):
85 """Serve robots.txt that allows or forbids robots."""
86 host = request.get_host()
87 if host in settings.ALLOW_ROBOTS_DOMAINS:
88 robots = ""
89 elif host in settings.ALLOW_ROBOTS_WEB_DOMAINS:
90 if host == settings.WIKI_HOST:
91 robots = ROBOTS_ALL_ALLOWED_TXT
92 else:
93 robots = ROBOTS_ALLOWED_TXT
94 else:
95 robots = ROBOTS_GO_AWAY_TXT
96 return HttpResponse(robots, content_type="text/plain")
97
98
99 class FaviconRedirect(RedirectView):
100 """Redirect to the favicon in the static img folder (bug 1402497)"""
101
102 def get_redirect_url(self, *args, **kwargs):
103 return favicon_url()
104
```
Path: `kuma/landing/urls.py`
Content:
```
1 from django.urls import re_path
2
3 from kuma.core.decorators import shared_cache_control
4
5 from . import views
6
7
8 MONTH = 60 * 60 * 24 * 30
9
10
11 lang_urlpatterns = [
12 re_path(r"^$", views.home, name="home"),
13 re_path(r"^maintenance-mode/?$", views.maintenance_mode, name="maintenance_mode"),
14 re_path(r"^promote/?$", views.promote_buttons, name="promote"),
15 re_path(r"^promote/buttons/?$", views.promote_buttons, name="promote_buttons"),
16 ]
17
18 urlpatterns = [
19 re_path(r"^contribute\.json$", views.contribute_json, name="contribute_json"),
20 re_path(r"^robots.txt$", views.robots_txt, name="robots_txt"),
21 re_path(
22 r"^favicon.ico$",
23 shared_cache_control(views.FaviconRedirect.as_view(), s_maxage=MONTH),
24 name="favicon_ico",
25 ),
26 ]
27
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kuma/landing/urls.py b/kuma/landing/urls.py
--- a/kuma/landing/urls.py
+++ b/kuma/landing/urls.py
@@ -11,12 +11,9 @@
lang_urlpatterns = [
re_path(r"^$", views.home, name="home"),
re_path(r"^maintenance-mode/?$", views.maintenance_mode, name="maintenance_mode"),
- re_path(r"^promote/?$", views.promote_buttons, name="promote"),
- re_path(r"^promote/buttons/?$", views.promote_buttons, name="promote_buttons"),
]
urlpatterns = [
- re_path(r"^contribute\.json$", views.contribute_json, name="contribute_json"),
re_path(r"^robots.txt$", views.robots_txt, name="robots_txt"),
re_path(
r"^favicon.ico$",
diff --git a/kuma/landing/views.py b/kuma/landing/views.py
--- a/kuma/landing/views.py
+++ b/kuma/landing/views.py
@@ -1,37 +1,25 @@
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import redirect, render
-from django.views import static
from django.views.decorators.cache import never_cache
from django.views.generic import RedirectView
from kuma.core.decorators import ensure_wiki_domain, shared_cache_control
-from kuma.core.utils import is_wiki
-from kuma.feeder.models import Bundle
-from kuma.feeder.sections import SECTION_HACKS
-from kuma.search.models import Filter
from .utils import favicon_url
-@shared_cache_control
-def contribute_json(request):
- return static.serve(request, "contribute.json", document_root=settings.ROOT)
-
-
-@shared_cache_control
def home(request):
"""Home page."""
- context = {}
- # Need for both wiki and react homepage
- context["updates"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])
-
- # The default template name
- template_name = "landing/react_homepage.html"
- if is_wiki(request):
- template_name = "landing/homepage.html"
- context["default_filters"] = Filter.objects.default_filters()
- return render(request, template_name, context)
+ return HttpResponse(
+ """
+ <html>
+ End of an era. Kuma's no longer rendering a home page.<br>
+ See project Yari.
+ </html>
+ """,
+ content_type="text/html",
+ )
@ensure_wiki_domain
@@ -43,13 +31,6 @@
return redirect("home")
-@ensure_wiki_domain
-@shared_cache_control
-def promote_buttons(request):
- """Bug 646192: MDN affiliate buttons"""
- return render(request, "landing/promote_buttons.html")
-
-
ROBOTS_ALL_ALLOWED_TXT = """\
User-agent: *
Sitemap: https://wiki.developer.mozilla.org/sitemap.xml
| {"golden_diff": "diff --git a/kuma/landing/urls.py b/kuma/landing/urls.py\n--- a/kuma/landing/urls.py\n+++ b/kuma/landing/urls.py\n@@ -11,12 +11,9 @@\n lang_urlpatterns = [\n re_path(r\"^$\", views.home, name=\"home\"),\n re_path(r\"^maintenance-mode/?$\", views.maintenance_mode, name=\"maintenance_mode\"),\n- re_path(r\"^promote/?$\", views.promote_buttons, name=\"promote\"),\n- re_path(r\"^promote/buttons/?$\", views.promote_buttons, name=\"promote_buttons\"),\n ]\n \n urlpatterns = [\n- re_path(r\"^contribute\\.json$\", views.contribute_json, name=\"contribute_json\"),\n re_path(r\"^robots.txt$\", views.robots_txt, name=\"robots_txt\"),\n re_path(\n r\"^favicon.ico$\",\ndiff --git a/kuma/landing/views.py b/kuma/landing/views.py\n--- a/kuma/landing/views.py\n+++ b/kuma/landing/views.py\n@@ -1,37 +1,25 @@\n from django.conf import settings\n from django.http import HttpResponse\n from django.shortcuts import redirect, render\n-from django.views import static\n from django.views.decorators.cache import never_cache\n from django.views.generic import RedirectView\n \n from kuma.core.decorators import ensure_wiki_domain, shared_cache_control\n-from kuma.core.utils import is_wiki\n-from kuma.feeder.models import Bundle\n-from kuma.feeder.sections import SECTION_HACKS\n-from kuma.search.models import Filter\n \n from .utils import favicon_url\n \n \n-@shared_cache_control\n-def contribute_json(request):\n- return static.serve(request, \"contribute.json\", document_root=settings.ROOT)\n-\n-\n-@shared_cache_control\n def home(request):\n \"\"\"Home page.\"\"\"\n- context = {}\n- # Need for both wiki and react homepage\n- context[\"updates\"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])\n-\n- # The default template name\n- template_name = \"landing/react_homepage.html\"\n- if is_wiki(request):\n- template_name = \"landing/homepage.html\"\n- context[\"default_filters\"] = Filter.objects.default_filters()\n- return render(request, template_name, context)\n+ return HttpResponse(\n+ \"\"\"\n+ <html>\n+ End of an era. Kuma's no longer rendering a home page.<br>\n+ See project Yari.\n+ </html>\n+ \"\"\",\n+ content_type=\"text/html\",\n+ )\n \n \n @ensure_wiki_domain\n@@ -43,13 +31,6 @@\n return redirect(\"home\")\n \n \n-@ensure_wiki_domain\n-@shared_cache_control\n-def promote_buttons(request):\n- \"\"\"Bug 646192: MDN affiliate buttons\"\"\"\n- return render(request, \"landing/promote_buttons.html\")\n-\n-\n ROBOTS_ALL_ALLOWED_TXT = \"\"\"\\\n User-agent: *\n Sitemap: https://wiki.developer.mozilla.org/sitemap.xml\n", "issue": "\ud83e\uddf9 Remove hompage related frontend code\nOnce https://github.com/mdn/yari/issues/2438 lands, we need to go into the Kuma forest and find all things related to the homepage frontend code, chop it down, and clear it out.\n", "before_files": [{"content": "from django.conf import settings\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect, render\nfrom django.views import static\nfrom django.views.decorators.cache import never_cache\nfrom django.views.generic import RedirectView\n\nfrom kuma.core.decorators import ensure_wiki_domain, shared_cache_control\nfrom kuma.core.utils import is_wiki\nfrom kuma.feeder.models import Bundle\nfrom kuma.feeder.sections import SECTION_HACKS\nfrom kuma.search.models import Filter\n\nfrom .utils import favicon_url\n\n\n@shared_cache_control\ndef contribute_json(request):\n return static.serve(request, \"contribute.json\", document_root=settings.ROOT)\n\n\n@shared_cache_control\ndef home(request):\n \"\"\"Home page.\"\"\"\n context = {}\n # Need for both wiki and react homepage\n context[\"updates\"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])\n\n # The default template name\n template_name = \"landing/react_homepage.html\"\n if is_wiki(request):\n template_name = \"landing/homepage.html\"\n context[\"default_filters\"] = Filter.objects.default_filters()\n return render(request, template_name, context)\n\n\n@ensure_wiki_domain\n@never_cache\ndef maintenance_mode(request):\n if settings.MAINTENANCE_MODE:\n return render(request, \"landing/maintenance-mode.html\")\n else:\n return redirect(\"home\")\n\n\n@ensure_wiki_domain\n@shared_cache_control\ndef promote_buttons(request):\n \"\"\"Bug 646192: MDN affiliate buttons\"\"\"\n return render(request, \"landing/promote_buttons.html\")\n\n\nROBOTS_ALL_ALLOWED_TXT = \"\"\"\\\nUser-agent: *\nSitemap: https://wiki.developer.mozilla.org/sitemap.xml\n\nDisallow:\n\"\"\"\n\nROBOTS_ALLOWED_TXT = \"\"\"\\\nUser-agent: *\nSitemap: https://developer.mozilla.org/sitemap.xml\n\nDisallow: /api/\nDisallow: /*docs/get-documents\nDisallow: /*docs/Experiment:*\nDisallow: /*$children\nDisallow: /*docs.json\nDisallow: /*/files/\nDisallow: /media\nDisallow: /*profiles*/edit\n\"\"\" + \"\\n\".join(\n \"Disallow: /{locale}/search\".format(locale=locale)\n for locale in settings.ENABLED_LOCALES\n)\n\nROBOTS_GO_AWAY_TXT = \"\"\"\\\nUser-Agent: *\nDisallow: /\n\"\"\"\n\n\n@shared_cache_control\ndef robots_txt(request):\n \"\"\"Serve robots.txt that allows or forbids robots.\"\"\"\n host = request.get_host()\n if host in settings.ALLOW_ROBOTS_DOMAINS:\n robots = \"\"\n elif host in settings.ALLOW_ROBOTS_WEB_DOMAINS:\n if host == settings.WIKI_HOST:\n robots = ROBOTS_ALL_ALLOWED_TXT\n else:\n robots = ROBOTS_ALLOWED_TXT\n else:\n robots = ROBOTS_GO_AWAY_TXT\n return HttpResponse(robots, content_type=\"text/plain\")\n\n\nclass FaviconRedirect(RedirectView):\n \"\"\"Redirect to the favicon in the static img folder (bug 1402497)\"\"\"\n\n def get_redirect_url(self, *args, **kwargs):\n return favicon_url()\n", "path": "kuma/landing/views.py"}, {"content": "from django.urls import re_path\n\nfrom kuma.core.decorators import shared_cache_control\n\nfrom . import views\n\n\nMONTH = 60 * 60 * 24 * 30\n\n\nlang_urlpatterns = [\n re_path(r\"^$\", views.home, name=\"home\"),\n re_path(r\"^maintenance-mode/?$\", views.maintenance_mode, name=\"maintenance_mode\"),\n re_path(r\"^promote/?$\", views.promote_buttons, name=\"promote\"),\n re_path(r\"^promote/buttons/?$\", views.promote_buttons, name=\"promote_buttons\"),\n]\n\nurlpatterns = [\n re_path(r\"^contribute\\.json$\", views.contribute_json, name=\"contribute_json\"),\n re_path(r\"^robots.txt$\", views.robots_txt, name=\"robots_txt\"),\n re_path(\n r\"^favicon.ico$\",\n shared_cache_control(views.FaviconRedirect.as_view(), s_maxage=MONTH),\n name=\"favicon_ico\",\n ),\n]\n", "path": "kuma/landing/urls.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect, render\nfrom django.views.decorators.cache import never_cache\nfrom django.views.generic import RedirectView\n\nfrom kuma.core.decorators import ensure_wiki_domain, shared_cache_control\n\nfrom .utils import favicon_url\n\n\ndef home(request):\n \"\"\"Home page.\"\"\"\n return HttpResponse(\n \"\"\"\n <html>\n End of an era. Kuma's no longer rendering a home page.<br>\n See project Yari.\n </html>\n \"\"\",\n content_type=\"text/html\",\n )\n\n\n@ensure_wiki_domain\n@never_cache\ndef maintenance_mode(request):\n if settings.MAINTENANCE_MODE:\n return render(request, \"landing/maintenance-mode.html\")\n else:\n return redirect(\"home\")\n\n\nROBOTS_ALL_ALLOWED_TXT = \"\"\"\\\nUser-agent: *\nSitemap: https://wiki.developer.mozilla.org/sitemap.xml\n\nDisallow:\n\"\"\"\n\nROBOTS_ALLOWED_TXT = \"\"\"\\\nUser-agent: *\nSitemap: https://developer.mozilla.org/sitemap.xml\n\nDisallow: /api/\nDisallow: /*docs/get-documents\nDisallow: /*docs/Experiment:*\nDisallow: /*$children\nDisallow: /*docs.json\nDisallow: /*/files/\nDisallow: /media\nDisallow: /*profiles*/edit\n\"\"\" + \"\\n\".join(\n \"Disallow: /{locale}/search\".format(locale=locale)\n for locale in settings.ENABLED_LOCALES\n)\n\nROBOTS_GO_AWAY_TXT = \"\"\"\\\nUser-Agent: *\nDisallow: /\n\"\"\"\n\n\n@shared_cache_control\ndef robots_txt(request):\n \"\"\"Serve robots.txt that allows or forbids robots.\"\"\"\n host = request.get_host()\n if host in settings.ALLOW_ROBOTS_DOMAINS:\n robots = \"\"\n elif host in settings.ALLOW_ROBOTS_WEB_DOMAINS:\n if host == settings.WIKI_HOST:\n robots = ROBOTS_ALL_ALLOWED_TXT\n else:\n robots = ROBOTS_ALLOWED_TXT\n else:\n robots = ROBOTS_GO_AWAY_TXT\n return HttpResponse(robots, content_type=\"text/plain\")\n\n\nclass FaviconRedirect(RedirectView):\n \"\"\"Redirect to the favicon in the static img folder (bug 1402497)\"\"\"\n\n def get_redirect_url(self, *args, **kwargs):\n return favicon_url()\n", "path": "kuma/landing/views.py"}, {"content": "from django.urls import re_path\n\nfrom kuma.core.decorators import shared_cache_control\n\nfrom . import views\n\n\nMONTH = 60 * 60 * 24 * 30\n\n\nlang_urlpatterns = [\n re_path(r\"^$\", views.home, name=\"home\"),\n re_path(r\"^maintenance-mode/?$\", views.maintenance_mode, name=\"maintenance_mode\"),\n]\n\nurlpatterns = [\n re_path(r\"^robots.txt$\", views.robots_txt, name=\"robots_txt\"),\n re_path(\n r\"^favicon.ico$\",\n shared_cache_control(views.FaviconRedirect.as_view(), s_maxage=MONTH),\n name=\"favicon_ico\",\n ),\n]\n", "path": "kuma/landing/urls.py"}]} | 1,474 | 654 |
gh_patches_debug_41527 | rasdani/github-patches | git_diff | streamlink__streamlink-2643 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ITV Player unable to open URL
- [x] This is a plugin issue and I have read the contribution guidelines.
### Description
ITV Player/Hub for the main ITV channel do not open and give an error message.
### Reproduction steps / Explicit stream URLs to test
Type "streamlink itv.com/hub/itv".
Other ITV Hub URL's such as hub/itv2 or hub/citv continue to work.
### Log output
[cli][info] Found matching plugin itvplayer for URL itv.com/hub/itv
error: Unable to open URL: http://cdn.dar.itv.com/private/aHR0cDovL2NzbS1lLnBsYXkuZGFyLml0di5jb20vY3NtL2V4dGxpdmUvMjQ3MTYyMTUubTN1OD95by5hYz10cnVlJnlvLmFkPWZhbHNlJnBtPWZyZWUmcmFuZG9tPTU2OTk0ODU2ODc0ODA1NzE0MjAmYXJlYT1pdHZwbGF5ZXIuc2ltdWxjYXN0JnNvdXJjZT1zaW11bGNhc3QmaGRldmlkPXgmc2l6ZT1wYXR0ZXJuJnN1cGVydGFnPWRmbHQsc2ltJnVzPWFubyZzaXRlPWl0diZvc3Zlcj0xMCZjaGFuYnJhbmQ9aXR2MSZvcz13aW5kb3dzJmhtb2Q9NjYmcHY9YnJvd3Nlci40LjEmcGxheWVyPWh0bWw1LmRlc2t0b3AmZG09bmxpJmhtYW49Y2hyb21lJnRkdXI9MjE2MDAmdHBhcnRzPTEmYXJwPXgmZGVmYXVsdD1EZWZhdWx0UGF0dGVybiZwbGlzdD1zaW1hZHJlcGxhY2VtZW50JnBsZmNpZD1zaW1hZHJlcGxhY2VtZW50/endpoint?hdnea=st=1563543051~exp=1563564651~acl=/*~data=nohubplus~hmac=0ebaf6d4f22f101378a6c7ed3903c1999f6b28efd29ee05763e5ec7bf0d27896&yo.channel=24716215 (404 Client Error: for url: http://csm-e.play.dar.itv.com/csm/extlive/24716215.m3u8?yo.ac=true&yo.ad=false&pm=free&random=5699485687480571420&area=itvplayer.simulcast&source=simulcast&hdevid=x&size=pattern&supertag=dflt,sim&us=ano&site=itv&osver=10&chanbrand=itv1&os=windows&hmod=66&pv=browser.4.1&player=html5.desktop&dm=nli&hman=chrome&tdur=21600&tparts=1&arp=x&default=DefaultPattern&plist=simadreplacement&plfcid=simadreplacement)
### Additional comments, screenshots, etc.
I have noticed the site plays a ITV Hub promo when starting a livestream on the website itself before loading that stream. This does not occur for the other channels.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/itvplayer.py`
Content:
```
1 import json
2 import logging
3 import re
4
5 from streamlink.compat import urljoin
6 from streamlink.plugin import Plugin
7 from streamlink.plugin.api import useragents, validate
8 from streamlink.plugin.api.utils import itertags
9 from streamlink.stream import HLSStream
10
11 log = logging.getLogger(__name__)
12
13
14 class ITVPlayer(Plugin):
15 _url_re = re.compile(r"https?://(?:www.)?itv.com/hub/(?P<stream>.+)")
16 _video_info_schema = validate.Schema({
17 "StatusCode": 200,
18 "AdditionalInfo": {
19 "Message": validate.any(None, validate.text)
20 },
21 "Playlist": {
22 "VideoType": validate.text,
23 "Video": {
24 "Subtitles": validate.any(None, [{
25 "Href": validate.url(),
26 }]),
27 "Base": validate.url(),
28 "MediaFiles": [
29 {"Href": validate.text,
30 "KeyServiceUrl": validate.any(None, validate.url())}
31 ]
32 }
33 }
34 })
35
36 @classmethod
37 def can_handle_url(cls, url):
38 match = cls._url_re.match(url)
39 return match is not None
40
41 @property
42 def device_info(self):
43 return {"user": {},
44 "device": {"manufacturer": "Chrome", "model": "66",
45 "os": {"name": "Windows", "version": "10", "type": "desktop"}},
46 "client": {"version": "4.1", "id": "browser"},
47 "variantAvailability": {"featureset": {"min": ["hls", "aes"],
48 "max": ["hls", "aes"]},
49 "platformTag": "dotcom"}}
50
51 def video_info(self):
52 page = self.session.http.get(self.url)
53 for div in itertags(page.text, 'div'):
54 if div.attributes.get("id") == "video":
55 return div.attributes
56
57 def _get_streams(self):
58 """
59 Find all the streams for the ITV url
60 :return: Mapping of quality to stream
61 """
62 self.session.http.headers.update({"User-Agent": useragents.FIREFOX})
63 video_info = self.video_info()
64 video_info_url = video_info.get("data-html5-playlist") or video_info.get("data-video-id")
65
66 res = self.session.http.post(video_info_url,
67 data=json.dumps(self.device_info),
68 headers={"hmac": video_info.get("data-video-hmac")})
69 data = self.session.http.json(res, schema=self._video_info_schema)
70
71 log.debug("Video ID info response: {0}".format(data))
72
73 stype = data['Playlist']['VideoType']
74
75 for media in data['Playlist']['Video']['MediaFiles']:
76 url = urljoin(data['Playlist']['Video']['Base'], media['Href'])
77 name_fmt = "{pixels}_{bitrate}" if stype == "CATCHUP" else None
78 for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():
79 yield s
80
81
82
83 __plugin__ = ITVPlayer
84
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/streamlink/plugins/itvplayer.py b/src/streamlink/plugins/itvplayer.py
--- a/src/streamlink/plugins/itvplayer.py
+++ b/src/streamlink/plugins/itvplayer.py
@@ -6,13 +6,14 @@
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents, validate
from streamlink.plugin.api.utils import itertags
-from streamlink.stream import HLSStream
+from streamlink.stream import HLSStream, RTMPStream
log = logging.getLogger(__name__)
class ITVPlayer(Plugin):
_url_re = re.compile(r"https?://(?:www.)?itv.com/hub/(?P<stream>.+)")
+ swf_url = "https://mediaplayer.itv.com/2.19.5%2Bbuild.a23aa62b1e/ITVMediaPlayer.swf"
_video_info_schema = validate.Schema({
"StatusCode": 200,
"AdditionalInfo": {
@@ -35,8 +36,7 @@
@classmethod
def can_handle_url(cls, url):
- match = cls._url_re.match(url)
- return match is not None
+ return cls._url_re.match(url) is not None
@property
def device_info(self):
@@ -54,18 +54,11 @@
if div.attributes.get("id") == "video":
return div.attributes
- def _get_streams(self):
- """
- Find all the streams for the ITV url
- :return: Mapping of quality to stream
- """
- self.session.http.headers.update({"User-Agent": useragents.FIREFOX})
+ def _get_html5_streams(self, video_info_url):
video_info = self.video_info()
- video_info_url = video_info.get("data-html5-playlist") or video_info.get("data-video-id")
-
res = self.session.http.post(video_info_url,
- data=json.dumps(self.device_info),
- headers={"hmac": video_info.get("data-video-hmac")})
+ data=json.dumps(self.device_info),
+ headers={"hmac": video_info.get("data-video-hmac")})
data = self.session.http.json(res, schema=self._video_info_schema)
log.debug("Video ID info response: {0}".format(data))
@@ -78,6 +71,31 @@
for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():
yield s
+ def _get_rtmp_streams(self, video_info_url):
+ log.debug("XML data path: {0}".format(video_info_url))
+ res = self.session.http.get(video_info_url)
+ playlist = self.session.http.xml(res, ignore_ns=True)
+ mediafiles = playlist.find(".//Playlist/VideoEntries/Video/MediaFiles")
+ playpath = mediafiles.find("./MediaFile/URL")
+ return {"live": RTMPStream(self.session, {"rtmp": mediafiles.attrib.get("base"),
+ "playpath": playpath.text,
+ "live": True,
+ "swfVfy": self.swf_url
+ })}
+
+ def _get_streams(self):
+ """
+ Find all the streams for the ITV url
+ :return: Mapping of quality to stream
+ """
+ self.session.http.headers.update({"User-Agent": useragents.FIREFOX})
+ stream = self._url_re.match(self.url).group("stream")
+ video_info = self.video_info()
+ video_info_url = video_info.get("data-video-id" if stream.lower() in ("itv", "itv4") else "data-html5-playlist")
+ if video_info_url.endswith(".xml"):
+ return self._get_rtmp_streams(video_info_url)
+ else:
+ return self._get_html5_streams(video_info_url)
__plugin__ = ITVPlayer
| {"golden_diff": "diff --git a/src/streamlink/plugins/itvplayer.py b/src/streamlink/plugins/itvplayer.py\n--- a/src/streamlink/plugins/itvplayer.py\n+++ b/src/streamlink/plugins/itvplayer.py\n@@ -6,13 +6,14 @@\n from streamlink.plugin import Plugin\n from streamlink.plugin.api import useragents, validate\n from streamlink.plugin.api.utils import itertags\n-from streamlink.stream import HLSStream\n+from streamlink.stream import HLSStream, RTMPStream\n \n log = logging.getLogger(__name__)\n \n \n class ITVPlayer(Plugin):\n _url_re = re.compile(r\"https?://(?:www.)?itv.com/hub/(?P<stream>.+)\")\n+ swf_url = \"https://mediaplayer.itv.com/2.19.5%2Bbuild.a23aa62b1e/ITVMediaPlayer.swf\"\n _video_info_schema = validate.Schema({\n \"StatusCode\": 200,\n \"AdditionalInfo\": {\n@@ -35,8 +36,7 @@\n \n @classmethod\n def can_handle_url(cls, url):\n- match = cls._url_re.match(url)\n- return match is not None\n+ return cls._url_re.match(url) is not None\n \n @property\n def device_info(self):\n@@ -54,18 +54,11 @@\n if div.attributes.get(\"id\") == \"video\":\n return div.attributes\n \n- def _get_streams(self):\n- \"\"\"\n- Find all the streams for the ITV url\n- :return: Mapping of quality to stream\n- \"\"\"\n- self.session.http.headers.update({\"User-Agent\": useragents.FIREFOX})\n+ def _get_html5_streams(self, video_info_url):\n video_info = self.video_info()\n- video_info_url = video_info.get(\"data-html5-playlist\") or video_info.get(\"data-video-id\")\n-\n res = self.session.http.post(video_info_url,\n- data=json.dumps(self.device_info),\n- headers={\"hmac\": video_info.get(\"data-video-hmac\")})\n+ data=json.dumps(self.device_info),\n+ headers={\"hmac\": video_info.get(\"data-video-hmac\")})\n data = self.session.http.json(res, schema=self._video_info_schema)\n \n log.debug(\"Video ID info response: {0}\".format(data))\n@@ -78,6 +71,31 @@\n for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():\n yield s\n \n+ def _get_rtmp_streams(self, video_info_url):\n+ log.debug(\"XML data path: {0}\".format(video_info_url))\n+ res = self.session.http.get(video_info_url)\n+ playlist = self.session.http.xml(res, ignore_ns=True)\n+ mediafiles = playlist.find(\".//Playlist/VideoEntries/Video/MediaFiles\")\n+ playpath = mediafiles.find(\"./MediaFile/URL\")\n+ return {\"live\": RTMPStream(self.session, {\"rtmp\": mediafiles.attrib.get(\"base\"),\n+ \"playpath\": playpath.text,\n+ \"live\": True,\n+ \"swfVfy\": self.swf_url\n+ })}\n+\n+ def _get_streams(self):\n+ \"\"\"\n+ Find all the streams for the ITV url\n+ :return: Mapping of quality to stream\n+ \"\"\"\n+ self.session.http.headers.update({\"User-Agent\": useragents.FIREFOX})\n+ stream = self._url_re.match(self.url).group(\"stream\")\n+ video_info = self.video_info()\n+ video_info_url = video_info.get(\"data-video-id\" if stream.lower() in (\"itv\", \"itv4\") else \"data-html5-playlist\")\n+ if video_info_url.endswith(\".xml\"):\n+ return self._get_rtmp_streams(video_info_url)\n+ else:\n+ return self._get_html5_streams(video_info_url)\n \n \n __plugin__ = ITVPlayer\n", "issue": "ITV Player unable to open URL\n- [x] This is a plugin issue and I have read the contribution guidelines.\r\n\r\n\r\n### Description\r\n\r\nITV Player/Hub for the main ITV channel do not open and give an error message.\r\n\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\nType \"streamlink itv.com/hub/itv\".\r\n\r\nOther ITV Hub URL's such as hub/itv2 or hub/citv continue to work.\r\n\r\n### Log output\r\n\r\n[cli][info] Found matching plugin itvplayer for URL itv.com/hub/itv\r\nerror: Unable to open URL: http://cdn.dar.itv.com/private/aHR0cDovL2NzbS1lLnBsYXkuZGFyLml0di5jb20vY3NtL2V4dGxpdmUvMjQ3MTYyMTUubTN1OD95by5hYz10cnVlJnlvLmFkPWZhbHNlJnBtPWZyZWUmcmFuZG9tPTU2OTk0ODU2ODc0ODA1NzE0MjAmYXJlYT1pdHZwbGF5ZXIuc2ltdWxjYXN0JnNvdXJjZT1zaW11bGNhc3QmaGRldmlkPXgmc2l6ZT1wYXR0ZXJuJnN1cGVydGFnPWRmbHQsc2ltJnVzPWFubyZzaXRlPWl0diZvc3Zlcj0xMCZjaGFuYnJhbmQ9aXR2MSZvcz13aW5kb3dzJmhtb2Q9NjYmcHY9YnJvd3Nlci40LjEmcGxheWVyPWh0bWw1LmRlc2t0b3AmZG09bmxpJmhtYW49Y2hyb21lJnRkdXI9MjE2MDAmdHBhcnRzPTEmYXJwPXgmZGVmYXVsdD1EZWZhdWx0UGF0dGVybiZwbGlzdD1zaW1hZHJlcGxhY2VtZW50JnBsZmNpZD1zaW1hZHJlcGxhY2VtZW50/endpoint?hdnea=st=1563543051~exp=1563564651~acl=/*~data=nohubplus~hmac=0ebaf6d4f22f101378a6c7ed3903c1999f6b28efd29ee05763e5ec7bf0d27896&yo.channel=24716215 (404 Client Error: for url: http://csm-e.play.dar.itv.com/csm/extlive/24716215.m3u8?yo.ac=true&yo.ad=false&pm=free&random=5699485687480571420&area=itvplayer.simulcast&source=simulcast&hdevid=x&size=pattern&supertag=dflt,sim&us=ano&site=itv&osver=10&chanbrand=itv1&os=windows&hmod=66&pv=browser.4.1&player=html5.desktop&dm=nli&hman=chrome&tdur=21600&tparts=1&arp=x&default=DefaultPattern&plist=simadreplacement&plfcid=simadreplacement)\r\n\r\n\r\n### Additional comments, screenshots, etc.\r\n\r\nI have noticed the site plays a ITV Hub promo when starting a livestream on the website itself before loading that stream. This does not occur for the other channels.\n", "before_files": [{"content": "import json\nimport logging\nimport re\n\nfrom streamlink.compat import urljoin\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import useragents, validate\nfrom streamlink.plugin.api.utils import itertags\nfrom streamlink.stream import HLSStream\n\nlog = logging.getLogger(__name__)\n\n\nclass ITVPlayer(Plugin):\n _url_re = re.compile(r\"https?://(?:www.)?itv.com/hub/(?P<stream>.+)\")\n _video_info_schema = validate.Schema({\n \"StatusCode\": 200,\n \"AdditionalInfo\": {\n \"Message\": validate.any(None, validate.text)\n },\n \"Playlist\": {\n \"VideoType\": validate.text,\n \"Video\": {\n \"Subtitles\": validate.any(None, [{\n \"Href\": validate.url(),\n }]),\n \"Base\": validate.url(),\n \"MediaFiles\": [\n {\"Href\": validate.text,\n \"KeyServiceUrl\": validate.any(None, validate.url())}\n ]\n }\n }\n })\n\n @classmethod\n def can_handle_url(cls, url):\n match = cls._url_re.match(url)\n return match is not None\n\n @property\n def device_info(self):\n return {\"user\": {},\n \"device\": {\"manufacturer\": \"Chrome\", \"model\": \"66\",\n \"os\": {\"name\": \"Windows\", \"version\": \"10\", \"type\": \"desktop\"}},\n \"client\": {\"version\": \"4.1\", \"id\": \"browser\"},\n \"variantAvailability\": {\"featureset\": {\"min\": [\"hls\", \"aes\"],\n \"max\": [\"hls\", \"aes\"]},\n \"platformTag\": \"dotcom\"}}\n\n def video_info(self):\n page = self.session.http.get(self.url)\n for div in itertags(page.text, 'div'):\n if div.attributes.get(\"id\") == \"video\":\n return div.attributes\n\n def _get_streams(self):\n \"\"\"\n Find all the streams for the ITV url\n :return: Mapping of quality to stream\n \"\"\"\n self.session.http.headers.update({\"User-Agent\": useragents.FIREFOX})\n video_info = self.video_info()\n video_info_url = video_info.get(\"data-html5-playlist\") or video_info.get(\"data-video-id\")\n\n res = self.session.http.post(video_info_url,\n data=json.dumps(self.device_info),\n headers={\"hmac\": video_info.get(\"data-video-hmac\")})\n data = self.session.http.json(res, schema=self._video_info_schema)\n\n log.debug(\"Video ID info response: {0}\".format(data))\n\n stype = data['Playlist']['VideoType']\n\n for media in data['Playlist']['Video']['MediaFiles']:\n url = urljoin(data['Playlist']['Video']['Base'], media['Href'])\n name_fmt = \"{pixels}_{bitrate}\" if stype == \"CATCHUP\" else None\n for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():\n yield s\n\n\n\n__plugin__ = ITVPlayer\n", "path": "src/streamlink/plugins/itvplayer.py"}], "after_files": [{"content": "import json\nimport logging\nimport re\n\nfrom streamlink.compat import urljoin\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import useragents, validate\nfrom streamlink.plugin.api.utils import itertags\nfrom streamlink.stream import HLSStream, RTMPStream\n\nlog = logging.getLogger(__name__)\n\n\nclass ITVPlayer(Plugin):\n _url_re = re.compile(r\"https?://(?:www.)?itv.com/hub/(?P<stream>.+)\")\n swf_url = \"https://mediaplayer.itv.com/2.19.5%2Bbuild.a23aa62b1e/ITVMediaPlayer.swf\"\n _video_info_schema = validate.Schema({\n \"StatusCode\": 200,\n \"AdditionalInfo\": {\n \"Message\": validate.any(None, validate.text)\n },\n \"Playlist\": {\n \"VideoType\": validate.text,\n \"Video\": {\n \"Subtitles\": validate.any(None, [{\n \"Href\": validate.url(),\n }]),\n \"Base\": validate.url(),\n \"MediaFiles\": [\n {\"Href\": validate.text,\n \"KeyServiceUrl\": validate.any(None, validate.url())}\n ]\n }\n }\n })\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url) is not None\n\n @property\n def device_info(self):\n return {\"user\": {},\n \"device\": {\"manufacturer\": \"Chrome\", \"model\": \"66\",\n \"os\": {\"name\": \"Windows\", \"version\": \"10\", \"type\": \"desktop\"}},\n \"client\": {\"version\": \"4.1\", \"id\": \"browser\"},\n \"variantAvailability\": {\"featureset\": {\"min\": [\"hls\", \"aes\"],\n \"max\": [\"hls\", \"aes\"]},\n \"platformTag\": \"dotcom\"}}\n\n def video_info(self):\n page = self.session.http.get(self.url)\n for div in itertags(page.text, 'div'):\n if div.attributes.get(\"id\") == \"video\":\n return div.attributes\n\n def _get_html5_streams(self, video_info_url):\n video_info = self.video_info()\n res = self.session.http.post(video_info_url,\n data=json.dumps(self.device_info),\n headers={\"hmac\": video_info.get(\"data-video-hmac\")})\n data = self.session.http.json(res, schema=self._video_info_schema)\n\n log.debug(\"Video ID info response: {0}\".format(data))\n\n stype = data['Playlist']['VideoType']\n\n for media in data['Playlist']['Video']['MediaFiles']:\n url = urljoin(data['Playlist']['Video']['Base'], media['Href'])\n name_fmt = \"{pixels}_{bitrate}\" if stype == \"CATCHUP\" else None\n for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():\n yield s\n\n def _get_rtmp_streams(self, video_info_url):\n log.debug(\"XML data path: {0}\".format(video_info_url))\n res = self.session.http.get(video_info_url)\n playlist = self.session.http.xml(res, ignore_ns=True)\n mediafiles = playlist.find(\".//Playlist/VideoEntries/Video/MediaFiles\")\n playpath = mediafiles.find(\"./MediaFile/URL\")\n return {\"live\": RTMPStream(self.session, {\"rtmp\": mediafiles.attrib.get(\"base\"),\n \"playpath\": playpath.text,\n \"live\": True,\n \"swfVfy\": self.swf_url\n })}\n\n def _get_streams(self):\n \"\"\"\n Find all the streams for the ITV url\n :return: Mapping of quality to stream\n \"\"\"\n self.session.http.headers.update({\"User-Agent\": useragents.FIREFOX})\n stream = self._url_re.match(self.url).group(\"stream\")\n video_info = self.video_info()\n video_info_url = video_info.get(\"data-video-id\" if stream.lower() in (\"itv\", \"itv4\") else \"data-html5-playlist\")\n if video_info_url.endswith(\".xml\"):\n return self._get_rtmp_streams(video_info_url)\n else:\n return self._get_html5_streams(video_info_url)\n\n\n__plugin__ = ITVPlayer\n", "path": "src/streamlink/plugins/itvplayer.py"}]} | 1,980 | 861 |
gh_patches_debug_11333 | rasdani/github-patches | git_diff | searx__searx-3479 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Using single apostrophe causes a "search error"
<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SEARX -->
**Version of Searx, commit number if you are using on master branch and stipulate if you forked Searx**
1.1.0-27-bf0a583f
<!-- If you are running on master branch using git execute this command
in order to fetch the latest commit ID:
```
git log -1
```
If you are using searx-docker then look at the bottom of the Searx page
and check for the version after "Powered by searx"
Please also stipulate if you are using a forked version of Searx and
include a link to the fork source code.
-->
**How did you install Searx?**
searx-docker. Specifically using docker-compose and running ``docker-compose up``.
Compose file:
```yaml
version: '3.3'
services:
searx:
volumes:
- /mnt/storage1/configs/searx:/etc/searx
ports:
- 9999:8080
environment:
- BASE_URL=http://localhost:9999/
image: searx/searx
restart: unless-stopped
```
<!-- Did you install Searx using the official wiki or using searx-docker
or manually by executing the searx/webapp.py file? -->
**What happened?**
Using a single apostrophe in a word (for example, "What's") causes a search error
<!-- A clear and concise description of what the bug is. -->
**How To Reproduce**
Use any single apostrophe in a search query
<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->
**Expected behavior**
Should return results, instead of an error.
<!-- A clear and concise description of what you expected to happen. -->
**Screenshots & Logs**
https://imgur.com/a/E2b4mNX
https://paste.ee/p/e5gS8
**Additional context**
It's also somewhat infuriating that it also returns from a result page to the home page when this happens, meaning I have to retype my query (though that's another issue entirely).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/plugins/search_operators.py`
Content:
```
1 import shlex
2 import string
3
4 from flask_babel import gettext
5
6 name = gettext("Search operators")
7 description = gettext("""Filter results using hyphen, site: and -site:.
8 Please note that you might get less results with the additional filtering.""")
9 default_on = False
10
11
12 def on_result(request, search, result):
13 q = search.search_query.query
14 qs = shlex.split(q)
15 spitems = [x.lower() for x in qs if ' ' in x]
16 mitems = [x.lower() for x in qs if x.startswith('-')]
17 siteitems = [x.lower() for x in qs if x.startswith('site:')]
18 msiteitems = [x.lower() for x in qs if x.startswith('-site:')]
19 url, title, content = (
20 result["url"].lower(),
21 result["title"].lower(),
22 (result.get("content").lower() if result.get("content") else '')
23 )
24 if all((x not in title or x not in content) for x in spitems):
25 return False
26 if all((x in title or x in content) for x in mitems):
27 return False
28 if all(x not in url for x in siteitems):
29 return False
30 if all(x in url for x in msiteitems):
31 return False
32 return True
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/searx/plugins/search_operators.py b/searx/plugins/search_operators.py
--- a/searx/plugins/search_operators.py
+++ b/searx/plugins/search_operators.py
@@ -11,7 +11,11 @@
def on_result(request, search, result):
q = search.search_query.query
- qs = shlex.split(q)
+ # WARN: shlex.quote is designed only for Unix shells and may be vulnerable
+ # to command injection on non-POSIX compliant shells (Windows)
+ # https://docs.python.org/3/library/shlex.html#shlex.quote
+ squote = shlex.quote(q)
+ qs = shlex.split(squote)
spitems = [x.lower() for x in qs if ' ' in x]
mitems = [x.lower() for x in qs if x.startswith('-')]
siteitems = [x.lower() for x in qs if x.startswith('site:')]
| {"golden_diff": "diff --git a/searx/plugins/search_operators.py b/searx/plugins/search_operators.py\n--- a/searx/plugins/search_operators.py\n+++ b/searx/plugins/search_operators.py\n@@ -11,7 +11,11 @@\n \n def on_result(request, search, result):\n q = search.search_query.query\n- qs = shlex.split(q)\n+ # WARN: shlex.quote is designed only for Unix shells and may be vulnerable\n+ # to command injection on non-POSIX compliant shells (Windows)\n+ # https://docs.python.org/3/library/shlex.html#shlex.quote\n+ squote = shlex.quote(q)\n+ qs = shlex.split(squote)\n spitems = [x.lower() for x in qs if ' ' in x]\n mitems = [x.lower() for x in qs if x.startswith('-')]\n siteitems = [x.lower() for x in qs if x.startswith('site:')]\n", "issue": "Using single apostrophe causes a \"search error\"\n<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SEARX -->\r\n\r\n**Version of Searx, commit number if you are using on master branch and stipulate if you forked Searx**\r\n1.1.0-27-bf0a583f\r\n<!-- If you are running on master branch using git execute this command\r\nin order to fetch the latest commit ID:\r\n```\r\ngit log -1\r\n``` \r\nIf you are using searx-docker then look at the bottom of the Searx page\r\nand check for the version after \"Powered by searx\"\r\n\r\nPlease also stipulate if you are using a forked version of Searx and\r\ninclude a link to the fork source code.\r\n-->\r\n**How did you install Searx?**\r\nsearx-docker. Specifically using docker-compose and running ``docker-compose up``.\r\nCompose file:\r\n```yaml\r\nversion: '3.3'\r\nservices:\r\n searx:\r\n volumes:\r\n - /mnt/storage1/configs/searx:/etc/searx\r\n ports:\r\n - 9999:8080\r\n environment:\r\n - BASE_URL=http://localhost:9999/\r\n image: searx/searx\r\n restart: unless-stopped\r\n```\r\n<!-- Did you install Searx using the official wiki or using searx-docker\r\nor manually by executing the searx/webapp.py file? -->\r\n**What happened?**\r\nUsing a single apostrophe in a word (for example, \"What's\") causes a search error\r\n<!-- A clear and concise description of what the bug is. -->\r\n**How To Reproduce**\r\nUse any single apostrophe in a search query\r\n<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->\r\n**Expected behavior**\r\nShould return results, instead of an error.\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n**Screenshots & Logs**\r\nhttps://imgur.com/a/E2b4mNX\r\nhttps://paste.ee/p/e5gS8\r\n\r\n**Additional context**\r\nIt's also somewhat infuriating that it also returns from a result page to the home page when this happens, meaning I have to retype my query (though that's another issue entirely).\n", "before_files": [{"content": "import shlex\nimport string\n\nfrom flask_babel import gettext\n\nname = gettext(\"Search operators\")\ndescription = gettext(\"\"\"Filter results using hyphen, site: and -site:.\nPlease note that you might get less results with the additional filtering.\"\"\")\ndefault_on = False\n\n\ndef on_result(request, search, result):\n q = search.search_query.query\n qs = shlex.split(q)\n spitems = [x.lower() for x in qs if ' ' in x]\n mitems = [x.lower() for x in qs if x.startswith('-')]\n siteitems = [x.lower() for x in qs if x.startswith('site:')]\n msiteitems = [x.lower() for x in qs if x.startswith('-site:')]\n url, title, content = (\n result[\"url\"].lower(),\n result[\"title\"].lower(),\n (result.get(\"content\").lower() if result.get(\"content\") else '')\n )\n if all((x not in title or x not in content) for x in spitems):\n return False\n if all((x in title or x in content) for x in mitems):\n return False\n if all(x not in url for x in siteitems):\n return False\n if all(x in url for x in msiteitems):\n return False\n return True\n", "path": "searx/plugins/search_operators.py"}], "after_files": [{"content": "import shlex\nimport string\n\nfrom flask_babel import gettext\n\nname = gettext(\"Search operators\")\ndescription = gettext(\"\"\"Filter results using hyphen, site: and -site:.\nPlease note that you might get less results with the additional filtering.\"\"\")\ndefault_on = False\n\n\ndef on_result(request, search, result):\n q = search.search_query.query\n # WARN: shlex.quote is designed only for Unix shells and may be vulnerable\n # to command injection on non-POSIX compliant shells (Windows)\n # https://docs.python.org/3/library/shlex.html#shlex.quote\n squote = shlex.quote(q)\n qs = shlex.split(squote)\n spitems = [x.lower() for x in qs if ' ' in x]\n mitems = [x.lower() for x in qs if x.startswith('-')]\n siteitems = [x.lower() for x in qs if x.startswith('site:')]\n msiteitems = [x.lower() for x in qs if x.startswith('-site:')]\n url, title, content = (\n result[\"url\"].lower(),\n result[\"title\"].lower(),\n (result.get(\"content\").lower() if result.get(\"content\") else '')\n )\n if all((x not in title or x not in content) for x in spitems):\n return False\n if all((x in title or x in content) for x in mitems):\n return False\n if all(x not in url for x in siteitems):\n return False\n if all(x in url for x in msiteitems):\n return False\n return True\n", "path": "searx/plugins/search_operators.py"}]} | 1,100 | 212 |
gh_patches_debug_426 | rasdani/github-patches | git_diff | nautobot__nautobot-3981 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
2.0: Tag string representation is incorrect
### Environment
* Nautobot version (Docker tag too if applicable): `next`, e13883b7b8a4f44bca0c40d8074dcf8f82e544e6
### Steps to Reproduce
1. Create a Tag and associate it to any model
2. View the legacy-UI list view or detail view of that model
<!-- What did you expect to happen? -->
### Expected Behavior
Tag name to be displayed
<!-- What happened instead? -->
### Observed Behavior
`Tag object (<uuid>)` is displayed.

Appears to be a regression due to #3914.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nautobot/extras/models/tags.py`
Content:
```
1 from django.contrib.contenttypes.models import ContentType
2 from django.db import models
3 from taggit.models import GenericUUIDTaggedItemBase
4
5 from nautobot.core.choices import ColorChoices
6 from nautobot.core.models import BaseManager, BaseModel
7 from nautobot.core.models.fields import ColorField
8 from nautobot.core.models.querysets import RestrictedQuerySet
9 from nautobot.extras.models import ChangeLoggedModel, CustomFieldModel
10 from nautobot.extras.models.mixins import NotesMixin
11 from nautobot.extras.models.relationships import RelationshipModel
12 from nautobot.extras.utils import extras_features, TaggableClassesQuery
13
14
15 #
16 # Tags
17 #
18
19
20 class TagQuerySet(RestrictedQuerySet):
21 """Queryset for `Tags` objects."""
22
23 def get_for_model(self, model):
24 """
25 Return all `Tags` assigned to the given model.
26 """
27 return self.filter(content_types__model=model._meta.model_name, content_types__app_label=model._meta.app_label)
28
29
30 # Tag *should* be a `NameColorContentTypesModel` but that way lies circular import purgatory. Sigh.
31 @extras_features(
32 "custom_validators",
33 )
34 class Tag(BaseModel, ChangeLoggedModel, CustomFieldModel, RelationshipModel, NotesMixin):
35 name = models.CharField(max_length=100, unique=True)
36 content_types = models.ManyToManyField(
37 to=ContentType,
38 related_name="tags",
39 limit_choices_to=TaggableClassesQuery(),
40 )
41 color = ColorField(default=ColorChoices.COLOR_GREY)
42 description = models.CharField(
43 max_length=200,
44 blank=True,
45 )
46
47 objects = BaseManager.from_queryset(TagQuerySet)()
48
49 class Meta:
50 ordering = ["name"]
51
52 def validate_content_types_removal(self, content_types_id):
53 """Validate content_types to be removed are not tagged to a model"""
54 errors = {}
55
56 removed_content_types = self.content_types.exclude(id__in=content_types_id)
57
58 # check if tag is assigned to any of the removed content_types
59 for content_type in removed_content_types:
60 model = content_type.model_class()
61 if model.objects.filter(tags=self).exists():
62 errors.setdefault("content_types", []).append(
63 f"Unable to remove {model._meta.label_lower}. Dependent objects were found."
64 )
65
66 return errors
67
68
69 class TaggedItem(BaseModel, GenericUUIDTaggedItemBase):
70 tag = models.ForeignKey(to=Tag, related_name="%(app_label)s_%(class)s_items", on_delete=models.CASCADE)
71
72 class Meta:
73 index_together = ("content_type", "object_id")
74 unique_together = [["content_type", "object_id", "tag"]]
75
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nautobot/extras/models/tags.py b/nautobot/extras/models/tags.py
--- a/nautobot/extras/models/tags.py
+++ b/nautobot/extras/models/tags.py
@@ -46,6 +46,9 @@
objects = BaseManager.from_queryset(TagQuerySet)()
+ def __str__(self):
+ return self.name
+
class Meta:
ordering = ["name"]
| {"golden_diff": "diff --git a/nautobot/extras/models/tags.py b/nautobot/extras/models/tags.py\n--- a/nautobot/extras/models/tags.py\n+++ b/nautobot/extras/models/tags.py\n@@ -46,6 +46,9 @@\n \n objects = BaseManager.from_queryset(TagQuerySet)()\n \n+ def __str__(self):\n+ return self.name\n+\n class Meta:\n ordering = [\"name\"]\n", "issue": "2.0: Tag string representation is incorrect\n### Environment\r\n* Nautobot version (Docker tag too if applicable): `next`, e13883b7b8a4f44bca0c40d8074dcf8f82e544e6\r\n\r\n### Steps to Reproduce\r\n1. Create a Tag and associate it to any model\r\n2. View the legacy-UI list view or detail view of that model\r\n\r\n<!-- What did you expect to happen? -->\r\n### Expected Behavior\r\n\r\nTag name to be displayed\r\n\r\n<!-- What happened instead? -->\r\n### Observed Behavior\r\n\r\n`Tag object (<uuid>)` is displayed.\r\n\r\n\r\n\r\nAppears to be a regression due to #3914.\r\n\n", "before_files": [{"content": "from django.contrib.contenttypes.models import ContentType\nfrom django.db import models\nfrom taggit.models import GenericUUIDTaggedItemBase\n\nfrom nautobot.core.choices import ColorChoices\nfrom nautobot.core.models import BaseManager, BaseModel\nfrom nautobot.core.models.fields import ColorField\nfrom nautobot.core.models.querysets import RestrictedQuerySet\nfrom nautobot.extras.models import ChangeLoggedModel, CustomFieldModel\nfrom nautobot.extras.models.mixins import NotesMixin\nfrom nautobot.extras.models.relationships import RelationshipModel\nfrom nautobot.extras.utils import extras_features, TaggableClassesQuery\n\n\n#\n# Tags\n#\n\n\nclass TagQuerySet(RestrictedQuerySet):\n \"\"\"Queryset for `Tags` objects.\"\"\"\n\n def get_for_model(self, model):\n \"\"\"\n Return all `Tags` assigned to the given model.\n \"\"\"\n return self.filter(content_types__model=model._meta.model_name, content_types__app_label=model._meta.app_label)\n\n\n# Tag *should* be a `NameColorContentTypesModel` but that way lies circular import purgatory. Sigh.\n@extras_features(\n \"custom_validators\",\n)\nclass Tag(BaseModel, ChangeLoggedModel, CustomFieldModel, RelationshipModel, NotesMixin):\n name = models.CharField(max_length=100, unique=True)\n content_types = models.ManyToManyField(\n to=ContentType,\n related_name=\"tags\",\n limit_choices_to=TaggableClassesQuery(),\n )\n color = ColorField(default=ColorChoices.COLOR_GREY)\n description = models.CharField(\n max_length=200,\n blank=True,\n )\n\n objects = BaseManager.from_queryset(TagQuerySet)()\n\n class Meta:\n ordering = [\"name\"]\n\n def validate_content_types_removal(self, content_types_id):\n \"\"\"Validate content_types to be removed are not tagged to a model\"\"\"\n errors = {}\n\n removed_content_types = self.content_types.exclude(id__in=content_types_id)\n\n # check if tag is assigned to any of the removed content_types\n for content_type in removed_content_types:\n model = content_type.model_class()\n if model.objects.filter(tags=self).exists():\n errors.setdefault(\"content_types\", []).append(\n f\"Unable to remove {model._meta.label_lower}. Dependent objects were found.\"\n )\n\n return errors\n\n\nclass TaggedItem(BaseModel, GenericUUIDTaggedItemBase):\n tag = models.ForeignKey(to=Tag, related_name=\"%(app_label)s_%(class)s_items\", on_delete=models.CASCADE)\n\n class Meta:\n index_together = (\"content_type\", \"object_id\")\n unique_together = [[\"content_type\", \"object_id\", \"tag\"]]\n", "path": "nautobot/extras/models/tags.py"}], "after_files": [{"content": "from django.contrib.contenttypes.models import ContentType\nfrom django.db import models\nfrom taggit.models import GenericUUIDTaggedItemBase\n\nfrom nautobot.core.choices import ColorChoices\nfrom nautobot.core.models import BaseManager, BaseModel\nfrom nautobot.core.models.fields import ColorField\nfrom nautobot.core.models.querysets import RestrictedQuerySet\nfrom nautobot.extras.models import ChangeLoggedModel, CustomFieldModel\nfrom nautobot.extras.models.mixins import NotesMixin\nfrom nautobot.extras.models.relationships import RelationshipModel\nfrom nautobot.extras.utils import extras_features, TaggableClassesQuery\n\n\n#\n# Tags\n#\n\n\nclass TagQuerySet(RestrictedQuerySet):\n \"\"\"Queryset for `Tags` objects.\"\"\"\n\n def get_for_model(self, model):\n \"\"\"\n Return all `Tags` assigned to the given model.\n \"\"\"\n return self.filter(content_types__model=model._meta.model_name, content_types__app_label=model._meta.app_label)\n\n\n# Tag *should* be a `NameColorContentTypesModel` but that way lies circular import purgatory. Sigh.\n@extras_features(\n \"custom_validators\",\n)\nclass Tag(BaseModel, ChangeLoggedModel, CustomFieldModel, RelationshipModel, NotesMixin):\n name = models.CharField(max_length=100, unique=True)\n content_types = models.ManyToManyField(\n to=ContentType,\n related_name=\"tags\",\n limit_choices_to=TaggableClassesQuery(),\n )\n color = ColorField(default=ColorChoices.COLOR_GREY)\n description = models.CharField(\n max_length=200,\n blank=True,\n )\n\n objects = BaseManager.from_queryset(TagQuerySet)()\n\n def __str__(self):\n return self.name\n\n class Meta:\n ordering = [\"name\"]\n\n def validate_content_types_removal(self, content_types_id):\n \"\"\"Validate content_types to be removed are not tagged to a model\"\"\"\n errors = {}\n\n removed_content_types = self.content_types.exclude(id__in=content_types_id)\n\n # check if tag is assigned to any of the removed content_types\n for content_type in removed_content_types:\n model = content_type.model_class()\n if model.objects.filter(tags=self).exists():\n errors.setdefault(\"content_types\", []).append(\n f\"Unable to remove {model._meta.label_lower}. Dependent objects were found.\"\n )\n\n return errors\n\n\nclass TaggedItem(BaseModel, GenericUUIDTaggedItemBase):\n tag = models.ForeignKey(to=Tag, related_name=\"%(app_label)s_%(class)s_items\", on_delete=models.CASCADE)\n\n class Meta:\n index_together = (\"content_type\", \"object_id\")\n unique_together = [[\"content_type\", \"object_id\", \"tag\"]]\n", "path": "nautobot/extras/models/tags.py"}]} | 1,195 | 95 |
gh_patches_debug_30621 | rasdani/github-patches | git_diff | pyodide__pyodide-3483 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
On first call, `pyodide config get emscripten_version` returns `Downloading xbuild environment Installing xbuild environment 3.1.27` instead of `3.1.27`
## 🐛 Bug
In [the docs for out-of-tree builds](https://pyodide.org/en/stable/development/building-and-testing-packages.html#building-and-testing-packages-out-of-tree) it gives this code snippet:
```bash
pip install pyodide-build
git clone https://github.com/emscripten-core/emsdk.git
cd emsdk
PYODIDE_EMSCRIPTEN_VERSION=$(pyodide config get emscripten_version)
./emsdk install ${PYODIDE_EMSCRIPTEN_VERSION}
./emsdk activate ${PYODIDE_EMSCRIPTEN_VERSION}
source emsdk_env.sh
```
But this doesn't work because on the first call, `pyodide config get emscripten_version` outputs this:
```
Downloading xbuild environment
Installing xbuild environment
3.1.27
```
On subsequent calls it returns `3.1.27`.
### To Reproduce
See above.
### Expected behavior
Calls to `pyodide config get emscripten_version` should only ever output the version string such that this command can be reliably used in build automation.
### Environment
- Pyodide Version<!-- (e.g. 1.8.1) -->: Pyodide CLI Version: 0.2.2
### Additional context
As a workaround for build scripts, `pyodide config get emscripten_version` can be called once before actually using it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyodide-build/pyodide_build/cli/config.py`
Content:
```
1 import typer
2
3 from ..common import get_make_environment_vars
4 from ..out_of_tree.utils import initialize_pyodide_root
5
6 app = typer.Typer(help="Manage config variables used in pyodide")
7
8
9 # A dictionary of config variables {key: env_var_in_makefile}
10 PYODIDE_CONFIGS = {
11 "emscripten_version": "PYODIDE_EMSCRIPTEN_VERSION",
12 "python_version": "PYVERSION",
13 }
14
15
16 @app.callback(no_args_is_help=True) # type: ignore[misc]
17 def callback() -> None:
18 return
19
20
21 def _get_configs() -> dict[str, str]:
22 initialize_pyodide_root()
23
24 configs: dict[str, str] = get_make_environment_vars()
25
26 configs_filtered = {k: configs[v] for k, v in PYODIDE_CONFIGS.items()}
27 return configs_filtered
28
29
30 @app.command("list")
31 def list_config():
32 """
33 List config variables used in pyodide
34 """
35 configs = _get_configs()
36
37 for k, v in configs.items():
38 typer.echo(f"{k}={v}")
39
40
41 @app.command("get") # type: ignore[misc]
42 def get_config(
43 config_var: str = typer.Argument(
44 ..., help="A config variable to get. Use `list` to see all possible values."
45 ),
46 ) -> None:
47 """
48 Get a value of a single config variable used in pyodide
49 """
50 configs = _get_configs()
51
52 if config_var not in configs:
53 typer.echo(f"Config variable {config_var} not found.")
54 typer.Exit(1)
55
56 typer.echo(configs[config_var])
57
```
Path: `pyodide-build/pyodide_build/out_of_tree/utils.py`
Content:
```
1 import os
2 from pathlib import Path
3
4 from ..common import search_pyodide_root
5
6
7 def ensure_env_installed(env: Path) -> None:
8 if env.exists():
9 return
10 from .. import __version__
11 from ..install_xbuildenv import download_xbuildenv, install_xbuildenv
12
13 if "dev" in __version__:
14 raise RuntimeError(
15 "To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT"
16 )
17
18 download_xbuildenv(__version__, env)
19 install_xbuildenv(__version__, env)
20
21
22 def initialize_pyodide_root() -> None:
23 if "PYODIDE_ROOT" in os.environ:
24 return
25 try:
26 os.environ["PYODIDE_ROOT"] = str(search_pyodide_root(__file__))
27 return
28 except FileNotFoundError:
29 pass
30 env = Path(".pyodide-xbuildenv")
31 os.environ["PYODIDE_ROOT"] = str(env / "xbuildenv/pyodide-root")
32 ensure_env_installed(env)
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pyodide-build/pyodide_build/cli/config.py b/pyodide-build/pyodide_build/cli/config.py
--- a/pyodide-build/pyodide_build/cli/config.py
+++ b/pyodide-build/pyodide_build/cli/config.py
@@ -19,7 +19,7 @@
def _get_configs() -> dict[str, str]:
- initialize_pyodide_root()
+ initialize_pyodide_root(quiet=True)
configs: dict[str, str] = get_make_environment_vars()
diff --git a/pyodide-build/pyodide_build/out_of_tree/utils.py b/pyodide-build/pyodide_build/out_of_tree/utils.py
--- a/pyodide-build/pyodide_build/out_of_tree/utils.py
+++ b/pyodide-build/pyodide_build/out_of_tree/utils.py
@@ -1,10 +1,12 @@
import os
+from contextlib import ExitStack, redirect_stdout
+from io import StringIO
from pathlib import Path
from ..common import search_pyodide_root
-def ensure_env_installed(env: Path) -> None:
+def ensure_env_installed(env: Path, *, quiet: bool = False) -> None:
if env.exists():
return
from .. import __version__
@@ -15,11 +17,16 @@
"To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT"
)
- download_xbuildenv(__version__, env)
- install_xbuildenv(__version__, env)
+ with ExitStack() as stack:
+ if quiet:
+ # Prevent writes to stdout
+ stack.enter_context(redirect_stdout(StringIO()))
+ download_xbuildenv(__version__, env)
+ install_xbuildenv(__version__, env)
-def initialize_pyodide_root() -> None:
+
+def initialize_pyodide_root(*, quiet: bool = False) -> None:
if "PYODIDE_ROOT" in os.environ:
return
try:
@@ -29,4 +36,4 @@
pass
env = Path(".pyodide-xbuildenv")
os.environ["PYODIDE_ROOT"] = str(env / "xbuildenv/pyodide-root")
- ensure_env_installed(env)
+ ensure_env_installed(env, quiet=quiet)
| {"golden_diff": "diff --git a/pyodide-build/pyodide_build/cli/config.py b/pyodide-build/pyodide_build/cli/config.py\n--- a/pyodide-build/pyodide_build/cli/config.py\n+++ b/pyodide-build/pyodide_build/cli/config.py\n@@ -19,7 +19,7 @@\n \n \n def _get_configs() -> dict[str, str]:\n- initialize_pyodide_root()\n+ initialize_pyodide_root(quiet=True)\n \n configs: dict[str, str] = get_make_environment_vars()\n \ndiff --git a/pyodide-build/pyodide_build/out_of_tree/utils.py b/pyodide-build/pyodide_build/out_of_tree/utils.py\n--- a/pyodide-build/pyodide_build/out_of_tree/utils.py\n+++ b/pyodide-build/pyodide_build/out_of_tree/utils.py\n@@ -1,10 +1,12 @@\n import os\n+from contextlib import ExitStack, redirect_stdout\n+from io import StringIO\n from pathlib import Path\n \n from ..common import search_pyodide_root\n \n \n-def ensure_env_installed(env: Path) -> None:\n+def ensure_env_installed(env: Path, *, quiet: bool = False) -> None:\n if env.exists():\n return\n from .. import __version__\n@@ -15,11 +17,16 @@\n \"To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT\"\n )\n \n- download_xbuildenv(__version__, env)\n- install_xbuildenv(__version__, env)\n+ with ExitStack() as stack:\n+ if quiet:\n+ # Prevent writes to stdout\n+ stack.enter_context(redirect_stdout(StringIO()))\n \n+ download_xbuildenv(__version__, env)\n+ install_xbuildenv(__version__, env)\n \n-def initialize_pyodide_root() -> None:\n+\n+def initialize_pyodide_root(*, quiet: bool = False) -> None:\n if \"PYODIDE_ROOT\" in os.environ:\n return\n try:\n@@ -29,4 +36,4 @@\n pass\n env = Path(\".pyodide-xbuildenv\")\n os.environ[\"PYODIDE_ROOT\"] = str(env / \"xbuildenv/pyodide-root\")\n- ensure_env_installed(env)\n+ ensure_env_installed(env, quiet=quiet)\n", "issue": "On first call, `pyodide config get emscripten_version` returns `Downloading xbuild environment Installing xbuild environment 3.1.27` instead of `3.1.27`\n## \ud83d\udc1b Bug\r\n\r\nIn [the docs for out-of-tree builds](https://pyodide.org/en/stable/development/building-and-testing-packages.html#building-and-testing-packages-out-of-tree) it gives this code snippet:\r\n\r\n```bash\r\npip install pyodide-build\r\n\r\ngit clone https://github.com/emscripten-core/emsdk.git\r\ncd emsdk\r\n\r\nPYODIDE_EMSCRIPTEN_VERSION=$(pyodide config get emscripten_version)\r\n./emsdk install ${PYODIDE_EMSCRIPTEN_VERSION}\r\n./emsdk activate ${PYODIDE_EMSCRIPTEN_VERSION}\r\nsource emsdk_env.sh\r\n```\r\nBut this doesn't work because on the first call, `pyodide config get emscripten_version` outputs this:\r\n```\r\nDownloading xbuild environment\r\nInstalling xbuild environment\r\n3.1.27\r\n```\r\nOn subsequent calls it returns `3.1.27`.\r\n\r\n### To Reproduce\r\n\r\nSee above.\r\n\r\n### Expected behavior\r\n\r\nCalls to `pyodide config get emscripten_version` should only ever output the version string such that this command can be reliably used in build automation.\r\n\r\n### Environment\r\n\r\n- Pyodide Version<!-- (e.g. 1.8.1) -->: Pyodide CLI Version: 0.2.2\r\n\r\n### Additional context\r\n\r\nAs a workaround for build scripts, `pyodide config get emscripten_version` can be called once before actually using it.\r\n\n", "before_files": [{"content": "import typer\n\nfrom ..common import get_make_environment_vars\nfrom ..out_of_tree.utils import initialize_pyodide_root\n\napp = typer.Typer(help=\"Manage config variables used in pyodide\")\n\n\n# A dictionary of config variables {key: env_var_in_makefile}\nPYODIDE_CONFIGS = {\n \"emscripten_version\": \"PYODIDE_EMSCRIPTEN_VERSION\",\n \"python_version\": \"PYVERSION\",\n}\n\n\[email protected](no_args_is_help=True) # type: ignore[misc]\ndef callback() -> None:\n return\n\n\ndef _get_configs() -> dict[str, str]:\n initialize_pyodide_root()\n\n configs: dict[str, str] = get_make_environment_vars()\n\n configs_filtered = {k: configs[v] for k, v in PYODIDE_CONFIGS.items()}\n return configs_filtered\n\n\[email protected](\"list\")\ndef list_config():\n \"\"\"\n List config variables used in pyodide\n \"\"\"\n configs = _get_configs()\n\n for k, v in configs.items():\n typer.echo(f\"{k}={v}\")\n\n\[email protected](\"get\") # type: ignore[misc]\ndef get_config(\n config_var: str = typer.Argument(\n ..., help=\"A config variable to get. Use `list` to see all possible values.\"\n ),\n) -> None:\n \"\"\"\n Get a value of a single config variable used in pyodide\n \"\"\"\n configs = _get_configs()\n\n if config_var not in configs:\n typer.echo(f\"Config variable {config_var} not found.\")\n typer.Exit(1)\n\n typer.echo(configs[config_var])\n", "path": "pyodide-build/pyodide_build/cli/config.py"}, {"content": "import os\nfrom pathlib import Path\n\nfrom ..common import search_pyodide_root\n\n\ndef ensure_env_installed(env: Path) -> None:\n if env.exists():\n return\n from .. import __version__\n from ..install_xbuildenv import download_xbuildenv, install_xbuildenv\n\n if \"dev\" in __version__:\n raise RuntimeError(\n \"To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT\"\n )\n\n download_xbuildenv(__version__, env)\n install_xbuildenv(__version__, env)\n\n\ndef initialize_pyodide_root() -> None:\n if \"PYODIDE_ROOT\" in os.environ:\n return\n try:\n os.environ[\"PYODIDE_ROOT\"] = str(search_pyodide_root(__file__))\n return\n except FileNotFoundError:\n pass\n env = Path(\".pyodide-xbuildenv\")\n os.environ[\"PYODIDE_ROOT\"] = str(env / \"xbuildenv/pyodide-root\")\n ensure_env_installed(env)\n", "path": "pyodide-build/pyodide_build/out_of_tree/utils.py"}], "after_files": [{"content": "import typer\n\nfrom ..common import get_make_environment_vars\nfrom ..out_of_tree.utils import initialize_pyodide_root\n\napp = typer.Typer(help=\"Manage config variables used in pyodide\")\n\n\n# A dictionary of config variables {key: env_var_in_makefile}\nPYODIDE_CONFIGS = {\n \"emscripten_version\": \"PYODIDE_EMSCRIPTEN_VERSION\",\n \"python_version\": \"PYVERSION\",\n}\n\n\[email protected](no_args_is_help=True) # type: ignore[misc]\ndef callback() -> None:\n return\n\n\ndef _get_configs() -> dict[str, str]:\n initialize_pyodide_root(quiet=True)\n\n configs: dict[str, str] = get_make_environment_vars()\n\n configs_filtered = {k: configs[v] for k, v in PYODIDE_CONFIGS.items()}\n return configs_filtered\n\n\[email protected](\"list\")\ndef list_config():\n \"\"\"\n List config variables used in pyodide\n \"\"\"\n configs = _get_configs()\n\n for k, v in configs.items():\n typer.echo(f\"{k}={v}\")\n\n\[email protected](\"get\") # type: ignore[misc]\ndef get_config(\n config_var: str = typer.Argument(\n ..., help=\"A config variable to get. Use `list` to see all possible values.\"\n ),\n) -> None:\n \"\"\"\n Get a value of a single config variable used in pyodide\n \"\"\"\n configs = _get_configs()\n\n if config_var not in configs:\n typer.echo(f\"Config variable {config_var} not found.\")\n typer.Exit(1)\n\n typer.echo(configs[config_var])\n", "path": "pyodide-build/pyodide_build/cli/config.py"}, {"content": "import os\nfrom contextlib import ExitStack, redirect_stdout\nfrom io import StringIO\nfrom pathlib import Path\n\nfrom ..common import search_pyodide_root\n\n\ndef ensure_env_installed(env: Path, *, quiet: bool = False) -> None:\n if env.exists():\n return\n from .. import __version__\n from ..install_xbuildenv import download_xbuildenv, install_xbuildenv\n\n if \"dev\" in __version__:\n raise RuntimeError(\n \"To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT\"\n )\n\n with ExitStack() as stack:\n if quiet:\n # Prevent writes to stdout\n stack.enter_context(redirect_stdout(StringIO()))\n\n download_xbuildenv(__version__, env)\n install_xbuildenv(__version__, env)\n\n\ndef initialize_pyodide_root(*, quiet: bool = False) -> None:\n if \"PYODIDE_ROOT\" in os.environ:\n return\n try:\n os.environ[\"PYODIDE_ROOT\"] = str(search_pyodide_root(__file__))\n return\n except FileNotFoundError:\n pass\n env = Path(\".pyodide-xbuildenv\")\n os.environ[\"PYODIDE_ROOT\"] = str(env / \"xbuildenv/pyodide-root\")\n ensure_env_installed(env, quiet=quiet)\n", "path": "pyodide-build/pyodide_build/out_of_tree/utils.py"}]} | 1,384 | 505 |
gh_patches_debug_19145 | rasdani/github-patches | git_diff | kivy__python-for-android-1480 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
libxml2 build broken on latest p4a master with python 3
With latest p4a master & python3crystax & host python 3, the libxml2 recipe gets built inside `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a__ndk_target_19/libxml2/ `.
This is a folder name change, previously it used to be in `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a/libxml2/` - and as a result, the path detection inside the libxml2 fails for some reason.
### Versions
* Python: host python 3.x, python3crystax target
* OS: ubuntu in docker
* Kivy: not used
* Cython: not sure, but I don't think it's relevant
Edit: corrected lxml -> libxml2
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pythonforandroid/recipes/libxslt/__init__.py`
Content:
```
1 from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory
2 from os.path import exists, join, dirname
3 import sh
4
5
6 class LibxsltRecipe(Recipe):
7 version = "1.1.28"
8 url = "http://xmlsoft.org/sources/libxslt-{version}.tar.gz"
9 depends = ["libxml2"]
10 patches = ["fix-dlopen.patch"]
11
12 call_hostpython_via_targetpython = False
13
14 def should_build(self, arch):
15 super(LibxsltRecipe, self).should_build(arch)
16 return not exists(join(self.ctx.get_libs_dir(arch.arch), "libxslt.a"))
17
18 def build_arch(self, arch):
19 super(LibxsltRecipe, self).build_arch(arch)
20 env = self.get_recipe_env(arch)
21 with current_directory(self.get_build_dir(arch.arch)):
22 # If the build is done with /bin/sh things blow up,
23 # try really hard to use bash
24 env["CC"] += " -I%s" % self.get_build_dir(arch.arch)
25 libxml = (
26 dirname(dirname(self.get_build_container_dir(arch.arch)))
27 + "/libxml2/%s/libxml2" % arch.arch
28 )
29 shprint(
30 sh.Command("./configure"),
31 "--build=i686-pc-linux-gnu",
32 "--host=arm-linux-eabi",
33 "--without-plugins",
34 "--without-debug",
35 "--without-python",
36 "--without-crypto",
37 "--with-libxml-src=%s" % libxml,
38 _env=env,
39 )
40 shprint(sh.make, "V=1", _env=env)
41 shutil.copyfile(
42 "libxslt/.libs/libxslt.a",
43 join(self.ctx.get_libs_dir(arch.arch), "libxslt.a"),
44 )
45 shutil.copyfile(
46 "libexslt/.libs/libexslt.a",
47 join(self.ctx.get_libs_dir(arch.arch), "libexslt.a"),
48 )
49
50 def get_recipe_env(self, arch):
51 env = super(LibxsltRecipe, self).get_recipe_env(arch)
52 env["CONFIG_SHELL"] = "/bin/bash"
53 env["SHELL"] = "/bin/bash"
54 env[
55 "CC"
56 ] = "arm-linux-androideabi-gcc -DANDROID -mandroid -fomit-frame-pointer --sysroot={}".format(
57 self.ctx.ndk_platform
58 )
59
60 env["LDSHARED"] = "%s -nostartfiles -shared -fPIC" % env["CC"]
61 return env
62
63
64 recipe = LibxsltRecipe()
65
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pythonforandroid/recipes/libxslt/__init__.py b/pythonforandroid/recipes/libxslt/__init__.py
--- a/pythonforandroid/recipes/libxslt/__init__.py
+++ b/pythonforandroid/recipes/libxslt/__init__.py
@@ -1,5 +1,5 @@
from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory
-from os.path import exists, join, dirname
+from os.path import exists, join
import sh
@@ -22,10 +22,8 @@
# If the build is done with /bin/sh things blow up,
# try really hard to use bash
env["CC"] += " -I%s" % self.get_build_dir(arch.arch)
- libxml = (
- dirname(dirname(self.get_build_container_dir(arch.arch)))
- + "/libxml2/%s/libxml2" % arch.arch
- )
+ libxml = Recipe.get_recipe(
+ 'libxml2', self.ctx).get_build_dir(arch.arch)
shprint(
sh.Command("./configure"),
"--build=i686-pc-linux-gnu",
| {"golden_diff": "diff --git a/pythonforandroid/recipes/libxslt/__init__.py b/pythonforandroid/recipes/libxslt/__init__.py\n--- a/pythonforandroid/recipes/libxslt/__init__.py\n+++ b/pythonforandroid/recipes/libxslt/__init__.py\n@@ -1,5 +1,5 @@\n from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory\n-from os.path import exists, join, dirname\n+from os.path import exists, join\n import sh\n \n \n@@ -22,10 +22,8 @@\n # If the build is done with /bin/sh things blow up,\n # try really hard to use bash\n env[\"CC\"] += \" -I%s\" % self.get_build_dir(arch.arch)\n- libxml = (\n- dirname(dirname(self.get_build_container_dir(arch.arch)))\n- + \"/libxml2/%s/libxml2\" % arch.arch\n- )\n+ libxml = Recipe.get_recipe(\n+ 'libxml2', self.ctx).get_build_dir(arch.arch)\n shprint(\n sh.Command(\"./configure\"),\n \"--build=i686-pc-linux-gnu\",\n", "issue": "libxml2 build broken on latest p4a master with python 3\nWith latest p4a master & python3crystax & host python 3, the libxml2 recipe gets built inside `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a__ndk_target_19/libxml2/ `.\r\n\r\nThis is a folder name change, previously it used to be in `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a/libxml2/` - and as a result, the path detection inside the libxml2 fails for some reason.\r\n\r\n### Versions\r\n\r\n* Python: host python 3.x, python3crystax target\r\n* OS: ubuntu in docker\r\n* Kivy: not used\r\n* Cython: not sure, but I don't think it's relevant\r\n\r\nEdit: corrected lxml -> libxml2\n", "before_files": [{"content": "from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory\nfrom os.path import exists, join, dirname\nimport sh\n\n\nclass LibxsltRecipe(Recipe):\n version = \"1.1.28\"\n url = \"http://xmlsoft.org/sources/libxslt-{version}.tar.gz\"\n depends = [\"libxml2\"]\n patches = [\"fix-dlopen.patch\"]\n\n call_hostpython_via_targetpython = False\n\n def should_build(self, arch):\n super(LibxsltRecipe, self).should_build(arch)\n return not exists(join(self.ctx.get_libs_dir(arch.arch), \"libxslt.a\"))\n\n def build_arch(self, arch):\n super(LibxsltRecipe, self).build_arch(arch)\n env = self.get_recipe_env(arch)\n with current_directory(self.get_build_dir(arch.arch)):\n # If the build is done with /bin/sh things blow up,\n # try really hard to use bash\n env[\"CC\"] += \" -I%s\" % self.get_build_dir(arch.arch)\n libxml = (\n dirname(dirname(self.get_build_container_dir(arch.arch)))\n + \"/libxml2/%s/libxml2\" % arch.arch\n )\n shprint(\n sh.Command(\"./configure\"),\n \"--build=i686-pc-linux-gnu\",\n \"--host=arm-linux-eabi\",\n \"--without-plugins\",\n \"--without-debug\",\n \"--without-python\",\n \"--without-crypto\",\n \"--with-libxml-src=%s\" % libxml,\n _env=env,\n )\n shprint(sh.make, \"V=1\", _env=env)\n shutil.copyfile(\n \"libxslt/.libs/libxslt.a\",\n join(self.ctx.get_libs_dir(arch.arch), \"libxslt.a\"),\n )\n shutil.copyfile(\n \"libexslt/.libs/libexslt.a\",\n join(self.ctx.get_libs_dir(arch.arch), \"libexslt.a\"),\n )\n\n def get_recipe_env(self, arch):\n env = super(LibxsltRecipe, self).get_recipe_env(arch)\n env[\"CONFIG_SHELL\"] = \"/bin/bash\"\n env[\"SHELL\"] = \"/bin/bash\"\n env[\n \"CC\"\n ] = \"arm-linux-androideabi-gcc -DANDROID -mandroid -fomit-frame-pointer --sysroot={}\".format(\n self.ctx.ndk_platform\n )\n\n env[\"LDSHARED\"] = \"%s -nostartfiles -shared -fPIC\" % env[\"CC\"]\n return env\n\n\nrecipe = LibxsltRecipe()\n", "path": "pythonforandroid/recipes/libxslt/__init__.py"}], "after_files": [{"content": "from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory\nfrom os.path import exists, join\nimport sh\n\n\nclass LibxsltRecipe(Recipe):\n version = \"1.1.28\"\n url = \"http://xmlsoft.org/sources/libxslt-{version}.tar.gz\"\n depends = [\"libxml2\"]\n patches = [\"fix-dlopen.patch\"]\n\n call_hostpython_via_targetpython = False\n\n def should_build(self, arch):\n super(LibxsltRecipe, self).should_build(arch)\n return not exists(join(self.ctx.get_libs_dir(arch.arch), \"libxslt.a\"))\n\n def build_arch(self, arch):\n super(LibxsltRecipe, self).build_arch(arch)\n env = self.get_recipe_env(arch)\n with current_directory(self.get_build_dir(arch.arch)):\n # If the build is done with /bin/sh things blow up,\n # try really hard to use bash\n env[\"CC\"] += \" -I%s\" % self.get_build_dir(arch.arch)\n libxml = Recipe.get_recipe(\n 'libxml2', self.ctx).get_build_dir(arch.arch)\n shprint(\n sh.Command(\"./configure\"),\n \"--build=i686-pc-linux-gnu\",\n \"--host=arm-linux-eabi\",\n \"--without-plugins\",\n \"--without-debug\",\n \"--without-python\",\n \"--without-crypto\",\n \"--with-libxml-src=%s\" % libxml,\n _env=env,\n )\n shprint(sh.make, \"V=1\", _env=env)\n shutil.copyfile(\n \"libxslt/.libs/libxslt.a\",\n join(self.ctx.get_libs_dir(arch.arch), \"libxslt.a\"),\n )\n shutil.copyfile(\n \"libexslt/.libs/libexslt.a\",\n join(self.ctx.get_libs_dir(arch.arch), \"libexslt.a\"),\n )\n\n def get_recipe_env(self, arch):\n env = super(LibxsltRecipe, self).get_recipe_env(arch)\n env[\"CONFIG_SHELL\"] = \"/bin/bash\"\n env[\"SHELL\"] = \"/bin/bash\"\n env[\n \"CC\"\n ] = \"arm-linux-androideabi-gcc -DANDROID -mandroid -fomit-frame-pointer --sysroot={}\".format(\n self.ctx.ndk_platform\n )\n\n env[\"LDSHARED\"] = \"%s -nostartfiles -shared -fPIC\" % env[\"CC\"]\n return env\n\n\nrecipe = LibxsltRecipe()\n", "path": "pythonforandroid/recipes/libxslt/__init__.py"}]} | 1,149 | 254 |
gh_patches_debug_12242 | rasdani/github-patches | git_diff | DataBiosphere__toil-562 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Need `toil --version`
Need `toil --version`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/toil/utils/toilMain.py`
Content:
```
1 from __future__ import absolute_import, print_function
2 import os
3 import sys
4
5 def main():
6 modules = loadModules()
7 try:
8 command = sys.argv[1]
9 except IndexError:
10 printHelp(modules)
11 else:
12 if command == '--help':
13 printHelp(modules)
14 else:
15 try:
16 module = modules[command]
17 except KeyError:
18 print("Unknown option '%s'. "
19 "Pass --help to display usage information.\n" % command, file=sys.stderr)
20 sys.exit(1)
21 else:
22 del sys.argv[1]
23 module.main()
24
25
26 def loadModules():
27 # noinspection PyUnresolvedReferences
28 from toil.utils import toilKill, toilRestart, toilStats, toilStatus, toilClean
29 return {name[4:].lower(): module for name, module in locals().iteritems()}
30
31
32 def printHelp(modules):
33 usage = ("\n"
34 "Usage: {name} COMMAND ...\n"
35 " {name} --help\n"
36 " {name} COMMAND --help\n\n"
37 "where COMMAND is one of the following:\n\n{descriptions}\n\n")
38 print(usage.format(
39 name=os.path.basename(sys.argv[0]),
40 commands='|'.join(modules.iterkeys()),
41 descriptions='\n'.join("%s - %s" % (n, m.__doc__.strip()) for n, m in modules.iteritems())))
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/toil/utils/toilMain.py b/src/toil/utils/toilMain.py
--- a/src/toil/utils/toilMain.py
+++ b/src/toil/utils/toilMain.py
@@ -1,4 +1,6 @@
from __future__ import absolute_import, print_function
+from toil.version import version
+import pkg_resources
import os
import sys
@@ -11,6 +13,11 @@
else:
if command == '--help':
printHelp(modules)
+ elif command == '--version':
+ try:
+ print(pkg_resources.get_distribution('toil').version)
+ except:
+ print("Version gathered from toil.version: "+version)
else:
try:
module = modules[command]
| {"golden_diff": "diff --git a/src/toil/utils/toilMain.py b/src/toil/utils/toilMain.py\n--- a/src/toil/utils/toilMain.py\n+++ b/src/toil/utils/toilMain.py\n@@ -1,4 +1,6 @@\n from __future__ import absolute_import, print_function\n+from toil.version import version\n+import pkg_resources\n import os\n import sys\n \n@@ -11,6 +13,11 @@\n else:\n if command == '--help':\n printHelp(modules)\n+ elif command == '--version':\n+ try:\n+ print(pkg_resources.get_distribution('toil').version)\n+ except:\n+ print(\"Version gathered from toil.version: \"+version)\n else:\n try:\n module = modules[command]\n", "issue": "Need `toil --version`\n\nNeed `toil --version`\n\n", "before_files": [{"content": "from __future__ import absolute_import, print_function\nimport os\nimport sys\n\ndef main():\n modules = loadModules()\n try:\n command = sys.argv[1]\n except IndexError:\n printHelp(modules)\n else:\n if command == '--help':\n printHelp(modules)\n else:\n try:\n module = modules[command]\n except KeyError:\n print(\"Unknown option '%s'. \"\n \"Pass --help to display usage information.\\n\" % command, file=sys.stderr)\n sys.exit(1)\n else:\n del sys.argv[1]\n module.main()\n\n\ndef loadModules():\n # noinspection PyUnresolvedReferences\n from toil.utils import toilKill, toilRestart, toilStats, toilStatus, toilClean\n return {name[4:].lower(): module for name, module in locals().iteritems()}\n\n\ndef printHelp(modules):\n usage = (\"\\n\"\n \"Usage: {name} COMMAND ...\\n\"\n \" {name} --help\\n\"\n \" {name} COMMAND --help\\n\\n\"\n \"where COMMAND is one of the following:\\n\\n{descriptions}\\n\\n\")\n print(usage.format(\n name=os.path.basename(sys.argv[0]),\n commands='|'.join(modules.iterkeys()),\n descriptions='\\n'.join(\"%s - %s\" % (n, m.__doc__.strip()) for n, m in modules.iteritems())))\n", "path": "src/toil/utils/toilMain.py"}], "after_files": [{"content": "from __future__ import absolute_import, print_function\nfrom toil.version import version\nimport pkg_resources\nimport os\nimport sys\n\ndef main():\n modules = loadModules()\n try:\n command = sys.argv[1]\n except IndexError:\n printHelp(modules)\n else:\n if command == '--help':\n printHelp(modules)\n elif command == '--version':\n try:\n print(pkg_resources.get_distribution('toil').version)\n except:\n print(\"Version gathered from toil.version: \"+version)\n else:\n try:\n module = modules[command]\n except KeyError:\n print(\"Unknown option '%s'. \"\n \"Pass --help to display usage information.\\n\" % command, file=sys.stderr)\n sys.exit(1)\n else:\n del sys.argv[1]\n module.main()\n\n\ndef loadModules():\n # noinspection PyUnresolvedReferences\n from toil.utils import toilKill, toilRestart, toilStats, toilStatus, toilClean\n return {name[4:].lower(): module for name, module in locals().iteritems()}\n\n\ndef printHelp(modules):\n usage = (\"\\n\"\n \"Usage: {name} COMMAND ...\\n\"\n \" {name} --help\\n\"\n \" {name} COMMAND --help\\n\\n\"\n \"where COMMAND is one of the following:\\n\\n{descriptions}\\n\\n\")\n print(usage.format(\n name=os.path.basename(sys.argv[0]),\n commands='|'.join(modules.iterkeys()),\n descriptions='\\n'.join(\"%s - %s\" % (n, m.__doc__.strip()) for n, m in modules.iteritems())))\n", "path": "src/toil/utils/toilMain.py"}]} | 665 | 169 |
gh_patches_debug_8190 | rasdani/github-patches | git_diff | microsoft__presidio-782 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
typo in append_analysis_explenation_text
It seems that there is a typo in the function name.
append_analysis_expl**e**nation_text - > append_analysis_expl**a**nation_text
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `presidio-analyzer/presidio_analyzer/recognizer_result.py`
Content:
```
1 import logging
2 from typing import Dict
3
4 from presidio_analyzer import AnalysisExplanation
5
6
7 class RecognizerResult:
8 """
9 Recognizer Result represents the findings of the detected entity.
10
11 Result of a recognizer analyzing the text.
12
13 :param entity_type: the type of the entity
14 :param start: the start location of the detected entity
15 :param end: the end location of the detected entity
16 :param score: the score of the detection
17 :param analysis_explanation: contains the explanation of why this
18 entity was identified
19 """
20
21 logger = logging.getLogger("presidio-analyzer")
22
23 def __init__(
24 self,
25 entity_type: str,
26 start: int,
27 end: int,
28 score: float,
29 analysis_explanation: AnalysisExplanation = None,
30 ):
31
32 self.entity_type = entity_type
33 self.start = start
34 self.end = end
35 self.score = score
36 self.analysis_explanation = analysis_explanation
37
38 def append_analysis_explenation_text(self, text: str) -> None:
39 """Add text to the analysis explanation."""
40 if self.analysis_explanation:
41 self.analysis_explanation.append_textual_explanation_line(text)
42
43 def to_dict(self) -> Dict:
44 """
45 Serialize self to dictionary.
46
47 :return: a dictionary
48 """
49 return self.__dict__
50
51 @classmethod
52 def from_json(cls, data: Dict) -> "RecognizerResult":
53 """
54 Create RecognizerResult from json.
55
56 :param data: e.g. {
57 "start": 24,
58 "end": 32,
59 "score": 0.8,
60 "entity_type": "NAME"
61 }
62 :return: RecognizerResult
63 """
64 score = data.get("score")
65 entity_type = data.get("entity_type")
66 start = data.get("start")
67 end = data.get("end")
68 return cls(entity_type, start, end, score)
69
70 def __repr__(self) -> str:
71 """Return a string representation of the instance."""
72 return self.__str__()
73
74 def intersects(self, other: "RecognizerResult") -> int:
75 """
76 Check if self intersects with a different RecognizerResult.
77
78 :return: If intersecting, returns the number of
79 intersecting characters.
80 If not, returns 0
81 """
82 # if they do not overlap the intersection is 0
83 if self.end < other.start or other.end < self.start:
84 return 0
85
86 # otherwise the intersection is min(end) - max(start)
87 return min(self.end, other.end) - max(self.start, other.start)
88
89 def contained_in(self, other: "RecognizerResult") -> bool:
90 """
91 Check if self is contained in a different RecognizerResult.
92
93 :return: true if contained
94 """
95 return self.start >= other.start and self.end <= other.end
96
97 def contains(self, other: "RecognizerResult") -> bool:
98 """
99 Check if one result is contained or equal to another result.
100
101 :param other: another RecognizerResult
102 :return: bool
103 """
104 return self.start <= other.start and self.end >= other.end
105
106 def equal_indices(self, other: "RecognizerResult") -> bool:
107 """
108 Check if the indices are equal between two results.
109
110 :param other: another RecognizerResult
111 :return:
112 """
113 return self.start == other.start and self.end == other.end
114
115 def __gt__(self, other: "RecognizerResult") -> bool:
116 """
117 Check if one result is greater by using the results indices in the text.
118
119 :param other: another RecognizerResult
120 :return: bool
121 """
122 if self.start == other.start:
123 return self.end > other.end
124 return self.start > other.start
125
126 def __eq__(self, other: "RecognizerResult") -> bool:
127 """
128 Check two results are equal by using all class fields.
129
130 :param other: another RecognizerResult
131 :return: bool
132 """
133 equal_type = self.entity_type == other.entity_type
134 equal_score = self.score == other.score
135 return self.equal_indices(other) and equal_type and equal_score
136
137 def __hash__(self):
138 """
139 Hash the result data by using all class fields.
140
141 :return: int
142 """
143 return hash(
144 f"{str(self.start)} {str(self.end)} {str(self.score)} {self.entity_type}"
145 )
146
147 def __str__(self) -> str:
148 """Return a string representation of the instance."""
149 return (
150 f"type: {self.entity_type}, "
151 f"start: {self.start}, "
152 f"end: {self.end}, "
153 f"score: {self.score}"
154 )
155
156 def has_conflict(self, other: "RecognizerResult") -> bool:
157 """
158 Check if two recognizer results are conflicted or not.
159
160 I have a conflict if:
161 1. My indices are the same as the other and my score is lower.
162 2. If my indices are contained in another.
163
164 :param other: RecognizerResult
165 :return:
166 """
167 if self.equal_indices(other):
168 return self.score <= other.score
169 return other.contains(self)
170
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/presidio-analyzer/presidio_analyzer/recognizer_result.py b/presidio-analyzer/presidio_analyzer/recognizer_result.py
--- a/presidio-analyzer/presidio_analyzer/recognizer_result.py
+++ b/presidio-analyzer/presidio_analyzer/recognizer_result.py
@@ -35,7 +35,7 @@
self.score = score
self.analysis_explanation = analysis_explanation
- def append_analysis_explenation_text(self, text: str) -> None:
+ def append_analysis_explanation_text(self, text: str) -> None:
"""Add text to the analysis explanation."""
if self.analysis_explanation:
self.analysis_explanation.append_textual_explanation_line(text)
| {"golden_diff": "diff --git a/presidio-analyzer/presidio_analyzer/recognizer_result.py b/presidio-analyzer/presidio_analyzer/recognizer_result.py\n--- a/presidio-analyzer/presidio_analyzer/recognizer_result.py\n+++ b/presidio-analyzer/presidio_analyzer/recognizer_result.py\n@@ -35,7 +35,7 @@\n self.score = score\n self.analysis_explanation = analysis_explanation\n \n- def append_analysis_explenation_text(self, text: str) -> None:\n+ def append_analysis_explanation_text(self, text: str) -> None:\n \"\"\"Add text to the analysis explanation.\"\"\"\n if self.analysis_explanation:\n self.analysis_explanation.append_textual_explanation_line(text)\n", "issue": "typo in append_analysis_explenation_text\nIt seems that there is a typo in the function name.\r\n\r\nappend_analysis_expl**e**nation_text - > append_analysis_expl**a**nation_text\n", "before_files": [{"content": "import logging\nfrom typing import Dict\n\nfrom presidio_analyzer import AnalysisExplanation\n\n\nclass RecognizerResult:\n \"\"\"\n Recognizer Result represents the findings of the detected entity.\n\n Result of a recognizer analyzing the text.\n\n :param entity_type: the type of the entity\n :param start: the start location of the detected entity\n :param end: the end location of the detected entity\n :param score: the score of the detection\n :param analysis_explanation: contains the explanation of why this\n entity was identified\n \"\"\"\n\n logger = logging.getLogger(\"presidio-analyzer\")\n\n def __init__(\n self,\n entity_type: str,\n start: int,\n end: int,\n score: float,\n analysis_explanation: AnalysisExplanation = None,\n ):\n\n self.entity_type = entity_type\n self.start = start\n self.end = end\n self.score = score\n self.analysis_explanation = analysis_explanation\n\n def append_analysis_explenation_text(self, text: str) -> None:\n \"\"\"Add text to the analysis explanation.\"\"\"\n if self.analysis_explanation:\n self.analysis_explanation.append_textual_explanation_line(text)\n\n def to_dict(self) -> Dict:\n \"\"\"\n Serialize self to dictionary.\n\n :return: a dictionary\n \"\"\"\n return self.__dict__\n\n @classmethod\n def from_json(cls, data: Dict) -> \"RecognizerResult\":\n \"\"\"\n Create RecognizerResult from json.\n\n :param data: e.g. {\n \"start\": 24,\n \"end\": 32,\n \"score\": 0.8,\n \"entity_type\": \"NAME\"\n }\n :return: RecognizerResult\n \"\"\"\n score = data.get(\"score\")\n entity_type = data.get(\"entity_type\")\n start = data.get(\"start\")\n end = data.get(\"end\")\n return cls(entity_type, start, end, score)\n\n def __repr__(self) -> str:\n \"\"\"Return a string representation of the instance.\"\"\"\n return self.__str__()\n\n def intersects(self, other: \"RecognizerResult\") -> int:\n \"\"\"\n Check if self intersects with a different RecognizerResult.\n\n :return: If intersecting, returns the number of\n intersecting characters.\n If not, returns 0\n \"\"\"\n # if they do not overlap the intersection is 0\n if self.end < other.start or other.end < self.start:\n return 0\n\n # otherwise the intersection is min(end) - max(start)\n return min(self.end, other.end) - max(self.start, other.start)\n\n def contained_in(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if self is contained in a different RecognizerResult.\n\n :return: true if contained\n \"\"\"\n return self.start >= other.start and self.end <= other.end\n\n def contains(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if one result is contained or equal to another result.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n return self.start <= other.start and self.end >= other.end\n\n def equal_indices(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if the indices are equal between two results.\n\n :param other: another RecognizerResult\n :return:\n \"\"\"\n return self.start == other.start and self.end == other.end\n\n def __gt__(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if one result is greater by using the results indices in the text.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n if self.start == other.start:\n return self.end > other.end\n return self.start > other.start\n\n def __eq__(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check two results are equal by using all class fields.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n equal_type = self.entity_type == other.entity_type\n equal_score = self.score == other.score\n return self.equal_indices(other) and equal_type and equal_score\n\n def __hash__(self):\n \"\"\"\n Hash the result data by using all class fields.\n\n :return: int\n \"\"\"\n return hash(\n f\"{str(self.start)} {str(self.end)} {str(self.score)} {self.entity_type}\"\n )\n\n def __str__(self) -> str:\n \"\"\"Return a string representation of the instance.\"\"\"\n return (\n f\"type: {self.entity_type}, \"\n f\"start: {self.start}, \"\n f\"end: {self.end}, \"\n f\"score: {self.score}\"\n )\n\n def has_conflict(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if two recognizer results are conflicted or not.\n\n I have a conflict if:\n 1. My indices are the same as the other and my score is lower.\n 2. If my indices are contained in another.\n\n :param other: RecognizerResult\n :return:\n \"\"\"\n if self.equal_indices(other):\n return self.score <= other.score\n return other.contains(self)\n", "path": "presidio-analyzer/presidio_analyzer/recognizer_result.py"}], "after_files": [{"content": "import logging\nfrom typing import Dict\n\nfrom presidio_analyzer import AnalysisExplanation\n\n\nclass RecognizerResult:\n \"\"\"\n Recognizer Result represents the findings of the detected entity.\n\n Result of a recognizer analyzing the text.\n\n :param entity_type: the type of the entity\n :param start: the start location of the detected entity\n :param end: the end location of the detected entity\n :param score: the score of the detection\n :param analysis_explanation: contains the explanation of why this\n entity was identified\n \"\"\"\n\n logger = logging.getLogger(\"presidio-analyzer\")\n\n def __init__(\n self,\n entity_type: str,\n start: int,\n end: int,\n score: float,\n analysis_explanation: AnalysisExplanation = None,\n ):\n\n self.entity_type = entity_type\n self.start = start\n self.end = end\n self.score = score\n self.analysis_explanation = analysis_explanation\n\n def append_analysis_explanation_text(self, text: str) -> None:\n \"\"\"Add text to the analysis explanation.\"\"\"\n if self.analysis_explanation:\n self.analysis_explanation.append_textual_explanation_line(text)\n\n def to_dict(self) -> Dict:\n \"\"\"\n Serialize self to dictionary.\n\n :return: a dictionary\n \"\"\"\n return self.__dict__\n\n @classmethod\n def from_json(cls, data: Dict) -> \"RecognizerResult\":\n \"\"\"\n Create RecognizerResult from json.\n\n :param data: e.g. {\n \"start\": 24,\n \"end\": 32,\n \"score\": 0.8,\n \"entity_type\": \"NAME\"\n }\n :return: RecognizerResult\n \"\"\"\n score = data.get(\"score\")\n entity_type = data.get(\"entity_type\")\n start = data.get(\"start\")\n end = data.get(\"end\")\n return cls(entity_type, start, end, score)\n\n def __repr__(self) -> str:\n \"\"\"Return a string representation of the instance.\"\"\"\n return self.__str__()\n\n def intersects(self, other: \"RecognizerResult\") -> int:\n \"\"\"\n Check if self intersects with a different RecognizerResult.\n\n :return: If intersecting, returns the number of\n intersecting characters.\n If not, returns 0\n \"\"\"\n # if they do not overlap the intersection is 0\n if self.end < other.start or other.end < self.start:\n return 0\n\n # otherwise the intersection is min(end) - max(start)\n return min(self.end, other.end) - max(self.start, other.start)\n\n def contained_in(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if self is contained in a different RecognizerResult.\n\n :return: true if contained\n \"\"\"\n return self.start >= other.start and self.end <= other.end\n\n def contains(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if one result is contained or equal to another result.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n return self.start <= other.start and self.end >= other.end\n\n def equal_indices(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if the indices are equal between two results.\n\n :param other: another RecognizerResult\n :return:\n \"\"\"\n return self.start == other.start and self.end == other.end\n\n def __gt__(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if one result is greater by using the results indices in the text.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n if self.start == other.start:\n return self.end > other.end\n return self.start > other.start\n\n def __eq__(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check two results are equal by using all class fields.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n equal_type = self.entity_type == other.entity_type\n equal_score = self.score == other.score\n return self.equal_indices(other) and equal_type and equal_score\n\n def __hash__(self):\n \"\"\"\n Hash the result data by using all class fields.\n\n :return: int\n \"\"\"\n return hash(\n f\"{str(self.start)} {str(self.end)} {str(self.score)} {self.entity_type}\"\n )\n\n def __str__(self) -> str:\n \"\"\"Return a string representation of the instance.\"\"\"\n return (\n f\"type: {self.entity_type}, \"\n f\"start: {self.start}, \"\n f\"end: {self.end}, \"\n f\"score: {self.score}\"\n )\n\n def has_conflict(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if two recognizer results are conflicted or not.\n\n I have a conflict if:\n 1. My indices are the same as the other and my score is lower.\n 2. If my indices are contained in another.\n\n :param other: RecognizerResult\n :return:\n \"\"\"\n if self.equal_indices(other):\n return self.score <= other.score\n return other.contains(self)\n", "path": "presidio-analyzer/presidio_analyzer/recognizer_result.py"}]} | 1,860 | 172 |
gh_patches_debug_18416 | rasdani/github-patches | git_diff | kartoza__prj.app-126 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
We should add Rischan to the email recipients list for projecta errors
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django_project/core/settings/base.py`
Content:
```
1 # coding=utf-8
2 """
3 core.settings.base
4 """
5 # Django settings for projecta project.
6
7 from .utils import absolute_path
8
9 ADMINS = (
10 ('Tim Sutton', '[email protected]'),
11 )
12 SERVER_EMAIL = '[email protected]'
13 EMAIL_HOST = 'localhost'
14 DEFAULT_FROM_EMAIL = '[email protected]'
15
16 MANAGERS = ADMINS
17
18 # Local time zone for this installation. Choices can be found here:
19 # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
20 # although not all choices may be available on all operating systems.
21 # In a Windows environment this must be set to your system time zone.
22 TIME_ZONE = 'America/Chicago'
23
24 # Language code for this installation. All choices can be found here:
25 # http://www.i18nguy.com/unicode/language-identifiers.html
26 LANGUAGE_CODE = 'en-us'
27
28 SITE_ID = 1
29
30 # If you set this to False, Django will make some optimizations so as not
31 # to load the internationalization machinery.
32 USE_I18N = True
33
34 # If you set this to False, Django will not format dates, numbers and
35 # calendars according to the current locale.
36 USE_L10N = True
37
38 # If you set this to False, Django will not use timezone-aware datetimes.
39 USE_TZ = True
40
41 # Absolute filesystem path to the directory that will hold user-uploaded files.
42 # Example: "/var/www/example.com/media/"
43 MEDIA_ROOT = '/home/web/media'
44
45 # URL that handles the media served from MEDIA_ROOT. Make sure to use a
46 # trailing slash.
47 # Examples: "http://example.com/media/", "http://media.example.com/"
48 # MEDIA_URL = '/media/'
49 # setting full MEDIA_URL to be able to use it for the feeds
50 MEDIA_URL = '/media/'
51
52 # Absolute path to the directory static files should be collected to.
53 # Don't put anything in this directory yourself; store your static files
54 # in apps' "static/" subdirectories and in STATICFILES_DIRS.
55 # Example: "/var/www/example.com/static/"
56 STATIC_ROOT = '/home/web/static'
57
58 # URL prefix for static files.
59 # Example: "http://example.com/static/", "http://static.example.com/"
60 STATIC_URL = '/static/'
61
62 # Additional locations of static files
63 STATICFILES_DIRS = (
64 # Put strings here, like "/home/html/static" or "C:/www/django/static".
65 # Always use forward slashes, even on Windows.
66 # Don't forget to use absolute paths, not relative paths.
67 absolute_path('core', 'base_static'),
68 )
69
70 # List of finder classes that know how to find static files in
71 # various locations.
72 STATICFILES_FINDERS = (
73 'django.contrib.staticfiles.finders.FileSystemFinder',
74 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
75 # 'django.contrib.staticfiles.finders.DefaultStorageFinder',
76 )
77
78 # import SECRET_KEY into current namespace
79 # noinspection PyUnresolvedReferences
80 from .secret import SECRET_KEY # noqa
81
82 # List of callables that know how to import templates from various sources.
83 TEMPLATE_LOADERS = (
84 'django.template.loaders.filesystem.Loader',
85 'django.template.loaders.app_directories.Loader',
86 # 'django.template.loaders.eggs.Loader',
87 )
88
89 TEMPLATE_CONTEXT_PROCESSORS = (
90 'django.contrib.auth.context_processors.auth',
91 'django.core.context_processors.request',
92 'django.core.context_processors.media',
93 'core.context_processors.add_intercom_app_id',
94 'django.template.context_processors.i18n'
95 )
96
97 MIDDLEWARE_CLASSES = (
98 'django.middleware.common.CommonMiddleware',
99 'django.contrib.sessions.middleware.SessionMiddleware',
100 'django.middleware.csrf.CsrfViewMiddleware',
101 'django.contrib.auth.middleware.AuthenticationMiddleware',
102 'django.contrib.messages.middleware.MessageMiddleware',
103 # Uncomment the next line for simple clickjacking protection:
104 # 'django.middleware.clickjacking.XFrameOptionsMiddleware',
105 'core.custom_middleware.NavContextMiddleware',
106 'django.middleware.locale.LocaleMiddleware'
107 )
108
109 ROOT_URLCONF = 'core.urls'
110
111 # Python dotted path to the WSGI application used by Django's runserver.
112 WSGI_APPLICATION = 'core.wsgi.application'
113
114 TEMPLATE_DIRS = (
115 # project level templates
116 absolute_path('core', 'base_templates'),
117 absolute_path('vota', 'templates'),
118 absolute_path('changes', 'templates'),
119
120 )
121
122 INSTALLED_APPS = (
123 'django.contrib.auth',
124 'django.contrib.contenttypes',
125 'django.contrib.sessions',
126 'django.contrib.sites',
127 'django.contrib.messages',
128 'django.contrib.staticfiles',
129 'django.contrib.admin',
130 'django.contrib.syndication',
131 )
132
133 # A sample logging configuration. The only tangible logging
134 # performed by this configuration is to send an email to
135 # the site admins on every HTTP 500 error when DEBUG=False.
136 # See http://docs.djangoproject.com/en/dev/topics/logging for
137 # more details on how to customize your logging configuration.
138 LOGGING = {
139 'version': 1,
140 'disable_existing_loggers': False,
141 'filters': {
142 'require_debug_false': {
143 '()': 'django.utils.log.RequireDebugFalse'
144 }
145 },
146 'handlers': {
147 'mail_admins': {
148 'level': 'ERROR',
149 'filters': ['require_debug_false'],
150 'class': 'django.utils.log.AdminEmailHandler'
151 }
152 },
153 'loggers': {
154 'django.request': {
155 'handlers': ['mail_admins'],
156 'level': 'ERROR',
157 'propagate': True,
158 },
159 }
160 }
161
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django_project/core/settings/base.py b/django_project/core/settings/base.py
--- a/django_project/core/settings/base.py
+++ b/django_project/core/settings/base.py
@@ -7,7 +7,8 @@
from .utils import absolute_path
ADMINS = (
- ('Tim Sutton', '[email protected]'),
+ ('Tim Sutton', '[email protected]'),
+ ('Rischan Mafrur', '[email protected]')
)
SERVER_EMAIL = '[email protected]'
EMAIL_HOST = 'localhost'
@@ -19,7 +20,7 @@
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
-TIME_ZONE = 'America/Chicago'
+TIME_ZONE = 'Africa/Johannesburg'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
| {"golden_diff": "diff --git a/django_project/core/settings/base.py b/django_project/core/settings/base.py\n--- a/django_project/core/settings/base.py\n+++ b/django_project/core/settings/base.py\n@@ -7,7 +7,8 @@\n from .utils import absolute_path\n \n ADMINS = (\n- ('Tim Sutton', '[email protected]'),\n+ ('Tim Sutton', '[email protected]'),\n+ ('Rischan Mafrur', '[email protected]')\n )\n SERVER_EMAIL = '[email protected]'\n EMAIL_HOST = 'localhost'\n@@ -19,7 +20,7 @@\n # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n # although not all choices may be available on all operating systems.\n # In a Windows environment this must be set to your system time zone.\n-TIME_ZONE = 'America/Chicago'\n+TIME_ZONE = 'Africa/Johannesburg'\n \n # Language code for this installation. All choices can be found here:\n # http://www.i18nguy.com/unicode/language-identifiers.html\n", "issue": "We should add Rischan to the email recipients list for projecta errors\n\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"\ncore.settings.base\n\"\"\"\n# Django settings for projecta project.\n\nfrom .utils import absolute_path\n\nADMINS = (\n ('Tim Sutton', '[email protected]'),\n)\nSERVER_EMAIL = '[email protected]'\nEMAIL_HOST = 'localhost'\nDEFAULT_FROM_EMAIL = '[email protected]'\n\nMANAGERS = ADMINS\n\n# Local time zone for this installation. Choices can be found here:\n# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n# although not all choices may be available on all operating systems.\n# In a Windows environment this must be set to your system time zone.\nTIME_ZONE = 'America/Chicago'\n\n# Language code for this installation. All choices can be found here:\n# http://www.i18nguy.com/unicode/language-identifiers.html\nLANGUAGE_CODE = 'en-us'\n\nSITE_ID = 1\n\n# If you set this to False, Django will make some optimizations so as not\n# to load the internationalization machinery.\nUSE_I18N = True\n\n# If you set this to False, Django will not format dates, numbers and\n# calendars according to the current locale.\nUSE_L10N = True\n\n# If you set this to False, Django will not use timezone-aware datetimes.\nUSE_TZ = True\n\n# Absolute filesystem path to the directory that will hold user-uploaded files.\n# Example: \"/var/www/example.com/media/\"\nMEDIA_ROOT = '/home/web/media'\n\n# URL that handles the media served from MEDIA_ROOT. Make sure to use a\n# trailing slash.\n# Examples: \"http://example.com/media/\", \"http://media.example.com/\"\n# MEDIA_URL = '/media/'\n# setting full MEDIA_URL to be able to use it for the feeds\nMEDIA_URL = '/media/'\n\n# Absolute path to the directory static files should be collected to.\n# Don't put anything in this directory yourself; store your static files\n# in apps' \"static/\" subdirectories and in STATICFILES_DIRS.\n# Example: \"/var/www/example.com/static/\"\nSTATIC_ROOT = '/home/web/static'\n\n# URL prefix for static files.\n# Example: \"http://example.com/static/\", \"http://static.example.com/\"\nSTATIC_URL = '/static/'\n\n# Additional locations of static files\nSTATICFILES_DIRS = (\n # Put strings here, like \"/home/html/static\" or \"C:/www/django/static\".\n # Always use forward slashes, even on Windows.\n # Don't forget to use absolute paths, not relative paths.\n absolute_path('core', 'base_static'),\n)\n\n# List of finder classes that know how to find static files in\n# various locations.\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n # 'django.contrib.staticfiles.finders.DefaultStorageFinder',\n)\n\n# import SECRET_KEY into current namespace\n# noinspection PyUnresolvedReferences\nfrom .secret import SECRET_KEY # noqa\n\n# List of callables that know how to import templates from various sources.\nTEMPLATE_LOADERS = (\n 'django.template.loaders.filesystem.Loader',\n 'django.template.loaders.app_directories.Loader',\n # 'django.template.loaders.eggs.Loader',\n)\n\nTEMPLATE_CONTEXT_PROCESSORS = (\n 'django.contrib.auth.context_processors.auth',\n 'django.core.context_processors.request',\n 'django.core.context_processors.media',\n 'core.context_processors.add_intercom_app_id',\n 'django.template.context_processors.i18n'\n)\n\nMIDDLEWARE_CLASSES = (\n 'django.middleware.common.CommonMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n # Uncomment the next line for simple clickjacking protection:\n # 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'core.custom_middleware.NavContextMiddleware',\n 'django.middleware.locale.LocaleMiddleware'\n)\n\nROOT_URLCONF = 'core.urls'\n\n# Python dotted path to the WSGI application used by Django's runserver.\nWSGI_APPLICATION = 'core.wsgi.application'\n\nTEMPLATE_DIRS = (\n # project level templates\n absolute_path('core', 'base_templates'),\n absolute_path('vota', 'templates'),\n absolute_path('changes', 'templates'),\n\n)\n\nINSTALLED_APPS = (\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.sites',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.admin',\n 'django.contrib.syndication',\n)\n\n# A sample logging configuration. The only tangible logging\n# performed by this configuration is to send an email to\n# the site admins on every HTTP 500 error when DEBUG=False.\n# See http://docs.djangoproject.com/en/dev/topics/logging for\n# more details on how to customize your logging configuration.\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse'\n }\n },\n 'handlers': {\n 'mail_admins': {\n 'level': 'ERROR',\n 'filters': ['require_debug_false'],\n 'class': 'django.utils.log.AdminEmailHandler'\n }\n },\n 'loggers': {\n 'django.request': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': True,\n },\n }\n}\n", "path": "django_project/core/settings/base.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"\ncore.settings.base\n\"\"\"\n# Django settings for projecta project.\n\nfrom .utils import absolute_path\n\nADMINS = (\n ('Tim Sutton', '[email protected]'),\n ('Rischan Mafrur', '[email protected]')\n)\nSERVER_EMAIL = '[email protected]'\nEMAIL_HOST = 'localhost'\nDEFAULT_FROM_EMAIL = '[email protected]'\n\nMANAGERS = ADMINS\n\n# Local time zone for this installation. Choices can be found here:\n# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n# although not all choices may be available on all operating systems.\n# In a Windows environment this must be set to your system time zone.\nTIME_ZONE = 'Africa/Johannesburg'\n\n# Language code for this installation. All choices can be found here:\n# http://www.i18nguy.com/unicode/language-identifiers.html\nLANGUAGE_CODE = 'en-us'\n\nSITE_ID = 1\n\n# If you set this to False, Django will make some optimizations so as not\n# to load the internationalization machinery.\nUSE_I18N = True\n\n# If you set this to False, Django will not format dates, numbers and\n# calendars according to the current locale.\nUSE_L10N = True\n\n# If you set this to False, Django will not use timezone-aware datetimes.\nUSE_TZ = True\n\n# Absolute filesystem path to the directory that will hold user-uploaded files.\n# Example: \"/var/www/example.com/media/\"\nMEDIA_ROOT = '/home/web/media'\n\n# URL that handles the media served from MEDIA_ROOT. Make sure to use a\n# trailing slash.\n# Examples: \"http://example.com/media/\", \"http://media.example.com/\"\n# MEDIA_URL = '/media/'\n# setting full MEDIA_URL to be able to use it for the feeds\nMEDIA_URL = '/media/'\n\n# Absolute path to the directory static files should be collected to.\n# Don't put anything in this directory yourself; store your static files\n# in apps' \"static/\" subdirectories and in STATICFILES_DIRS.\n# Example: \"/var/www/example.com/static/\"\nSTATIC_ROOT = '/home/web/static'\n\n# URL prefix for static files.\n# Example: \"http://example.com/static/\", \"http://static.example.com/\"\nSTATIC_URL = '/static/'\n\n# Additional locations of static files\nSTATICFILES_DIRS = (\n # Put strings here, like \"/home/html/static\" or \"C:/www/django/static\".\n # Always use forward slashes, even on Windows.\n # Don't forget to use absolute paths, not relative paths.\n absolute_path('core', 'base_static'),\n)\n\n# List of finder classes that know how to find static files in\n# various locations.\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n # 'django.contrib.staticfiles.finders.DefaultStorageFinder',\n)\n\n# import SECRET_KEY into current namespace\n# noinspection PyUnresolvedReferences\nfrom .secret import SECRET_KEY # noqa\n\n# List of callables that know how to import templates from various sources.\nTEMPLATE_LOADERS = (\n 'django.template.loaders.filesystem.Loader',\n 'django.template.loaders.app_directories.Loader',\n # 'django.template.loaders.eggs.Loader',\n)\n\nTEMPLATE_CONTEXT_PROCESSORS = (\n 'django.contrib.auth.context_processors.auth',\n 'django.core.context_processors.request',\n 'django.core.context_processors.media',\n 'core.context_processors.add_intercom_app_id',\n 'django.template.context_processors.i18n'\n)\n\nMIDDLEWARE_CLASSES = (\n 'django.middleware.common.CommonMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n # Uncomment the next line for simple clickjacking protection:\n # 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'core.custom_middleware.NavContextMiddleware',\n 'django.middleware.locale.LocaleMiddleware'\n)\n\nROOT_URLCONF = 'core.urls'\n\n# Python dotted path to the WSGI application used by Django's runserver.\nWSGI_APPLICATION = 'core.wsgi.application'\n\nTEMPLATE_DIRS = (\n # project level templates\n absolute_path('core', 'base_templates'),\n absolute_path('vota', 'templates'),\n absolute_path('changes', 'templates'),\n\n)\n\nINSTALLED_APPS = (\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.sites',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.admin',\n 'django.contrib.syndication',\n)\n\n# A sample logging configuration. The only tangible logging\n# performed by this configuration is to send an email to\n# the site admins on every HTTP 500 error when DEBUG=False.\n# See http://docs.djangoproject.com/en/dev/topics/logging for\n# more details on how to customize your logging configuration.\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse'\n }\n },\n 'handlers': {\n 'mail_admins': {\n 'level': 'ERROR',\n 'filters': ['require_debug_false'],\n 'class': 'django.utils.log.AdminEmailHandler'\n }\n },\n 'loggers': {\n 'django.request': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': True,\n },\n }\n}\n", "path": "django_project/core/settings/base.py"}]} | 1,839 | 236 |
gh_patches_debug_39022 | rasdani/github-patches | git_diff | opsdroid__opsdroid-693 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Google Style Docstrings
We should implement Google Style Docstrings to every function, method, class in opsdroid. This style will support existing documentation and will help in the future by generating documentation automatically.
This consists in a bit of effort so this issue can be worked by more than one contributor, just make sure that everyone knows what you are working on in order to avoid other contributors spending time on something that you are working on.
If you are unfamiliar with the Google Style Docstrings I'd recommend that you check these resources:
- [Sphix 1.8.0+ - Google Style Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
Docstrings that need to be updated:
- main.py
- [x] configure_lang
- [ ] configure_log
- [ ] get_logging_level
- [ ] check_dependencies
- [ ] print_version
- [ ] print_example_config
- [ ] edit_files
- [x] welcome_message
- ~~helper.py~~
- [x] get_opsdroid
- [x] del_rw
- [x] move_config_to_appdir
- memory.py
- [x] Memory
- [x] get
- [x] put
- [x] _get_from_database
- [x] _put_to_database
- message.py
- [x] Message
- [x] __init__
- [x] _thinking_delay
- [x] _typing delay
- [x] respond
- [x] react
- web.py
- [ ] Web
- [x] get_port
- [x] get_host
- [x] get_ssl_context
- [ ] start
- [ ] build_response
- [ ] web_index_handler
- [ ] web_stats_handler
- matchers.py
- [ ] match_regex
- [ ] match_apiai_action
- [ ] match_apiai_intent
- [ ] match_dialogflow_action
- [ ] match_dialogflow_intent
- [ ] match_luisai_intent
- [ ] match_rasanlu
- [ ] match_recastai
- [ ] match_witai
- [ ] match_crontab
- [ ] match_webhook
- [ ] match_always
- core.py
- [ ] OpsDroid
- [ ] default_connector
- [ ] exit
- [ ] critical
- [ ] call_stop
- [ ] disconnect
- [ ] stop
- [ ] load
- [ ] start_loop
- [x] setup_skills
- [ ] train_parsers
- [ ] start_connector_tasks
- [ ] start_database
- [ ] run_skill
- [ ] get_ranked_skills
- [ ] parse
- loader.py
- [ ] Loader
- [x] import_module_from_spec
- [x] import_module
- [x] check_cache
- [x] build_module_import_path
- [x] build_module_install_path
- [x] git_clone
- [x] git_pull
- [x] pip_install_deps
- [x] create_default_config
- [x] load_config_file
- [ ] envvar_constructor
- [ ] include_constructor
- [x] setup_modules_directory
- [x] load_modules_from_config
- [x] _load_modules
- [x] _install_module
- [x] _update_module
- [ ] _install_git_module
- [x] _install_local_module
---- ORIGINAL POST ----
I've been wondering about this for a while now and I would like to know if we should replace/update all the docstrings in opsdroid with the Google Style doc strings.
I think this could help new and old contributors to contribute and commit to opsdroid since the Google Style docstrings give more information about every method/function and specifies clearly what sort of input the function/method expects, what will it return and what will be raised (if applicable).
The downsize of this style is that the length of every .py file will increase due to the doc strings, but since most IDE's allow you to hide those fields it shouldn't be too bad.
Here is a good example of Google Style Doc strings: [Sphix 1.8.0+ - Google Style Docstrings](http://www.sphinx-doc.org/en/master/ext/example_google.html)
I would like to know what you all think about this idea and if its worth spending time on it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opsdroid/memory.py`
Content:
```
1 """Class for persisting information in opsdroid."""
2
3 import logging
4
5
6 _LOGGER = logging.getLogger(__name__)
7
8
9 class Memory:
10 """An object to store and persist data outside of opsdroid."""
11
12 def __init__(self):
13 """Create memory dictionary."""
14 self.memory = {}
15 self.databases = []
16
17 async def get(self, key):
18 """Get data object for a given key."""
19 _LOGGER.debug(_("Getting %s from memory."), key)
20 database_result = await self._get_from_database(key)
21 if database_result is not None:
22 self.memory[key] = database_result
23 if key in self.memory:
24 return self.memory[key]
25
26 return None
27
28 async def put(self, key, data):
29 """Put a data object to a given key."""
30 _LOGGER.debug(_("Putting %s to memory"), key)
31 self.memory[key] = data
32 await self._put_to_database(key, self.memory[key])
33
34 async def _get_from_database(self, key):
35 """Get updates from databases for a given key."""
36 if not self.databases:
37 return None
38
39 results = []
40 for database in self.databases:
41 results.append(await database.get(key))
42 # TODO: Handle multiple databases
43 return results[0]
44
45 async def _put_to_database(self, key, data):
46 """Put updates into databases for a given key."""
47 if self.databases:
48 for database in self.databases:
49 await database.put(key, data)
50
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/opsdroid/memory.py b/opsdroid/memory.py
--- a/opsdroid/memory.py
+++ b/opsdroid/memory.py
@@ -7,15 +7,33 @@
class Memory:
- """An object to store and persist data outside of opsdroid."""
+ """A Memory object.
+
+ An object to obtain, store and persist data outside of opsdroid.
+
+ Attributes:
+ databases (:obj:`list` of :obj:`Database`): List of database objects.
+ memory (:obj:`dict`): In-memory dictionary to store data.
+
+ """
def __init__(self):
- """Create memory dictionary."""
+ """Create object with minimum properties."""
self.memory = {}
self.databases = []
async def get(self, key):
- """Get data object for a given key."""
+ """Get data object for a given key.
+
+ Gets the key value found in-memory or from the database(s).
+
+ Args:
+ key (str): Key to retrieve data.
+
+ Returns:
+ A data object for the given key, otherwise `None`.
+
+ """
_LOGGER.debug(_("Getting %s from memory."), key)
database_result = await self._get_from_database(key)
if database_result is not None:
@@ -26,24 +44,53 @@
return None
async def put(self, key, data):
- """Put a data object to a given key."""
+ """Put a data object to a given key.
+
+ Stores the key and value in memory and the database(s).
+
+ Args:
+ key (str): Key for the data to store.
+ data (obj): Data object to store.
+
+ """
_LOGGER.debug(_("Putting %s to memory"), key)
self.memory[key] = data
await self._put_to_database(key, self.memory[key])
async def _get_from_database(self, key):
- """Get updates from databases for a given key."""
+ """Get updates from databases for a given key.
+
+ Gets the first key value found from the database(s).
+
+ Args:
+ key (str): Key to retrieve data from a database.
+
+ Returns:
+ The first key value (data object) found from the database(s).
+ Or `None` when no database is defined or no value is found.
+
+ Todo:
+ * Handle multiple databases
+
+ """
if not self.databases:
return None
results = []
for database in self.databases:
results.append(await database.get(key))
- # TODO: Handle multiple databases
return results[0]
async def _put_to_database(self, key, data):
- """Put updates into databases for a given key."""
+ """Put updates into databases for a given key.
+
+ Stores the key and value on each database defined.
+
+ Args:
+ key (str): Key for the data to store.
+ data (obj): Data object to store.
+
+ """
if self.databases:
for database in self.databases:
await database.put(key, data)
| {"golden_diff": "diff --git a/opsdroid/memory.py b/opsdroid/memory.py\n--- a/opsdroid/memory.py\n+++ b/opsdroid/memory.py\n@@ -7,15 +7,33 @@\n \n \n class Memory:\n- \"\"\"An object to store and persist data outside of opsdroid.\"\"\"\n+ \"\"\"A Memory object.\n+\n+ An object to obtain, store and persist data outside of opsdroid.\n+\n+ Attributes:\n+ databases (:obj:`list` of :obj:`Database`): List of database objects.\n+ memory (:obj:`dict`): In-memory dictionary to store data.\n+\n+ \"\"\"\n \n def __init__(self):\n- \"\"\"Create memory dictionary.\"\"\"\n+ \"\"\"Create object with minimum properties.\"\"\"\n self.memory = {}\n self.databases = []\n \n async def get(self, key):\n- \"\"\"Get data object for a given key.\"\"\"\n+ \"\"\"Get data object for a given key.\n+\n+ Gets the key value found in-memory or from the database(s).\n+\n+ Args:\n+ key (str): Key to retrieve data.\n+\n+ Returns:\n+ A data object for the given key, otherwise `None`.\n+\n+ \"\"\"\n _LOGGER.debug(_(\"Getting %s from memory.\"), key)\n database_result = await self._get_from_database(key)\n if database_result is not None:\n@@ -26,24 +44,53 @@\n return None\n \n async def put(self, key, data):\n- \"\"\"Put a data object to a given key.\"\"\"\n+ \"\"\"Put a data object to a given key.\n+\n+ Stores the key and value in memory and the database(s).\n+\n+ Args:\n+ key (str): Key for the data to store.\n+ data (obj): Data object to store.\n+\n+ \"\"\"\n _LOGGER.debug(_(\"Putting %s to memory\"), key)\n self.memory[key] = data\n await self._put_to_database(key, self.memory[key])\n \n async def _get_from_database(self, key):\n- \"\"\"Get updates from databases for a given key.\"\"\"\n+ \"\"\"Get updates from databases for a given key.\n+\n+ Gets the first key value found from the database(s).\n+\n+ Args:\n+ key (str): Key to retrieve data from a database.\n+\n+ Returns:\n+ The first key value (data object) found from the database(s).\n+ Or `None` when no database is defined or no value is found.\n+\n+ Todo:\n+ * Handle multiple databases\n+\n+ \"\"\"\n if not self.databases:\n return None\n \n results = []\n for database in self.databases:\n results.append(await database.get(key))\n- # TODO: Handle multiple databases\n return results[0]\n \n async def _put_to_database(self, key, data):\n- \"\"\"Put updates into databases for a given key.\"\"\"\n+ \"\"\"Put updates into databases for a given key.\n+\n+ Stores the key and value on each database defined.\n+\n+ Args:\n+ key (str): Key for the data to store.\n+ data (obj): Data object to store.\n+\n+ \"\"\"\n if self.databases:\n for database in self.databases:\n await database.put(key, data)\n", "issue": "Add Google Style Docstrings\nWe should implement Google Style Docstrings to every function, method, class in opsdroid. This style will support existing documentation and will help in the future by generating documentation automatically.\r\n\r\nThis consists in a bit of effort so this issue can be worked by more than one contributor, just make sure that everyone knows what you are working on in order to avoid other contributors spending time on something that you are working on.\r\n\r\nIf you are unfamiliar with the Google Style Docstrings I'd recommend that you check these resources:\r\n\r\n - [Sphix 1.8.0+ - Google Style Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)\r\n\r\n\r\n\r\nDocstrings that need to be updated:\r\n\r\n- main.py\r\n - [x] configure_lang\r\n - [ ] configure_log\r\n - [ ] get_logging_level\r\n - [ ] check_dependencies\r\n - [ ] print_version\r\n - [ ] print_example_config\r\n - [ ] edit_files\r\n - [x] welcome_message\r\n- ~~helper.py~~\r\n - [x] get_opsdroid\r\n - [x] del_rw\r\n - [x] move_config_to_appdir\r\n- memory.py\r\n - [x] Memory\r\n - [x] get\r\n - [x] put\r\n - [x] _get_from_database\r\n - [x] _put_to_database\r\n- message.py\r\n - [x] Message\r\n - [x] __init__\r\n - [x] _thinking_delay\r\n - [x] _typing delay\r\n - [x] respond\r\n - [x] react\r\n- web.py\r\n - [ ] Web\r\n - [x] get_port\r\n - [x] get_host\r\n - [x] get_ssl_context\r\n - [ ] start\r\n - [ ] build_response\r\n - [ ] web_index_handler\r\n - [ ] web_stats_handler\r\n- matchers.py\r\n - [ ] match_regex\r\n - [ ] match_apiai_action\r\n - [ ] match_apiai_intent\r\n - [ ] match_dialogflow_action\r\n - [ ] match_dialogflow_intent\r\n - [ ] match_luisai_intent\r\n - [ ] match_rasanlu\r\n - [ ] match_recastai\r\n - [ ] match_witai\r\n - [ ] match_crontab\r\n - [ ] match_webhook\r\n - [ ] match_always\r\n- core.py\r\n - [ ] OpsDroid\r\n - [ ] default_connector\r\n - [ ] exit\r\n - [ ] critical\r\n - [ ] call_stop\r\n - [ ] disconnect\r\n - [ ] stop\r\n - [ ] load\r\n - [ ] start_loop\r\n - [x] setup_skills\r\n - [ ] train_parsers\r\n - [ ] start_connector_tasks\r\n - [ ] start_database\r\n - [ ] run_skill\r\n - [ ] get_ranked_skills\r\n - [ ] parse\r\n- loader.py\r\n - [ ] Loader\r\n - [x] import_module_from_spec\r\n - [x] import_module\r\n - [x] check_cache\r\n - [x] build_module_import_path\r\n - [x] build_module_install_path\r\n - [x] git_clone\r\n - [x] git_pull\r\n - [x] pip_install_deps\r\n - [x] create_default_config\r\n - [x] load_config_file\r\n - [ ] envvar_constructor\r\n - [ ] include_constructor\r\n - [x] setup_modules_directory\r\n - [x] load_modules_from_config\r\n - [x] _load_modules\r\n - [x] _install_module\r\n - [x] _update_module\r\n - [ ] _install_git_module\r\n - [x] _install_local_module\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n---- ORIGINAL POST ---- \r\nI've been wondering about this for a while now and I would like to know if we should replace/update all the docstrings in opsdroid with the Google Style doc strings. \r\n\r\nI think this could help new and old contributors to contribute and commit to opsdroid since the Google Style docstrings give more information about every method/function and specifies clearly what sort of input the function/method expects, what will it return and what will be raised (if applicable).\r\n\r\nThe downsize of this style is that the length of every .py file will increase due to the doc strings, but since most IDE's allow you to hide those fields it shouldn't be too bad.\r\n\r\nHere is a good example of Google Style Doc strings: [Sphix 1.8.0+ - Google Style Docstrings](http://www.sphinx-doc.org/en/master/ext/example_google.html)\r\n\r\nI would like to know what you all think about this idea and if its worth spending time on it.\n", "before_files": [{"content": "\"\"\"Class for persisting information in opsdroid.\"\"\"\n\nimport logging\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nclass Memory:\n \"\"\"An object to store and persist data outside of opsdroid.\"\"\"\n\n def __init__(self):\n \"\"\"Create memory dictionary.\"\"\"\n self.memory = {}\n self.databases = []\n\n async def get(self, key):\n \"\"\"Get data object for a given key.\"\"\"\n _LOGGER.debug(_(\"Getting %s from memory.\"), key)\n database_result = await self._get_from_database(key)\n if database_result is not None:\n self.memory[key] = database_result\n if key in self.memory:\n return self.memory[key]\n\n return None\n\n async def put(self, key, data):\n \"\"\"Put a data object to a given key.\"\"\"\n _LOGGER.debug(_(\"Putting %s to memory\"), key)\n self.memory[key] = data\n await self._put_to_database(key, self.memory[key])\n\n async def _get_from_database(self, key):\n \"\"\"Get updates from databases for a given key.\"\"\"\n if not self.databases:\n return None\n\n results = []\n for database in self.databases:\n results.append(await database.get(key))\n # TODO: Handle multiple databases\n return results[0]\n\n async def _put_to_database(self, key, data):\n \"\"\"Put updates into databases for a given key.\"\"\"\n if self.databases:\n for database in self.databases:\n await database.put(key, data)\n", "path": "opsdroid/memory.py"}], "after_files": [{"content": "\"\"\"Class for persisting information in opsdroid.\"\"\"\n\nimport logging\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nclass Memory:\n \"\"\"A Memory object.\n\n An object to obtain, store and persist data outside of opsdroid.\n\n Attributes:\n databases (:obj:`list` of :obj:`Database`): List of database objects.\n memory (:obj:`dict`): In-memory dictionary to store data.\n\n \"\"\"\n\n def __init__(self):\n \"\"\"Create object with minimum properties.\"\"\"\n self.memory = {}\n self.databases = []\n\n async def get(self, key):\n \"\"\"Get data object for a given key.\n\n Gets the key value found in-memory or from the database(s).\n\n Args:\n key (str): Key to retrieve data.\n\n Returns:\n A data object for the given key, otherwise `None`.\n\n \"\"\"\n _LOGGER.debug(_(\"Getting %s from memory.\"), key)\n database_result = await self._get_from_database(key)\n if database_result is not None:\n self.memory[key] = database_result\n if key in self.memory:\n return self.memory[key]\n\n return None\n\n async def put(self, key, data):\n \"\"\"Put a data object to a given key.\n\n Stores the key and value in memory and the database(s).\n\n Args:\n key (str): Key for the data to store.\n data (obj): Data object to store.\n\n \"\"\"\n _LOGGER.debug(_(\"Putting %s to memory\"), key)\n self.memory[key] = data\n await self._put_to_database(key, self.memory[key])\n\n async def _get_from_database(self, key):\n \"\"\"Get updates from databases for a given key.\n\n Gets the first key value found from the database(s).\n\n Args:\n key (str): Key to retrieve data from a database.\n\n Returns:\n The first key value (data object) found from the database(s).\n Or `None` when no database is defined or no value is found.\n\n Todo:\n * Handle multiple databases\n\n \"\"\"\n if not self.databases:\n return None\n\n results = []\n for database in self.databases:\n results.append(await database.get(key))\n return results[0]\n\n async def _put_to_database(self, key, data):\n \"\"\"Put updates into databases for a given key.\n\n Stores the key and value on each database defined.\n\n Args:\n key (str): Key for the data to store.\n data (obj): Data object to store.\n\n \"\"\"\n if self.databases:\n for database in self.databases:\n await database.put(key, data)\n", "path": "opsdroid/memory.py"}]} | 1,707 | 701 |
gh_patches_debug_24411 | rasdani/github-patches | git_diff | python__python-docs-es-40 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Mejorar la guía de CONTRIBUTING
Tenemos una pequeña guía que explica el procedimiento. Sin embargo, estaría bueno mejorarla un poco para que sea más fácil de seguir para persona que no sepan mucho de github y demás herramientas: https://github.com/raulcd/python-docs-es/blob/3.7/CONTRIBUTING.rst
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conf.py`
Content:
```
1 # Sphinx configuration file.
2 #
3 # - import original configurations from cpython/Doc/conf.py
4 # - append the path considering the cpython submodule is at ./cpython
5 # - create the symbolic links under ./cpython/locale/es/LC_MESSAGES
6 # - make the build to work under Read the Docs
7 #
8 # The git submodule was created using this Stack Overflow answer
9 # to fetch only the commit that I needed and avoid clonning the whole history
10 # https://stackoverflow.com/a/27445058
11 #
12 # This can be built locally using `sphinx-build` by running
13 #
14 # $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html
15
16 import sys, os, time
17 sys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))
18 sys.path.append(os.path.abspath('cpython/Doc/includes'))
19
20 # Import all the Sphinx settings from cpython
21 sys.path.append(os.path.abspath('cpython/Doc'))
22 from conf import *
23
24 version = '3.7'
25 release = '3.7.7'
26
27 project = 'Python en Español'
28 copyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')
29
30 html_theme_path = ['cpython/Doc/tools']
31 templates_path = ['cpython/Doc/tools/templates']
32 html_static_path = ['cpython/Doc/tools/static']
33
34 os.system('mkdir -p cpython/locales/es/')
35 os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')
36
37 gettext_compact = False
38 locale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir
39
40 def setup(app):
41 # Change the sourcedir programmatically because Read the Docs always call it with `.`
42 app.srcdir = 'cpython/Doc'
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conf.py b/conf.py
--- a/conf.py
+++ b/conf.py
@@ -33,10 +33,34 @@
os.system('mkdir -p cpython/locales/es/')
os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')
+os.system('ln -nfs `pwd`/CONTRIBUTING.rst cpython/Doc/CONTRIBUTING.rst')
gettext_compact = False
locale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir
def setup(app):
+
+ def add_contributing_banner(app, doctree):
+ """
+ Insert a banner at the top of the index.
+
+ This way, we can easily communicate people to help with the translation,
+ pointing them to different resources.
+ """
+ from docutils import nodes, core
+
+ message = '¡Ayúdanos a traducir la documentación oficial de Python al Español! ' \
+ f'Puedes encontrar más información en `Como contribuir </es/{version}/CONTRIBUTING.html>`_ ' \
+ 'y así ayudarnos a acercar Python a más personas de habla hispana.'
+
+ paragraph = core.publish_doctree(message)[0]
+ banner = nodes.warning(ids=['contributing-banner'])
+ banner.append(paragraph)
+
+ for document in doctree.traverse(nodes.document):
+ document.insert(0, banner)
+
# Change the sourcedir programmatically because Read the Docs always call it with `.`
app.srcdir = 'cpython/Doc'
+
+ app.connect('doctree-read', add_contributing_banner)
| {"golden_diff": "diff --git a/conf.py b/conf.py\n--- a/conf.py\n+++ b/conf.py\n@@ -33,10 +33,34 @@\n \n os.system('mkdir -p cpython/locales/es/')\n os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\n+os.system('ln -nfs `pwd`/CONTRIBUTING.rst cpython/Doc/CONTRIBUTING.rst')\n \n gettext_compact = False\n locale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir\n \n def setup(app):\n+\n+ def add_contributing_banner(app, doctree):\n+ \"\"\"\n+ Insert a banner at the top of the index.\n+\n+ This way, we can easily communicate people to help with the translation,\n+ pointing them to different resources.\n+ \"\"\"\n+ from docutils import nodes, core\n+\n+ message = '\u00a1Ay\u00fadanos a traducir la documentaci\u00f3n oficial de Python al Espa\u00f1ol! ' \\\n+ f'Puedes encontrar m\u00e1s informaci\u00f3n en `Como contribuir </es/{version}/CONTRIBUTING.html>`_ ' \\\n+ 'y as\u00ed ayudarnos a acercar Python a m\u00e1s personas de habla hispana.'\n+\n+ paragraph = core.publish_doctree(message)[0]\n+ banner = nodes.warning(ids=['contributing-banner'])\n+ banner.append(paragraph)\n+\n+ for document in doctree.traverse(nodes.document):\n+ document.insert(0, banner)\n+\n # Change the sourcedir programmatically because Read the Docs always call it with `.`\n app.srcdir = 'cpython/Doc'\n+\n+ app.connect('doctree-read', add_contributing_banner)\n", "issue": "Mejorar la gu\u00eda de CONTRIBUTING\nTenemos una peque\u00f1a gu\u00eda que explica el procedimiento. Sin embargo, estar\u00eda bueno mejorarla un poco para que sea m\u00e1s f\u00e1cil de seguir para persona que no sepan mucho de github y dem\u00e1s herramientas: https://github.com/raulcd/python-docs-es/blob/3.7/CONTRIBUTING.rst\n", "before_files": [{"content": "# Sphinx configuration file.\n#\n# - import original configurations from cpython/Doc/conf.py\n# - append the path considering the cpython submodule is at ./cpython\n# - create the symbolic links under ./cpython/locale/es/LC_MESSAGES\n# - make the build to work under Read the Docs\n#\n# The git submodule was created using this Stack Overflow answer\n# to fetch only the commit that I needed and avoid clonning the whole history\n# https://stackoverflow.com/a/27445058\n#\n# This can be built locally using `sphinx-build` by running\n#\n# $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html\n\nimport sys, os, time\nsys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))\nsys.path.append(os.path.abspath('cpython/Doc/includes'))\n\n# Import all the Sphinx settings from cpython\nsys.path.append(os.path.abspath('cpython/Doc'))\nfrom conf import *\n\nversion = '3.7'\nrelease = '3.7.7'\n\nproject = 'Python en Espa\u00f1ol'\ncopyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')\n\nhtml_theme_path = ['cpython/Doc/tools']\ntemplates_path = ['cpython/Doc/tools/templates']\nhtml_static_path = ['cpython/Doc/tools/static']\n\nos.system('mkdir -p cpython/locales/es/')\nos.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\n\ngettext_compact = False\nlocale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir\n\ndef setup(app):\n # Change the sourcedir programmatically because Read the Docs always call it with `.`\n app.srcdir = 'cpython/Doc'\n", "path": "conf.py"}], "after_files": [{"content": "# Sphinx configuration file.\n#\n# - import original configurations from cpython/Doc/conf.py\n# - append the path considering the cpython submodule is at ./cpython\n# - create the symbolic links under ./cpython/locale/es/LC_MESSAGES\n# - make the build to work under Read the Docs\n#\n# The git submodule was created using this Stack Overflow answer\n# to fetch only the commit that I needed and avoid clonning the whole history\n# https://stackoverflow.com/a/27445058\n#\n# This can be built locally using `sphinx-build` by running\n#\n# $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html\n\nimport sys, os, time\nsys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))\nsys.path.append(os.path.abspath('cpython/Doc/includes'))\n\n# Import all the Sphinx settings from cpython\nsys.path.append(os.path.abspath('cpython/Doc'))\nfrom conf import *\n\nversion = '3.7'\nrelease = '3.7.7'\n\nproject = 'Python en Espa\u00f1ol'\ncopyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')\n\nhtml_theme_path = ['cpython/Doc/tools']\ntemplates_path = ['cpython/Doc/tools/templates']\nhtml_static_path = ['cpython/Doc/tools/static']\n\nos.system('mkdir -p cpython/locales/es/')\nos.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\nos.system('ln -nfs `pwd`/CONTRIBUTING.rst cpython/Doc/CONTRIBUTING.rst')\n\ngettext_compact = False\nlocale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir\n\ndef setup(app):\n\n def add_contributing_banner(app, doctree):\n \"\"\"\n Insert a banner at the top of the index.\n\n This way, we can easily communicate people to help with the translation,\n pointing them to different resources.\n \"\"\"\n from docutils import nodes, core\n\n message = '\u00a1Ay\u00fadanos a traducir la documentaci\u00f3n oficial de Python al Espa\u00f1ol! ' \\\n f'Puedes encontrar m\u00e1s informaci\u00f3n en `Como contribuir </es/{version}/CONTRIBUTING.html>`_ ' \\\n 'y as\u00ed ayudarnos a acercar Python a m\u00e1s personas de habla hispana.'\n\n paragraph = core.publish_doctree(message)[0]\n banner = nodes.warning(ids=['contributing-banner'])\n banner.append(paragraph)\n\n for document in doctree.traverse(nodes.document):\n document.insert(0, banner)\n\n # Change the sourcedir programmatically because Read the Docs always call it with `.`\n app.srcdir = 'cpython/Doc'\n\n app.connect('doctree-read', add_contributing_banner)\n", "path": "conf.py"}]} | 808 | 376 |
gh_patches_debug_18863 | rasdani/github-patches | git_diff | vega__altair-2785 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Equal default chart dimensions
Currently the default theme in Altair specifies an aspect ratio of 4/3, with a width of 400 and a height of 300 (for continuous data):

The default in VegaLite is to make both dimensions of equal length, which I think makes sense since it spreads the data over the same amount of pixels on both the X and Y axis. This could have benefits in terms of making it easier to fairly compare the distribution of the data between the two plotted variables instead of it appearing more spread out over the X axis due to the increase chart width. The default in Vega-Lite is to use 200 px for the width and height which I think is a bit small, but setting both to 300 px looks good:

What do you all think about changing the default width in Altair to 300 px, so that both the X and Y axes occupy the same amount of pixels by default? Are there benefits of having an unequal aspect ratio like the current default that I am missing (maybe that it is more similar to the screen aspect ratio)? I don't think this is a major concern, but thought I would bring it up and see if others also regard it as a small improvement or just a matter of personal aesthetics/taste.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `altair/vegalite/v5/theme.py`
Content:
```
1 """Tools for enabling and registering chart themes"""
2
3 from ...utils.theme import ThemeRegistry
4
5 VEGA_THEMES = [
6 "ggplot2",
7 "quartz",
8 "vox",
9 "fivethirtyeight",
10 "dark",
11 "latimes",
12 "urbaninstitute",
13 ]
14
15
16 class VegaTheme(object):
17 """Implementation of a builtin vega theme."""
18
19 def __init__(self, theme):
20 self.theme = theme
21
22 def __call__(self):
23 return {
24 "usermeta": {"embedOptions": {"theme": self.theme}},
25 "config": {"view": {"continuousWidth": 400, "continuousHeight": 300}},
26 }
27
28 def __repr__(self):
29 return "VegaTheme({!r})".format(self.theme)
30
31
32 # The entry point group that can be used by other packages to declare other
33 # renderers that will be auto-detected. Explicit registration is also
34 # allowed by the PluginRegistery API.
35 ENTRY_POINT_GROUP = "altair.vegalite.v5.theme" # type: str
36 themes = ThemeRegistry(entry_point_group=ENTRY_POINT_GROUP)
37
38 themes.register(
39 "default",
40 lambda: {"config": {"view": {"continuousWidth": 400, "continuousHeight": 300}}},
41 )
42 themes.register(
43 "opaque",
44 lambda: {
45 "config": {
46 "background": "white",
47 "view": {"continuousWidth": 400, "continuousHeight": 300},
48 }
49 },
50 )
51 themes.register("none", lambda: {})
52
53 for theme in VEGA_THEMES:
54 themes.register(theme, VegaTheme(theme))
55
56 themes.enable("default")
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/altair/vegalite/v5/theme.py b/altair/vegalite/v5/theme.py
--- a/altair/vegalite/v5/theme.py
+++ b/altair/vegalite/v5/theme.py
@@ -22,7 +22,7 @@
def __call__(self):
return {
"usermeta": {"embedOptions": {"theme": self.theme}},
- "config": {"view": {"continuousWidth": 400, "continuousHeight": 300}},
+ "config": {"view": {"continuousWidth": 300, "continuousHeight": 300}},
}
def __repr__(self):
@@ -37,14 +37,14 @@
themes.register(
"default",
- lambda: {"config": {"view": {"continuousWidth": 400, "continuousHeight": 300}}},
+ lambda: {"config": {"view": {"continuousWidth": 300, "continuousHeight": 300}}},
)
themes.register(
"opaque",
lambda: {
"config": {
"background": "white",
- "view": {"continuousWidth": 400, "continuousHeight": 300},
+ "view": {"continuousWidth": 300, "continuousHeight": 300},
}
},
)
| {"golden_diff": "diff --git a/altair/vegalite/v5/theme.py b/altair/vegalite/v5/theme.py\n--- a/altair/vegalite/v5/theme.py\n+++ b/altair/vegalite/v5/theme.py\n@@ -22,7 +22,7 @@\n def __call__(self):\n return {\n \"usermeta\": {\"embedOptions\": {\"theme\": self.theme}},\n- \"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}},\n+ \"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}},\n }\n \n def __repr__(self):\n@@ -37,14 +37,14 @@\n \n themes.register(\n \"default\",\n- lambda: {\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}},\n+ lambda: {\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}},\n )\n themes.register(\n \"opaque\",\n lambda: {\n \"config\": {\n \"background\": \"white\",\n- \"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300},\n+ \"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300},\n }\n },\n )\n", "issue": "Equal default chart dimensions\nCurrently the default theme in Altair specifies an aspect ratio of 4/3, with a width of 400 and a height of 300 (for continuous data):\r\n\r\n\r\n\r\nThe default in VegaLite is to make both dimensions of equal length, which I think makes sense since it spreads the data over the same amount of pixels on both the X and Y axis. This could have benefits in terms of making it easier to fairly compare the distribution of the data between the two plotted variables instead of it appearing more spread out over the X axis due to the increase chart width. The default in Vega-Lite is to use 200 px for the width and height which I think is a bit small, but setting both to 300 px looks good:\r\n\r\n\r\n\r\nWhat do you all think about changing the default width in Altair to 300 px, so that both the X and Y axes occupy the same amount of pixels by default? Are there benefits of having an unequal aspect ratio like the current default that I am missing (maybe that it is more similar to the screen aspect ratio)? I don't think this is a major concern, but thought I would bring it up and see if others also regard it as a small improvement or just a matter of personal aesthetics/taste.\n", "before_files": [{"content": "\"\"\"Tools for enabling and registering chart themes\"\"\"\n\nfrom ...utils.theme import ThemeRegistry\n\nVEGA_THEMES = [\n \"ggplot2\",\n \"quartz\",\n \"vox\",\n \"fivethirtyeight\",\n \"dark\",\n \"latimes\",\n \"urbaninstitute\",\n]\n\n\nclass VegaTheme(object):\n \"\"\"Implementation of a builtin vega theme.\"\"\"\n\n def __init__(self, theme):\n self.theme = theme\n\n def __call__(self):\n return {\n \"usermeta\": {\"embedOptions\": {\"theme\": self.theme}},\n \"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}},\n }\n\n def __repr__(self):\n return \"VegaTheme({!r})\".format(self.theme)\n\n\n# The entry point group that can be used by other packages to declare other\n# renderers that will be auto-detected. Explicit registration is also\n# allowed by the PluginRegistery API.\nENTRY_POINT_GROUP = \"altair.vegalite.v5.theme\" # type: str\nthemes = ThemeRegistry(entry_point_group=ENTRY_POINT_GROUP)\n\nthemes.register(\n \"default\",\n lambda: {\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}},\n)\nthemes.register(\n \"opaque\",\n lambda: {\n \"config\": {\n \"background\": \"white\",\n \"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300},\n }\n },\n)\nthemes.register(\"none\", lambda: {})\n\nfor theme in VEGA_THEMES:\n themes.register(theme, VegaTheme(theme))\n\nthemes.enable(\"default\")\n", "path": "altair/vegalite/v5/theme.py"}], "after_files": [{"content": "\"\"\"Tools for enabling and registering chart themes\"\"\"\n\nfrom ...utils.theme import ThemeRegistry\n\nVEGA_THEMES = [\n \"ggplot2\",\n \"quartz\",\n \"vox\",\n \"fivethirtyeight\",\n \"dark\",\n \"latimes\",\n \"urbaninstitute\",\n]\n\n\nclass VegaTheme(object):\n \"\"\"Implementation of a builtin vega theme.\"\"\"\n\n def __init__(self, theme):\n self.theme = theme\n\n def __call__(self):\n return {\n \"usermeta\": {\"embedOptions\": {\"theme\": self.theme}},\n \"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}},\n }\n\n def __repr__(self):\n return \"VegaTheme({!r})\".format(self.theme)\n\n\n# The entry point group that can be used by other packages to declare other\n# renderers that will be auto-detected. Explicit registration is also\n# allowed by the PluginRegistery API.\nENTRY_POINT_GROUP = \"altair.vegalite.v5.theme\" # type: str\nthemes = ThemeRegistry(entry_point_group=ENTRY_POINT_GROUP)\n\nthemes.register(\n \"default\",\n lambda: {\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}},\n)\nthemes.register(\n \"opaque\",\n lambda: {\n \"config\": {\n \"background\": \"white\",\n \"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300},\n }\n },\n)\nthemes.register(\"none\", lambda: {})\n\nfor theme in VEGA_THEMES:\n themes.register(theme, VegaTheme(theme))\n\nthemes.enable(\"default\")\n", "path": "altair/vegalite/v5/theme.py"}]} | 1,125 | 308 |
gh_patches_debug_6214 | rasdani/github-patches | git_diff | voicepaw__so-vits-svc-fork-1157 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Unable to use svc pre-sd with pyannote.audio 3.1.1
### Describe the bug
To use svc pre-sd for a long audio file with multiple speakers, I followed the [setup guide](https://github.com/voicepaw/so-vits-svc-fork/#before-training) and manually installed pyannote.audio, getting the latest version, 3.1.1.
Attempting to run svc pre-sd triggered the following error messages:
```
Model was trained with pyannote.audio 0.0.1, yours is 3.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.
Model was trained with torch 1.10.0+cu102, yours is 2.2.2+cu121. Bad things might happen unless you revert torch to 1.x.
```
According to [PyPI](https://pypi.org/project/pyannote.audio/3.1.1/), pyannote.audio 3.1.1 works with speaker-diarization-3.1. So, it’s necessary to explicitly specify this version in the code.
### To Reproduce
1. Set up the environment. (I'm using torch 2.2.2+cu121.)
2. Install so-vits-svc-fork and its dependencies.
3. Install pyannote.audio with `pip3 install pyannote-audio`.
4. Prepare your data and organize data folders.
5. Run svc pre-sd with options suited to your data.
### Additional context
_No response_
### Version
4.1.61
### Platform
WSL-Ubuntu 22.04 LTS
### Code of Conduct
- [X] I agree to follow this project's Code of Conduct.
### No Duplicate
- [X] I have checked existing issues to avoid duplicates.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py`
Content:
```
1 from __future__ import annotations
2
3 from collections import defaultdict
4 from logging import getLogger
5 from pathlib import Path
6
7 import librosa
8 import soundfile as sf
9 import torch
10 from joblib import Parallel, delayed
11 from pyannote.audio import Pipeline
12 from tqdm import tqdm
13 from tqdm_joblib import tqdm_joblib
14
15 LOG = getLogger(__name__)
16
17
18 def _process_one(
19 input_path: Path,
20 output_dir: Path,
21 sr: int,
22 *,
23 min_speakers: int = 1,
24 max_speakers: int = 1,
25 huggingface_token: str | None = None,
26 ) -> None:
27 try:
28 audio, sr = librosa.load(input_path, sr=sr, mono=True)
29 except Exception as e:
30 LOG.warning(f"Failed to read {input_path}: {e}")
31 return
32 pipeline = Pipeline.from_pretrained(
33 "pyannote/speaker-diarization", use_auth_token=huggingface_token
34 )
35 if pipeline is None:
36 raise ValueError("Failed to load pipeline")
37 pipeline = pipeline.to(torch.device("cuda"))
38 LOG.info(f"Processing {input_path}. This may take a while...")
39 diarization = pipeline(
40 input_path, min_speakers=min_speakers, max_speakers=max_speakers
41 )
42
43 LOG.info(f"Found {len(diarization)} tracks, writing to {output_dir}")
44 speaker_count = defaultdict(int)
45
46 output_dir.mkdir(parents=True, exist_ok=True)
47 for segment, track, speaker in tqdm(
48 list(diarization.itertracks(yield_label=True)), desc=f"Writing {input_path}"
49 ):
50 if segment.end - segment.start < 1:
51 continue
52 speaker_count[speaker] += 1
53 audio_cut = audio[int(segment.start * sr) : int(segment.end * sr)]
54 sf.write(
55 (output_dir / f"{speaker}_{speaker_count[speaker]}.wav"),
56 audio_cut,
57 sr,
58 )
59
60 LOG.info(f"Speaker count: {speaker_count}")
61
62
63 def preprocess_speaker_diarization(
64 input_dir: Path | str,
65 output_dir: Path | str,
66 sr: int,
67 *,
68 min_speakers: int = 1,
69 max_speakers: int = 1,
70 huggingface_token: str | None = None,
71 n_jobs: int = -1,
72 ) -> None:
73 if huggingface_token is not None and not huggingface_token.startswith("hf_"):
74 LOG.warning("Huggingface token probably should start with hf_")
75 if not torch.cuda.is_available():
76 LOG.warning("CUDA is not available. This will be extremely slow.")
77 input_dir = Path(input_dir)
78 output_dir = Path(output_dir)
79 input_dir.mkdir(parents=True, exist_ok=True)
80 output_dir.mkdir(parents=True, exist_ok=True)
81 input_paths = list(input_dir.rglob("*.*"))
82 with tqdm_joblib(desc="Preprocessing speaker diarization", total=len(input_paths)):
83 Parallel(n_jobs=n_jobs)(
84 delayed(_process_one)(
85 input_path,
86 output_dir / input_path.relative_to(input_dir).parent / input_path.stem,
87 sr,
88 max_speakers=max_speakers,
89 min_speakers=min_speakers,
90 huggingface_token=huggingface_token,
91 )
92 for input_path in input_paths
93 )
94
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py
--- a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py
+++ b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py
@@ -30,7 +30,7 @@
LOG.warning(f"Failed to read {input_path}: {e}")
return
pipeline = Pipeline.from_pretrained(
- "pyannote/speaker-diarization", use_auth_token=huggingface_token
+ "pyannote/speaker-diarization-3.1", use_auth_token=huggingface_token
)
if pipeline is None:
raise ValueError("Failed to load pipeline")
| {"golden_diff": "diff --git a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py\n--- a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py\n+++ b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py\n@@ -30,7 +30,7 @@\n LOG.warning(f\"Failed to read {input_path}: {e}\")\n return\n pipeline = Pipeline.from_pretrained(\n- \"pyannote/speaker-diarization\", use_auth_token=huggingface_token\n+ \"pyannote/speaker-diarization-3.1\", use_auth_token=huggingface_token\n )\n if pipeline is None:\n raise ValueError(\"Failed to load pipeline\")\n", "issue": "Unable to use svc pre-sd with pyannote.audio 3.1.1\n### Describe the bug\n\nTo use svc pre-sd for a long audio file with multiple speakers, I followed the [setup guide](https://github.com/voicepaw/so-vits-svc-fork/#before-training) and manually installed pyannote.audio, getting the latest version, 3.1.1.\r\n\r\nAttempting to run svc pre-sd triggered the following error messages:\r\n\r\n```\r\nModel was trained with pyannote.audio 0.0.1, yours is 3.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.\r\n\r\nModel was trained with torch 1.10.0+cu102, yours is 2.2.2+cu121. Bad things might happen unless you revert torch to 1.x.\r\n```\r\n\r\nAccording to [PyPI](https://pypi.org/project/pyannote.audio/3.1.1/), pyannote.audio 3.1.1 works with speaker-diarization-3.1. So, it\u2019s necessary to explicitly specify this version in the code.\n\n### To Reproduce\n\n1. Set up the environment. (I'm using torch 2.2.2+cu121.)\r\n2. Install so-vits-svc-fork and its dependencies.\r\n3. Install pyannote.audio with `pip3 install pyannote-audio`.\r\n4. Prepare your data and organize data folders.\r\n5. Run svc pre-sd with options suited to your data.\n\n### Additional context\n\n_No response_\n\n### Version\n\n4.1.61\n\n### Platform\n\nWSL-Ubuntu 22.04 LTS\n\n### Code of Conduct\n\n- [X] I agree to follow this project's Code of Conduct.\n\n### No Duplicate\n\n- [X] I have checked existing issues to avoid duplicates.\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom collections import defaultdict\nfrom logging import getLogger\nfrom pathlib import Path\n\nimport librosa\nimport soundfile as sf\nimport torch\nfrom joblib import Parallel, delayed\nfrom pyannote.audio import Pipeline\nfrom tqdm import tqdm\nfrom tqdm_joblib import tqdm_joblib\n\nLOG = getLogger(__name__)\n\n\ndef _process_one(\n input_path: Path,\n output_dir: Path,\n sr: int,\n *,\n min_speakers: int = 1,\n max_speakers: int = 1,\n huggingface_token: str | None = None,\n) -> None:\n try:\n audio, sr = librosa.load(input_path, sr=sr, mono=True)\n except Exception as e:\n LOG.warning(f\"Failed to read {input_path}: {e}\")\n return\n pipeline = Pipeline.from_pretrained(\n \"pyannote/speaker-diarization\", use_auth_token=huggingface_token\n )\n if pipeline is None:\n raise ValueError(\"Failed to load pipeline\")\n pipeline = pipeline.to(torch.device(\"cuda\"))\n LOG.info(f\"Processing {input_path}. This may take a while...\")\n diarization = pipeline(\n input_path, min_speakers=min_speakers, max_speakers=max_speakers\n )\n\n LOG.info(f\"Found {len(diarization)} tracks, writing to {output_dir}\")\n speaker_count = defaultdict(int)\n\n output_dir.mkdir(parents=True, exist_ok=True)\n for segment, track, speaker in tqdm(\n list(diarization.itertracks(yield_label=True)), desc=f\"Writing {input_path}\"\n ):\n if segment.end - segment.start < 1:\n continue\n speaker_count[speaker] += 1\n audio_cut = audio[int(segment.start * sr) : int(segment.end * sr)]\n sf.write(\n (output_dir / f\"{speaker}_{speaker_count[speaker]}.wav\"),\n audio_cut,\n sr,\n )\n\n LOG.info(f\"Speaker count: {speaker_count}\")\n\n\ndef preprocess_speaker_diarization(\n input_dir: Path | str,\n output_dir: Path | str,\n sr: int,\n *,\n min_speakers: int = 1,\n max_speakers: int = 1,\n huggingface_token: str | None = None,\n n_jobs: int = -1,\n) -> None:\n if huggingface_token is not None and not huggingface_token.startswith(\"hf_\"):\n LOG.warning(\"Huggingface token probably should start with hf_\")\n if not torch.cuda.is_available():\n LOG.warning(\"CUDA is not available. This will be extremely slow.\")\n input_dir = Path(input_dir)\n output_dir = Path(output_dir)\n input_dir.mkdir(parents=True, exist_ok=True)\n output_dir.mkdir(parents=True, exist_ok=True)\n input_paths = list(input_dir.rglob(\"*.*\"))\n with tqdm_joblib(desc=\"Preprocessing speaker diarization\", total=len(input_paths)):\n Parallel(n_jobs=n_jobs)(\n delayed(_process_one)(\n input_path,\n output_dir / input_path.relative_to(input_dir).parent / input_path.stem,\n sr,\n max_speakers=max_speakers,\n min_speakers=min_speakers,\n huggingface_token=huggingface_token,\n )\n for input_path in input_paths\n )\n", "path": "src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom collections import defaultdict\nfrom logging import getLogger\nfrom pathlib import Path\n\nimport librosa\nimport soundfile as sf\nimport torch\nfrom joblib import Parallel, delayed\nfrom pyannote.audio import Pipeline\nfrom tqdm import tqdm\nfrom tqdm_joblib import tqdm_joblib\n\nLOG = getLogger(__name__)\n\n\ndef _process_one(\n input_path: Path,\n output_dir: Path,\n sr: int,\n *,\n min_speakers: int = 1,\n max_speakers: int = 1,\n huggingface_token: str | None = None,\n) -> None:\n try:\n audio, sr = librosa.load(input_path, sr=sr, mono=True)\n except Exception as e:\n LOG.warning(f\"Failed to read {input_path}: {e}\")\n return\n pipeline = Pipeline.from_pretrained(\n \"pyannote/speaker-diarization-3.1\", use_auth_token=huggingface_token\n )\n if pipeline is None:\n raise ValueError(\"Failed to load pipeline\")\n pipeline = pipeline.to(torch.device(\"cuda\"))\n LOG.info(f\"Processing {input_path}. This may take a while...\")\n diarization = pipeline(\n input_path, min_speakers=min_speakers, max_speakers=max_speakers\n )\n\n LOG.info(f\"Found {len(diarization)} tracks, writing to {output_dir}\")\n speaker_count = defaultdict(int)\n\n output_dir.mkdir(parents=True, exist_ok=True)\n for segment, track, speaker in tqdm(\n list(diarization.itertracks(yield_label=True)), desc=f\"Writing {input_path}\"\n ):\n if segment.end - segment.start < 1:\n continue\n speaker_count[speaker] += 1\n audio_cut = audio[int(segment.start * sr) : int(segment.end * sr)]\n sf.write(\n (output_dir / f\"{speaker}_{speaker_count[speaker]}.wav\"),\n audio_cut,\n sr,\n )\n\n LOG.info(f\"Speaker count: {speaker_count}\")\n\n\ndef preprocess_speaker_diarization(\n input_dir: Path | str,\n output_dir: Path | str,\n sr: int,\n *,\n min_speakers: int = 1,\n max_speakers: int = 1,\n huggingface_token: str | None = None,\n n_jobs: int = -1,\n) -> None:\n if huggingface_token is not None and not huggingface_token.startswith(\"hf_\"):\n LOG.warning(\"Huggingface token probably should start with hf_\")\n if not torch.cuda.is_available():\n LOG.warning(\"CUDA is not available. This will be extremely slow.\")\n input_dir = Path(input_dir)\n output_dir = Path(output_dir)\n input_dir.mkdir(parents=True, exist_ok=True)\n output_dir.mkdir(parents=True, exist_ok=True)\n input_paths = list(input_dir.rglob(\"*.*\"))\n with tqdm_joblib(desc=\"Preprocessing speaker diarization\", total=len(input_paths)):\n Parallel(n_jobs=n_jobs)(\n delayed(_process_one)(\n input_path,\n output_dir / input_path.relative_to(input_dir).parent / input_path.stem,\n sr,\n max_speakers=max_speakers,\n min_speakers=min_speakers,\n huggingface_token=huggingface_token,\n )\n for input_path in input_paths\n )\n", "path": "src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py"}]} | 1,585 | 183 |
gh_patches_debug_6467 | rasdani/github-patches | git_diff | getnikola__nikola-1145 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Never copy/list listings/*.py[co]
kinda related to invariance
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nikola/plugins/task/listings.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Copyright © 2012-2014 Roberto Alsina and others.
4
5 # Permission is hereby granted, free of charge, to any
6 # person obtaining a copy of this software and associated
7 # documentation files (the "Software"), to deal in the
8 # Software without restriction, including without limitation
9 # the rights to use, copy, modify, merge, publish,
10 # distribute, sublicense, and/or sell copies of the
11 # Software, and to permit persons to whom the Software is
12 # furnished to do so, subject to the following conditions:
13 #
14 # The above copyright notice and this permission notice
15 # shall be included in all copies or substantial portions of
16 # the Software.
17 #
18 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
19 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
20 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
21 # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
22 # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
24 # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26
27 from __future__ import unicode_literals, print_function
28
29 import os
30
31 from pygments import highlight
32 from pygments.lexers import get_lexer_for_filename, TextLexer
33 from pygments.formatters import HtmlFormatter
34
35 from nikola.plugin_categories import Task
36 from nikola import utils
37
38
39 class Listings(Task):
40 """Render pretty listings."""
41
42 name = "render_listings"
43
44 def set_site(self, site):
45 site.register_path_handler('listing', self.listing_path)
46 return super(Listings, self).set_site(site)
47
48 def gen_tasks(self):
49 """Render pretty code listings."""
50 kw = {
51 "default_lang": self.site.config["DEFAULT_LANG"],
52 "listings_folder": self.site.config["LISTINGS_FOLDER"],
53 "output_folder": self.site.config["OUTPUT_FOLDER"],
54 "index_file": self.site.config["INDEX_FILE"],
55 }
56
57 # Things to ignore in listings
58 ignored_extensions = (".pyc", ".pyo")
59
60 def render_listing(in_name, out_name, folders=[], files=[]):
61 if in_name:
62 with open(in_name, 'r') as fd:
63 try:
64 lexer = get_lexer_for_filename(in_name)
65 except:
66 lexer = TextLexer()
67 code = highlight(fd.read(), lexer,
68 HtmlFormatter(cssclass='code',
69 linenos="table", nowrap=False,
70 lineanchors=utils.slugify(in_name),
71 anchorlinenos=True))
72 title = os.path.basename(in_name)
73 else:
74 code = ''
75 title = ''
76 crumbs = utils.get_crumbs(os.path.relpath(out_name,
77 kw['output_folder']),
78 is_file=True)
79 context = {
80 'code': code,
81 'title': title,
82 'crumbs': crumbs,
83 'lang': kw['default_lang'],
84 'folders': folders,
85 'files': files,
86 'description': title,
87 }
88 self.site.render_template('listing.tmpl', out_name,
89 context)
90
91 yield self.group_task()
92
93 template_deps = self.site.template_system.template_deps('listing.tmpl')
94 for root, dirs, files in os.walk(kw['listings_folder']):
95 # Render all files
96 out_name = os.path.join(
97 kw['output_folder'],
98 root, kw['index_file']
99 )
100 yield {
101 'basename': self.name,
102 'name': out_name,
103 'file_dep': template_deps,
104 'targets': [out_name],
105 'actions': [(render_listing, [None, out_name, dirs, files])],
106 # This is necessary to reflect changes in blog title,
107 # sidebar links, etc.
108 'uptodate': [utils.config_changed(
109 self.site.GLOBAL_CONTEXT)],
110 'clean': True,
111 }
112 for f in files:
113 ext = os.path.splitext(f)[-1]
114 if ext in ignored_extensions:
115 continue
116 in_name = os.path.join(root, f)
117 out_name = os.path.join(
118 kw['output_folder'],
119 root,
120 f) + '.html'
121 yield {
122 'basename': self.name,
123 'name': out_name,
124 'file_dep': template_deps + [in_name],
125 'targets': [out_name],
126 'actions': [(render_listing, [in_name, out_name])],
127 # This is necessary to reflect changes in blog title,
128 # sidebar links, etc.
129 'uptodate': [utils.config_changed(
130 self.site.GLOBAL_CONTEXT)],
131 'clean': True,
132 }
133
134 def listing_path(self, name, lang):
135 return [_f for _f in [self.site.config['LISTINGS_FOLDER'], name +
136 '.html'] if _f]
137
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nikola/plugins/task/listings.py b/nikola/plugins/task/listings.py
--- a/nikola/plugins/task/listings.py
+++ b/nikola/plugins/task/listings.py
@@ -92,6 +92,7 @@
template_deps = self.site.template_system.template_deps('listing.tmpl')
for root, dirs, files in os.walk(kw['listings_folder']):
+ files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]
# Render all files
out_name = os.path.join(
kw['output_folder'],
| {"golden_diff": "diff --git a/nikola/plugins/task/listings.py b/nikola/plugins/task/listings.py\n--- a/nikola/plugins/task/listings.py\n+++ b/nikola/plugins/task/listings.py\n@@ -92,6 +92,7 @@\n \n template_deps = self.site.template_system.template_deps('listing.tmpl')\n for root, dirs, files in os.walk(kw['listings_folder']):\n+ files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]\n # Render all files\n out_name = os.path.join(\n kw['output_folder'],\n", "issue": "Never copy/list listings/*.py[co]\nkinda related to invariance\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright \u00a9 2012-2014 Roberto Alsina and others.\n\n# Permission is hereby granted, free of charge, to any\n# person obtaining a copy of this software and associated\n# documentation files (the \"Software\"), to deal in the\n# Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the\n# Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice\n# shall be included in all copies or substantial portions of\n# the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS\n# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR\n# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nfrom __future__ import unicode_literals, print_function\n\nimport os\n\nfrom pygments import highlight\nfrom pygments.lexers import get_lexer_for_filename, TextLexer\nfrom pygments.formatters import HtmlFormatter\n\nfrom nikola.plugin_categories import Task\nfrom nikola import utils\n\n\nclass Listings(Task):\n \"\"\"Render pretty listings.\"\"\"\n\n name = \"render_listings\"\n\n def set_site(self, site):\n site.register_path_handler('listing', self.listing_path)\n return super(Listings, self).set_site(site)\n\n def gen_tasks(self):\n \"\"\"Render pretty code listings.\"\"\"\n kw = {\n \"default_lang\": self.site.config[\"DEFAULT_LANG\"],\n \"listings_folder\": self.site.config[\"LISTINGS_FOLDER\"],\n \"output_folder\": self.site.config[\"OUTPUT_FOLDER\"],\n \"index_file\": self.site.config[\"INDEX_FILE\"],\n }\n\n # Things to ignore in listings\n ignored_extensions = (\".pyc\", \".pyo\")\n\n def render_listing(in_name, out_name, folders=[], files=[]):\n if in_name:\n with open(in_name, 'r') as fd:\n try:\n lexer = get_lexer_for_filename(in_name)\n except:\n lexer = TextLexer()\n code = highlight(fd.read(), lexer,\n HtmlFormatter(cssclass='code',\n linenos=\"table\", nowrap=False,\n lineanchors=utils.slugify(in_name),\n anchorlinenos=True))\n title = os.path.basename(in_name)\n else:\n code = ''\n title = ''\n crumbs = utils.get_crumbs(os.path.relpath(out_name,\n kw['output_folder']),\n is_file=True)\n context = {\n 'code': code,\n 'title': title,\n 'crumbs': crumbs,\n 'lang': kw['default_lang'],\n 'folders': folders,\n 'files': files,\n 'description': title,\n }\n self.site.render_template('listing.tmpl', out_name,\n context)\n\n yield self.group_task()\n\n template_deps = self.site.template_system.template_deps('listing.tmpl')\n for root, dirs, files in os.walk(kw['listings_folder']):\n # Render all files\n out_name = os.path.join(\n kw['output_folder'],\n root, kw['index_file']\n )\n yield {\n 'basename': self.name,\n 'name': out_name,\n 'file_dep': template_deps,\n 'targets': [out_name],\n 'actions': [(render_listing, [None, out_name, dirs, files])],\n # This is necessary to reflect changes in blog title,\n # sidebar links, etc.\n 'uptodate': [utils.config_changed(\n self.site.GLOBAL_CONTEXT)],\n 'clean': True,\n }\n for f in files:\n ext = os.path.splitext(f)[-1]\n if ext in ignored_extensions:\n continue\n in_name = os.path.join(root, f)\n out_name = os.path.join(\n kw['output_folder'],\n root,\n f) + '.html'\n yield {\n 'basename': self.name,\n 'name': out_name,\n 'file_dep': template_deps + [in_name],\n 'targets': [out_name],\n 'actions': [(render_listing, [in_name, out_name])],\n # This is necessary to reflect changes in blog title,\n # sidebar links, etc.\n 'uptodate': [utils.config_changed(\n self.site.GLOBAL_CONTEXT)],\n 'clean': True,\n }\n\n def listing_path(self, name, lang):\n return [_f for _f in [self.site.config['LISTINGS_FOLDER'], name +\n '.html'] if _f]\n", "path": "nikola/plugins/task/listings.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright \u00a9 2012-2014 Roberto Alsina and others.\n\n# Permission is hereby granted, free of charge, to any\n# person obtaining a copy of this software and associated\n# documentation files (the \"Software\"), to deal in the\n# Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the\n# Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice\n# shall be included in all copies or substantial portions of\n# the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS\n# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR\n# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nfrom __future__ import unicode_literals, print_function\n\nimport os\n\nfrom pygments import highlight\nfrom pygments.lexers import get_lexer_for_filename, TextLexer\nfrom pygments.formatters import HtmlFormatter\n\nfrom nikola.plugin_categories import Task\nfrom nikola import utils\n\n\nclass Listings(Task):\n \"\"\"Render pretty listings.\"\"\"\n\n name = \"render_listings\"\n\n def set_site(self, site):\n site.register_path_handler('listing', self.listing_path)\n return super(Listings, self).set_site(site)\n\n def gen_tasks(self):\n \"\"\"Render pretty code listings.\"\"\"\n kw = {\n \"default_lang\": self.site.config[\"DEFAULT_LANG\"],\n \"listings_folder\": self.site.config[\"LISTINGS_FOLDER\"],\n \"output_folder\": self.site.config[\"OUTPUT_FOLDER\"],\n \"index_file\": self.site.config[\"INDEX_FILE\"],\n }\n\n # Things to ignore in listings\n ignored_extensions = (\".pyc\", \".pyo\")\n\n def render_listing(in_name, out_name, folders=[], files=[]):\n if in_name:\n with open(in_name, 'r') as fd:\n try:\n lexer = get_lexer_for_filename(in_name)\n except:\n lexer = TextLexer()\n code = highlight(fd.read(), lexer,\n HtmlFormatter(cssclass='code',\n linenos=\"table\", nowrap=False,\n lineanchors=utils.slugify(in_name),\n anchorlinenos=True))\n title = os.path.basename(in_name)\n else:\n code = ''\n title = ''\n crumbs = utils.get_crumbs(os.path.relpath(out_name,\n kw['output_folder']),\n is_file=True)\n context = {\n 'code': code,\n 'title': title,\n 'crumbs': crumbs,\n 'lang': kw['default_lang'],\n 'folders': folders,\n 'files': files,\n 'description': title,\n }\n self.site.render_template('listing.tmpl', out_name,\n context)\n\n yield self.group_task()\n\n template_deps = self.site.template_system.template_deps('listing.tmpl')\n for root, dirs, files in os.walk(kw['listings_folder']):\n files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]\n # Render all files\n out_name = os.path.join(\n kw['output_folder'],\n root, kw['index_file']\n )\n yield {\n 'basename': self.name,\n 'name': out_name,\n 'file_dep': template_deps,\n 'targets': [out_name],\n 'actions': [(render_listing, [None, out_name, dirs, files])],\n # This is necessary to reflect changes in blog title,\n # sidebar links, etc.\n 'uptodate': [utils.config_changed(\n self.site.GLOBAL_CONTEXT)],\n 'clean': True,\n }\n for f in files:\n ext = os.path.splitext(f)[-1]\n if ext in ignored_extensions:\n continue\n in_name = os.path.join(root, f)\n out_name = os.path.join(\n kw['output_folder'],\n root,\n f) + '.html'\n yield {\n 'basename': self.name,\n 'name': out_name,\n 'file_dep': template_deps + [in_name],\n 'targets': [out_name],\n 'actions': [(render_listing, [in_name, out_name])],\n # This is necessary to reflect changes in blog title,\n # sidebar links, etc.\n 'uptodate': [utils.config_changed(\n self.site.GLOBAL_CONTEXT)],\n 'clean': True,\n }\n\n def listing_path(self, name, lang):\n return [_f for _f in [self.site.config['LISTINGS_FOLDER'], name +\n '.html'] if _f]\n", "path": "nikola/plugins/task/listings.py"}]} | 1,621 | 132 |
gh_patches_debug_27442 | rasdani/github-patches | git_diff | aio-libs-abandoned__aioredis-py-839 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Drop Python 3.5 support
Python 3.5 was a famous release (async/await syntax was introduced) but the version has reached end-of-life.
We can (and should) drop it.
Speaking as the maintainer of other libraries, I can say that it simplifies the code base a little
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import re
2 import os.path
3 import sys
4 import platform
5 from setuptools import setup, find_packages
6
7
8 install_requires = ['async-timeout']
9 if platform.python_implementation() == 'CPython':
10 install_requires.append('hiredis')
11
12 PY_VER = sys.version_info
13
14 if PY_VER < (3, 5):
15 raise RuntimeError("aioredis doesn't support Python version prior 3.5")
16
17
18 def read(*parts):
19 with open(os.path.join(*parts), 'rt') as f:
20 return f.read().strip()
21
22
23 def read_version():
24 regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'")
25 init_py = os.path.join(os.path.dirname(__file__),
26 'aioredis', '__init__.py')
27 with open(init_py) as f:
28 for line in f:
29 match = regexp.match(line)
30 if match is not None:
31 return match.group(1)
32 raise RuntimeError('Cannot find version in {}'.format(init_py))
33
34
35 classifiers = [
36 'License :: OSI Approved :: MIT License',
37 'Development Status :: 4 - Beta',
38 'Programming Language :: Python',
39 'Programming Language :: Python :: 3',
40 'Programming Language :: Python :: 3.5',
41 'Programming Language :: Python :: 3.6',
42 'Programming Language :: Python :: 3.7',
43 'Programming Language :: Python :: 3 :: Only',
44 'Operating System :: POSIX',
45 'Environment :: Web Environment',
46 'Intended Audience :: Developers',
47 'Topic :: Software Development',
48 'Topic :: Software Development :: Libraries',
49 'Framework :: AsyncIO',
50 ]
51
52 setup(name='aioredis',
53 version=read_version(),
54 description=("asyncio (PEP 3156) Redis support"),
55 long_description="\n\n".join((read('README.rst'), read('CHANGES.txt'))),
56 classifiers=classifiers,
57 platforms=["POSIX"],
58 author="Alexey Popravka",
59 author_email="[email protected]",
60 url="https://github.com/aio-libs/aioredis",
61 license="MIT",
62 packages=find_packages(exclude=["tests"]),
63 install_requires=install_requires,
64 include_package_data=True,
65 )
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,20 +1,8 @@
import re
import os.path
-import sys
-import platform
from setuptools import setup, find_packages
-install_requires = ['async-timeout']
-if platform.python_implementation() == 'CPython':
- install_requires.append('hiredis')
-
-PY_VER = sys.version_info
-
-if PY_VER < (3, 5):
- raise RuntimeError("aioredis doesn't support Python version prior 3.5")
-
-
def read(*parts):
with open(os.path.join(*parts), 'rt') as f:
return f.read().strip()
@@ -37,7 +25,6 @@
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
@@ -60,6 +47,10 @@
url="https://github.com/aio-libs/aioredis",
license="MIT",
packages=find_packages(exclude=["tests"]),
- install_requires=install_requires,
+ install_requires=[
+ 'async-timeout',
+ 'hiredis; implementation_name=="cpython"'
+ ],
+ python_requires=">=3.6",
include_package_data=True,
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,20 +1,8 @@\n import re\n import os.path\n-import sys\n-import platform\n from setuptools import setup, find_packages\n \n \n-install_requires = ['async-timeout']\n-if platform.python_implementation() == 'CPython':\n- install_requires.append('hiredis')\n-\n-PY_VER = sys.version_info\n-\n-if PY_VER < (3, 5):\n- raise RuntimeError(\"aioredis doesn't support Python version prior 3.5\")\n-\n-\n def read(*parts):\n with open(os.path.join(*parts), 'rt') as f:\n return f.read().strip()\n@@ -37,7 +25,6 @@\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n- 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n@@ -60,6 +47,10 @@\n url=\"https://github.com/aio-libs/aioredis\",\n license=\"MIT\",\n packages=find_packages(exclude=[\"tests\"]),\n- install_requires=install_requires,\n+ install_requires=[\n+ 'async-timeout',\n+ 'hiredis; implementation_name==\"cpython\"'\n+ ],\n+ python_requires=\">=3.6\",\n include_package_data=True,\n )\n", "issue": "Drop Python 3.5 support\nPython 3.5 was a famous release (async/await syntax was introduced) but the version has reached end-of-life.\r\nWe can (and should) drop it.\r\nSpeaking as the maintainer of other libraries, I can say that it simplifies the code base a little\n", "before_files": [{"content": "import re\nimport os.path\nimport sys\nimport platform\nfrom setuptools import setup, find_packages\n\n\ninstall_requires = ['async-timeout']\nif platform.python_implementation() == 'CPython':\n install_requires.append('hiredis')\n\nPY_VER = sys.version_info\n\nif PY_VER < (3, 5):\n raise RuntimeError(\"aioredis doesn't support Python version prior 3.5\")\n\n\ndef read(*parts):\n with open(os.path.join(*parts), 'rt') as f:\n return f.read().strip()\n\n\ndef read_version():\n regexp = re.compile(r\"^__version__\\W*=\\W*'([\\d.abrc]+)'\")\n init_py = os.path.join(os.path.dirname(__file__),\n 'aioredis', '__init__.py')\n with open(init_py) as f:\n for line in f:\n match = regexp.match(line)\n if match is not None:\n return match.group(1)\n raise RuntimeError('Cannot find version in {}'.format(init_py))\n\n\nclassifiers = [\n 'License :: OSI Approved :: MIT License',\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Operating System :: POSIX',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Framework :: AsyncIO',\n]\n\nsetup(name='aioredis',\n version=read_version(),\n description=(\"asyncio (PEP 3156) Redis support\"),\n long_description=\"\\n\\n\".join((read('README.rst'), read('CHANGES.txt'))),\n classifiers=classifiers,\n platforms=[\"POSIX\"],\n author=\"Alexey Popravka\",\n author_email=\"[email protected]\",\n url=\"https://github.com/aio-libs/aioredis\",\n license=\"MIT\",\n packages=find_packages(exclude=[\"tests\"]),\n install_requires=install_requires,\n include_package_data=True,\n )\n", "path": "setup.py"}], "after_files": [{"content": "import re\nimport os.path\nfrom setuptools import setup, find_packages\n\n\ndef read(*parts):\n with open(os.path.join(*parts), 'rt') as f:\n return f.read().strip()\n\n\ndef read_version():\n regexp = re.compile(r\"^__version__\\W*=\\W*'([\\d.abrc]+)'\")\n init_py = os.path.join(os.path.dirname(__file__),\n 'aioredis', '__init__.py')\n with open(init_py) as f:\n for line in f:\n match = regexp.match(line)\n if match is not None:\n return match.group(1)\n raise RuntimeError('Cannot find version in {}'.format(init_py))\n\n\nclassifiers = [\n 'License :: OSI Approved :: MIT License',\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Operating System :: POSIX',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Framework :: AsyncIO',\n]\n\nsetup(name='aioredis',\n version=read_version(),\n description=(\"asyncio (PEP 3156) Redis support\"),\n long_description=\"\\n\\n\".join((read('README.rst'), read('CHANGES.txt'))),\n classifiers=classifiers,\n platforms=[\"POSIX\"],\n author=\"Alexey Popravka\",\n author_email=\"[email protected]\",\n url=\"https://github.com/aio-libs/aioredis\",\n license=\"MIT\",\n packages=find_packages(exclude=[\"tests\"]),\n install_requires=[\n 'async-timeout',\n 'hiredis; implementation_name==\"cpython\"'\n ],\n python_requires=\">=3.6\",\n include_package_data=True,\n )\n", "path": "setup.py"}]} | 936 | 335 |
gh_patches_debug_34889 | rasdani/github-patches | git_diff | scikit-hep__pyhf-1818 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add contextlib support to the pyhf.schema API
> instead of having users overwriting the value by assignment, do you think there is value in offering a `pyhf.utils.update_schema_path` or something that performs this operation through the API?
>
> Thanks @kratsg for taking up my suggestion!
>
> I second @matthewfeickert's suggestion to change schemas via function call rather than assignment. It is much simpler to add functionality to a function if it ever becomes necessary, than to replace an entire existing module with a class instance with a property.
>
> I'd even go so far to say that a context manager doubling as an update function would be ideal IMO:
> ```python
> # in pyhf.utils
> _SCHEMAS = Path(...)
> class use_schema_path: # snake_case to remind of function-like usage
> def __init__(self, path):
> global _SCHEMAS
> self._old_schemas = _SCHEMAS
> _SCHEMAS = pathlib.Path(path)
> def __enter__(self):
> pass
> def __exit__(self, *args, **kwargs):
> global _SCHEMAS
> _SCHEMAS = self._old_schemas
> ```
> which can still be called as a function (only executing `__init__`), so short scripts etc. are not forced to use `with` blocks.
> But it can also be used like so:
> ```python
> def make_my_workspace(spec):
> with pyhf.utils.use_schema_path('/my/very/special/schemas'):
> return pyhf.Workspace(spec)
> ```
> So as a user writing code on top of pyhf, I don't have to worry about resesetting the global variable, the CM does it for me, and there are fewer mistakes to make.
>
_Originally posted by @lhenkelm in https://github.com/scikit-hep/pyhf/issues/1753#issuecomment-1026678066_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/pyhf/schema/__init__.py`
Content:
```
1 """
2 See :class:`~pyhf.schema.Schema` for documentation.
3 """
4 import pathlib
5 import sys
6 from pyhf.schema.loader import load_schema
7 from pyhf.schema.validator import validate
8 from pyhf.schema import variables
9
10 __all__ = [
11 "load_schema",
12 "validate",
13 "path",
14 "version",
15 ]
16
17
18 def __dir__():
19 return __all__
20
21
22 class Schema(sys.modules[__name__].__class__):
23 """
24 A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.
25
26 Example:
27 >>> import pyhf.schema
28 >>> import pathlib
29 >>> curr_path = pyhf.schema.path
30 >>> curr_path # doctest: +ELLIPSIS
31 PosixPath('.../pyhf/schemas')
32 >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))
33 >>> pyhf.schema.path
34 PosixPath('/home/root/my/new/path')
35 >>> pyhf.schema(curr_path)
36 >>> pyhf.schema.path # doctest: +ELLIPSIS
37 PosixPath('.../pyhf/schemas')
38
39 """
40
41 def __call__(self, new_path: pathlib.Path):
42 """
43 Change the local search path for finding schemas locally.
44
45 Args:
46 new_path (pathlib.Path): Path to folder containing the schemas
47
48 Returns:
49 None
50 """
51 variables.schemas = new_path
52
53 @property
54 def path(self):
55 """
56 The local path for schemas.
57 """
58 return variables.schemas
59
60 @property
61 def version(self):
62 """
63 The default version used for finding schemas.
64 """
65 return variables.SCHEMA_VERSION
66
67
68 sys.modules[__name__].__class__ = Schema
69
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/pyhf/schema/__init__.py b/src/pyhf/schema/__init__.py
--- a/src/pyhf/schema/__init__.py
+++ b/src/pyhf/schema/__init__.py
@@ -23,17 +23,40 @@
"""
A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.
- Example:
+ .. rubric:: Example (callable)
+
+ .. code-block:: pycon
+
>>> import pyhf.schema
>>> import pathlib
>>> curr_path = pyhf.schema.path
- >>> curr_path # doctest: +ELLIPSIS
+ >>> curr_path # doctest: +ELLIPSIS
PosixPath('.../pyhf/schemas')
- >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))
+ >>> new_path = pathlib.Path("/home/root/my/new/path")
+ >>> pyhf.schema(new_path) # doctest: +ELLIPSIS
+ <module 'pyhf.schema' from ...>
>>> pyhf.schema.path
PosixPath('/home/root/my/new/path')
- >>> pyhf.schema(curr_path)
- >>> pyhf.schema.path # doctest: +ELLIPSIS
+ >>> pyhf.schema(curr_path) # doctest: +ELLIPSIS
+ <module 'pyhf.schema' from ...>
+ >>> pyhf.schema.path # doctest: +ELLIPSIS
+ PosixPath('.../pyhf/schemas')
+
+ .. rubric:: Example (context-manager)
+
+ .. code-block:: pycon
+
+ >>> import pyhf.schema
+ >>> import pathlib
+ >>> curr_path = pyhf.schema.path
+ >>> curr_path # doctest: +ELLIPSIS
+ PosixPath('.../pyhf/schemas')
+ >>> new_path = pathlib.Path("/home/root/my/new/path")
+ >>> with pyhf.schema(new_path):
+ ... print(repr(pyhf.schema.path))
+ ...
+ PosixPath('/home/root/my/new/path')
+ >>> pyhf.schema.path # doctest: +ELLIPSIS
PosixPath('.../pyhf/schemas')
"""
@@ -45,10 +68,23 @@
Args:
new_path (pathlib.Path): Path to folder containing the schemas
+ Returns:
+ self (pyhf.schema.Schema): Returns itself (for contextlib management)
+ """
+ self.orig_path, variables.schemas = variables.schemas, new_path
+ return self
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, *args, **kwargs):
+ """
+ Reset the local search path for finding schemas locally.
+
Returns:
None
"""
- variables.schemas = new_path
+ variables.schemas = self.orig_path
@property
def path(self):
| {"golden_diff": "diff --git a/src/pyhf/schema/__init__.py b/src/pyhf/schema/__init__.py\n--- a/src/pyhf/schema/__init__.py\n+++ b/src/pyhf/schema/__init__.py\n@@ -23,17 +23,40 @@\n \"\"\"\n A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.\n \n- Example:\n+ .. rubric:: Example (callable)\n+\n+ .. code-block:: pycon\n+\n >>> import pyhf.schema\n >>> import pathlib\n >>> curr_path = pyhf.schema.path\n- >>> curr_path # doctest: +ELLIPSIS\n+ >>> curr_path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n- >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))\n+ >>> new_path = pathlib.Path(\"/home/root/my/new/path\")\n+ >>> pyhf.schema(new_path) # doctest: +ELLIPSIS\n+ <module 'pyhf.schema' from ...>\n >>> pyhf.schema.path\n PosixPath('/home/root/my/new/path')\n- >>> pyhf.schema(curr_path)\n- >>> pyhf.schema.path # doctest: +ELLIPSIS\n+ >>> pyhf.schema(curr_path) # doctest: +ELLIPSIS\n+ <module 'pyhf.schema' from ...>\n+ >>> pyhf.schema.path # doctest: +ELLIPSIS\n+ PosixPath('.../pyhf/schemas')\n+\n+ .. rubric:: Example (context-manager)\n+\n+ .. code-block:: pycon\n+\n+ >>> import pyhf.schema\n+ >>> import pathlib\n+ >>> curr_path = pyhf.schema.path\n+ >>> curr_path # doctest: +ELLIPSIS\n+ PosixPath('.../pyhf/schemas')\n+ >>> new_path = pathlib.Path(\"/home/root/my/new/path\")\n+ >>> with pyhf.schema(new_path):\n+ ... print(repr(pyhf.schema.path))\n+ ...\n+ PosixPath('/home/root/my/new/path')\n+ >>> pyhf.schema.path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n \n \"\"\"\n@@ -45,10 +68,23 @@\n Args:\n new_path (pathlib.Path): Path to folder containing the schemas\n \n+ Returns:\n+ self (pyhf.schema.Schema): Returns itself (for contextlib management)\n+ \"\"\"\n+ self.orig_path, variables.schemas = variables.schemas, new_path\n+ return self\n+\n+ def __enter__(self):\n+ pass\n+\n+ def __exit__(self, *args, **kwargs):\n+ \"\"\"\n+ Reset the local search path for finding schemas locally.\n+\n Returns:\n None\n \"\"\"\n- variables.schemas = new_path\n+ variables.schemas = self.orig_path\n \n @property\n def path(self):\n", "issue": "Add contextlib support to the pyhf.schema API\n> instead of having users overwriting the value by assignment, do you think there is value in offering a `pyhf.utils.update_schema_path` or something that performs this operation through the API?\r\n>\r\n> Thanks @kratsg for taking up my suggestion!\r\n> \r\n> I second @matthewfeickert's suggestion to change schemas via function call rather than assignment. It is much simpler to add functionality to a function if it ever becomes necessary, than to replace an entire existing module with a class instance with a property.\r\n> \r\n> I'd even go so far to say that a context manager doubling as an update function would be ideal IMO:\r\n> ```python\r\n> # in pyhf.utils\r\n> _SCHEMAS = Path(...)\r\n> class use_schema_path: # snake_case to remind of function-like usage\r\n> def __init__(self, path):\r\n> global _SCHEMAS\r\n> self._old_schemas = _SCHEMAS\r\n> _SCHEMAS = pathlib.Path(path)\r\n> def __enter__(self):\r\n> pass\r\n> def __exit__(self, *args, **kwargs):\r\n> global _SCHEMAS\r\n> _SCHEMAS = self._old_schemas\r\n> ```\r\n> which can still be called as a function (only executing `__init__`), so short scripts etc. are not forced to use `with` blocks.\r\n> But it can also be used like so:\r\n> ```python\r\n> def make_my_workspace(spec):\r\n> with pyhf.utils.use_schema_path('/my/very/special/schemas'):\r\n> return pyhf.Workspace(spec)\r\n> ```\r\n> So as a user writing code on top of pyhf, I don't have to worry about resesetting the global variable, the CM does it for me, and there are fewer mistakes to make.\r\n> \r\n_Originally posted by @lhenkelm in https://github.com/scikit-hep/pyhf/issues/1753#issuecomment-1026678066_\n", "before_files": [{"content": "\"\"\"\nSee :class:`~pyhf.schema.Schema` for documentation.\n\"\"\"\nimport pathlib\nimport sys\nfrom pyhf.schema.loader import load_schema\nfrom pyhf.schema.validator import validate\nfrom pyhf.schema import variables\n\n__all__ = [\n \"load_schema\",\n \"validate\",\n \"path\",\n \"version\",\n]\n\n\ndef __dir__():\n return __all__\n\n\nclass Schema(sys.modules[__name__].__class__):\n \"\"\"\n A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.\n\n Example:\n >>> import pyhf.schema\n >>> import pathlib\n >>> curr_path = pyhf.schema.path\n >>> curr_path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))\n >>> pyhf.schema.path\n PosixPath('/home/root/my/new/path')\n >>> pyhf.schema(curr_path)\n >>> pyhf.schema.path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n\n \"\"\"\n\n def __call__(self, new_path: pathlib.Path):\n \"\"\"\n Change the local search path for finding schemas locally.\n\n Args:\n new_path (pathlib.Path): Path to folder containing the schemas\n\n Returns:\n None\n \"\"\"\n variables.schemas = new_path\n\n @property\n def path(self):\n \"\"\"\n The local path for schemas.\n \"\"\"\n return variables.schemas\n\n @property\n def version(self):\n \"\"\"\n The default version used for finding schemas.\n \"\"\"\n return variables.SCHEMA_VERSION\n\n\nsys.modules[__name__].__class__ = Schema\n", "path": "src/pyhf/schema/__init__.py"}], "after_files": [{"content": "\"\"\"\nSee :class:`~pyhf.schema.Schema` for documentation.\n\"\"\"\nimport pathlib\nimport sys\nfrom pyhf.schema.loader import load_schema\nfrom pyhf.schema.validator import validate\nfrom pyhf.schema import variables\n\n__all__ = [\n \"load_schema\",\n \"validate\",\n \"path\",\n \"version\",\n]\n\n\ndef __dir__():\n return __all__\n\n\nclass Schema(sys.modules[__name__].__class__):\n \"\"\"\n A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.\n\n .. rubric:: Example (callable)\n\n .. code-block:: pycon\n\n >>> import pyhf.schema\n >>> import pathlib\n >>> curr_path = pyhf.schema.path\n >>> curr_path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n >>> new_path = pathlib.Path(\"/home/root/my/new/path\")\n >>> pyhf.schema(new_path) # doctest: +ELLIPSIS\n <module 'pyhf.schema' from ...>\n >>> pyhf.schema.path\n PosixPath('/home/root/my/new/path')\n >>> pyhf.schema(curr_path) # doctest: +ELLIPSIS\n <module 'pyhf.schema' from ...>\n >>> pyhf.schema.path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n\n .. rubric:: Example (context-manager)\n\n .. code-block:: pycon\n\n >>> import pyhf.schema\n >>> import pathlib\n >>> curr_path = pyhf.schema.path\n >>> curr_path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n >>> new_path = pathlib.Path(\"/home/root/my/new/path\")\n >>> with pyhf.schema(new_path):\n ... print(repr(pyhf.schema.path))\n ...\n PosixPath('/home/root/my/new/path')\n >>> pyhf.schema.path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n\n \"\"\"\n\n def __call__(self, new_path: pathlib.Path):\n \"\"\"\n Change the local search path for finding schemas locally.\n\n Args:\n new_path (pathlib.Path): Path to folder containing the schemas\n\n Returns:\n self (pyhf.schema.Schema): Returns itself (for contextlib management)\n \"\"\"\n self.orig_path, variables.schemas = variables.schemas, new_path\n return self\n\n def __enter__(self):\n pass\n\n def __exit__(self, *args, **kwargs):\n \"\"\"\n Reset the local search path for finding schemas locally.\n\n Returns:\n None\n \"\"\"\n variables.schemas = self.orig_path\n\n @property\n def path(self):\n \"\"\"\n The local path for schemas.\n \"\"\"\n return variables.schemas\n\n @property\n def version(self):\n \"\"\"\n The default version used for finding schemas.\n \"\"\"\n return variables.SCHEMA_VERSION\n\n\nsys.modules[__name__].__class__ = Schema\n", "path": "src/pyhf/schema/__init__.py"}]} | 1,206 | 656 |
gh_patches_debug_3498 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-1530 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Social Network Login Failure
When clicking on connect GitHub on the [social accounts page](https://readthedocs.org/accounts/social/connections/?) I get a message:
> An error occurred while attempting to login via your social network account.
There's a `?` in the url. Could that be a hint? Is it missing some request arguments? If I omit it, the bug persists.
Cheers!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `readthedocs/settings/postgres.py`
Content:
```
1 import os
2
3 from .base import * # noqa
4
5
6 DATABASES = {
7 'default': {
8 'ENGINE': 'django.db.backends.postgresql_psycopg2',
9 'NAME': 'docs',
10 'USER': 'postgres', # Not used with sqlite3.
11 'PASSWORD': '',
12 'HOST': '10.177.73.97',
13 'PORT': '',
14 }
15 }
16
17 DEBUG = False
18 TEMPLATE_DEBUG = False
19 CELERY_ALWAYS_EAGER = False
20
21 MEDIA_URL = 'https://media.readthedocs.org/'
22 STATIC_URL = 'https://media.readthedocs.org/static/'
23 ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
24 SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
25
26 HAYSTACK_CONNECTIONS = {
27 'default': {
28 'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
29 'URL': 'http://odin:8983/solr',
30 }
31 }
32
33 CACHES = {
34 'default': {
35 'BACKEND': 'redis_cache.RedisCache',
36 'LOCATION': 'localhost:6379',
37 'PREFIX': 'docs',
38 'OPTIONS': {
39 'DB': 1,
40 'PARSER_CLASS': 'redis.connection.HiredisParser'
41 },
42 },
43 }
44
45 # Elasticsearch settings.
46 ES_HOSTS = ['backup:9200', 'db:9200']
47 ES_DEFAULT_NUM_REPLICAS = 1
48 ES_DEFAULT_NUM_SHARDS = 5
49
50 SLUMBER_API_HOST = 'https://readthedocs.org'
51 WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
52
53 PRODUCTION_DOMAIN = 'readthedocs.org'
54 USE_SUBDOMAIN = True
55 NGINX_X_ACCEL_REDIRECT = True
56
57 SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
58
59 # Lock builds for 10 minutes
60 REPO_LOCK_SECONDS = 300
61
62 # Don't re-confirm existing accounts
63 ACCOUNT_EMAIL_VERIFICATION = 'none'
64
65 FILE_SYNCER = 'privacy.backends.syncers.DoubleRemotePuller'
66
67 # set GitHub scope
68 SOCIALACCOUNT_PROVIDERS = {
69 'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}
70 }
71
72 if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):
73 try:
74 from local_settings import * # noqa
75 except ImportError:
76 pass
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/readthedocs/settings/postgres.py b/readthedocs/settings/postgres.py
--- a/readthedocs/settings/postgres.py
+++ b/readthedocs/settings/postgres.py
@@ -69,6 +69,9 @@
'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}
}
+# allauth settings
+ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'
+
if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):
try:
from local_settings import * # noqa
| {"golden_diff": "diff --git a/readthedocs/settings/postgres.py b/readthedocs/settings/postgres.py\n--- a/readthedocs/settings/postgres.py\n+++ b/readthedocs/settings/postgres.py\n@@ -69,6 +69,9 @@\n 'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}\n }\n \n+# allauth settings\n+ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'\n+\n if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):\n try:\n from local_settings import * # noqa\n", "issue": "Social Network Login Failure\nWhen clicking on connect GitHub on the [social accounts page](https://readthedocs.org/accounts/social/connections/?) I get a message:\n\n> An error occurred while attempting to login via your social network account.\n\nThere's a `?` in the url. Could that be a hint? Is it missing some request arguments? If I omit it, the bug persists.\n\nCheers!\n\n", "before_files": [{"content": "import os\n\nfrom .base import * # noqa\n\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': 'docs',\n 'USER': 'postgres', # Not used with sqlite3.\n 'PASSWORD': '',\n 'HOST': '10.177.73.97',\n 'PORT': '',\n }\n}\n\nDEBUG = False\nTEMPLATE_DEBUG = False\nCELERY_ALWAYS_EAGER = False\n\nMEDIA_URL = 'https://media.readthedocs.org/'\nSTATIC_URL = 'https://media.readthedocs.org/static/'\nADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'\nSESSION_ENGINE = \"django.contrib.sessions.backends.cached_db\"\n\nHAYSTACK_CONNECTIONS = {\n 'default': {\n 'ENGINE': 'haystack.backends.solr_backend.SolrEngine',\n 'URL': 'http://odin:8983/solr',\n }\n}\n\nCACHES = {\n 'default': {\n 'BACKEND': 'redis_cache.RedisCache',\n 'LOCATION': 'localhost:6379',\n 'PREFIX': 'docs',\n 'OPTIONS': {\n 'DB': 1,\n 'PARSER_CLASS': 'redis.connection.HiredisParser'\n },\n },\n}\n\n# Elasticsearch settings.\nES_HOSTS = ['backup:9200', 'db:9200']\nES_DEFAULT_NUM_REPLICAS = 1\nES_DEFAULT_NUM_SHARDS = 5\n\nSLUMBER_API_HOST = 'https://readthedocs.org'\nWEBSOCKET_HOST = 'websocket.readthedocs.org:8088'\n\nPRODUCTION_DOMAIN = 'readthedocs.org'\nUSE_SUBDOMAIN = True\nNGINX_X_ACCEL_REDIRECT = True\n\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n\n# Lock builds for 10 minutes\nREPO_LOCK_SECONDS = 300\n\n# Don't re-confirm existing accounts\nACCOUNT_EMAIL_VERIFICATION = 'none'\n\nFILE_SYNCER = 'privacy.backends.syncers.DoubleRemotePuller'\n\n# set GitHub scope\nSOCIALACCOUNT_PROVIDERS = {\n 'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}\n}\n\nif not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):\n try:\n from local_settings import * # noqa\n except ImportError:\n pass\n", "path": "readthedocs/settings/postgres.py"}], "after_files": [{"content": "import os\n\nfrom .base import * # noqa\n\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': 'docs',\n 'USER': 'postgres', # Not used with sqlite3.\n 'PASSWORD': '',\n 'HOST': '10.177.73.97',\n 'PORT': '',\n }\n}\n\nDEBUG = False\nTEMPLATE_DEBUG = False\nCELERY_ALWAYS_EAGER = False\n\nMEDIA_URL = 'https://media.readthedocs.org/'\nSTATIC_URL = 'https://media.readthedocs.org/static/'\nADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'\nSESSION_ENGINE = \"django.contrib.sessions.backends.cached_db\"\n\nHAYSTACK_CONNECTIONS = {\n 'default': {\n 'ENGINE': 'haystack.backends.solr_backend.SolrEngine',\n 'URL': 'http://odin:8983/solr',\n }\n}\n\nCACHES = {\n 'default': {\n 'BACKEND': 'redis_cache.RedisCache',\n 'LOCATION': 'localhost:6379',\n 'PREFIX': 'docs',\n 'OPTIONS': {\n 'DB': 1,\n 'PARSER_CLASS': 'redis.connection.HiredisParser'\n },\n },\n}\n\n# Elasticsearch settings.\nES_HOSTS = ['backup:9200', 'db:9200']\nES_DEFAULT_NUM_REPLICAS = 1\nES_DEFAULT_NUM_SHARDS = 5\n\nSLUMBER_API_HOST = 'https://readthedocs.org'\nWEBSOCKET_HOST = 'websocket.readthedocs.org:8088'\n\nPRODUCTION_DOMAIN = 'readthedocs.org'\nUSE_SUBDOMAIN = True\nNGINX_X_ACCEL_REDIRECT = True\n\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n\n# Lock builds for 10 minutes\nREPO_LOCK_SECONDS = 300\n\n# Don't re-confirm existing accounts\nACCOUNT_EMAIL_VERIFICATION = 'none'\n\nFILE_SYNCER = 'privacy.backends.syncers.DoubleRemotePuller'\n\n# set GitHub scope\nSOCIALACCOUNT_PROVIDERS = {\n 'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}\n}\n\n# allauth settings\nACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'\n\nif not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):\n try:\n from local_settings import * # noqa\n except ImportError:\n pass\n", "path": "readthedocs/settings/postgres.py"}]} | 1,016 | 125 |
gh_patches_debug_16013 | rasdani/github-patches | git_diff | pyro-ppl__pyro-3167 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
bug with OneCycleLR on Apple Silicone
### Guidelines
**NOTE:** Issues are for bugs and feature requests only. If you have a question about using Pyro or general modeling questions, please post it on the [forum](https://forum.pyro.ai/).
If you would like to address any minor bugs in the documentation or source, please feel free to contribute a Pull Request without creating an issue first.
Please tag the issue appropriately in the title e.g. [bug], [feature request], [discussion], etc.
Please provide the following details:
--------------------------------------------------------------------------------------------------
### Issue Description
Provide a brief description of the issue.
### Environment
For any bugs, please provide the following:
- OS and python version.
- PyTorch version, or if relevant, output of `pip freeze`.
- Pyro version: output of `python -c 'import pyro; print pyro.__version__'`
### Code Snippet
Provide any relevant code snippets and commands run to replicate the issue.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyro/optim/pytorch_optimizers.py`
Content:
```
1 # Copyright (c) 2017-2019 Uber Technologies, Inc.
2 # SPDX-License-Identifier: Apache-2.0
3
4 import torch
5
6 from pyro.optim import PyroOptim
7 from pyro.optim.lr_scheduler import PyroLRScheduler
8
9 __all__ = []
10 # Programmatically load all optimizers from PyTorch.
11 for _name, _Optim in torch.optim.__dict__.items():
12 if not isinstance(_Optim, type):
13 continue
14 if not issubclass(_Optim, torch.optim.Optimizer):
15 continue
16 if _Optim is torch.optim.Optimizer:
17 continue
18 if _Optim is torch.optim.LBFGS:
19 # XXX LBFGS is not supported for SVI yet
20 continue
21
22 _PyroOptim = (
23 lambda _Optim: lambda optim_args, clip_args=None: PyroOptim(
24 _Optim, optim_args, clip_args
25 )
26 )(_Optim)
27 _PyroOptim.__name__ = _name
28 _PyroOptim.__doc__ = "Wraps :class:`torch.optim.{}` with :class:`~pyro.optim.optim.PyroOptim`.".format(
29 _name
30 )
31
32 locals()[_name] = _PyroOptim
33 __all__.append(_name)
34 del _PyroOptim
35
36 # Load all schedulers from PyTorch
37 for _name, _Optim in torch.optim.lr_scheduler.__dict__.items():
38 if not isinstance(_Optim, type):
39 continue
40 if (
41 not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)
42 and _name != "ReduceLROnPlateau"
43 ):
44 continue
45 if _Optim is torch.optim.Optimizer:
46 continue
47
48 _PyroOptim = (
49 lambda _Optim: lambda optim_args, clip_args=None: PyroLRScheduler(
50 _Optim, optim_args, clip_args
51 )
52 )(_Optim)
53 _PyroOptim.__name__ = _name
54 _PyroOptim.__doc__ = (
55 "Wraps :class:`torch.optim.{}` with ".format(_name)
56 + ":class:`~pyro.optim.lr_scheduler.PyroLRScheduler`."
57 )
58
59 locals()[_name] = _PyroOptim
60 __all__.append(_name)
61 del _PyroOptim
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pyro/optim/pytorch_optimizers.py b/pyro/optim/pytorch_optimizers.py
--- a/pyro/optim/pytorch_optimizers.py
+++ b/pyro/optim/pytorch_optimizers.py
@@ -34,13 +34,16 @@
del _PyroOptim
# Load all schedulers from PyTorch
+# breaking change in torch >= 1.14: LRScheduler is new base class
+if hasattr(torch.optim.lr_scheduler, "LRScheduler"):
+ _torch_scheduler_base = torch.optim.lr_scheduler.LRScheduler
+else: # for torch < 1.13, _LRScheduler is base class
+ _torch_scheduler_base = torch.optim.lr_scheduler._LRScheduler
+
for _name, _Optim in torch.optim.lr_scheduler.__dict__.items():
if not isinstance(_Optim, type):
continue
- if (
- not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)
- and _name != "ReduceLROnPlateau"
- ):
+ if not issubclass(_Optim, _torch_scheduler_base) and _name != "ReduceLROnPlateau":
continue
if _Optim is torch.optim.Optimizer:
continue
| {"golden_diff": "diff --git a/pyro/optim/pytorch_optimizers.py b/pyro/optim/pytorch_optimizers.py\n--- a/pyro/optim/pytorch_optimizers.py\n+++ b/pyro/optim/pytorch_optimizers.py\n@@ -34,13 +34,16 @@\n del _PyroOptim\n \n # Load all schedulers from PyTorch\n+# breaking change in torch >= 1.14: LRScheduler is new base class\n+if hasattr(torch.optim.lr_scheduler, \"LRScheduler\"):\n+ _torch_scheduler_base = torch.optim.lr_scheduler.LRScheduler\n+else: # for torch < 1.13, _LRScheduler is base class\n+ _torch_scheduler_base = torch.optim.lr_scheduler._LRScheduler\n+\n for _name, _Optim in torch.optim.lr_scheduler.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n- if (\n- not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)\n- and _name != \"ReduceLROnPlateau\"\n- ):\n+ if not issubclass(_Optim, _torch_scheduler_base) and _name != \"ReduceLROnPlateau\":\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n", "issue": "bug with OneCycleLR on Apple Silicone \n### Guidelines\r\n\r\n**NOTE:** Issues are for bugs and feature requests only. If you have a question about using Pyro or general modeling questions, please post it on the [forum](https://forum.pyro.ai/).\r\n\r\nIf you would like to address any minor bugs in the documentation or source, please feel free to contribute a Pull Request without creating an issue first. \r\n\r\nPlease tag the issue appropriately in the title e.g. [bug], [feature request], [discussion], etc.\r\n\r\nPlease provide the following details:\r\n--------------------------------------------------------------------------------------------------\r\n### Issue Description\r\nProvide a brief description of the issue.\r\n\r\n### Environment\r\nFor any bugs, please provide the following:\r\n - OS and python version.\r\n - PyTorch version, or if relevant, output of `pip freeze`.\r\n - Pyro version: output of `python -c 'import pyro; print pyro.__version__'`\r\n\r\n### Code Snippet\r\nProvide any relevant code snippets and commands run to replicate the issue.\r\n\n", "before_files": [{"content": "# Copyright (c) 2017-2019 Uber Technologies, Inc.\n# SPDX-License-Identifier: Apache-2.0\n\nimport torch\n\nfrom pyro.optim import PyroOptim\nfrom pyro.optim.lr_scheduler import PyroLRScheduler\n\n__all__ = []\n# Programmatically load all optimizers from PyTorch.\nfor _name, _Optim in torch.optim.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n if not issubclass(_Optim, torch.optim.Optimizer):\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n if _Optim is torch.optim.LBFGS:\n # XXX LBFGS is not supported for SVI yet\n continue\n\n _PyroOptim = (\n lambda _Optim: lambda optim_args, clip_args=None: PyroOptim(\n _Optim, optim_args, clip_args\n )\n )(_Optim)\n _PyroOptim.__name__ = _name\n _PyroOptim.__doc__ = \"Wraps :class:`torch.optim.{}` with :class:`~pyro.optim.optim.PyroOptim`.\".format(\n _name\n )\n\n locals()[_name] = _PyroOptim\n __all__.append(_name)\n del _PyroOptim\n\n# Load all schedulers from PyTorch\nfor _name, _Optim in torch.optim.lr_scheduler.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n if (\n not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)\n and _name != \"ReduceLROnPlateau\"\n ):\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n\n _PyroOptim = (\n lambda _Optim: lambda optim_args, clip_args=None: PyroLRScheduler(\n _Optim, optim_args, clip_args\n )\n )(_Optim)\n _PyroOptim.__name__ = _name\n _PyroOptim.__doc__ = (\n \"Wraps :class:`torch.optim.{}` with \".format(_name)\n + \":class:`~pyro.optim.lr_scheduler.PyroLRScheduler`.\"\n )\n\n locals()[_name] = _PyroOptim\n __all__.append(_name)\n del _PyroOptim\n", "path": "pyro/optim/pytorch_optimizers.py"}], "after_files": [{"content": "# Copyright (c) 2017-2019 Uber Technologies, Inc.\n# SPDX-License-Identifier: Apache-2.0\n\nimport torch\n\nfrom pyro.optim import PyroOptim\nfrom pyro.optim.lr_scheduler import PyroLRScheduler\n\n__all__ = []\n# Programmatically load all optimizers from PyTorch.\nfor _name, _Optim in torch.optim.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n if not issubclass(_Optim, torch.optim.Optimizer):\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n if _Optim is torch.optim.LBFGS:\n # XXX LBFGS is not supported for SVI yet\n continue\n\n _PyroOptim = (\n lambda _Optim: lambda optim_args, clip_args=None: PyroOptim(\n _Optim, optim_args, clip_args\n )\n )(_Optim)\n _PyroOptim.__name__ = _name\n _PyroOptim.__doc__ = \"Wraps :class:`torch.optim.{}` with :class:`~pyro.optim.optim.PyroOptim`.\".format(\n _name\n )\n\n locals()[_name] = _PyroOptim\n __all__.append(_name)\n del _PyroOptim\n\n# Load all schedulers from PyTorch\n# breaking change in torch >= 1.14: LRScheduler is new base class\nif hasattr(torch.optim.lr_scheduler, \"LRScheduler\"):\n _torch_scheduler_base = torch.optim.lr_scheduler.LRScheduler\nelse: # for torch < 1.13, _LRScheduler is base class\n _torch_scheduler_base = torch.optim.lr_scheduler._LRScheduler\n\nfor _name, _Optim in torch.optim.lr_scheduler.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n if not issubclass(_Optim, _torch_scheduler_base) and _name != \"ReduceLROnPlateau\":\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n\n _PyroOptim = (\n lambda _Optim: lambda optim_args, clip_args=None: PyroLRScheduler(\n _Optim, optim_args, clip_args\n )\n )(_Optim)\n _PyroOptim.__name__ = _name\n _PyroOptim.__doc__ = (\n \"Wraps :class:`torch.optim.{}` with \".format(_name)\n + \":class:`~pyro.optim.lr_scheduler.PyroLRScheduler`.\"\n )\n\n locals()[_name] = _PyroOptim\n __all__.append(_name)\n del _PyroOptim\n", "path": "pyro/optim/pytorch_optimizers.py"}]} | 1,119 | 285 |
gh_patches_debug_67407 | rasdani/github-patches | git_diff | mitmproxy__mitmproxy-1711 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pathoc does not accept `:pa,f` to pause forever at end of message
##### Steps to reproduce the problem:
`pathoc www.example.com 'get:/:pa,f'`
##### What is the expected behavior?
Send request, but pause forever after sending.
##### What went wrong?
I get a stack trace with "a float is required".
```
$ pathoc www.example.com 'get:/:pa,f'
08-09-16 16:59:41: >> 'GET':/:pa,f
Traceback (most recent call last):
File "/usr/local/bin/pathoc", line 11, in <module>
sys.exit(go_pathoc())
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc_cmdline.py", line 226, in go_pathoc
pathoc.main(args)
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 522, in main
ret = p.request(spec)
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 452, in request
return self.http(r)
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 432, in http
return resp
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 411, in http
req = language.serve(r, self.wfile, self.settings)
File "/usr/local/lib/python2.7/dist-packages/pathod/language/__init__.py", line 105, in serve
disconnect = writer.write_values(fp, vals, actions[:])
File "/usr/local/lib/python2.7/dist-packages/pathod/language/writer.py", line 61, in write_values
time.sleep(a[2])
TypeError: a float is required
```
##### Any other comments? What have you tried so far?
All other combinations of pause flags work as expected:
```
$ pathoc www.example.com 'get:/:p2,5'
08-09-16 17:05:07: >> 'GET':/:p2,5
<< 200 OK: 1270 bytes
$ pathoc www.example.com 'get:/:pr,5'
08-09-16 17:05:21: >> 'GET':/:pr,5
<< 200 OK: 1270 bytes
$ pathoc www.example.com 'get:/:pa,5'
08-09-16 17:05:41: >> 'GET':/:pa,5
<< 200 OK: 1270 bytes
$ pathoc www.example.com 'get:/:p2,f'
^C08-09-16 17:04:46: >> 'GET':/:p2,f
$ pathoc www.example.com 'get:/:pr,f'
^C08-09-16 17:04:55: >> 'GET':/:pr,f
```
---
pathoc version: 0.17
Operating System: Debian Linux 8.5 "Jessie" x64
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pathod/language/writer.py`
Content:
```
1 import time
2 from mitmproxy import exceptions
3
4 BLOCKSIZE = 1024
5 # It's not clear what the upper limit for time.sleep is. It's lower than the
6 # maximum int or float. 1 year should do.
7 FOREVER = 60 * 60 * 24 * 365
8
9
10 def send_chunk(fp, val, blocksize, start, end):
11 """
12 (start, end): Inclusive lower bound, exclusive upper bound.
13 """
14 for i in range(start, end, blocksize):
15 fp.write(
16 val[i:min(i + blocksize, end)]
17 )
18 return end - start
19
20
21 def write_values(fp, vals, actions, sofar=0, blocksize=BLOCKSIZE):
22 """
23 vals: A list of values, which may be strings or Value objects.
24
25 actions: A list of (offset, action, arg) tuples. Action may be "inject",
26 "pause" or "disconnect".
27
28 Both vals and actions are in reverse order, with the first items last.
29
30 Return True if connection should disconnect.
31 """
32 sofar = 0
33 try:
34 while vals:
35 v = vals.pop()
36 offset = 0
37 while actions and actions[-1][0] < (sofar + len(v)):
38 a = actions.pop()
39 offset += send_chunk(
40 fp,
41 v,
42 blocksize,
43 offset,
44 a[0] - sofar - offset
45 )
46 if a[1] == "pause":
47 time.sleep(
48 FOREVER if a[2] == "f" else a[2]
49 )
50 elif a[1] == "disconnect":
51 return True
52 elif a[1] == "inject":
53 send_chunk(fp, a[2], blocksize, 0, len(a[2]))
54 send_chunk(fp, v, blocksize, offset, len(v))
55 sofar += len(v)
56 # Remainders
57 while actions:
58 a = actions.pop()
59 if a[1] == "pause":
60 time.sleep(a[2])
61 elif a[1] == "disconnect":
62 return True
63 elif a[1] == "inject":
64 send_chunk(fp, a[2], blocksize, 0, len(a[2]))
65 except exceptions.TcpDisconnect: # pragma: no cover
66 return True
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pathod/language/writer.py b/pathod/language/writer.py
--- a/pathod/language/writer.py
+++ b/pathod/language/writer.py
@@ -57,7 +57,9 @@
while actions:
a = actions.pop()
if a[1] == "pause":
- time.sleep(a[2])
+ time.sleep(
+ FOREVER if a[2] == "f" else a[2]
+ )
elif a[1] == "disconnect":
return True
elif a[1] == "inject":
| {"golden_diff": "diff --git a/pathod/language/writer.py b/pathod/language/writer.py\n--- a/pathod/language/writer.py\n+++ b/pathod/language/writer.py\n@@ -57,7 +57,9 @@\n while actions:\n a = actions.pop()\n if a[1] == \"pause\":\n- time.sleep(a[2])\n+ time.sleep(\n+ FOREVER if a[2] == \"f\" else a[2]\n+ )\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n", "issue": "pathoc does not accept `:pa,f` to pause forever at end of message\n##### Steps to reproduce the problem:\n\n`pathoc www.example.com 'get:/:pa,f'`\n##### What is the expected behavior?\n\nSend request, but pause forever after sending.\n##### What went wrong?\n\nI get a stack trace with \"a float is required\".\n\n```\n$ pathoc www.example.com 'get:/:pa,f'\n08-09-16 16:59:41: >> 'GET':/:pa,f\nTraceback (most recent call last):\n File \"/usr/local/bin/pathoc\", line 11, in <module>\n sys.exit(go_pathoc())\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc_cmdline.py\", line 226, in go_pathoc\n pathoc.main(args)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 522, in main\n ret = p.request(spec)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 452, in request\n return self.http(r)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 432, in http\n return resp\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 411, in http\n req = language.serve(r, self.wfile, self.settings)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/language/__init__.py\", line 105, in serve\n disconnect = writer.write_values(fp, vals, actions[:])\n File \"/usr/local/lib/python2.7/dist-packages/pathod/language/writer.py\", line 61, in write_values\n time.sleep(a[2])\nTypeError: a float is required\n```\n##### Any other comments? What have you tried so far?\n\nAll other combinations of pause flags work as expected:\n\n```\n$ pathoc www.example.com 'get:/:p2,5'\n08-09-16 17:05:07: >> 'GET':/:p2,5\n<< 200 OK: 1270 bytes\n$ pathoc www.example.com 'get:/:pr,5'\n08-09-16 17:05:21: >> 'GET':/:pr,5\n<< 200 OK: 1270 bytes\n$ pathoc www.example.com 'get:/:pa,5'\n08-09-16 17:05:41: >> 'GET':/:pa,5\n<< 200 OK: 1270 bytes\n$ pathoc www.example.com 'get:/:p2,f'\n^C08-09-16 17:04:46: >> 'GET':/:p2,f\n$ pathoc www.example.com 'get:/:pr,f'\n^C08-09-16 17:04:55: >> 'GET':/:pr,f\n```\n\n---\n\npathoc version: 0.17\nOperating System: Debian Linux 8.5 \"Jessie\" x64\n\n", "before_files": [{"content": "import time\nfrom mitmproxy import exceptions\n\nBLOCKSIZE = 1024\n# It's not clear what the upper limit for time.sleep is. It's lower than the\n# maximum int or float. 1 year should do.\nFOREVER = 60 * 60 * 24 * 365\n\n\ndef send_chunk(fp, val, blocksize, start, end):\n \"\"\"\n (start, end): Inclusive lower bound, exclusive upper bound.\n \"\"\"\n for i in range(start, end, blocksize):\n fp.write(\n val[i:min(i + blocksize, end)]\n )\n return end - start\n\n\ndef write_values(fp, vals, actions, sofar=0, blocksize=BLOCKSIZE):\n \"\"\"\n vals: A list of values, which may be strings or Value objects.\n\n actions: A list of (offset, action, arg) tuples. Action may be \"inject\",\n \"pause\" or \"disconnect\".\n\n Both vals and actions are in reverse order, with the first items last.\n\n Return True if connection should disconnect.\n \"\"\"\n sofar = 0\n try:\n while vals:\n v = vals.pop()\n offset = 0\n while actions and actions[-1][0] < (sofar + len(v)):\n a = actions.pop()\n offset += send_chunk(\n fp,\n v,\n blocksize,\n offset,\n a[0] - sofar - offset\n )\n if a[1] == \"pause\":\n time.sleep(\n FOREVER if a[2] == \"f\" else a[2]\n )\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n send_chunk(fp, a[2], blocksize, 0, len(a[2]))\n send_chunk(fp, v, blocksize, offset, len(v))\n sofar += len(v)\n # Remainders\n while actions:\n a = actions.pop()\n if a[1] == \"pause\":\n time.sleep(a[2])\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n send_chunk(fp, a[2], blocksize, 0, len(a[2]))\n except exceptions.TcpDisconnect: # pragma: no cover\n return True\n", "path": "pathod/language/writer.py"}], "after_files": [{"content": "import time\nfrom mitmproxy import exceptions\n\nBLOCKSIZE = 1024\n# It's not clear what the upper limit for time.sleep is. It's lower than the\n# maximum int or float. 1 year should do.\nFOREVER = 60 * 60 * 24 * 365\n\n\ndef send_chunk(fp, val, blocksize, start, end):\n \"\"\"\n (start, end): Inclusive lower bound, exclusive upper bound.\n \"\"\"\n for i in range(start, end, blocksize):\n fp.write(\n val[i:min(i + blocksize, end)]\n )\n return end - start\n\n\ndef write_values(fp, vals, actions, sofar=0, blocksize=BLOCKSIZE):\n \"\"\"\n vals: A list of values, which may be strings or Value objects.\n\n actions: A list of (offset, action, arg) tuples. Action may be \"inject\",\n \"pause\" or \"disconnect\".\n\n Both vals and actions are in reverse order, with the first items last.\n\n Return True if connection should disconnect.\n \"\"\"\n sofar = 0\n try:\n while vals:\n v = vals.pop()\n offset = 0\n while actions and actions[-1][0] < (sofar + len(v)):\n a = actions.pop()\n offset += send_chunk(\n fp,\n v,\n blocksize,\n offset,\n a[0] - sofar - offset\n )\n if a[1] == \"pause\":\n time.sleep(\n FOREVER if a[2] == \"f\" else a[2]\n )\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n send_chunk(fp, a[2], blocksize, 0, len(a[2]))\n send_chunk(fp, v, blocksize, offset, len(v))\n sofar += len(v)\n # Remainders\n while actions:\n a = actions.pop()\n if a[1] == \"pause\":\n time.sleep(\n FOREVER if a[2] == \"f\" else a[2]\n )\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n send_chunk(fp, a[2], blocksize, 0, len(a[2]))\n except exceptions.TcpDisconnect: # pragma: no cover\n return True\n", "path": "pathod/language/writer.py"}]} | 1,613 | 126 |
gh_patches_debug_18789 | rasdani/github-patches | git_diff | numpy__numpy-14207 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Wrong capitalization on bitgenerator MT19937
Wrong capitalization of MT19937 in default argument values of functions in numpy.random._pickle.py
### Reproducing code example:
```python
import numpy as np
np.random._pickle.__randomstate_ctor() # => error
np.random._pickle.__randomstate_ctor('mt19937') # => error
np.random._pickle.__randomstate_ctor('MT19937') # => works
```
### Error message:
*** ValueError: mt19937 is not a known BitGenerator module.
### Numpy/Python version information:
1.17.0 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]]
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `numpy/random/_pickle.py`
Content:
```
1 from .mtrand import RandomState
2 from .philox import Philox
3 from .pcg64 import PCG64
4 from .sfc64 import SFC64
5
6 from .generator import Generator
7 from .mt19937 import MT19937
8
9 BitGenerators = {'MT19937': MT19937,
10 'PCG64': PCG64,
11 'Philox': Philox,
12 'SFC64': SFC64,
13 }
14
15
16 def __generator_ctor(bit_generator_name='mt19937'):
17 """
18 Pickling helper function that returns a Generator object
19
20 Parameters
21 ----------
22 bit_generator_name: str
23 String containing the core BitGenerator
24
25 Returns
26 -------
27 rg: Generator
28 Generator using the named core BitGenerator
29 """
30 if bit_generator_name in BitGenerators:
31 bit_generator = BitGenerators[bit_generator_name]
32 else:
33 raise ValueError(str(bit_generator_name) + ' is not a known '
34 'BitGenerator module.')
35
36 return Generator(bit_generator())
37
38
39 def __bit_generator_ctor(bit_generator_name='mt19937'):
40 """
41 Pickling helper function that returns a bit generator object
42
43 Parameters
44 ----------
45 bit_generator_name: str
46 String containing the name of the BitGenerator
47
48 Returns
49 -------
50 bit_generator: BitGenerator
51 BitGenerator instance
52 """
53 if bit_generator_name in BitGenerators:
54 bit_generator = BitGenerators[bit_generator_name]
55 else:
56 raise ValueError(str(bit_generator_name) + ' is not a known '
57 'BitGenerator module.')
58
59 return bit_generator()
60
61
62 def __randomstate_ctor(bit_generator_name='mt19937'):
63 """
64 Pickling helper function that returns a legacy RandomState-like object
65
66 Parameters
67 ----------
68 bit_generator_name: str
69 String containing the core BitGenerator
70
71 Returns
72 -------
73 rs: RandomState
74 Legacy RandomState using the named core BitGenerator
75 """
76 if bit_generator_name in BitGenerators:
77 bit_generator = BitGenerators[bit_generator_name]
78 else:
79 raise ValueError(str(bit_generator_name) + ' is not a known '
80 'BitGenerator module.')
81
82 return RandomState(bit_generator())
83
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/numpy/random/_pickle.py b/numpy/random/_pickle.py
--- a/numpy/random/_pickle.py
+++ b/numpy/random/_pickle.py
@@ -13,7 +13,7 @@
}
-def __generator_ctor(bit_generator_name='mt19937'):
+def __generator_ctor(bit_generator_name='MT19937'):
"""
Pickling helper function that returns a Generator object
@@ -36,7 +36,7 @@
return Generator(bit_generator())
-def __bit_generator_ctor(bit_generator_name='mt19937'):
+def __bit_generator_ctor(bit_generator_name='MT19937'):
"""
Pickling helper function that returns a bit generator object
@@ -59,7 +59,7 @@
return bit_generator()
-def __randomstate_ctor(bit_generator_name='mt19937'):
+def __randomstate_ctor(bit_generator_name='MT19937'):
"""
Pickling helper function that returns a legacy RandomState-like object
| {"golden_diff": "diff --git a/numpy/random/_pickle.py b/numpy/random/_pickle.py\n--- a/numpy/random/_pickle.py\n+++ b/numpy/random/_pickle.py\n@@ -13,7 +13,7 @@\n }\n \n \n-def __generator_ctor(bit_generator_name='mt19937'):\n+def __generator_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a Generator object\n \n@@ -36,7 +36,7 @@\n return Generator(bit_generator())\n \n \n-def __bit_generator_ctor(bit_generator_name='mt19937'):\n+def __bit_generator_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a bit generator object\n \n@@ -59,7 +59,7 @@\n return bit_generator()\n \n \n-def __randomstate_ctor(bit_generator_name='mt19937'):\n+def __randomstate_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a legacy RandomState-like object\n", "issue": "Wrong capitalization on bitgenerator MT19937\nWrong capitalization of MT19937 in default argument values of functions in numpy.random._pickle.py\r\n\r\n### Reproducing code example:\r\n\r\n```python\r\nimport numpy as np\r\nnp.random._pickle.__randomstate_ctor() # => error\r\nnp.random._pickle.__randomstate_ctor('mt19937') # => error\r\nnp.random._pickle.__randomstate_ctor('MT19937') # => works\r\n```\r\n\r\n### Error message:\r\n\r\n*** ValueError: mt19937 is not a known BitGenerator module.\r\n\r\n### Numpy/Python version information:\r\n\r\n1.17.0 3.6.8 (default, Jan 14 2019, 11:02:34) \r\n[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]]\r\n\n", "before_files": [{"content": "from .mtrand import RandomState\nfrom .philox import Philox\nfrom .pcg64 import PCG64\nfrom .sfc64 import SFC64\n\nfrom .generator import Generator\nfrom .mt19937 import MT19937\n\nBitGenerators = {'MT19937': MT19937,\n 'PCG64': PCG64,\n 'Philox': Philox,\n 'SFC64': SFC64,\n }\n\n\ndef __generator_ctor(bit_generator_name='mt19937'):\n \"\"\"\n Pickling helper function that returns a Generator object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the core BitGenerator\n\n Returns\n -------\n rg: Generator\n Generator using the named core BitGenerator\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return Generator(bit_generator())\n\n\ndef __bit_generator_ctor(bit_generator_name='mt19937'):\n \"\"\"\n Pickling helper function that returns a bit generator object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the name of the BitGenerator\n\n Returns\n -------\n bit_generator: BitGenerator\n BitGenerator instance\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return bit_generator()\n\n\ndef __randomstate_ctor(bit_generator_name='mt19937'):\n \"\"\"\n Pickling helper function that returns a legacy RandomState-like object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the core BitGenerator\n\n Returns\n -------\n rs: RandomState\n Legacy RandomState using the named core BitGenerator\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return RandomState(bit_generator())\n", "path": "numpy/random/_pickle.py"}], "after_files": [{"content": "from .mtrand import RandomState\nfrom .philox import Philox\nfrom .pcg64 import PCG64\nfrom .sfc64 import SFC64\n\nfrom .generator import Generator\nfrom .mt19937 import MT19937\n\nBitGenerators = {'MT19937': MT19937,\n 'PCG64': PCG64,\n 'Philox': Philox,\n 'SFC64': SFC64,\n }\n\n\ndef __generator_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a Generator object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the core BitGenerator\n\n Returns\n -------\n rg: Generator\n Generator using the named core BitGenerator\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return Generator(bit_generator())\n\n\ndef __bit_generator_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a bit generator object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the name of the BitGenerator\n\n Returns\n -------\n bit_generator: BitGenerator\n BitGenerator instance\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return bit_generator()\n\n\ndef __randomstate_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a legacy RandomState-like object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the core BitGenerator\n\n Returns\n -------\n rs: RandomState\n Legacy RandomState using the named core BitGenerator\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return RandomState(bit_generator())\n", "path": "numpy/random/_pickle.py"}]} | 1,124 | 228 |
gh_patches_debug_4272 | rasdani/github-patches | git_diff | akvo__akvo-rsr-3260 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove scrollbars when they are not necessary
The home page shows a scroll bar even when there is nothing to scroll


--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rest/views/organisation.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Akvo RSR is covered by the GNU Affero General Public License.
4 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
6
7 from django.conf import settings
8 from django.db.models import Q
9 from django.utils import six
10 from rest_framework.decorators import api_view
11 from rest_framework.exceptions import ParseError
12 from rest_framework.parsers import JSONParser
13 from rest_framework.response import Response
14 from rest_framework_xml.parsers import XMLParser
15 from rest_framework_xml.compat import etree
16
17 from akvo.rest.views.utils import int_or_none, get_qs_elements_for_page
18 from akvo.rsr.filters import location_choices, get_m49_filter
19 from akvo.rsr.models import Project, Organisation, Country
20 from akvo.rsr.views.utils import apply_keywords, org_projects
21 from ..serializers import OrganisationSerializer, OrganisationDirectorySerializer
22 from ..viewsets import BaseRSRViewSet
23
24
25 class AkvoOrganisationParser(XMLParser):
26 def parse(self, stream, media_type=None, parser_context=None):
27 assert etree, 'XMLParser requires defusedxml to be installed'
28
29 parser_context = parser_context or {}
30 encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
31 parser = etree.DefusedXMLParser(encoding=encoding)
32 try:
33 tree = etree.parse(stream, parser=parser, forbid_dtd=True)
34 except (etree.ParseError, ValueError) as exc:
35 raise ParseError('XML parse error - %s' % six.text_type(exc))
36 return self.organisation_data_from_etree(tree.getroot())
37
38 def organisation_data_from_etree(self, tree):
39 def find_text(tree, str):
40 element = tree.find(str)
41 if element is None:
42 return ''
43 return element.text.strip() if element.text else ""
44
45 def location_data(location_tree):
46 if location_tree is None:
47 return []
48 iso_code = find_text(location_tree, 'iso_code').lower()
49 country, created = Country.objects.get_or_create(**Country.fields_from_iso_code(iso_code))
50 country = country.id
51 latitude = find_text(location_tree, 'latitude') or 0
52 longitude = find_text(location_tree, 'longitude') or 0
53 primary = True
54 return [dict(latitude=latitude, longitude=longitude, country=country, primary=primary)]
55
56 long_name = find_text(tree, 'name')
57 name = long_name[:25]
58 description = find_text(tree, 'description')
59 url = find_text(tree, 'url')
60 iati_type = find_text(tree, 'iati_organisation_type')
61 new_organisation_type = int(iati_type) if iati_type else 22
62 organisation_type = Organisation.org_type_from_iati_type(new_organisation_type)
63 locations = location_data(tree.find('location/object'))
64 return dict(
65 name=name, long_name=long_name, description=description, url=url,
66 organisation_type=organisation_type, new_organisation_type=new_organisation_type,
67 locations=locations
68 )
69
70
71 class OrganisationViewSet(BaseRSRViewSet):
72 """
73 API endpoint that allows organisations to be viewed or edited.
74 """
75 queryset = Organisation.objects.all()
76 serializer_class = OrganisationSerializer
77 parser_classes = (AkvoOrganisationParser, JSONParser,)
78
79
80 @api_view(['GET'])
81 def organisation_directory(request):
82 """REST view for the update directory."""
83
84 page = request.rsr_page
85 all_organisations = Organisation.objects.all() if not page else _page_organisations(page)
86
87 # Filter updates based on query parameters
88 filter_, text_filter = _create_filters_query(request)
89 organisations = (
90 all_organisations.filter(filter_).distinct() if filter_ is not None else all_organisations
91 )
92 organisations_text_filtered = (
93 organisations.filter(text_filter) if text_filter is not None else organisations
94 )
95 if organisations_text_filtered.exists():
96 organisations = organisations_text_filtered
97
98 # Get the relevant data for typeaheads based on filtered organisations (minus
99 # text filtering, if no organisations were found)
100 locations = [
101 {'id': choice[0], 'name': choice[1]}
102 for choice in location_choices(organisations)
103 ]
104
105 display_organisations = get_qs_elements_for_page(organisations_text_filtered, request)
106
107 # Get related objects of page at once
108 response = {
109 'project_count': all_organisations.count(),
110 'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,
111 'location': locations,
112 }
113 return Response(response)
114
115
116 def _public_projects():
117 """Return all public projects."""
118 return Project.objects.public().published().select_related('partners')
119
120
121 def _page_organisations(page):
122 """Dig out the list or organisations to use."""
123 projects = org_projects(page.organisation) if page.partner_projects else _public_projects()
124 keyword_projects = apply_keywords(page, projects)
125 return keyword_projects.all_partners()
126
127
128 def _create_filters_query(request):
129 """Returns a Q object expression based on query parameters."""
130 location_param = int_or_none(request.GET.get('location'))
131 title_or_subtitle_param = request.GET.get('title_or_subtitle')
132
133 location_filter = (
134 get_m49_filter(location_param, use_recipient_country=False) if location_param else None
135 )
136 title_filter = (
137 Q(name__icontains=title_or_subtitle_param) |
138 Q(long_name__icontains=title_or_subtitle_param)
139 ) if title_or_subtitle_param else None
140 all_filters = [
141 location_filter,
142 ]
143 filters = filter(None, all_filters)
144 return reduce(lambda x, y: x & y, filters) if filters else None, title_filter
145
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/akvo/rest/views/organisation.py b/akvo/rest/views/organisation.py
--- a/akvo/rest/views/organisation.py
+++ b/akvo/rest/views/organisation.py
@@ -106,7 +106,7 @@
# Get related objects of page at once
response = {
- 'project_count': all_organisations.count(),
+ 'project_count': organisations_text_filtered.count(),
'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,
'location': locations,
}
| {"golden_diff": "diff --git a/akvo/rest/views/organisation.py b/akvo/rest/views/organisation.py\n--- a/akvo/rest/views/organisation.py\n+++ b/akvo/rest/views/organisation.py\n@@ -106,7 +106,7 @@\n \n # Get related objects of page at once\n response = {\n- 'project_count': all_organisations.count(),\n+ 'project_count': organisations_text_filtered.count(),\n 'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,\n 'location': locations,\n }\n", "issue": "Remove scrollbars when they are not necessary\nThe home page shows a scroll bar even when there is nothing to scroll\r\n\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.conf import settings\nfrom django.db.models import Q\nfrom django.utils import six\nfrom rest_framework.decorators import api_view\nfrom rest_framework.exceptions import ParseError\nfrom rest_framework.parsers import JSONParser\nfrom rest_framework.response import Response\nfrom rest_framework_xml.parsers import XMLParser\nfrom rest_framework_xml.compat import etree\n\nfrom akvo.rest.views.utils import int_or_none, get_qs_elements_for_page\nfrom akvo.rsr.filters import location_choices, get_m49_filter\nfrom akvo.rsr.models import Project, Organisation, Country\nfrom akvo.rsr.views.utils import apply_keywords, org_projects\nfrom ..serializers import OrganisationSerializer, OrganisationDirectorySerializer\nfrom ..viewsets import BaseRSRViewSet\n\n\nclass AkvoOrganisationParser(XMLParser):\n def parse(self, stream, media_type=None, parser_context=None):\n assert etree, 'XMLParser requires defusedxml to be installed'\n\n parser_context = parser_context or {}\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n parser = etree.DefusedXMLParser(encoding=encoding)\n try:\n tree = etree.parse(stream, parser=parser, forbid_dtd=True)\n except (etree.ParseError, ValueError) as exc:\n raise ParseError('XML parse error - %s' % six.text_type(exc))\n return self.organisation_data_from_etree(tree.getroot())\n\n def organisation_data_from_etree(self, tree):\n def find_text(tree, str):\n element = tree.find(str)\n if element is None:\n return ''\n return element.text.strip() if element.text else \"\"\n\n def location_data(location_tree):\n if location_tree is None:\n return []\n iso_code = find_text(location_tree, 'iso_code').lower()\n country, created = Country.objects.get_or_create(**Country.fields_from_iso_code(iso_code))\n country = country.id\n latitude = find_text(location_tree, 'latitude') or 0\n longitude = find_text(location_tree, 'longitude') or 0\n primary = True\n return [dict(latitude=latitude, longitude=longitude, country=country, primary=primary)]\n\n long_name = find_text(tree, 'name')\n name = long_name[:25]\n description = find_text(tree, 'description')\n url = find_text(tree, 'url')\n iati_type = find_text(tree, 'iati_organisation_type')\n new_organisation_type = int(iati_type) if iati_type else 22\n organisation_type = Organisation.org_type_from_iati_type(new_organisation_type)\n locations = location_data(tree.find('location/object'))\n return dict(\n name=name, long_name=long_name, description=description, url=url,\n organisation_type=organisation_type, new_organisation_type=new_organisation_type,\n locations=locations\n )\n\n\nclass OrganisationViewSet(BaseRSRViewSet):\n \"\"\"\n API endpoint that allows organisations to be viewed or edited.\n \"\"\"\n queryset = Organisation.objects.all()\n serializer_class = OrganisationSerializer\n parser_classes = (AkvoOrganisationParser, JSONParser,)\n\n\n@api_view(['GET'])\ndef organisation_directory(request):\n \"\"\"REST view for the update directory.\"\"\"\n\n page = request.rsr_page\n all_organisations = Organisation.objects.all() if not page else _page_organisations(page)\n\n # Filter updates based on query parameters\n filter_, text_filter = _create_filters_query(request)\n organisations = (\n all_organisations.filter(filter_).distinct() if filter_ is not None else all_organisations\n )\n organisations_text_filtered = (\n organisations.filter(text_filter) if text_filter is not None else organisations\n )\n if organisations_text_filtered.exists():\n organisations = organisations_text_filtered\n\n # Get the relevant data for typeaheads based on filtered organisations (minus\n # text filtering, if no organisations were found)\n locations = [\n {'id': choice[0], 'name': choice[1]}\n for choice in location_choices(organisations)\n ]\n\n display_organisations = get_qs_elements_for_page(organisations_text_filtered, request)\n\n # Get related objects of page at once\n response = {\n 'project_count': all_organisations.count(),\n 'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,\n 'location': locations,\n }\n return Response(response)\n\n\ndef _public_projects():\n \"\"\"Return all public projects.\"\"\"\n return Project.objects.public().published().select_related('partners')\n\n\ndef _page_organisations(page):\n \"\"\"Dig out the list or organisations to use.\"\"\"\n projects = org_projects(page.organisation) if page.partner_projects else _public_projects()\n keyword_projects = apply_keywords(page, projects)\n return keyword_projects.all_partners()\n\n\ndef _create_filters_query(request):\n \"\"\"Returns a Q object expression based on query parameters.\"\"\"\n location_param = int_or_none(request.GET.get('location'))\n title_or_subtitle_param = request.GET.get('title_or_subtitle')\n\n location_filter = (\n get_m49_filter(location_param, use_recipient_country=False) if location_param else None\n )\n title_filter = (\n Q(name__icontains=title_or_subtitle_param) |\n Q(long_name__icontains=title_or_subtitle_param)\n ) if title_or_subtitle_param else None\n all_filters = [\n location_filter,\n ]\n filters = filter(None, all_filters)\n return reduce(lambda x, y: x & y, filters) if filters else None, title_filter\n", "path": "akvo/rest/views/organisation.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.conf import settings\nfrom django.db.models import Q\nfrom django.utils import six\nfrom rest_framework.decorators import api_view\nfrom rest_framework.exceptions import ParseError\nfrom rest_framework.parsers import JSONParser\nfrom rest_framework.response import Response\nfrom rest_framework_xml.parsers import XMLParser\nfrom rest_framework_xml.compat import etree\n\nfrom akvo.rest.views.utils import int_or_none, get_qs_elements_for_page\nfrom akvo.rsr.filters import location_choices, get_m49_filter\nfrom akvo.rsr.models import Project, Organisation, Country\nfrom akvo.rsr.views.utils import apply_keywords, org_projects\nfrom ..serializers import OrganisationSerializer, OrganisationDirectorySerializer\nfrom ..viewsets import BaseRSRViewSet\n\n\nclass AkvoOrganisationParser(XMLParser):\n def parse(self, stream, media_type=None, parser_context=None):\n assert etree, 'XMLParser requires defusedxml to be installed'\n\n parser_context = parser_context or {}\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n parser = etree.DefusedXMLParser(encoding=encoding)\n try:\n tree = etree.parse(stream, parser=parser, forbid_dtd=True)\n except (etree.ParseError, ValueError) as exc:\n raise ParseError('XML parse error - %s' % six.text_type(exc))\n return self.organisation_data_from_etree(tree.getroot())\n\n def organisation_data_from_etree(self, tree):\n def find_text(tree, str):\n element = tree.find(str)\n if element is None:\n return ''\n return element.text.strip() if element.text else \"\"\n\n def location_data(location_tree):\n if location_tree is None:\n return []\n iso_code = find_text(location_tree, 'iso_code').lower()\n country, created = Country.objects.get_or_create(**Country.fields_from_iso_code(iso_code))\n country = country.id\n latitude = find_text(location_tree, 'latitude') or 0\n longitude = find_text(location_tree, 'longitude') or 0\n primary = True\n return [dict(latitude=latitude, longitude=longitude, country=country, primary=primary)]\n\n long_name = find_text(tree, 'name')\n name = long_name[:25]\n description = find_text(tree, 'description')\n url = find_text(tree, 'url')\n iati_type = find_text(tree, 'iati_organisation_type')\n new_organisation_type = int(iati_type) if iati_type else 22\n organisation_type = Organisation.org_type_from_iati_type(new_organisation_type)\n locations = location_data(tree.find('location/object'))\n return dict(\n name=name, long_name=long_name, description=description, url=url,\n organisation_type=organisation_type, new_organisation_type=new_organisation_type,\n locations=locations\n )\n\n\nclass OrganisationViewSet(BaseRSRViewSet):\n \"\"\"\n API endpoint that allows organisations to be viewed or edited.\n \"\"\"\n queryset = Organisation.objects.all()\n serializer_class = OrganisationSerializer\n parser_classes = (AkvoOrganisationParser, JSONParser,)\n\n\n@api_view(['GET'])\ndef organisation_directory(request):\n \"\"\"REST view for the update directory.\"\"\"\n\n page = request.rsr_page\n all_organisations = Organisation.objects.all() if not page else _page_organisations(page)\n\n # Filter updates based on query parameters\n filter_, text_filter = _create_filters_query(request)\n organisations = (\n all_organisations.filter(filter_).distinct() if filter_ is not None else all_organisations\n )\n organisations_text_filtered = (\n organisations.filter(text_filter) if text_filter is not None else organisations\n )\n if organisations_text_filtered.exists():\n organisations = organisations_text_filtered\n\n # Get the relevant data for typeaheads based on filtered organisations (minus\n # text filtering, if no organisations were found)\n locations = [\n {'id': choice[0], 'name': choice[1]}\n for choice in location_choices(organisations)\n ]\n\n display_organisations = get_qs_elements_for_page(organisations_text_filtered, request)\n\n # Get related objects of page at once\n response = {\n 'project_count': organisations_text_filtered.count(),\n 'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,\n 'location': locations,\n }\n return Response(response)\n\n\ndef _public_projects():\n \"\"\"Return all public projects.\"\"\"\n return Project.objects.public().published().select_related('partners')\n\n\ndef _page_organisations(page):\n \"\"\"Dig out the list or organisations to use.\"\"\"\n projects = org_projects(page.organisation) if page.partner_projects else _public_projects()\n keyword_projects = apply_keywords(page, projects)\n return keyword_projects.all_partners()\n\n\ndef _create_filters_query(request):\n \"\"\"Returns a Q object expression based on query parameters.\"\"\"\n location_param = int_or_none(request.GET.get('location'))\n title_or_subtitle_param = request.GET.get('title_or_subtitle')\n\n location_filter = (\n get_m49_filter(location_param, use_recipient_country=False) if location_param else None\n )\n title_filter = (\n Q(name__icontains=title_or_subtitle_param) |\n Q(long_name__icontains=title_or_subtitle_param)\n ) if title_or_subtitle_param else None\n all_filters = [\n location_filter,\n ]\n filters = filter(None, all_filters)\n return reduce(lambda x, y: x & y, filters) if filters else None, title_filter\n", "path": "akvo/rest/views/organisation.py"}]} | 1,967 | 121 |
gh_patches_debug_23524 | rasdani/github-patches | git_diff | pypa__setuptools-2316 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Reconsidering distutils replacement strategy
I first expressed concerns about the fact that `distutils` needs to be imported *after* `setuptools` in [this post](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551), and I did not realize that we had not adequately addressed this before the `distutils` adoption. I think we should re-consider the way this is done, to avoid any sort of requirement on the sort order.
Currently, when you `import setuptools`, [`distutils` is injected into `sys.modules`](https://github.com/pypa/setuptools/blob/37d81f4ce8f08c4baf44b6ff0f3f1bd3f6b2a127/setuptools/distutils_patch.py#L48-L61).
The problem here is that it requires importing `setuptools` before `distutils`, which most people will *not* be doing right now (by almost all common import sort orders, `distutils` comes first).
@jaraco [explains why it's done this way here](https://github.com/pypa/setuptools/issues/2230#issuecomment-657327796):
> I did consider another approach, where Setuptools could add a .pth file that would import setuptools.distutils_patch. Such an approach would always happen earlier and so would not be subject to the race that's happening here, but it would also happen whether or not setuptools was imported (on any invocation of Python in that environment).
I think one preferred solution would be for us to install a `distutils` package that would be imported before `distutils`. The problem is that in the standard `sys.path`, `site-packages` comes *after* the standard library. Since anything we do here will be a tremendous hack, I'm thinking that maybe the most surgical way to accomplish this is to do something like this:
1. Create a `setuptools._distutils/import_hack/distutils/__init__.py` (where there's no `__init__.py` in `import_hack`) that invokes `import setuptools.distutils_patch`.
2. Add a `.pth` file that injects `$SITEPACKAGES/setuptools/_import_hack/` into `sys.path` *before* the standard library.
Another option I think we should consider is stepping up the timetable on providing everything `distutils` provides directly from the `setuptools` namespace, [as I described here](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551):
> I personally would prefer it if we made the setuptools namespace the canonical namespace, and have distutils just import symbols from setuptools and possibly wrap them in deprecation warnings.
Right now, there are things that you *must* import from `distutils`, and so we can't say, "You can avoid an issue with the sort order by just not importing `distutils`". If we provide everything that `distutils` provides in the `setuptools` namespace, we can just say, "Just use the `setuptools` version and you won't have this problem."
Personally, I think we should do a combination of both — make it so importing `distutils` still works in whatever order you do it in *and* move everything to the canonical `setuptools` namespace. That will make it easier to actively deprecate direct use of `distutils`, and pave the way for us eventually removing the need for the `.pth` file (though the fastest road to not having the `.pth` file will be via removing `distutils` from the standard library).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `_distutils_hack/__init__.py`
Content:
```
1 import sys
2 import os
3 import re
4 import importlib
5 import warnings
6
7
8 is_pypy = '__pypy__' in sys.builtin_module_names
9
10
11 def warn_distutils_present():
12 if 'distutils' not in sys.modules:
13 return
14 if is_pypy and sys.version_info < (3, 7):
15 # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
16 # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
17 return
18 warnings.warn(
19 "Distutils was imported before Setuptools. This usage is discouraged "
20 "and may exhibit undesirable behaviors or errors. Please use "
21 "Setuptools' objects directly or at least import Setuptools first.")
22
23
24 def clear_distutils():
25 if 'distutils' not in sys.modules:
26 return
27 warnings.warn("Setuptools is replacing distutils.")
28 mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
29 for name in mods:
30 del sys.modules[name]
31
32
33 def enabled():
34 """
35 Allow selection of distutils by environment variable.
36 """
37 which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
38 return which == 'local'
39
40
41 def ensure_local_distutils():
42 clear_distutils()
43 distutils = importlib.import_module('setuptools._distutils')
44 distutils.__name__ = 'distutils'
45 sys.modules['distutils'] = distutils
46
47 # sanity check that submodules load as expected
48 core = importlib.import_module('distutils.core')
49 assert '_distutils' in core.__file__, core.__file__
50
51
52 def do_override():
53 """
54 Ensure that the local copy of distutils is preferred over stdlib.
55
56 See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
57 for more motivation.
58 """
59 warn_distutils_present()
60 if enabled():
61 ensure_local_distutils()
62
63
64 class DistutilsMetaFinder:
65 def find_spec(self, fullname, path, target=None):
66 if path is not None or fullname != "distutils":
67 return None
68
69 return self.get_distutils_spec()
70
71 def get_distutils_spec(self):
72 import importlib.util
73
74 class DistutilsLoader(importlib.util.abc.Loader):
75
76 def create_module(self, spec):
77 return importlib.import_module('._distutils', 'setuptools')
78
79 def exec_module(self, module):
80 pass
81
82 return importlib.util.spec_from_loader('distutils', DistutilsLoader())
83
84
85 DISTUTILS_FINDER = DistutilsMetaFinder()
86
87
88 def add_shim():
89 sys.meta_path.insert(0, DISTUTILS_FINDER)
90
91
92 def remove_shim():
93 try:
94 sys.meta_path.remove(DISTUTILS_FINDER)
95 except ValueError:
96 pass
97
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -16,9 +16,12 @@
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
return
warnings.warn(
- "Distutils was imported before Setuptools. This usage is discouraged "
- "and may exhibit undesirable behaviors or errors. Please use "
- "Setuptools' objects directly or at least import Setuptools first.")
+ "Distutils was imported before Setuptools, but importing Setuptools "
+ "also replaces the `distutils` module in `sys.modules`. This may lead "
+ "to undesirable behaviors or errors. To avoid these issues, avoid "
+ "using distutils directly, ensure that setuptools is installed in the "
+ "traditional way (e.g. not an editable install), and/or make sure that "
+ "setuptools is always imported before distutils.")
def clear_distutils():
@@ -56,8 +59,8 @@
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
- warn_distutils_present()
if enabled():
+ warn_distutils_present()
ensure_local_distutils()
| {"golden_diff": "diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py\n--- a/_distutils_hack/__init__.py\n+++ b/_distutils_hack/__init__.py\n@@ -16,9 +16,12 @@\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n- \"Distutils was imported before Setuptools. This usage is discouraged \"\n- \"and may exhibit undesirable behaviors or errors. Please use \"\n- \"Setuptools' objects directly or at least import Setuptools first.\")\n+ \"Distutils was imported before Setuptools, but importing Setuptools \"\n+ \"also replaces the `distutils` module in `sys.modules`. This may lead \"\n+ \"to undesirable behaviors or errors. To avoid these issues, avoid \"\n+ \"using distutils directly, ensure that setuptools is installed in the \"\n+ \"traditional way (e.g. not an editable install), and/or make sure that \"\n+ \"setuptools is always imported before distutils.\")\n \n \n def clear_distutils():\n@@ -56,8 +59,8 @@\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n- warn_distutils_present()\n if enabled():\n+ warn_distutils_present()\n ensure_local_distutils()\n", "issue": "Reconsidering distutils replacement strategy\nI first expressed concerns about the fact that `distutils` needs to be imported *after* `setuptools` in [this post](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551), and I did not realize that we had not adequately addressed this before the `distutils` adoption. I think we should re-consider the way this is done, to avoid any sort of requirement on the sort order.\r\n\r\nCurrently, when you `import setuptools`, [`distutils` is injected into `sys.modules`](https://github.com/pypa/setuptools/blob/37d81f4ce8f08c4baf44b6ff0f3f1bd3f6b2a127/setuptools/distutils_patch.py#L48-L61).\r\n\r\nThe problem here is that it requires importing `setuptools` before `distutils`, which most people will *not* be doing right now (by almost all common import sort orders, `distutils` comes first).\r\n\r\n@jaraco [explains why it's done this way here](https://github.com/pypa/setuptools/issues/2230#issuecomment-657327796):\r\n\r\n> I did consider another approach, where Setuptools could add a .pth file that would import setuptools.distutils_patch. Such an approach would always happen earlier and so would not be subject to the race that's happening here, but it would also happen whether or not setuptools was imported (on any invocation of Python in that environment).\r\n\r\nI think one preferred solution would be for us to install a `distutils` package that would be imported before `distutils`. The problem is that in the standard `sys.path`, `site-packages` comes *after* the standard library. Since anything we do here will be a tremendous hack, I'm thinking that maybe the most surgical way to accomplish this is to do something like this:\r\n\r\n1. Create a `setuptools._distutils/import_hack/distutils/__init__.py` (where there's no `__init__.py` in `import_hack`) that invokes `import setuptools.distutils_patch`.\r\n2. Add a `.pth` file that injects `$SITEPACKAGES/setuptools/_import_hack/` into `sys.path` *before* the standard library.\r\n\r\nAnother option I think we should consider is stepping up the timetable on providing everything `distutils` provides directly from the `setuptools` namespace, [as I described here](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551):\r\n\r\n> I personally would prefer it if we made the setuptools namespace the canonical namespace, and have distutils just import symbols from setuptools and possibly wrap them in deprecation warnings.\r\n\r\nRight now, there are things that you *must* import from `distutils`, and so we can't say, \"You can avoid an issue with the sort order by just not importing `distutils`\". If we provide everything that `distutils` provides in the `setuptools` namespace, we can just say, \"Just use the `setuptools` version and you won't have this problem.\"\r\n\r\nPersonally, I think we should do a combination of both \u2014 make it so importing `distutils` still works in whatever order you do it in *and* move everything to the canonical `setuptools` namespace. That will make it easier to actively deprecate direct use of `distutils`, and pave the way for us eventually removing the need for the `.pth` file (though the fastest road to not having the `.pth` file will be via removing `distutils` from the standard library).\n", "before_files": [{"content": "import sys\nimport os\nimport re\nimport importlib\nimport warnings\n\n\nis_pypy = '__pypy__' in sys.builtin_module_names\n\n\ndef warn_distutils_present():\n if 'distutils' not in sys.modules:\n return\n if is_pypy and sys.version_info < (3, 7):\n # PyPy for 3.6 unconditionally imports distutils, so bypass the warning\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n \"Distutils was imported before Setuptools. This usage is discouraged \"\n \"and may exhibit undesirable behaviors or errors. Please use \"\n \"Setuptools' objects directly or at least import Setuptools first.\")\n\n\ndef clear_distutils():\n if 'distutils' not in sys.modules:\n return\n warnings.warn(\"Setuptools is replacing distutils.\")\n mods = [name for name in sys.modules if re.match(r'distutils\\b', name)]\n for name in mods:\n del sys.modules[name]\n\n\ndef enabled():\n \"\"\"\n Allow selection of distutils by environment variable.\n \"\"\"\n which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')\n return which == 'local'\n\n\ndef ensure_local_distutils():\n clear_distutils()\n distutils = importlib.import_module('setuptools._distutils')\n distutils.__name__ = 'distutils'\n sys.modules['distutils'] = distutils\n\n # sanity check that submodules load as expected\n core = importlib.import_module('distutils.core')\n assert '_distutils' in core.__file__, core.__file__\n\n\ndef do_override():\n \"\"\"\n Ensure that the local copy of distutils is preferred over stdlib.\n\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n warn_distutils_present()\n if enabled():\n ensure_local_distutils()\n\n\nclass DistutilsMetaFinder:\n def find_spec(self, fullname, path, target=None):\n if path is not None or fullname != \"distutils\":\n return None\n\n return self.get_distutils_spec()\n\n def get_distutils_spec(self):\n import importlib.util\n\n class DistutilsLoader(importlib.util.abc.Loader):\n\n def create_module(self, spec):\n return importlib.import_module('._distutils', 'setuptools')\n\n def exec_module(self, module):\n pass\n\n return importlib.util.spec_from_loader('distutils', DistutilsLoader())\n\n\nDISTUTILS_FINDER = DistutilsMetaFinder()\n\n\ndef add_shim():\n sys.meta_path.insert(0, DISTUTILS_FINDER)\n\n\ndef remove_shim():\n try:\n sys.meta_path.remove(DISTUTILS_FINDER)\n except ValueError:\n pass\n", "path": "_distutils_hack/__init__.py"}], "after_files": [{"content": "import sys\nimport os\nimport re\nimport importlib\nimport warnings\n\n\nis_pypy = '__pypy__' in sys.builtin_module_names\n\n\ndef warn_distutils_present():\n if 'distutils' not in sys.modules:\n return\n if is_pypy and sys.version_info < (3, 7):\n # PyPy for 3.6 unconditionally imports distutils, so bypass the warning\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n \"Distutils was imported before Setuptools, but importing Setuptools \"\n \"also replaces the `distutils` module in `sys.modules`. This may lead \"\n \"to undesirable behaviors or errors. To avoid these issues, avoid \"\n \"using distutils directly, ensure that setuptools is installed in the \"\n \"traditional way (e.g. not an editable install), and/or make sure that \"\n \"setuptools is always imported before distutils.\")\n\n\ndef clear_distutils():\n if 'distutils' not in sys.modules:\n return\n warnings.warn(\"Setuptools is replacing distutils.\")\n mods = [name for name in sys.modules if re.match(r'distutils\\b', name)]\n for name in mods:\n del sys.modules[name]\n\n\ndef enabled():\n \"\"\"\n Allow selection of distutils by environment variable.\n \"\"\"\n which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')\n return which == 'local'\n\n\ndef ensure_local_distutils():\n clear_distutils()\n distutils = importlib.import_module('setuptools._distutils')\n distutils.__name__ = 'distutils'\n sys.modules['distutils'] = distutils\n\n # sanity check that submodules load as expected\n core = importlib.import_module('distutils.core')\n assert '_distutils' in core.__file__, core.__file__\n\n\ndef do_override():\n \"\"\"\n Ensure that the local copy of distutils is preferred over stdlib.\n\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n if enabled():\n warn_distutils_present()\n ensure_local_distutils()\n\n\nclass DistutilsMetaFinder:\n def find_spec(self, fullname, path, target=None):\n if path is not None or fullname != \"distutils\":\n return None\n\n return self.get_distutils_spec()\n\n def get_distutils_spec(self):\n import importlib.util\n\n class DistutilsLoader(importlib.util.abc.Loader):\n\n def create_module(self, spec):\n return importlib.import_module('._distutils', 'setuptools')\n\n def exec_module(self, module):\n pass\n\n return importlib.util.spec_from_loader('distutils', DistutilsLoader())\n\n\nDISTUTILS_FINDER = DistutilsMetaFinder()\n\n\ndef add_shim():\n sys.meta_path.insert(0, DISTUTILS_FINDER)\n\n\ndef remove_shim():\n try:\n sys.meta_path.remove(DISTUTILS_FINDER)\n except ValueError:\n pass\n", "path": "_distutils_hack/__init__.py"}]} | 1,916 | 360 |
gh_patches_debug_2666 | rasdani/github-patches | git_diff | netbox-community__netbox-14935 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Typo in DataSourceBulkEditForm
### Deployment Type
Self-hosted
### NetBox Version
v3.7.1
### Python Version
3.8
### Steps to Reproduce
"lavel" is defined as "Enforce unique space", but I think the correct definition is "Enabled".
https://github.com/netbox-community/netbox/blob/487f1ccfde26ef3c1f8a28089826acc0cd6fadb2/netbox/core/forms/bulk_edit.py#L21-L25
- Add a new data source

- Editing 1 Data Sources

### Expected Behavior
Enabled
### Observed Behavior
Enforce unique space
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `netbox/core/forms/bulk_edit.py`
Content:
```
1 from django import forms
2 from django.utils.translation import gettext_lazy as _
3
4 from core.models import *
5 from netbox.forms import NetBoxModelBulkEditForm
6 from netbox.utils import get_data_backend_choices
7 from utilities.forms.fields import CommentField
8 from utilities.forms.widgets import BulkEditNullBooleanSelect
9
10 __all__ = (
11 'DataSourceBulkEditForm',
12 )
13
14
15 class DataSourceBulkEditForm(NetBoxModelBulkEditForm):
16 type = forms.ChoiceField(
17 label=_('Type'),
18 choices=get_data_backend_choices,
19 required=False
20 )
21 enabled = forms.NullBooleanField(
22 required=False,
23 widget=BulkEditNullBooleanSelect(),
24 label=_('Enforce unique space')
25 )
26 description = forms.CharField(
27 label=_('Description'),
28 max_length=200,
29 required=False
30 )
31 comments = CommentField()
32 parameters = forms.JSONField(
33 label=_('Parameters'),
34 required=False
35 )
36 ignore_rules = forms.CharField(
37 label=_('Ignore rules'),
38 required=False,
39 widget=forms.Textarea()
40 )
41
42 model = DataSource
43 fieldsets = (
44 (None, ('type', 'enabled', 'description', 'comments', 'parameters', 'ignore_rules')),
45 )
46 nullable_fields = (
47 'description', 'description', 'parameters', 'comments', 'parameters', 'ignore_rules',
48 )
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/netbox/core/forms/bulk_edit.py b/netbox/core/forms/bulk_edit.py
--- a/netbox/core/forms/bulk_edit.py
+++ b/netbox/core/forms/bulk_edit.py
@@ -21,7 +21,7 @@
enabled = forms.NullBooleanField(
required=False,
widget=BulkEditNullBooleanSelect(),
- label=_('Enforce unique space')
+ label=_('Enabled')
)
description = forms.CharField(
label=_('Description'),
| {"golden_diff": "diff --git a/netbox/core/forms/bulk_edit.py b/netbox/core/forms/bulk_edit.py\n--- a/netbox/core/forms/bulk_edit.py\n+++ b/netbox/core/forms/bulk_edit.py\n@@ -21,7 +21,7 @@\n enabled = forms.NullBooleanField(\n required=False,\n widget=BulkEditNullBooleanSelect(),\n- label=_('Enforce unique space')\n+ label=_('Enabled')\n )\n description = forms.CharField(\n label=_('Description'),\n", "issue": "Typo in DataSourceBulkEditForm\n### Deployment Type\n\nSelf-hosted\n\n### NetBox Version\n\nv3.7.1\n\n### Python Version\n\n3.8\n\n### Steps to Reproduce\n\n\"lavel\" is defined as \"Enforce unique space\", but I think the correct definition is \"Enabled\".\r\n\r\nhttps://github.com/netbox-community/netbox/blob/487f1ccfde26ef3c1f8a28089826acc0cd6fadb2/netbox/core/forms/bulk_edit.py#L21-L25\r\n\r\n- Add a new data source\r\n\r\n\r\n- Editing 1 Data Sources\r\n\r\n\n\n### Expected Behavior\n\nEnabled\n\n### Observed Behavior\n\nEnforce unique space\n", "before_files": [{"content": "from django import forms\nfrom django.utils.translation import gettext_lazy as _\n\nfrom core.models import *\nfrom netbox.forms import NetBoxModelBulkEditForm\nfrom netbox.utils import get_data_backend_choices\nfrom utilities.forms.fields import CommentField\nfrom utilities.forms.widgets import BulkEditNullBooleanSelect\n\n__all__ = (\n 'DataSourceBulkEditForm',\n)\n\n\nclass DataSourceBulkEditForm(NetBoxModelBulkEditForm):\n type = forms.ChoiceField(\n label=_('Type'),\n choices=get_data_backend_choices,\n required=False\n )\n enabled = forms.NullBooleanField(\n required=False,\n widget=BulkEditNullBooleanSelect(),\n label=_('Enforce unique space')\n )\n description = forms.CharField(\n label=_('Description'),\n max_length=200,\n required=False\n )\n comments = CommentField()\n parameters = forms.JSONField(\n label=_('Parameters'),\n required=False\n )\n ignore_rules = forms.CharField(\n label=_('Ignore rules'),\n required=False,\n widget=forms.Textarea()\n )\n\n model = DataSource\n fieldsets = (\n (None, ('type', 'enabled', 'description', 'comments', 'parameters', 'ignore_rules')),\n )\n nullable_fields = (\n 'description', 'description', 'parameters', 'comments', 'parameters', 'ignore_rules',\n )\n", "path": "netbox/core/forms/bulk_edit.py"}], "after_files": [{"content": "from django import forms\nfrom django.utils.translation import gettext_lazy as _\n\nfrom core.models import *\nfrom netbox.forms import NetBoxModelBulkEditForm\nfrom netbox.utils import get_data_backend_choices\nfrom utilities.forms.fields import CommentField\nfrom utilities.forms.widgets import BulkEditNullBooleanSelect\n\n__all__ = (\n 'DataSourceBulkEditForm',\n)\n\n\nclass DataSourceBulkEditForm(NetBoxModelBulkEditForm):\n type = forms.ChoiceField(\n label=_('Type'),\n choices=get_data_backend_choices,\n required=False\n )\n enabled = forms.NullBooleanField(\n required=False,\n widget=BulkEditNullBooleanSelect(),\n label=_('Enabled')\n )\n description = forms.CharField(\n label=_('Description'),\n max_length=200,\n required=False\n )\n comments = CommentField()\n parameters = forms.JSONField(\n label=_('Parameters'),\n required=False\n )\n ignore_rules = forms.CharField(\n label=_('Ignore rules'),\n required=False,\n widget=forms.Textarea()\n )\n\n model = DataSource\n fieldsets = (\n (None, ('type', 'enabled', 'description', 'comments', 'parameters', 'ignore_rules')),\n )\n nullable_fields = (\n 'description', 'description', 'parameters', 'comments', 'parameters', 'ignore_rules',\n )\n", "path": "netbox/core/forms/bulk_edit.py"}]} | 922 | 105 |
gh_patches_debug_63309 | rasdani/github-patches | git_diff | scikit-hep__pyhf-924 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove cloudpickle constraints when updating to TensorFlow Probability v0.11
# Description
Once TensorFlow Probability `v0.11.0` is released there will no longer be the need for PR #915, and so that should be reverted.
Related Issues: #815
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup
2
3 extras_require = {
4 'tensorflow': [
5 'tensorflow~=2.0',
6 'tensorflow-probability~=0.8',
7 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11
8 ],
9 'torch': ['torch~=1.2'],
10 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],
11 'xmlio': ['uproot'],
12 'minuit': ['iminuit'],
13 }
14 extras_require['backends'] = sorted(
15 set(
16 extras_require['tensorflow']
17 + extras_require['torch']
18 + extras_require['jax']
19 + extras_require['minuit']
20 )
21 )
22 extras_require['contrib'] = sorted(set(['matplotlib']))
23 extras_require['lint'] = sorted(set(['pyflakes', 'black']))
24
25 extras_require['test'] = sorted(
26 set(
27 extras_require['backends']
28 + extras_require['xmlio']
29 + extras_require['contrib']
30 + [
31 'pytest~=3.5',
32 'pytest-cov>=2.5.1',
33 'pytest-mock',
34 'pytest-benchmark[histogram]',
35 'pytest-console-scripts',
36 'pytest-mpl',
37 'pydocstyle',
38 'coverage>=4.0', # coveralls
39 'papermill~=2.0',
40 'nteract-scrapbook~=0.2',
41 'jupyter',
42 'uproot~=3.3',
43 'graphviz',
44 'jsonpatch',
45 ]
46 )
47 )
48 extras_require['docs'] = sorted(
49 set(
50 [
51 'sphinx~=3.0.0', # Sphinx v3.1.X regressions break docs
52 'sphinxcontrib-bibtex',
53 'sphinx-click',
54 'sphinx_rtd_theme',
55 'nbsphinx',
56 'ipywidgets',
57 'sphinx-issues',
58 'sphinx-copybutton>0.2.9',
59 ]
60 )
61 )
62 extras_require['develop'] = sorted(
63 set(
64 extras_require['docs']
65 + extras_require['lint']
66 + extras_require['test']
67 + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine']
68 )
69 )
70 extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
71
72
73 setup(
74 extras_require=extras_require,
75 use_scm_version=lambda: {'local_scheme': lambda version: ''},
76 )
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -3,8 +3,7 @@
extras_require = {
'tensorflow': [
'tensorflow~=2.0',
- 'tensorflow-probability~=0.8',
- 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11
+ 'tensorflow-probability~=0.10', # TODO: Temp patch until tfp v0.11
],
'torch': ['torch~=1.2'],
'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -3,8 +3,7 @@\n extras_require = {\n 'tensorflow': [\n 'tensorflow~=2.0',\n- 'tensorflow-probability~=0.8',\n- 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11\n+ 'tensorflow-probability~=0.10', # TODO: Temp patch until tfp v0.11\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n", "issue": "Remove cloudpickle constraints when updating to TensorFlow Probability v0.11\n# Description\r\n\r\nOnce TensorFlow Probability `v0.11.0` is released there will no longer be the need for PR #915, and so that should be reverted.\r\n\r\nRelated Issues: #815 \r\n\n", "before_files": [{"content": "from setuptools import setup\n\nextras_require = {\n 'tensorflow': [\n 'tensorflow~=2.0',\n 'tensorflow-probability~=0.8',\n 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot'],\n 'minuit': ['iminuit'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\nextras_require['lint'] = sorted(set(['pyflakes', 'black']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + [\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx~=3.0.0', # Sphinx v3.1.X regressions break docs\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['lint']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup\n\nextras_require = {\n 'tensorflow': [\n 'tensorflow~=2.0',\n 'tensorflow-probability~=0.10', # TODO: Temp patch until tfp v0.11\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot'],\n 'minuit': ['iminuit'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\nextras_require['lint'] = sorted(set(['pyflakes', 'black']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + [\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx~=3.0.0', # Sphinx v3.1.X regressions break docs\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['lint']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py"}]} | 1,018 | 162 |
gh_patches_debug_1435 | rasdani/github-patches | git_diff | keras-team__keras-1039 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
keras.utils.visualize_util
line 9: if type(model) == Sequential
Global name Sequential is not defined
line 25 elif type(model) == Graph:
Global name Graph is not defined
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `keras/utils/visualize_util.py`
Content:
```
1 import pydot
2 # old pydot will not work with python3, must use one
3 # that works with python3 such as pydot2 or pydot
4
5
6 def plot(model, to_file='model.png'):
7
8 graph = pydot.Dot(graph_type='digraph')
9 if type(model) == Sequential:
10 previous_node = None
11 written_nodes = []
12 n = 1
13 for node in model.get_config()['layers']:
14 # append number in case layers have same name to differentiate
15 if (node['name'] + str(n)) in written_nodes:
16 n += 1
17 current_node = pydot.Node(node['name'] + str(n))
18 written_nodes.append(node['name'] + str(n))
19 graph.add_node(current_node)
20 if previous_node:
21 graph.add_edge(pydot.Edge(previous_node, current_node))
22 previous_node = current_node
23 graph.write_png(to_file)
24
25 elif type(model) == Graph:
26 # don't need to append number for names since all nodes labeled
27 for input_node in model.input_config:
28 graph.add_node(pydot.Node(input_node['name']))
29
30 # intermediate and output nodes have input defined
31 for layer_config in [model.node_config, model.output_config]:
32 for node in layer_config:
33 graph.add_node(pydot.Node(node['name']))
34 # possible to have multiple 'inputs' vs 1 'input'
35 if node['inputs']:
36 for e in node['inputs']:
37 graph.add_edge(pydot.Edge(e, node['name']))
38 else:
39 graph.add_edge(pydot.Edge(node['input'], node['name']))
40
41 graph.write_png(to_file)
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/keras/utils/visualize_util.py b/keras/utils/visualize_util.py
--- a/keras/utils/visualize_util.py
+++ b/keras/utils/visualize_util.py
@@ -1,7 +1,7 @@
import pydot
# old pydot will not work with python3, must use one
# that works with python3 such as pydot2 or pydot
-
+from keras.models import Sequential, Graph
def plot(model, to_file='model.png'):
| {"golden_diff": "diff --git a/keras/utils/visualize_util.py b/keras/utils/visualize_util.py\n--- a/keras/utils/visualize_util.py\n+++ b/keras/utils/visualize_util.py\n@@ -1,7 +1,7 @@\n import pydot\n # old pydot will not work with python3, must use one\n # that works with python3 such as pydot2 or pydot\n-\n+from keras.models import Sequential, Graph\n \n def plot(model, to_file='model.png'):\n", "issue": "keras.utils.visualize_util\nline 9: if type(model) == Sequential\nGlobal name Sequential is not defined\nline 25 elif type(model) == Graph:\nGlobal name Graph is not defined\n\n", "before_files": [{"content": "import pydot\n# old pydot will not work with python3, must use one\n# that works with python3 such as pydot2 or pydot\n\n\ndef plot(model, to_file='model.png'):\n\n graph = pydot.Dot(graph_type='digraph')\n if type(model) == Sequential:\n previous_node = None\n written_nodes = []\n n = 1\n for node in model.get_config()['layers']:\n # append number in case layers have same name to differentiate\n if (node['name'] + str(n)) in written_nodes:\n n += 1\n current_node = pydot.Node(node['name'] + str(n))\n written_nodes.append(node['name'] + str(n))\n graph.add_node(current_node)\n if previous_node:\n graph.add_edge(pydot.Edge(previous_node, current_node))\n previous_node = current_node\n graph.write_png(to_file)\n\n elif type(model) == Graph:\n # don't need to append number for names since all nodes labeled\n for input_node in model.input_config:\n graph.add_node(pydot.Node(input_node['name']))\n\n # intermediate and output nodes have input defined\n for layer_config in [model.node_config, model.output_config]:\n for node in layer_config:\n graph.add_node(pydot.Node(node['name']))\n # possible to have multiple 'inputs' vs 1 'input'\n if node['inputs']:\n for e in node['inputs']:\n graph.add_edge(pydot.Edge(e, node['name']))\n else:\n graph.add_edge(pydot.Edge(node['input'], node['name']))\n\n graph.write_png(to_file)\n", "path": "keras/utils/visualize_util.py"}], "after_files": [{"content": "import pydot\n# old pydot will not work with python3, must use one\n# that works with python3 such as pydot2 or pydot\nfrom keras.models import Sequential, Graph\n\ndef plot(model, to_file='model.png'):\n\n graph = pydot.Dot(graph_type='digraph')\n if type(model) == Sequential:\n previous_node = None\n written_nodes = []\n n = 1\n for node in model.get_config()['layers']:\n # append number in case layers have same name to differentiate\n if (node['name'] + str(n)) in written_nodes:\n n += 1\n current_node = pydot.Node(node['name'] + str(n))\n written_nodes.append(node['name'] + str(n))\n graph.add_node(current_node)\n if previous_node:\n graph.add_edge(pydot.Edge(previous_node, current_node))\n previous_node = current_node\n graph.write_png(to_file)\n\n elif type(model) == Graph:\n # don't need to append number for names since all nodes labeled\n for input_node in model.input_config:\n graph.add_node(pydot.Node(input_node['name']))\n\n # intermediate and output nodes have input defined\n for layer_config in [model.node_config, model.output_config]:\n for node in layer_config:\n graph.add_node(pydot.Node(node['name']))\n # possible to have multiple 'inputs' vs 1 'input'\n if node['inputs']:\n for e in node['inputs']:\n graph.add_edge(pydot.Edge(e, node['name']))\n else:\n graph.add_edge(pydot.Edge(node['input'], node['name']))\n\n graph.write_png(to_file)\n", "path": "keras/utils/visualize_util.py"}]} | 733 | 112 |
gh_patches_debug_6143 | rasdani/github-patches | git_diff | getsentry__sentry-python-2371 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
KeyError 'https,http' in async handling 1.31.0
### How do you use Sentry?
Self-hosted/on-premise
### Version
1.31.0
### Steps to Reproduce
I'm running sentry and uvicorn. We also have django-channels. On upgrading to 1.31.0 I'm getting the following:
```
Traceback (most recent call last):
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/protocols/http/httptools_impl.py", line 426, in run_asgi
result = await app( # type: ignore[func-returns-value]
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/middleware/proxy_headers.py", line 84, in __call__
return await self.app(scope, receive, send)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/channels/routing.py", line 62, in __call__
return await application(scope, receive, send)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/django/asgi.py", line 40, in sentry_patched_asgi_handler
return await middleware(scope, receive, send)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py", line 146, in _run_asgi3
return await self._run_app(scope, receive, send, asgi_version=3)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py", line 178, in _run_app
) = self._get_transaction_name_and_source(
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py", line 293, in _get_transaction_name_and_source
name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/_asgi_common.py", line 46, in _get_url
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
KeyError: 'https,http'
```
I did not research further but it seems to be caused by the recent overhaul of the code (#2335)
### Expected Result
No exception
### Actual Result
.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry_sdk/integrations/_asgi_common.py`
Content:
```
1 import urllib
2
3 from sentry_sdk.hub import _should_send_default_pii
4 from sentry_sdk.integrations._wsgi_common import _filter_headers
5 from sentry_sdk._types import TYPE_CHECKING
6
7 if TYPE_CHECKING:
8 from typing import Any
9 from typing import Dict
10 from typing import Optional
11 from typing_extensions import Literal
12
13
14 def _get_headers(asgi_scope):
15 # type: (Any) -> Dict[str, str]
16 """
17 Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
18 """
19 headers = {} # type: Dict[str, str]
20 for raw_key, raw_value in asgi_scope["headers"]:
21 key = raw_key.decode("latin-1")
22 value = raw_value.decode("latin-1")
23 if key in headers:
24 headers[key] = headers[key] + ", " + value
25 else:
26 headers[key] = value
27
28 return headers
29
30
31 def _get_url(asgi_scope, default_scheme, host):
32 # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
33 """
34 Extract URL from the ASGI scope, without also including the querystring.
35 """
36 scheme = asgi_scope.get("scheme", default_scheme)
37
38 server = asgi_scope.get("server", None)
39 path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
40
41 if host:
42 return "%s://%s%s" % (scheme, host, path)
43
44 if server is not None:
45 host, port = server
46 default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
47 if port != default_port:
48 return "%s://%s:%s%s" % (scheme, host, port, path)
49 return "%s://%s%s" % (scheme, host, path)
50 return path
51
52
53 def _get_query(asgi_scope):
54 # type: (Any) -> Any
55 """
56 Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
57 """
58 qs = asgi_scope.get("query_string")
59 if not qs:
60 return None
61 return urllib.parse.unquote(qs.decode("latin-1"))
62
63
64 def _get_ip(asgi_scope):
65 # type: (Any) -> str
66 """
67 Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
68 """
69 headers = _get_headers(asgi_scope)
70 try:
71 return headers["x-forwarded-for"].split(",")[0].strip()
72 except (KeyError, IndexError):
73 pass
74
75 try:
76 return headers["x-real-ip"]
77 except KeyError:
78 pass
79
80 return asgi_scope.get("client")[0]
81
82
83 def _get_request_data(asgi_scope):
84 # type: (Any) -> Dict[str, Any]
85 """
86 Returns data related to the HTTP request from the ASGI scope.
87 """
88 request_data = {} # type: Dict[str, Any]
89 ty = asgi_scope["type"]
90 if ty in ("http", "websocket"):
91 request_data["method"] = asgi_scope.get("method")
92
93 request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope))
94 request_data["query_string"] = _get_query(asgi_scope)
95
96 request_data["url"] = _get_url(
97 asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
98 )
99
100 client = asgi_scope.get("client")
101 if client and _should_send_default_pii():
102 request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)}
103
104 return request_data
105
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py
--- a/sentry_sdk/integrations/_asgi_common.py
+++ b/sentry_sdk/integrations/_asgi_common.py
@@ -43,7 +43,7 @@
if server is not None:
host, port = server
- default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
+ default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
if port != default_port:
return "%s://%s:%s%s" % (scheme, host, port, path)
return "%s://%s%s" % (scheme, host, path)
| {"golden_diff": "diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py\n--- a/sentry_sdk/integrations/_asgi_common.py\n+++ b/sentry_sdk/integrations/_asgi_common.py\n@@ -43,7 +43,7 @@\n \n if server is not None:\n host, port = server\n- default_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}[scheme]\n+ default_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}.get(scheme)\n if port != default_port:\n return \"%s://%s:%s%s\" % (scheme, host, port, path)\n return \"%s://%s%s\" % (scheme, host, path)\n", "issue": "KeyError 'https,http' in async handling 1.31.0\n### How do you use Sentry?\n\nSelf-hosted/on-premise\n\n### Version\n\n1.31.0\n\n### Steps to Reproduce\n\nI'm running sentry and uvicorn. We also have django-channels. On upgrading to 1.31.0 I'm getting the following:\r\n\r\n```\r\nTraceback (most recent call last):\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/protocols/http/httptools_impl.py\", line 426, in run_asgi\r\nresult = await app( # type: ignore[func-returns-value]\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/middleware/proxy_headers.py\", line 84, in __call__\r\nreturn await self.app(scope, receive, send)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/channels/routing.py\", line 62, in __call__\r\nreturn await application(scope, receive, send)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/django/asgi.py\", line 40, in sentry_patched_asgi_handler\r\nreturn await middleware(scope, receive, send)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py\", line 146, in _run_asgi3\r\nreturn await self._run_app(scope, receive, send, asgi_version=3)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py\", line 178, in _run_app\r\n) = self._get_transaction_name_and_source(\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py\", line 293, in _get_transaction_name_and_source\r\nname = _get_url(asgi_scope, \"http\" if ty == \"http\" else \"ws\", host=None)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/_asgi_common.py\", line 46, in _get_url\r\ndefault_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}[scheme]\r\nKeyError: 'https,http'\r\n```\r\n\r\nI did not research further but it seems to be caused by the recent overhaul of the code (#2335)\n\n### Expected Result\n\nNo exception\n\n### Actual Result\n\n.\n", "before_files": [{"content": "import urllib\n\nfrom sentry_sdk.hub import _should_send_default_pii\nfrom sentry_sdk.integrations._wsgi_common import _filter_headers\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from typing import Any\n from typing import Dict\n from typing import Optional\n from typing_extensions import Literal\n\n\ndef _get_headers(asgi_scope):\n # type: (Any) -> Dict[str, str]\n \"\"\"\n Extract headers from the ASGI scope, in the format that the Sentry protocol expects.\n \"\"\"\n headers = {} # type: Dict[str, str]\n for raw_key, raw_value in asgi_scope[\"headers\"]:\n key = raw_key.decode(\"latin-1\")\n value = raw_value.decode(\"latin-1\")\n if key in headers:\n headers[key] = headers[key] + \", \" + value\n else:\n headers[key] = value\n\n return headers\n\n\ndef _get_url(asgi_scope, default_scheme, host):\n # type: (Dict[str, Any], Literal[\"ws\", \"http\"], Optional[str]) -> str\n \"\"\"\n Extract URL from the ASGI scope, without also including the querystring.\n \"\"\"\n scheme = asgi_scope.get(\"scheme\", default_scheme)\n\n server = asgi_scope.get(\"server\", None)\n path = asgi_scope.get(\"root_path\", \"\") + asgi_scope.get(\"path\", \"\")\n\n if host:\n return \"%s://%s%s\" % (scheme, host, path)\n\n if server is not None:\n host, port = server\n default_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}[scheme]\n if port != default_port:\n return \"%s://%s:%s%s\" % (scheme, host, port, path)\n return \"%s://%s%s\" % (scheme, host, path)\n return path\n\n\ndef _get_query(asgi_scope):\n # type: (Any) -> Any\n \"\"\"\n Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.\n \"\"\"\n qs = asgi_scope.get(\"query_string\")\n if not qs:\n return None\n return urllib.parse.unquote(qs.decode(\"latin-1\"))\n\n\ndef _get_ip(asgi_scope):\n # type: (Any) -> str\n \"\"\"\n Extract IP Address from the ASGI scope based on request headers with fallback to scope client.\n \"\"\"\n headers = _get_headers(asgi_scope)\n try:\n return headers[\"x-forwarded-for\"].split(\",\")[0].strip()\n except (KeyError, IndexError):\n pass\n\n try:\n return headers[\"x-real-ip\"]\n except KeyError:\n pass\n\n return asgi_scope.get(\"client\")[0]\n\n\ndef _get_request_data(asgi_scope):\n # type: (Any) -> Dict[str, Any]\n \"\"\"\n Returns data related to the HTTP request from the ASGI scope.\n \"\"\"\n request_data = {} # type: Dict[str, Any]\n ty = asgi_scope[\"type\"]\n if ty in (\"http\", \"websocket\"):\n request_data[\"method\"] = asgi_scope.get(\"method\")\n\n request_data[\"headers\"] = headers = _filter_headers(_get_headers(asgi_scope))\n request_data[\"query_string\"] = _get_query(asgi_scope)\n\n request_data[\"url\"] = _get_url(\n asgi_scope, \"http\" if ty == \"http\" else \"ws\", headers.get(\"host\")\n )\n\n client = asgi_scope.get(\"client\")\n if client and _should_send_default_pii():\n request_data[\"env\"] = {\"REMOTE_ADDR\": _get_ip(asgi_scope)}\n\n return request_data\n", "path": "sentry_sdk/integrations/_asgi_common.py"}], "after_files": [{"content": "import urllib\n\nfrom sentry_sdk.hub import _should_send_default_pii\nfrom sentry_sdk.integrations._wsgi_common import _filter_headers\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from typing import Any\n from typing import Dict\n from typing import Optional\n from typing_extensions import Literal\n\n\ndef _get_headers(asgi_scope):\n # type: (Any) -> Dict[str, str]\n \"\"\"\n Extract headers from the ASGI scope, in the format that the Sentry protocol expects.\n \"\"\"\n headers = {} # type: Dict[str, str]\n for raw_key, raw_value in asgi_scope[\"headers\"]:\n key = raw_key.decode(\"latin-1\")\n value = raw_value.decode(\"latin-1\")\n if key in headers:\n headers[key] = headers[key] + \", \" + value\n else:\n headers[key] = value\n\n return headers\n\n\ndef _get_url(asgi_scope, default_scheme, host):\n # type: (Dict[str, Any], Literal[\"ws\", \"http\"], Optional[str]) -> str\n \"\"\"\n Extract URL from the ASGI scope, without also including the querystring.\n \"\"\"\n scheme = asgi_scope.get(\"scheme\", default_scheme)\n\n server = asgi_scope.get(\"server\", None)\n path = asgi_scope.get(\"root_path\", \"\") + asgi_scope.get(\"path\", \"\")\n\n if host:\n return \"%s://%s%s\" % (scheme, host, path)\n\n if server is not None:\n host, port = server\n default_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}.get(scheme)\n if port != default_port:\n return \"%s://%s:%s%s\" % (scheme, host, port, path)\n return \"%s://%s%s\" % (scheme, host, path)\n return path\n\n\ndef _get_query(asgi_scope):\n # type: (Any) -> Any\n \"\"\"\n Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.\n \"\"\"\n qs = asgi_scope.get(\"query_string\")\n if not qs:\n return None\n return urllib.parse.unquote(qs.decode(\"latin-1\"))\n\n\ndef _get_ip(asgi_scope):\n # type: (Any) -> str\n \"\"\"\n Extract IP Address from the ASGI scope based on request headers with fallback to scope client.\n \"\"\"\n headers = _get_headers(asgi_scope)\n try:\n return headers[\"x-forwarded-for\"].split(\",\")[0].strip()\n except (KeyError, IndexError):\n pass\n\n try:\n return headers[\"x-real-ip\"]\n except KeyError:\n pass\n\n return asgi_scope.get(\"client\")[0]\n\n\ndef _get_request_data(asgi_scope):\n # type: (Any) -> Dict[str, Any]\n \"\"\"\n Returns data related to the HTTP request from the ASGI scope.\n \"\"\"\n request_data = {} # type: Dict[str, Any]\n ty = asgi_scope[\"type\"]\n if ty in (\"http\", \"websocket\"):\n request_data[\"method\"] = asgi_scope.get(\"method\")\n\n request_data[\"headers\"] = headers = _filter_headers(_get_headers(asgi_scope))\n request_data[\"query_string\"] = _get_query(asgi_scope)\n\n request_data[\"url\"] = _get_url(\n asgi_scope, \"http\" if ty == \"http\" else \"ws\", headers.get(\"host\")\n )\n\n client = asgi_scope.get(\"client\")\n if client and _should_send_default_pii():\n request_data[\"env\"] = {\"REMOTE_ADDR\": _get_ip(asgi_scope)}\n\n return request_data\n", "path": "sentry_sdk/integrations/_asgi_common.py"}]} | 1,878 | 204 |
gh_patches_debug_13968 | rasdani/github-patches | git_diff | Kinto__kinto-367 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
kinto init should install postgresql dependencies
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kinto/__main__.py`
Content:
```
1 from __future__ import print_function
2 import argparse
3 import os
4 import sys
5 from six.moves import input
6 from cliquet.scripts import cliquet
7 from pyramid.scripts import pserve
8 from pyramid.paster import bootstrap
9
10 from kinto.config import init
11
12 CONFIG_FILE = 'config/kinto.ini'
13
14
15 def main(args=None):
16 """The main routine."""
17 if args is None:
18 args = sys.argv[1:]
19
20 parser = argparse.ArgumentParser(description="Kinto commands")
21 parser.add_argument('--ini',
22 help='Application configuration file',
23 dest='ini_file',
24 required=False,
25 default=CONFIG_FILE)
26 parser.add_argument('--backend',
27 help='Specify backend',
28 dest='backend',
29 required=False,
30 default=None)
31
32 subparsers = parser.add_subparsers(title='subcommands',
33 description='valid subcommands',
34 help='init/start/migrate')
35
36 parser_init = subparsers.add_parser('init')
37 parser_init.set_defaults(which='init')
38
39 parser_migrate = subparsers.add_parser('migrate')
40 parser_migrate.set_defaults(which='migrate')
41
42 parser_start = subparsers.add_parser('start')
43 parser_start.set_defaults(which='start')
44
45 args = vars(parser.parse_args())
46 config_file = args['ini_file']
47
48 if args['which'] == 'init':
49 if os.path.exists(config_file):
50 print("%s already exist." % config_file, file=sys.stderr)
51 sys.exit(1)
52
53 backend = args['backend']
54 if not backend:
55 while True:
56 prompt = ("Which backend to use? "
57 "(1 - postgresql, 2 - redis, default - memory) ")
58 answer = input(prompt).strip()
59 try:
60 backends = {"1": "postgresql", "2": "redis", "": "memory"}
61 backend = backends[answer]
62 break
63 except KeyError:
64 pass
65
66 init(config_file, backend)
67
68 elif args['which'] == 'migrate':
69 env = bootstrap(config_file)
70 cliquet.init_schema(env)
71
72 elif args['which'] == 'start':
73 pserve_argv = ['pserve', config_file, '--reload']
74 pserve.main(pserve_argv)
75
76
77 if __name__ == "__main__":
78 main()
79
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kinto/__main__.py b/kinto/__main__.py
--- a/kinto/__main__.py
+++ b/kinto/__main__.py
@@ -2,6 +2,8 @@
import argparse
import os
import sys
+
+import pip
from six.moves import input
from cliquet.scripts import cliquet
from pyramid.scripts import pserve
@@ -65,6 +67,13 @@
init(config_file, backend)
+ # Install postgresql libraries if necessary
+ if backend == "postgresql":
+ try:
+ import psycopg2 # NOQA
+ except ImportError:
+ pip.main(['install', "cliquet[postgresql]"])
+
elif args['which'] == 'migrate':
env = bootstrap(config_file)
cliquet.init_schema(env)
| {"golden_diff": "diff --git a/kinto/__main__.py b/kinto/__main__.py\n--- a/kinto/__main__.py\n+++ b/kinto/__main__.py\n@@ -2,6 +2,8 @@\n import argparse\n import os\n import sys\n+\n+import pip\n from six.moves import input\n from cliquet.scripts import cliquet\n from pyramid.scripts import pserve\n@@ -65,6 +67,13 @@\n \n init(config_file, backend)\n \n+ # Install postgresql libraries if necessary\n+ if backend == \"postgresql\":\n+ try:\n+ import psycopg2 # NOQA\n+ except ImportError:\n+ pip.main(['install', \"cliquet[postgresql]\"])\n+\n elif args['which'] == 'migrate':\n env = bootstrap(config_file)\n cliquet.init_schema(env)\n", "issue": "kinto init should install postgresql dependencies\n\n", "before_files": [{"content": "from __future__ import print_function\nimport argparse\nimport os\nimport sys\nfrom six.moves import input\nfrom cliquet.scripts import cliquet\nfrom pyramid.scripts import pserve\nfrom pyramid.paster import bootstrap\n\nfrom kinto.config import init\n\nCONFIG_FILE = 'config/kinto.ini'\n\n\ndef main(args=None):\n \"\"\"The main routine.\"\"\"\n if args is None:\n args = sys.argv[1:]\n\n parser = argparse.ArgumentParser(description=\"Kinto commands\")\n parser.add_argument('--ini',\n help='Application configuration file',\n dest='ini_file',\n required=False,\n default=CONFIG_FILE)\n parser.add_argument('--backend',\n help='Specify backend',\n dest='backend',\n required=False,\n default=None)\n\n subparsers = parser.add_subparsers(title='subcommands',\n description='valid subcommands',\n help='init/start/migrate')\n\n parser_init = subparsers.add_parser('init')\n parser_init.set_defaults(which='init')\n\n parser_migrate = subparsers.add_parser('migrate')\n parser_migrate.set_defaults(which='migrate')\n\n parser_start = subparsers.add_parser('start')\n parser_start.set_defaults(which='start')\n\n args = vars(parser.parse_args())\n config_file = args['ini_file']\n\n if args['which'] == 'init':\n if os.path.exists(config_file):\n print(\"%s already exist.\" % config_file, file=sys.stderr)\n sys.exit(1)\n\n backend = args['backend']\n if not backend:\n while True:\n prompt = (\"Which backend to use? \"\n \"(1 - postgresql, 2 - redis, default - memory) \")\n answer = input(prompt).strip()\n try:\n backends = {\"1\": \"postgresql\", \"2\": \"redis\", \"\": \"memory\"}\n backend = backends[answer]\n break\n except KeyError:\n pass\n\n init(config_file, backend)\n\n elif args['which'] == 'migrate':\n env = bootstrap(config_file)\n cliquet.init_schema(env)\n\n elif args['which'] == 'start':\n pserve_argv = ['pserve', config_file, '--reload']\n pserve.main(pserve_argv)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "kinto/__main__.py"}], "after_files": [{"content": "from __future__ import print_function\nimport argparse\nimport os\nimport sys\n\nimport pip\nfrom six.moves import input\nfrom cliquet.scripts import cliquet\nfrom pyramid.scripts import pserve\nfrom pyramid.paster import bootstrap\n\nfrom kinto.config import init\n\nCONFIG_FILE = 'config/kinto.ini'\n\n\ndef main(args=None):\n \"\"\"The main routine.\"\"\"\n if args is None:\n args = sys.argv[1:]\n\n parser = argparse.ArgumentParser(description=\"Kinto commands\")\n parser.add_argument('--ini',\n help='Application configuration file',\n dest='ini_file',\n required=False,\n default=CONFIG_FILE)\n parser.add_argument('--backend',\n help='Specify backend',\n dest='backend',\n required=False,\n default=None)\n\n subparsers = parser.add_subparsers(title='subcommands',\n description='valid subcommands',\n help='init/start/migrate')\n\n parser_init = subparsers.add_parser('init')\n parser_init.set_defaults(which='init')\n\n parser_migrate = subparsers.add_parser('migrate')\n parser_migrate.set_defaults(which='migrate')\n\n parser_start = subparsers.add_parser('start')\n parser_start.set_defaults(which='start')\n\n args = vars(parser.parse_args())\n config_file = args['ini_file']\n\n if args['which'] == 'init':\n if os.path.exists(config_file):\n print(\"%s already exist.\" % config_file, file=sys.stderr)\n sys.exit(1)\n\n backend = args['backend']\n if not backend:\n while True:\n prompt = (\"Which backend to use? \"\n \"(1 - postgresql, 2 - redis, default - memory) \")\n answer = input(prompt).strip()\n try:\n backends = {\"1\": \"postgresql\", \"2\": \"redis\", \"\": \"memory\"}\n backend = backends[answer]\n break\n except KeyError:\n pass\n\n init(config_file, backend)\n\n # Install postgresql libraries if necessary\n if backend == \"postgresql\":\n try:\n import psycopg2 # NOQA\n except ImportError:\n pip.main(['install', \"cliquet[postgresql]\"])\n\n elif args['which'] == 'migrate':\n env = bootstrap(config_file)\n cliquet.init_schema(env)\n\n elif args['which'] == 'start':\n pserve_argv = ['pserve', config_file, '--reload']\n pserve.main(pserve_argv)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "kinto/__main__.py"}]} | 907 | 183 |
gh_patches_debug_33610 | rasdani/github-patches | git_diff | litestar-org__litestar-183 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`from starlite import *` broken if `testing` extra not installed
This is only an issue on main, not in any release.
When I want to try a library out, I'll install it into a fresh env, run python repl and do `from lib import *` and have a play around. If just doing that raised an error it would freak me out a little about the lib.
Possible solution:
- remove `.testing` imports from `starlite.__all__`
- add deprecation warning for top-level `.testing` imports
- remove `if TYPE_CHECKING` too? May as well if we are doing the above, I think?
Refs: #174 #130
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `starlite/__init__.py`
Content:
```
1 from typing import TYPE_CHECKING, Any
2
3 from starlite.datastructures import File, Redirect, State, Stream, Template
4
5 from .app import Starlite
6 from .config import (
7 CacheConfig,
8 CORSConfig,
9 OpenAPIConfig,
10 StaticFilesConfig,
11 TemplateConfig,
12 )
13 from .connection import Request, WebSocket
14 from .controller import Controller
15 from .dto import DTOFactory
16 from .enums import (
17 HttpMethod,
18 MediaType,
19 OpenAPIMediaType,
20 RequestEncodingType,
21 ScopeType,
22 )
23 from .exceptions import (
24 HTTPException,
25 ImproperlyConfiguredException,
26 InternalServerException,
27 MissingDependencyException,
28 NotAuthorizedException,
29 NotFoundException,
30 PermissionDeniedException,
31 ServiceUnavailableException,
32 StarLiteException,
33 ValidationException,
34 )
35 from .handlers import (
36 ASGIRouteHandler,
37 BaseRouteHandler,
38 HTTPRouteHandler,
39 WebsocketRouteHandler,
40 asgi,
41 delete,
42 get,
43 patch,
44 post,
45 put,
46 route,
47 websocket,
48 )
49 from .logging import LoggingConfig, QueueListenerHandler
50 from .middleware import AbstractAuthenticationMiddleware, AuthenticationResult
51 from .openapi.controller import OpenAPIController
52 from .params import Body, Dependency, Parameter
53 from .plugins import PluginProtocol
54 from .provide import Provide
55 from .response import Response
56 from .router import Router
57 from .routes import BaseRoute, HTTPRoute, WebSocketRoute
58 from .types import MiddlewareProtocol, Partial, ResponseHeader
59
60 if TYPE_CHECKING:
61 from .testing import TestClient, create_test_client, create_test_request
62
63
64 __all__ = [
65 "ASGIRouteHandler",
66 "AbstractAuthenticationMiddleware",
67 "AuthenticationResult",
68 "BaseRoute",
69 "BaseRouteHandler",
70 "Body",
71 "CORSConfig",
72 "CacheConfig",
73 "Controller",
74 "Dependency",
75 "DTOFactory",
76 "File",
77 "HTTPException",
78 "HTTPRoute",
79 "HTTPRouteHandler",
80 "HttpMethod",
81 "ImproperlyConfiguredException",
82 "InternalServerException",
83 "LoggingConfig",
84 "MediaType",
85 "MiddlewareProtocol",
86 "MissingDependencyException",
87 "NotAuthorizedException",
88 "NotFoundException",
89 "OpenAPIConfig",
90 "OpenAPIController",
91 "OpenAPIMediaType",
92 "Parameter",
93 "Partial",
94 "PermissionDeniedException",
95 "PluginProtocol",
96 "Provide",
97 "QueueListenerHandler",
98 "Redirect",
99 "Request",
100 "RequestEncodingType",
101 "Response",
102 "ResponseHeader",
103 "Router",
104 "ScopeType",
105 "ServiceUnavailableException",
106 "StarLiteException",
107 "Starlite",
108 "State",
109 "StaticFilesConfig",
110 "Stream",
111 "Template",
112 "TemplateConfig",
113 "TestClient",
114 "ValidationException",
115 "WebSocket",
116 "WebSocketRoute",
117 "WebsocketRouteHandler",
118 "asgi",
119 "create_test_client",
120 "create_test_request",
121 "delete",
122 "get",
123 "patch",
124 "post",
125 "put",
126 "route",
127 "websocket",
128 ]
129
130 _dynamic_imports = {"TestClient", "create_test_client", "create_test_request"}
131
132
133 # pylint: disable=import-outside-toplevel
134 def __getattr__(name: str) -> Any:
135 """Provide lazy importing as per https://peps.python.org/pep-0562/"""
136 if name not in _dynamic_imports:
137 raise AttributeError(f"Module {__package__} has no attribute {name}")
138
139 from . import testing
140
141 attr = globals()[name] = getattr(testing, name)
142 return attr
143
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/starlite/__init__.py b/starlite/__init__.py
--- a/starlite/__init__.py
+++ b/starlite/__init__.py
@@ -1,4 +1,4 @@
-from typing import TYPE_CHECKING, Any
+from typing import Any
from starlite.datastructures import File, Redirect, State, Stream, Template
@@ -57,10 +57,6 @@
from .routes import BaseRoute, HTTPRoute, WebSocketRoute
from .types import MiddlewareProtocol, Partial, ResponseHeader
-if TYPE_CHECKING:
- from .testing import TestClient, create_test_client, create_test_request
-
-
__all__ = [
"ASGIRouteHandler",
"AbstractAuthenticationMiddleware",
@@ -110,14 +106,11 @@
"Stream",
"Template",
"TemplateConfig",
- "TestClient",
"ValidationException",
"WebSocket",
"WebSocketRoute",
"WebsocketRouteHandler",
"asgi",
- "create_test_client",
- "create_test_request",
"delete",
"get",
"patch",
@@ -127,15 +120,24 @@
"websocket",
]
-_dynamic_imports = {"TestClient", "create_test_client", "create_test_request"}
+
+_deprecated_imports = {"TestClient", "create_test_client", "create_test_request"}
# pylint: disable=import-outside-toplevel
def __getattr__(name: str) -> Any:
"""Provide lazy importing as per https://peps.python.org/pep-0562/"""
- if name not in _dynamic_imports:
+ if name not in _deprecated_imports:
raise AttributeError(f"Module {__package__} has no attribute {name}")
+ import warnings
+
+ warnings.warn(
+ f"Importing {name} from {__package__} is deprecated, use `from startlite.testing import {name}` instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
from . import testing
attr = globals()[name] = getattr(testing, name)
| {"golden_diff": "diff --git a/starlite/__init__.py b/starlite/__init__.py\n--- a/starlite/__init__.py\n+++ b/starlite/__init__.py\n@@ -1,4 +1,4 @@\n-from typing import TYPE_CHECKING, Any\n+from typing import Any\n \n from starlite.datastructures import File, Redirect, State, Stream, Template\n \n@@ -57,10 +57,6 @@\n from .routes import BaseRoute, HTTPRoute, WebSocketRoute\n from .types import MiddlewareProtocol, Partial, ResponseHeader\n \n-if TYPE_CHECKING:\n- from .testing import TestClient, create_test_client, create_test_request\n-\n-\n __all__ = [\n \"ASGIRouteHandler\",\n \"AbstractAuthenticationMiddleware\",\n@@ -110,14 +106,11 @@\n \"Stream\",\n \"Template\",\n \"TemplateConfig\",\n- \"TestClient\",\n \"ValidationException\",\n \"WebSocket\",\n \"WebSocketRoute\",\n \"WebsocketRouteHandler\",\n \"asgi\",\n- \"create_test_client\",\n- \"create_test_request\",\n \"delete\",\n \"get\",\n \"patch\",\n@@ -127,15 +120,24 @@\n \"websocket\",\n ]\n \n-_dynamic_imports = {\"TestClient\", \"create_test_client\", \"create_test_request\"}\n+\n+_deprecated_imports = {\"TestClient\", \"create_test_client\", \"create_test_request\"}\n \n \n # pylint: disable=import-outside-toplevel\n def __getattr__(name: str) -> Any:\n \"\"\"Provide lazy importing as per https://peps.python.org/pep-0562/\"\"\"\n- if name not in _dynamic_imports:\n+ if name not in _deprecated_imports:\n raise AttributeError(f\"Module {__package__} has no attribute {name}\")\n \n+ import warnings\n+\n+ warnings.warn(\n+ f\"Importing {name} from {__package__} is deprecated, use `from startlite.testing import {name}` instead\",\n+ DeprecationWarning,\n+ stacklevel=2,\n+ )\n+\n from . import testing\n \n attr = globals()[name] = getattr(testing, name)\n", "issue": "`from starlite import *` broken if `testing` extra not installed\nThis is only an issue on main, not in any release.\r\n\r\nWhen I want to try a library out, I'll install it into a fresh env, run python repl and do `from lib import *` and have a play around. If just doing that raised an error it would freak me out a little about the lib.\r\n\r\nPossible solution:\r\n- remove `.testing` imports from `starlite.__all__`\r\n- add deprecation warning for top-level `.testing` imports\r\n- remove `if TYPE_CHECKING` too? May as well if we are doing the above, I think?\r\n\r\nRefs: #174 #130 \n", "before_files": [{"content": "from typing import TYPE_CHECKING, Any\n\nfrom starlite.datastructures import File, Redirect, State, Stream, Template\n\nfrom .app import Starlite\nfrom .config import (\n CacheConfig,\n CORSConfig,\n OpenAPIConfig,\n StaticFilesConfig,\n TemplateConfig,\n)\nfrom .connection import Request, WebSocket\nfrom .controller import Controller\nfrom .dto import DTOFactory\nfrom .enums import (\n HttpMethod,\n MediaType,\n OpenAPIMediaType,\n RequestEncodingType,\n ScopeType,\n)\nfrom .exceptions import (\n HTTPException,\n ImproperlyConfiguredException,\n InternalServerException,\n MissingDependencyException,\n NotAuthorizedException,\n NotFoundException,\n PermissionDeniedException,\n ServiceUnavailableException,\n StarLiteException,\n ValidationException,\n)\nfrom .handlers import (\n ASGIRouteHandler,\n BaseRouteHandler,\n HTTPRouteHandler,\n WebsocketRouteHandler,\n asgi,\n delete,\n get,\n patch,\n post,\n put,\n route,\n websocket,\n)\nfrom .logging import LoggingConfig, QueueListenerHandler\nfrom .middleware import AbstractAuthenticationMiddleware, AuthenticationResult\nfrom .openapi.controller import OpenAPIController\nfrom .params import Body, Dependency, Parameter\nfrom .plugins import PluginProtocol\nfrom .provide import Provide\nfrom .response import Response\nfrom .router import Router\nfrom .routes import BaseRoute, HTTPRoute, WebSocketRoute\nfrom .types import MiddlewareProtocol, Partial, ResponseHeader\n\nif TYPE_CHECKING:\n from .testing import TestClient, create_test_client, create_test_request\n\n\n__all__ = [\n \"ASGIRouteHandler\",\n \"AbstractAuthenticationMiddleware\",\n \"AuthenticationResult\",\n \"BaseRoute\",\n \"BaseRouteHandler\",\n \"Body\",\n \"CORSConfig\",\n \"CacheConfig\",\n \"Controller\",\n \"Dependency\",\n \"DTOFactory\",\n \"File\",\n \"HTTPException\",\n \"HTTPRoute\",\n \"HTTPRouteHandler\",\n \"HttpMethod\",\n \"ImproperlyConfiguredException\",\n \"InternalServerException\",\n \"LoggingConfig\",\n \"MediaType\",\n \"MiddlewareProtocol\",\n \"MissingDependencyException\",\n \"NotAuthorizedException\",\n \"NotFoundException\",\n \"OpenAPIConfig\",\n \"OpenAPIController\",\n \"OpenAPIMediaType\",\n \"Parameter\",\n \"Partial\",\n \"PermissionDeniedException\",\n \"PluginProtocol\",\n \"Provide\",\n \"QueueListenerHandler\",\n \"Redirect\",\n \"Request\",\n \"RequestEncodingType\",\n \"Response\",\n \"ResponseHeader\",\n \"Router\",\n \"ScopeType\",\n \"ServiceUnavailableException\",\n \"StarLiteException\",\n \"Starlite\",\n \"State\",\n \"StaticFilesConfig\",\n \"Stream\",\n \"Template\",\n \"TemplateConfig\",\n \"TestClient\",\n \"ValidationException\",\n \"WebSocket\",\n \"WebSocketRoute\",\n \"WebsocketRouteHandler\",\n \"asgi\",\n \"create_test_client\",\n \"create_test_request\",\n \"delete\",\n \"get\",\n \"patch\",\n \"post\",\n \"put\",\n \"route\",\n \"websocket\",\n]\n\n_dynamic_imports = {\"TestClient\", \"create_test_client\", \"create_test_request\"}\n\n\n# pylint: disable=import-outside-toplevel\ndef __getattr__(name: str) -> Any:\n \"\"\"Provide lazy importing as per https://peps.python.org/pep-0562/\"\"\"\n if name not in _dynamic_imports:\n raise AttributeError(f\"Module {__package__} has no attribute {name}\")\n\n from . import testing\n\n attr = globals()[name] = getattr(testing, name)\n return attr\n", "path": "starlite/__init__.py"}], "after_files": [{"content": "from typing import Any\n\nfrom starlite.datastructures import File, Redirect, State, Stream, Template\n\nfrom .app import Starlite\nfrom .config import (\n CacheConfig,\n CORSConfig,\n OpenAPIConfig,\n StaticFilesConfig,\n TemplateConfig,\n)\nfrom .connection import Request, WebSocket\nfrom .controller import Controller\nfrom .dto import DTOFactory\nfrom .enums import (\n HttpMethod,\n MediaType,\n OpenAPIMediaType,\n RequestEncodingType,\n ScopeType,\n)\nfrom .exceptions import (\n HTTPException,\n ImproperlyConfiguredException,\n InternalServerException,\n MissingDependencyException,\n NotAuthorizedException,\n NotFoundException,\n PermissionDeniedException,\n ServiceUnavailableException,\n StarLiteException,\n ValidationException,\n)\nfrom .handlers import (\n ASGIRouteHandler,\n BaseRouteHandler,\n HTTPRouteHandler,\n WebsocketRouteHandler,\n asgi,\n delete,\n get,\n patch,\n post,\n put,\n route,\n websocket,\n)\nfrom .logging import LoggingConfig, QueueListenerHandler\nfrom .middleware import AbstractAuthenticationMiddleware, AuthenticationResult\nfrom .openapi.controller import OpenAPIController\nfrom .params import Body, Dependency, Parameter\nfrom .plugins import PluginProtocol\nfrom .provide import Provide\nfrom .response import Response\nfrom .router import Router\nfrom .routes import BaseRoute, HTTPRoute, WebSocketRoute\nfrom .types import MiddlewareProtocol, Partial, ResponseHeader\n\n__all__ = [\n \"ASGIRouteHandler\",\n \"AbstractAuthenticationMiddleware\",\n \"AuthenticationResult\",\n \"BaseRoute\",\n \"BaseRouteHandler\",\n \"Body\",\n \"CORSConfig\",\n \"CacheConfig\",\n \"Controller\",\n \"Dependency\",\n \"DTOFactory\",\n \"File\",\n \"HTTPException\",\n \"HTTPRoute\",\n \"HTTPRouteHandler\",\n \"HttpMethod\",\n \"ImproperlyConfiguredException\",\n \"InternalServerException\",\n \"LoggingConfig\",\n \"MediaType\",\n \"MiddlewareProtocol\",\n \"MissingDependencyException\",\n \"NotAuthorizedException\",\n \"NotFoundException\",\n \"OpenAPIConfig\",\n \"OpenAPIController\",\n \"OpenAPIMediaType\",\n \"Parameter\",\n \"Partial\",\n \"PermissionDeniedException\",\n \"PluginProtocol\",\n \"Provide\",\n \"QueueListenerHandler\",\n \"Redirect\",\n \"Request\",\n \"RequestEncodingType\",\n \"Response\",\n \"ResponseHeader\",\n \"Router\",\n \"ScopeType\",\n \"ServiceUnavailableException\",\n \"StarLiteException\",\n \"Starlite\",\n \"State\",\n \"StaticFilesConfig\",\n \"Stream\",\n \"Template\",\n \"TemplateConfig\",\n \"ValidationException\",\n \"WebSocket\",\n \"WebSocketRoute\",\n \"WebsocketRouteHandler\",\n \"asgi\",\n \"delete\",\n \"get\",\n \"patch\",\n \"post\",\n \"put\",\n \"route\",\n \"websocket\",\n]\n\n\n_deprecated_imports = {\"TestClient\", \"create_test_client\", \"create_test_request\"}\n\n\n# pylint: disable=import-outside-toplevel\ndef __getattr__(name: str) -> Any:\n \"\"\"Provide lazy importing as per https://peps.python.org/pep-0562/\"\"\"\n if name not in _deprecated_imports:\n raise AttributeError(f\"Module {__package__} has no attribute {name}\")\n\n import warnings\n\n warnings.warn(\n f\"Importing {name} from {__package__} is deprecated, use `from startlite.testing import {name}` instead\",\n DeprecationWarning,\n stacklevel=2,\n )\n\n from . import testing\n\n attr = globals()[name] = getattr(testing, name)\n return attr\n", "path": "starlite/__init__.py"}]} | 1,510 | 471 |
gh_patches_debug_38574 | rasdani/github-patches | git_diff | electricitymaps__electricitymaps-contrib-1305 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CL-SIC parser returns wrong datetime
CL-SIC parser returns datetime of 0018 instead of 2018, as illustrated below:
`datetime: 0018-02-18 (06:43:00.000) CET`
`production: {"unknown":0,"coal":2206.6,"hydro":2416.39660,"wind":309.314,....}`
`updatedAt: 2018-02-20 (23:40:06.979) CET`
@systemcatch any idea?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parsers/lib/quality.py`
Content:
```
1 import datetime
2 import warnings
3
4 import arrow
5
6
7 class ValidationError(ValueError):
8 pass
9
10
11 def validate_consumption(obj, zone_key):
12 # Data quality check
13 if obj['consumption'] is not None and obj['consumption'] < 0:
14 raise ValidationError('%s: consumption has negative value '
15 '%s' % (zone_key, obj['consumption']))
16
17
18 def validate_exchange(item, k):
19 if item.get('sortedZoneKeys', None) != k:
20 raise ValidationError("Sorted country codes %s and %s don't "
21 "match" % (item.get('sortedZoneKeys', None), k))
22 if 'datetime' not in item:
23 raise ValidationError('datetime was not returned for %s' % k)
24 if type(item['datetime']) != datetime.datetime:
25 raise ValidationError('datetime %s is not valid for %s' %
26 (item['datetime'], k))
27 data_time = arrow.get(item['datetime'])
28 if data_time > arrow.now():
29 raise ValidationError("Data from %s can't be in the future, data was "
30 "%s, now is %s" % (k, data_time, arrow.now()))
31 if data_time.year < 2000:
32 raise ValidationError("Data from %s can't be before year 2000, it was "
33 "%s" % (k, data_time))
34
35
36 def validate_production(obj, zone_key):
37 if 'datetime' not in obj:
38 raise ValidationError(
39 'datetime was not returned for %s' % zone_key)
40 if 'countryCode' in obj:
41 warnings.warn('object has field `countryCode`. It should have '
42 '`zoneKey` instead. In {}'.format(obj))
43 if 'zoneKey' not in obj and 'countryCode' not in obj:
44 raise ValidationError('zoneKey was not returned for %s' % zone_key)
45 if not isinstance(obj['datetime'], datetime.datetime):
46 raise ValidationError('datetime %s is not valid for %s' %
47 (obj['datetime'], zone_key))
48 if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:
49 raise ValidationError("Zone keys %s and %s don't match in %s" %
50 (obj.get('zoneKey', None), zone_key, obj))
51 data_time = arrow.get(obj['datetime'])
52 arrow_now = arrow.utcnow()
53 if data_time > arrow_now:
54 raise ValidationError(
55 "Data from %s can't be in the future, data was %s, now is "
56 "%s" % (zone_key, data_time, arrow_now))
57
58 if ((obj.get('production', {}).get('unknown', None) is None and
59 obj.get('production', {}).get('coal', None) is None and
60 obj.get('production', {}).get('oil', None) is None and
61 obj.get('production', {}).get('gas', None) is None and zone_key
62 not in ['CH', 'NO', 'AUS-TAS', 'DK-BHM', 'US-NEISO'])):
63 raise ValidationError(
64 "Coal or oil or unknown production value is required for"
65 " %s" % zone_key)
66 for k, v in obj['production'].items():
67 if v is None:
68 continue
69 if v < 0:
70 raise ValidationError('%s: key %s has negative value %s' %
71 (zone_key, k, v))
72
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/parsers/lib/quality.py b/parsers/lib/quality.py
--- a/parsers/lib/quality.py
+++ b/parsers/lib/quality.py
@@ -8,11 +8,26 @@
pass
+def validate_reasonable_time(item, k):
+ data_time = arrow.get(item['datetime'])
+ if data_time.year < 2000:
+ raise ValidationError("Data from %s can't be before year 2000, it was "
+ "%s" % (k, data_time))
+
+ arrow_now = arrow.utcnow()
+ if data_time > arrow_now:
+ raise ValidationError(
+ "Data from %s can't be in the future, data was %s, now is "
+ "%s" % (k, data_time, arrow_now))
+
+
+
def validate_consumption(obj, zone_key):
# Data quality check
if obj['consumption'] is not None and obj['consumption'] < 0:
raise ValidationError('%s: consumption has negative value '
'%s' % (zone_key, obj['consumption']))
+ validate_reasonable_time(obj, zone_key)
def validate_exchange(item, k):
@@ -24,13 +39,7 @@
if type(item['datetime']) != datetime.datetime:
raise ValidationError('datetime %s is not valid for %s' %
(item['datetime'], k))
- data_time = arrow.get(item['datetime'])
- if data_time > arrow.now():
- raise ValidationError("Data from %s can't be in the future, data was "
- "%s, now is %s" % (k, data_time, arrow.now()))
- if data_time.year < 2000:
- raise ValidationError("Data from %s can't be before year 2000, it was "
- "%s" % (k, data_time))
+ validate_reasonable_time(item, k)
def validate_production(obj, zone_key):
@@ -48,12 +57,6 @@
if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:
raise ValidationError("Zone keys %s and %s don't match in %s" %
(obj.get('zoneKey', None), zone_key, obj))
- data_time = arrow.get(obj['datetime'])
- arrow_now = arrow.utcnow()
- if data_time > arrow_now:
- raise ValidationError(
- "Data from %s can't be in the future, data was %s, now is "
- "%s" % (zone_key, data_time, arrow_now))
if ((obj.get('production', {}).get('unknown', None) is None and
obj.get('production', {}).get('coal', None) is None and
@@ -69,3 +72,4 @@
if v < 0:
raise ValidationError('%s: key %s has negative value %s' %
(zone_key, k, v))
+ validate_reasonable_time(obj, zone_key)
| {"golden_diff": "diff --git a/parsers/lib/quality.py b/parsers/lib/quality.py\n--- a/parsers/lib/quality.py\n+++ b/parsers/lib/quality.py\n@@ -8,11 +8,26 @@\n pass\n \n \n+def validate_reasonable_time(item, k):\n+ data_time = arrow.get(item['datetime'])\n+ if data_time.year < 2000:\n+ raise ValidationError(\"Data from %s can't be before year 2000, it was \"\n+ \"%s\" % (k, data_time))\n+\n+ arrow_now = arrow.utcnow()\n+ if data_time > arrow_now:\n+ raise ValidationError(\n+ \"Data from %s can't be in the future, data was %s, now is \"\n+ \"%s\" % (k, data_time, arrow_now))\n+\n+\n+\n def validate_consumption(obj, zone_key):\n # Data quality check\n if obj['consumption'] is not None and obj['consumption'] < 0:\n raise ValidationError('%s: consumption has negative value '\n '%s' % (zone_key, obj['consumption']))\n+ validate_reasonable_time(obj, zone_key)\n \n \n def validate_exchange(item, k):\n@@ -24,13 +39,7 @@\n if type(item['datetime']) != datetime.datetime:\n raise ValidationError('datetime %s is not valid for %s' %\n (item['datetime'], k))\n- data_time = arrow.get(item['datetime'])\n- if data_time > arrow.now():\n- raise ValidationError(\"Data from %s can't be in the future, data was \"\n- \"%s, now is %s\" % (k, data_time, arrow.now()))\n- if data_time.year < 2000:\n- raise ValidationError(\"Data from %s can't be before year 2000, it was \"\n- \"%s\" % (k, data_time))\n+ validate_reasonable_time(item, k)\n \n \n def validate_production(obj, zone_key):\n@@ -48,12 +57,6 @@\n if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:\n raise ValidationError(\"Zone keys %s and %s don't match in %s\" %\n (obj.get('zoneKey', None), zone_key, obj))\n- data_time = arrow.get(obj['datetime'])\n- arrow_now = arrow.utcnow()\n- if data_time > arrow_now:\n- raise ValidationError(\n- \"Data from %s can't be in the future, data was %s, now is \"\n- \"%s\" % (zone_key, data_time, arrow_now))\n \n if ((obj.get('production', {}).get('unknown', None) is None and\n obj.get('production', {}).get('coal', None) is None and\n@@ -69,3 +72,4 @@\n if v < 0:\n raise ValidationError('%s: key %s has negative value %s' %\n (zone_key, k, v))\n+ validate_reasonable_time(obj, zone_key)\n", "issue": "CL-SIC parser returns wrong datetime\nCL-SIC parser returns datetime of 0018 instead of 2018, as illustrated below:\r\n\r\n`datetime: 0018-02-18 (06:43:00.000) CET`\r\n`production: {\"unknown\":0,\"coal\":2206.6,\"hydro\":2416.39660,\"wind\":309.314,....}`\r\n`updatedAt: 2018-02-20 (23:40:06.979) CET`\r\n\r\n@systemcatch any idea?\r\n\n", "before_files": [{"content": "import datetime\nimport warnings\n\nimport arrow\n\n\nclass ValidationError(ValueError):\n pass\n\n\ndef validate_consumption(obj, zone_key):\n # Data quality check\n if obj['consumption'] is not None and obj['consumption'] < 0:\n raise ValidationError('%s: consumption has negative value '\n '%s' % (zone_key, obj['consumption']))\n\n\ndef validate_exchange(item, k):\n if item.get('sortedZoneKeys', None) != k:\n raise ValidationError(\"Sorted country codes %s and %s don't \"\n \"match\" % (item.get('sortedZoneKeys', None), k))\n if 'datetime' not in item:\n raise ValidationError('datetime was not returned for %s' % k)\n if type(item['datetime']) != datetime.datetime:\n raise ValidationError('datetime %s is not valid for %s' %\n (item['datetime'], k))\n data_time = arrow.get(item['datetime'])\n if data_time > arrow.now():\n raise ValidationError(\"Data from %s can't be in the future, data was \"\n \"%s, now is %s\" % (k, data_time, arrow.now()))\n if data_time.year < 2000:\n raise ValidationError(\"Data from %s can't be before year 2000, it was \"\n \"%s\" % (k, data_time))\n\n\ndef validate_production(obj, zone_key):\n if 'datetime' not in obj:\n raise ValidationError(\n 'datetime was not returned for %s' % zone_key)\n if 'countryCode' in obj:\n warnings.warn('object has field `countryCode`. It should have '\n '`zoneKey` instead. In {}'.format(obj))\n if 'zoneKey' not in obj and 'countryCode' not in obj:\n raise ValidationError('zoneKey was not returned for %s' % zone_key)\n if not isinstance(obj['datetime'], datetime.datetime):\n raise ValidationError('datetime %s is not valid for %s' %\n (obj['datetime'], zone_key))\n if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:\n raise ValidationError(\"Zone keys %s and %s don't match in %s\" %\n (obj.get('zoneKey', None), zone_key, obj))\n data_time = arrow.get(obj['datetime'])\n arrow_now = arrow.utcnow()\n if data_time > arrow_now:\n raise ValidationError(\n \"Data from %s can't be in the future, data was %s, now is \"\n \"%s\" % (zone_key, data_time, arrow_now))\n\n if ((obj.get('production', {}).get('unknown', None) is None and\n obj.get('production', {}).get('coal', None) is None and\n obj.get('production', {}).get('oil', None) is None and\n obj.get('production', {}).get('gas', None) is None and zone_key\n not in ['CH', 'NO', 'AUS-TAS', 'DK-BHM', 'US-NEISO'])):\n raise ValidationError(\n \"Coal or oil or unknown production value is required for\"\n \" %s\" % zone_key)\n for k, v in obj['production'].items():\n if v is None:\n continue\n if v < 0:\n raise ValidationError('%s: key %s has negative value %s' %\n (zone_key, k, v))\n", "path": "parsers/lib/quality.py"}], "after_files": [{"content": "import datetime\nimport warnings\n\nimport arrow\n\n\nclass ValidationError(ValueError):\n pass\n\n\ndef validate_reasonable_time(item, k):\n data_time = arrow.get(item['datetime'])\n if data_time.year < 2000:\n raise ValidationError(\"Data from %s can't be before year 2000, it was \"\n \"%s\" % (k, data_time))\n\n arrow_now = arrow.utcnow()\n if data_time > arrow_now:\n raise ValidationError(\n \"Data from %s can't be in the future, data was %s, now is \"\n \"%s\" % (k, data_time, arrow_now))\n\n\n\ndef validate_consumption(obj, zone_key):\n # Data quality check\n if obj['consumption'] is not None and obj['consumption'] < 0:\n raise ValidationError('%s: consumption has negative value '\n '%s' % (zone_key, obj['consumption']))\n validate_reasonable_time(obj, zone_key)\n\n\ndef validate_exchange(item, k):\n if item.get('sortedZoneKeys', None) != k:\n raise ValidationError(\"Sorted country codes %s and %s don't \"\n \"match\" % (item.get('sortedZoneKeys', None), k))\n if 'datetime' not in item:\n raise ValidationError('datetime was not returned for %s' % k)\n if type(item['datetime']) != datetime.datetime:\n raise ValidationError('datetime %s is not valid for %s' %\n (item['datetime'], k))\n validate_reasonable_time(item, k)\n\n\ndef validate_production(obj, zone_key):\n if 'datetime' not in obj:\n raise ValidationError(\n 'datetime was not returned for %s' % zone_key)\n if 'countryCode' in obj:\n warnings.warn('object has field `countryCode`. It should have '\n '`zoneKey` instead. In {}'.format(obj))\n if 'zoneKey' not in obj and 'countryCode' not in obj:\n raise ValidationError('zoneKey was not returned for %s' % zone_key)\n if not isinstance(obj['datetime'], datetime.datetime):\n raise ValidationError('datetime %s is not valid for %s' %\n (obj['datetime'], zone_key))\n if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:\n raise ValidationError(\"Zone keys %s and %s don't match in %s\" %\n (obj.get('zoneKey', None), zone_key, obj))\n\n if ((obj.get('production', {}).get('unknown', None) is None and\n obj.get('production', {}).get('coal', None) is None and\n obj.get('production', {}).get('oil', None) is None and\n obj.get('production', {}).get('gas', None) is None and zone_key\n not in ['CH', 'NO', 'AUS-TAS', 'DK-BHM', 'US-NEISO'])):\n raise ValidationError(\n \"Coal or oil or unknown production value is required for\"\n \" %s\" % zone_key)\n for k, v in obj['production'].items():\n if v is None:\n continue\n if v < 0:\n raise ValidationError('%s: key %s has negative value %s' %\n (zone_key, k, v))\n validate_reasonable_time(obj, zone_key)\n", "path": "parsers/lib/quality.py"}]} | 1,277 | 670 |
gh_patches_debug_10020 | rasdani/github-patches | git_diff | onnx__sklearn-onnx-440 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SelectFromModel converted model with threshold such that no features selected give error
SelectFromModel model in Scikit with threshold such that no features are selected returns an array of shape (M, 0), where M is the number of test instances, onnx converter however can't handle it.
RuntimeError: [ONNXRuntimeError] : 1 : GENERAL ERROR : Load model from pca1.onnx failed:[ShapeInferenceError] Shape input must be a one-dimensional tensor.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `skl2onnx/operator_converters/feature_selection.py`
Content:
```
1 # -------------------------------------------------------------------------
2 # Copyright (c) Microsoft Corporation. All rights reserved.
3 # Licensed under the MIT License. See License.txt in the project root for
4 # license information.
5 # --------------------------------------------------------------------------
6
7 from ..proto import onnx_proto
8 from ..common._apply_operation import apply_cast
9 from ..common._registration import register_converter
10 from ..common.data_types import FloatTensorType, FloatType
11
12
13 def convert_sklearn_feature_selection(scope, operator, container):
14 op = operator.raw_operator
15 # Get indices of the features selected
16 index = op.get_support(indices=True)
17 needs_cast = not isinstance(operator.inputs[0].type,
18 (FloatTensorType, FloatType))
19 if needs_cast:
20 output_name = scope.get_unique_variable_name('output')
21 else:
22 output_name = operator.outputs[0].full_name
23
24 if index.any():
25 column_indices_name = scope.get_unique_variable_name('column_indices')
26
27 container.add_initializer(column_indices_name,
28 onnx_proto.TensorProto.INT64,
29 [len(index)], index)
30
31 container.add_node(
32 'ArrayFeatureExtractor',
33 [operator.inputs[0].full_name, column_indices_name],
34 output_name, op_domain='ai.onnx.ml',
35 name=scope.get_unique_operator_name('ArrayFeatureExtractor'))
36 else:
37 container.add_node('ConstantOfShape', operator.inputs[0].full_name,
38 output_name, op_version=9)
39 if needs_cast:
40 apply_cast(scope, output_name, operator.outputs[0].full_name,
41 container, to=onnx_proto.TensorProto.FLOAT)
42
43
44 register_converter('SklearnGenericUnivariateSelect',
45 convert_sklearn_feature_selection)
46 register_converter('SklearnRFE', convert_sklearn_feature_selection)
47 register_converter('SklearnRFECV', convert_sklearn_feature_selection)
48 register_converter('SklearnSelectFdr', convert_sklearn_feature_selection)
49 register_converter('SklearnSelectFpr', convert_sklearn_feature_selection)
50 register_converter('SklearnSelectFromModel', convert_sklearn_feature_selection)
51 register_converter('SklearnSelectFwe', convert_sklearn_feature_selection)
52 register_converter('SklearnSelectKBest', convert_sklearn_feature_selection)
53 register_converter('SklearnSelectPercentile',
54 convert_sklearn_feature_selection)
55 register_converter('SklearnVarianceThreshold',
56 convert_sklearn_feature_selection)
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/skl2onnx/operator_converters/feature_selection.py b/skl2onnx/operator_converters/feature_selection.py
--- a/skl2onnx/operator_converters/feature_selection.py
+++ b/skl2onnx/operator_converters/feature_selection.py
@@ -14,6 +14,11 @@
op = operator.raw_operator
# Get indices of the features selected
index = op.get_support(indices=True)
+ if len(index) == 0:
+ raise RuntimeError(
+ "Model '{}' did not select any feature. "
+ "This model cannot be converted into ONNX."
+ "".format(op.__class__.__name__))
needs_cast = not isinstance(operator.inputs[0].type,
(FloatTensorType, FloatType))
if needs_cast:
| {"golden_diff": "diff --git a/skl2onnx/operator_converters/feature_selection.py b/skl2onnx/operator_converters/feature_selection.py\n--- a/skl2onnx/operator_converters/feature_selection.py\n+++ b/skl2onnx/operator_converters/feature_selection.py\n@@ -14,6 +14,11 @@\n op = operator.raw_operator\n # Get indices of the features selected\n index = op.get_support(indices=True)\n+ if len(index) == 0:\n+ raise RuntimeError(\n+ \"Model '{}' did not select any feature. \"\n+ \"This model cannot be converted into ONNX.\"\n+ \"\".format(op.__class__.__name__))\n needs_cast = not isinstance(operator.inputs[0].type,\n (FloatTensorType, FloatType))\n if needs_cast:\n", "issue": "SelectFromModel converted model with threshold such that no features selected give error\nSelectFromModel model in Scikit with threshold such that no features are selected returns an array of shape (M, 0), where M is the number of test instances, onnx converter however can't handle it.\r\n\r\nRuntimeError: [ONNXRuntimeError] : 1 : GENERAL ERROR : Load model from pca1.onnx failed:[ShapeInferenceError] Shape input must be a one-dimensional tensor. \n", "before_files": [{"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See License.txt in the project root for\n# license information.\n# --------------------------------------------------------------------------\n\nfrom ..proto import onnx_proto\nfrom ..common._apply_operation import apply_cast\nfrom ..common._registration import register_converter\nfrom ..common.data_types import FloatTensorType, FloatType\n\n\ndef convert_sklearn_feature_selection(scope, operator, container):\n op = operator.raw_operator\n # Get indices of the features selected\n index = op.get_support(indices=True)\n needs_cast = not isinstance(operator.inputs[0].type,\n (FloatTensorType, FloatType))\n if needs_cast:\n output_name = scope.get_unique_variable_name('output')\n else:\n output_name = operator.outputs[0].full_name\n\n if index.any():\n column_indices_name = scope.get_unique_variable_name('column_indices')\n\n container.add_initializer(column_indices_name,\n onnx_proto.TensorProto.INT64,\n [len(index)], index)\n\n container.add_node(\n 'ArrayFeatureExtractor',\n [operator.inputs[0].full_name, column_indices_name],\n output_name, op_domain='ai.onnx.ml',\n name=scope.get_unique_operator_name('ArrayFeatureExtractor'))\n else:\n container.add_node('ConstantOfShape', operator.inputs[0].full_name,\n output_name, op_version=9)\n if needs_cast:\n apply_cast(scope, output_name, operator.outputs[0].full_name,\n container, to=onnx_proto.TensorProto.FLOAT)\n\n\nregister_converter('SklearnGenericUnivariateSelect',\n convert_sklearn_feature_selection)\nregister_converter('SklearnRFE', convert_sklearn_feature_selection)\nregister_converter('SklearnRFECV', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFdr', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFpr', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFromModel', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFwe', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectKBest', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectPercentile',\n convert_sklearn_feature_selection)\nregister_converter('SklearnVarianceThreshold',\n convert_sklearn_feature_selection)\n", "path": "skl2onnx/operator_converters/feature_selection.py"}], "after_files": [{"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See License.txt in the project root for\n# license information.\n# --------------------------------------------------------------------------\n\nfrom ..proto import onnx_proto\nfrom ..common._apply_operation import apply_cast\nfrom ..common._registration import register_converter\nfrom ..common.data_types import FloatTensorType, FloatType\n\n\ndef convert_sklearn_feature_selection(scope, operator, container):\n op = operator.raw_operator\n # Get indices of the features selected\n index = op.get_support(indices=True)\n if len(index) == 0:\n raise RuntimeError(\n \"Model '{}' did not select any feature. \"\n \"This model cannot be converted into ONNX.\"\n \"\".format(op.__class__.__name__))\n needs_cast = not isinstance(operator.inputs[0].type,\n (FloatTensorType, FloatType))\n if needs_cast:\n output_name = scope.get_unique_variable_name('output')\n else:\n output_name = operator.outputs[0].full_name\n\n if index.any():\n column_indices_name = scope.get_unique_variable_name('column_indices')\n\n container.add_initializer(column_indices_name,\n onnx_proto.TensorProto.INT64,\n [len(index)], index)\n\n container.add_node(\n 'ArrayFeatureExtractor',\n [operator.inputs[0].full_name, column_indices_name],\n output_name, op_domain='ai.onnx.ml',\n name=scope.get_unique_operator_name('ArrayFeatureExtractor'))\n else:\n container.add_node('ConstantOfShape', operator.inputs[0].full_name,\n output_name, op_version=9)\n if needs_cast:\n apply_cast(scope, output_name, operator.outputs[0].full_name,\n container, to=onnx_proto.TensorProto.FLOAT)\n\n\nregister_converter('SklearnGenericUnivariateSelect',\n convert_sklearn_feature_selection)\nregister_converter('SklearnRFE', convert_sklearn_feature_selection)\nregister_converter('SklearnRFECV', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFdr', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFpr', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFromModel', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFwe', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectKBest', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectPercentile',\n convert_sklearn_feature_selection)\nregister_converter('SklearnVarianceThreshold',\n convert_sklearn_feature_selection)\n", "path": "skl2onnx/operator_converters/feature_selection.py"}]} | 954 | 174 |
gh_patches_debug_42030 | rasdani/github-patches | git_diff | ManageIQ__integration_tests-3352 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Check if we have fallback if no trackerbot URL specified
We had some issues htat people with no trackerbot URL in config were not able to run test:
- Investigate what is happening
- Add fallback (do not check templates at all? Load them from all providers?)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `fixtures/provider.py`
Content:
```
1 """``setup_provider`` fixture
2
3 In test modules paramatrized with :py:func:`utils.testgen.provider_by_type` (should be
4 just about any module that needs a provider to run its tests), this fixture will set up
5 the single provider needed to run that test.
6
7 If the provider setup fails, this fixture will record that failure and skip future tests
8 using the provider.
9
10 """
11 import pytest
12
13 from fixtures.artifactor_plugin import art_client, get_test_idents
14 from fixtures.templateloader import TEMPLATES
15 from utils import providers
16 from utils.log import logger
17
18 # failed provider tracking for _setup_provider_fixture
19 _failed_providers = set()
20
21
22 def _setup_provider(provider_key, request=None):
23 def skip(provider_key, previous_fail=False):
24 if request:
25 node = request.node
26 name, location = get_test_idents(node)
27 skip_data = {'type': 'provider', 'reason': provider_key}
28 art_client.fire_hook('skip_test', test_location=location, test_name=name,
29 skip_data=skip_data)
30 if previous_fail:
31 raise pytest.skip('Provider {} failed to set up previously in another test, '
32 'skipping test'.format(provider_key))
33 else:
34 raise pytest.skip('Provider {} failed to set up this time, '
35 'skipping test'.format(provider_key))
36 # This function is dynamically "fixturized" to setup up a specific provider,
37 # optionally skipping the provider setup if that provider has previously failed.
38 if provider_key in _failed_providers:
39 skip(provider_key, previous_fail=True)
40
41 try:
42 providers.setup_provider(provider_key)
43 except Exception as ex:
44 logger.error('Error setting up provider {}'.format(provider_key))
45 logger.exception(ex)
46 _failed_providers.add(provider_key)
47 skip(provider_key)
48
49
50 @pytest.fixture(scope='function')
51 def setup_provider(request, provider):
52 """Function-scoped fixture to set up a provider"""
53 _setup_provider(provider.key, request)
54
55
56 @pytest.fixture(scope='module')
57 def setup_provider_modscope(request, provider):
58 """Function-scoped fixture to set up a provider"""
59 _setup_provider(provider.key, request)
60
61
62 @pytest.fixture(scope='class')
63 def setup_provider_clsscope(request, provider):
64 """Module-scoped fixture to set up a provider"""
65 _setup_provider(provider.key, request)
66
67
68 @pytest.fixture
69 def setup_provider_funcscope(request, provider):
70 """Function-scoped fixture to set up a provider
71
72 Note:
73
74 While there are cases where this is useful, provider fixtures should
75 be module-scoped the majority of the time.
76
77 """
78 _setup_provider(provider.key, request)
79
80
81 @pytest.fixture(scope="session")
82 def any_provider_session():
83 providers.clear_providers() # To make it clean
84 providers.setup_a_provider(validate=True, check_existing=True)
85
86
87 @pytest.fixture(scope="function")
88 def template(template_location, provider):
89 if template_location is not None:
90 o = provider.data
91 try:
92 for field in template_location:
93 o = o[field]
94 except (IndexError, KeyError):
95 logger.info("Cannot apply %s to %s in the template specification, ignoring.",
96 repr(field), repr(o))
97 else:
98 if not isinstance(o, basestring):
99 raise ValueError("{} is not a string! (for template)".format(repr(o)))
100 templates = TEMPLATES.get(provider.key, None)
101 if templates is not None:
102 if o in templates:
103 return o
104 logger.info(
105 "Wanted template {} on {} but it is not there!\n".format(o, provider.key))
106 pytest.skip('Template not available')
107
108
109 def _small_template(provider):
110 template = provider.data.get('small_template', None)
111 if template:
112 templates = TEMPLATES.get(provider.key, None)
113 if templates is not None:
114 if template in templates:
115 return template
116 logger.info(
117 "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
118 pytest.skip('Template not available')
119
120
121 @pytest.fixture(scope="function")
122 def small_template(provider):
123 return _small_template(provider)
124
125
126 @pytest.fixture(scope="module")
127 def small_template_modscope(provider):
128 return _small_template(provider)
129
130
131 @pytest.fixture(scope="function")
132 def full_template(provider):
133 template = provider.data.get('full_template', {})
134 if template:
135 templates = TEMPLATES.get(provider.key, None)
136 if templates is not None:
137 if template['name'] in templates:
138 return template
139 logger.info(
140 "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
141 pytest.skip('Template not available')
142
143
144 @pytest.fixture(scope="function")
145 def provisioning(provider):
146 return provider.data['provisioning']
147
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/fixtures/provider.py b/fixtures/provider.py
--- a/fixtures/provider.py
+++ b/fixtures/provider.py
@@ -9,6 +9,7 @@
"""
import pytest
+import six
from fixtures.artifactor_plugin import art_client, get_test_idents
from fixtures.templateloader import TEMPLATES
@@ -41,7 +42,7 @@
try:
providers.setup_provider(provider_key)
except Exception as ex:
- logger.error('Error setting up provider {}'.format(provider_key))
+ logger.error('Error setting up provider %s', provider_key)
logger.exception(ex)
_failed_providers.add(provider_key)
skip(provider_key)
@@ -92,29 +93,35 @@
for field in template_location:
o = o[field]
except (IndexError, KeyError):
- logger.info("Cannot apply %s to %s in the template specification, ignoring.",
- repr(field), repr(o))
+ logger.info("Cannot apply %r to %r in the template specification, ignoring.", field, o)
else:
- if not isinstance(o, basestring):
- raise ValueError("{} is not a string! (for template)".format(repr(o)))
+ if not isinstance(o, six.string_types):
+ raise ValueError("{!r} is not a string! (for template)".format(o))
+ if not TEMPLATES:
+ # There is nothing in TEMPLATES, that means no trackerbot URL and no data pulled.
+ # This should normally not constitute an issue so continue.
+ return o
templates = TEMPLATES.get(provider.key, None)
if templates is not None:
if o in templates:
return o
- logger.info(
- "Wanted template {} on {} but it is not there!\n".format(o, provider.key))
+ logger.info("Wanted template %s on %s but it is not there!", o, provider.key)
pytest.skip('Template not available')
def _small_template(provider):
template = provider.data.get('small_template', None)
if template:
+ if not TEMPLATES:
+ # Same as couple of lines above
+ return template
templates = TEMPLATES.get(provider.key, None)
if templates is not None:
if template in templates:
return template
- logger.info(
- "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
+ else:
+ pytest.skip('No small_template for provider {}'.format(provider.key))
+ logger.info("Wanted template %s on %s but it is not there!", template, provider.key)
pytest.skip('Template not available')
@@ -132,12 +139,16 @@
def full_template(provider):
template = provider.data.get('full_template', {})
if template:
+ if not TEMPLATES:
+ # Same as couple of lines above
+ return template
templates = TEMPLATES.get(provider.key, None)
if templates is not None:
if template['name'] in templates:
return template
- logger.info(
- "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
+ else:
+ pytest.skip('No full_template for provider {}'.format(provider.key))
+ logger.info("Wanted template %s on %s but it is not there!", template, provider.key)
pytest.skip('Template not available')
| {"golden_diff": "diff --git a/fixtures/provider.py b/fixtures/provider.py\n--- a/fixtures/provider.py\n+++ b/fixtures/provider.py\n@@ -9,6 +9,7 @@\n \n \"\"\"\n import pytest\n+import six\n \n from fixtures.artifactor_plugin import art_client, get_test_idents\n from fixtures.templateloader import TEMPLATES\n@@ -41,7 +42,7 @@\n try:\n providers.setup_provider(provider_key)\n except Exception as ex:\n- logger.error('Error setting up provider {}'.format(provider_key))\n+ logger.error('Error setting up provider %s', provider_key)\n logger.exception(ex)\n _failed_providers.add(provider_key)\n skip(provider_key)\n@@ -92,29 +93,35 @@\n for field in template_location:\n o = o[field]\n except (IndexError, KeyError):\n- logger.info(\"Cannot apply %s to %s in the template specification, ignoring.\",\n- repr(field), repr(o))\n+ logger.info(\"Cannot apply %r to %r in the template specification, ignoring.\", field, o)\n else:\n- if not isinstance(o, basestring):\n- raise ValueError(\"{} is not a string! (for template)\".format(repr(o)))\n+ if not isinstance(o, six.string_types):\n+ raise ValueError(\"{!r} is not a string! (for template)\".format(o))\n+ if not TEMPLATES:\n+ # There is nothing in TEMPLATES, that means no trackerbot URL and no data pulled.\n+ # This should normally not constitute an issue so continue.\n+ return o\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if o in templates:\n return o\n- logger.info(\n- \"Wanted template {} on {} but it is not there!\\n\".format(o, provider.key))\n+ logger.info(\"Wanted template %s on %s but it is not there!\", o, provider.key)\n pytest.skip('Template not available')\n \n \n def _small_template(provider):\n template = provider.data.get('small_template', None)\n if template:\n+ if not TEMPLATES:\n+ # Same as couple of lines above\n+ return template\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template in templates:\n return template\n- logger.info(\n- \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n+ else:\n+ pytest.skip('No small_template for provider {}'.format(provider.key))\n+ logger.info(\"Wanted template %s on %s but it is not there!\", template, provider.key)\n pytest.skip('Template not available')\n \n \n@@ -132,12 +139,16 @@\n def full_template(provider):\n template = provider.data.get('full_template', {})\n if template:\n+ if not TEMPLATES:\n+ # Same as couple of lines above\n+ return template\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template['name'] in templates:\n return template\n- logger.info(\n- \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n+ else:\n+ pytest.skip('No full_template for provider {}'.format(provider.key))\n+ logger.info(\"Wanted template %s on %s but it is not there!\", template, provider.key)\n pytest.skip('Template not available')\n", "issue": "Check if we have fallback if no trackerbot URL specified\nWe had some issues htat people with no trackerbot URL in config were not able to run test:\n- Investigate what is happening\n- Add fallback (do not check templates at all? Load them from all providers?)\n\n", "before_files": [{"content": "\"\"\"``setup_provider`` fixture\n\nIn test modules paramatrized with :py:func:`utils.testgen.provider_by_type` (should be\njust about any module that needs a provider to run its tests), this fixture will set up\nthe single provider needed to run that test.\n\nIf the provider setup fails, this fixture will record that failure and skip future tests\nusing the provider.\n\n\"\"\"\nimport pytest\n\nfrom fixtures.artifactor_plugin import art_client, get_test_idents\nfrom fixtures.templateloader import TEMPLATES\nfrom utils import providers\nfrom utils.log import logger\n\n# failed provider tracking for _setup_provider_fixture\n_failed_providers = set()\n\n\ndef _setup_provider(provider_key, request=None):\n def skip(provider_key, previous_fail=False):\n if request:\n node = request.node\n name, location = get_test_idents(node)\n skip_data = {'type': 'provider', 'reason': provider_key}\n art_client.fire_hook('skip_test', test_location=location, test_name=name,\n skip_data=skip_data)\n if previous_fail:\n raise pytest.skip('Provider {} failed to set up previously in another test, '\n 'skipping test'.format(provider_key))\n else:\n raise pytest.skip('Provider {} failed to set up this time, '\n 'skipping test'.format(provider_key))\n # This function is dynamically \"fixturized\" to setup up a specific provider,\n # optionally skipping the provider setup if that provider has previously failed.\n if provider_key in _failed_providers:\n skip(provider_key, previous_fail=True)\n\n try:\n providers.setup_provider(provider_key)\n except Exception as ex:\n logger.error('Error setting up provider {}'.format(provider_key))\n logger.exception(ex)\n _failed_providers.add(provider_key)\n skip(provider_key)\n\n\[email protected](scope='function')\ndef setup_provider(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope='module')\ndef setup_provider_modscope(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope='class')\ndef setup_provider_clsscope(request, provider):\n \"\"\"Module-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected]\ndef setup_provider_funcscope(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\n\n Note:\n\n While there are cases where this is useful, provider fixtures should\n be module-scoped the majority of the time.\n\n \"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope=\"session\")\ndef any_provider_session():\n providers.clear_providers() # To make it clean\n providers.setup_a_provider(validate=True, check_existing=True)\n\n\[email protected](scope=\"function\")\ndef template(template_location, provider):\n if template_location is not None:\n o = provider.data\n try:\n for field in template_location:\n o = o[field]\n except (IndexError, KeyError):\n logger.info(\"Cannot apply %s to %s in the template specification, ignoring.\",\n repr(field), repr(o))\n else:\n if not isinstance(o, basestring):\n raise ValueError(\"{} is not a string! (for template)\".format(repr(o)))\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if o in templates:\n return o\n logger.info(\n \"Wanted template {} on {} but it is not there!\\n\".format(o, provider.key))\n pytest.skip('Template not available')\n\n\ndef _small_template(provider):\n template = provider.data.get('small_template', None)\n if template:\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template in templates:\n return template\n logger.info(\n \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n pytest.skip('Template not available')\n\n\[email protected](scope=\"function\")\ndef small_template(provider):\n return _small_template(provider)\n\n\[email protected](scope=\"module\")\ndef small_template_modscope(provider):\n return _small_template(provider)\n\n\[email protected](scope=\"function\")\ndef full_template(provider):\n template = provider.data.get('full_template', {})\n if template:\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template['name'] in templates:\n return template\n logger.info(\n \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n pytest.skip('Template not available')\n\n\[email protected](scope=\"function\")\ndef provisioning(provider):\n return provider.data['provisioning']\n", "path": "fixtures/provider.py"}], "after_files": [{"content": "\"\"\"``setup_provider`` fixture\n\nIn test modules paramatrized with :py:func:`utils.testgen.provider_by_type` (should be\njust about any module that needs a provider to run its tests), this fixture will set up\nthe single provider needed to run that test.\n\nIf the provider setup fails, this fixture will record that failure and skip future tests\nusing the provider.\n\n\"\"\"\nimport pytest\nimport six\n\nfrom fixtures.artifactor_plugin import art_client, get_test_idents\nfrom fixtures.templateloader import TEMPLATES\nfrom utils import providers\nfrom utils.log import logger\n\n# failed provider tracking for _setup_provider_fixture\n_failed_providers = set()\n\n\ndef _setup_provider(provider_key, request=None):\n def skip(provider_key, previous_fail=False):\n if request:\n node = request.node\n name, location = get_test_idents(node)\n skip_data = {'type': 'provider', 'reason': provider_key}\n art_client.fire_hook('skip_test', test_location=location, test_name=name,\n skip_data=skip_data)\n if previous_fail:\n raise pytest.skip('Provider {} failed to set up previously in another test, '\n 'skipping test'.format(provider_key))\n else:\n raise pytest.skip('Provider {} failed to set up this time, '\n 'skipping test'.format(provider_key))\n # This function is dynamically \"fixturized\" to setup up a specific provider,\n # optionally skipping the provider setup if that provider has previously failed.\n if provider_key in _failed_providers:\n skip(provider_key, previous_fail=True)\n\n try:\n providers.setup_provider(provider_key)\n except Exception as ex:\n logger.error('Error setting up provider %s', provider_key)\n logger.exception(ex)\n _failed_providers.add(provider_key)\n skip(provider_key)\n\n\[email protected](scope='function')\ndef setup_provider(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope='module')\ndef setup_provider_modscope(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope='class')\ndef setup_provider_clsscope(request, provider):\n \"\"\"Module-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected]\ndef setup_provider_funcscope(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\n\n Note:\n\n While there are cases where this is useful, provider fixtures should\n be module-scoped the majority of the time.\n\n \"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope=\"session\")\ndef any_provider_session():\n providers.clear_providers() # To make it clean\n providers.setup_a_provider(validate=True, check_existing=True)\n\n\[email protected](scope=\"function\")\ndef template(template_location, provider):\n if template_location is not None:\n o = provider.data\n try:\n for field in template_location:\n o = o[field]\n except (IndexError, KeyError):\n logger.info(\"Cannot apply %r to %r in the template specification, ignoring.\", field, o)\n else:\n if not isinstance(o, six.string_types):\n raise ValueError(\"{!r} is not a string! (for template)\".format(o))\n if not TEMPLATES:\n # There is nothing in TEMPLATES, that means no trackerbot URL and no data pulled.\n # This should normally not constitute an issue so continue.\n return o\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if o in templates:\n return o\n logger.info(\"Wanted template %s on %s but it is not there!\", o, provider.key)\n pytest.skip('Template not available')\n\n\ndef _small_template(provider):\n template = provider.data.get('small_template', None)\n if template:\n if not TEMPLATES:\n # Same as couple of lines above\n return template\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template in templates:\n return template\n else:\n pytest.skip('No small_template for provider {}'.format(provider.key))\n logger.info(\"Wanted template %s on %s but it is not there!\", template, provider.key)\n pytest.skip('Template not available')\n\n\[email protected](scope=\"function\")\ndef small_template(provider):\n return _small_template(provider)\n\n\[email protected](scope=\"module\")\ndef small_template_modscope(provider):\n return _small_template(provider)\n\n\[email protected](scope=\"function\")\ndef full_template(provider):\n template = provider.data.get('full_template', {})\n if template:\n if not TEMPLATES:\n # Same as couple of lines above\n return template\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template['name'] in templates:\n return template\n else:\n pytest.skip('No full_template for provider {}'.format(provider.key))\n logger.info(\"Wanted template %s on %s but it is not there!\", template, provider.key)\n pytest.skip('Template not available')\n\n\[email protected](scope=\"function\")\ndef provisioning(provider):\n return provider.data['provisioning']\n", "path": "fixtures/provider.py"}]} | 1,679 | 756 |
gh_patches_debug_12356 | rasdani/github-patches | git_diff | mathesar-foundation__mathesar-2514 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
UX and frontend implementation for upgrades
### Notes
- [Current Figma design](https://www.figma.com/file/xHb5oIqye3fnXtb2heRH34/Styling?node-id=3804%3A28864&t=HyNupYmgZ9PqjEGr-0)
- [Issue with user flow](https://github.com/centerofci/mathesar/issues/227)
### Tasks
- Finalize the user flow
- Request Figma UX changes if needed
- Implement the frontend (create additional GitHub issues if needed)
### Feasibility
Watchtower, our docker image upgrade backend, doesn't report progress. The only progress reporting available to the frontend will be periodically calling some HTTP endpoint on the service container to check whether it is online or not.
The final UX should take this into account.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mathesar/urls.py`
Content:
```
1 from django.contrib.auth.views import LoginView
2 from django.urls import include, path, re_path
3 from rest_framework_nested import routers
4
5 from mathesar import views
6 from mathesar.api.db import viewsets as db_viewsets
7 from mathesar.api.ui import viewsets as ui_viewsets
8 from mathesar.users.password_reset import MathesarPasswordResetConfirmView
9
10 db_router = routers.DefaultRouter()
11 db_router.register(r'tables', db_viewsets.TableViewSet, basename='table')
12 db_router.register(r'queries', db_viewsets.QueryViewSet, basename='query')
13 db_router.register(r'links', db_viewsets.LinkViewSet, basename='links')
14 db_router.register(r'schemas', db_viewsets.SchemaViewSet, basename='schema')
15 db_router.register(r'databases', db_viewsets.DatabaseViewSet, basename='database')
16 db_router.register(r'data_files', db_viewsets.DataFileViewSet, basename='data-file')
17
18 db_table_router = routers.NestedSimpleRouter(db_router, r'tables', lookup='table')
19 db_table_router.register(r'records', db_viewsets.RecordViewSet, basename='table-record')
20 db_table_router.register(r'settings', db_viewsets.TableSettingsViewSet, basename='table-setting')
21 db_table_router.register(r'columns', db_viewsets.ColumnViewSet, basename='table-column')
22 db_table_router.register(r'constraints', db_viewsets.ConstraintViewSet, basename='table-constraint')
23
24 ui_router = routers.DefaultRouter()
25 ui_router.register(r'version', ui_viewsets.VersionViewSet, basename='version')
26 ui_router.register(r'databases', ui_viewsets.DatabaseViewSet, basename='database')
27 ui_router.register(r'users', ui_viewsets.UserViewSet, basename='user')
28 ui_router.register(r'database_roles', ui_viewsets.DatabaseRoleViewSet, basename='database_role')
29 ui_router.register(r'schema_roles', ui_viewsets.SchemaRoleViewSet, basename='schema_role')
30
31 urlpatterns = [
32 path('api/db/v0/', include(db_router.urls)),
33 path('api/db/v0/', include(db_table_router.urls)),
34 path('api/ui/v0/', include(ui_router.urls)),
35 path('api/ui/v0/reflect/', views.reflect_all, name='reflect_all'),
36 path('auth/password_reset_confirm', MathesarPasswordResetConfirmView.as_view(), name='password_reset_confirm'),
37 path('auth/login/', LoginView.as_view(redirect_authenticated_user=True), name='login'),
38 path('auth/', include('django.contrib.auth.urls')),
39 path('', views.home, name='home'),
40 path('profile/', views.profile, name='profile'),
41 path('administration/', views.admin_home, name='admin_home'),
42 path('administration/users/', views.admin_home, name='admin_users_home'),
43 path('administration/users/<user_id>/', views.admin_home, name='admin_users_edit'),
44 path('administration/general/', views.admin_home, name='admin_general'),
45 path('<db_name>/', views.schemas, name='schemas'),
46 re_path(
47 r'^(?P<db_name>\w+)/(?P<schema_id>\w+)/',
48 views.schema_home,
49 name='schema_home'
50 ),
51 ]
52
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mathesar/urls.py b/mathesar/urls.py
--- a/mathesar/urls.py
+++ b/mathesar/urls.py
@@ -41,7 +41,7 @@
path('administration/', views.admin_home, name='admin_home'),
path('administration/users/', views.admin_home, name='admin_users_home'),
path('administration/users/<user_id>/', views.admin_home, name='admin_users_edit'),
- path('administration/general/', views.admin_home, name='admin_general'),
+ path('administration/update/', views.admin_home, name='admin_update'),
path('<db_name>/', views.schemas, name='schemas'),
re_path(
r'^(?P<db_name>\w+)/(?P<schema_id>\w+)/',
| {"golden_diff": "diff --git a/mathesar/urls.py b/mathesar/urls.py\n--- a/mathesar/urls.py\n+++ b/mathesar/urls.py\n@@ -41,7 +41,7 @@\n path('administration/', views.admin_home, name='admin_home'),\n path('administration/users/', views.admin_home, name='admin_users_home'),\n path('administration/users/<user_id>/', views.admin_home, name='admin_users_edit'),\n- path('administration/general/', views.admin_home, name='admin_general'),\n+ path('administration/update/', views.admin_home, name='admin_update'),\n path('<db_name>/', views.schemas, name='schemas'),\n re_path(\n r'^(?P<db_name>\\w+)/(?P<schema_id>\\w+)/',\n", "issue": "UX and frontend implementation for upgrades\n### Notes\r\n- [Current Figma design](https://www.figma.com/file/xHb5oIqye3fnXtb2heRH34/Styling?node-id=3804%3A28864&t=HyNupYmgZ9PqjEGr-0)\r\n- [Issue with user flow](https://github.com/centerofci/mathesar/issues/227)\r\n\r\n### Tasks\r\n- Finalize the user flow\r\n- Request Figma UX changes if needed\r\n- Implement the frontend (create additional GitHub issues if needed)\r\n\r\n### Feasibility \r\nWatchtower, our docker image upgrade backend, doesn't report progress. The only progress reporting available to the frontend will be periodically calling some HTTP endpoint on the service container to check whether it is online or not.\r\n\r\nThe final UX should take this into account.\n", "before_files": [{"content": "from django.contrib.auth.views import LoginView\nfrom django.urls import include, path, re_path\nfrom rest_framework_nested import routers\n\nfrom mathesar import views\nfrom mathesar.api.db import viewsets as db_viewsets\nfrom mathesar.api.ui import viewsets as ui_viewsets\nfrom mathesar.users.password_reset import MathesarPasswordResetConfirmView\n\ndb_router = routers.DefaultRouter()\ndb_router.register(r'tables', db_viewsets.TableViewSet, basename='table')\ndb_router.register(r'queries', db_viewsets.QueryViewSet, basename='query')\ndb_router.register(r'links', db_viewsets.LinkViewSet, basename='links')\ndb_router.register(r'schemas', db_viewsets.SchemaViewSet, basename='schema')\ndb_router.register(r'databases', db_viewsets.DatabaseViewSet, basename='database')\ndb_router.register(r'data_files', db_viewsets.DataFileViewSet, basename='data-file')\n\ndb_table_router = routers.NestedSimpleRouter(db_router, r'tables', lookup='table')\ndb_table_router.register(r'records', db_viewsets.RecordViewSet, basename='table-record')\ndb_table_router.register(r'settings', db_viewsets.TableSettingsViewSet, basename='table-setting')\ndb_table_router.register(r'columns', db_viewsets.ColumnViewSet, basename='table-column')\ndb_table_router.register(r'constraints', db_viewsets.ConstraintViewSet, basename='table-constraint')\n\nui_router = routers.DefaultRouter()\nui_router.register(r'version', ui_viewsets.VersionViewSet, basename='version')\nui_router.register(r'databases', ui_viewsets.DatabaseViewSet, basename='database')\nui_router.register(r'users', ui_viewsets.UserViewSet, basename='user')\nui_router.register(r'database_roles', ui_viewsets.DatabaseRoleViewSet, basename='database_role')\nui_router.register(r'schema_roles', ui_viewsets.SchemaRoleViewSet, basename='schema_role')\n\nurlpatterns = [\n path('api/db/v0/', include(db_router.urls)),\n path('api/db/v0/', include(db_table_router.urls)),\n path('api/ui/v0/', include(ui_router.urls)),\n path('api/ui/v0/reflect/', views.reflect_all, name='reflect_all'),\n path('auth/password_reset_confirm', MathesarPasswordResetConfirmView.as_view(), name='password_reset_confirm'),\n path('auth/login/', LoginView.as_view(redirect_authenticated_user=True), name='login'),\n path('auth/', include('django.contrib.auth.urls')),\n path('', views.home, name='home'),\n path('profile/', views.profile, name='profile'),\n path('administration/', views.admin_home, name='admin_home'),\n path('administration/users/', views.admin_home, name='admin_users_home'),\n path('administration/users/<user_id>/', views.admin_home, name='admin_users_edit'),\n path('administration/general/', views.admin_home, name='admin_general'),\n path('<db_name>/', views.schemas, name='schemas'),\n re_path(\n r'^(?P<db_name>\\w+)/(?P<schema_id>\\w+)/',\n views.schema_home,\n name='schema_home'\n ),\n]\n", "path": "mathesar/urls.py"}], "after_files": [{"content": "from django.contrib.auth.views import LoginView\nfrom django.urls import include, path, re_path\nfrom rest_framework_nested import routers\n\nfrom mathesar import views\nfrom mathesar.api.db import viewsets as db_viewsets\nfrom mathesar.api.ui import viewsets as ui_viewsets\nfrom mathesar.users.password_reset import MathesarPasswordResetConfirmView\n\ndb_router = routers.DefaultRouter()\ndb_router.register(r'tables', db_viewsets.TableViewSet, basename='table')\ndb_router.register(r'queries', db_viewsets.QueryViewSet, basename='query')\ndb_router.register(r'links', db_viewsets.LinkViewSet, basename='links')\ndb_router.register(r'schemas', db_viewsets.SchemaViewSet, basename='schema')\ndb_router.register(r'databases', db_viewsets.DatabaseViewSet, basename='database')\ndb_router.register(r'data_files', db_viewsets.DataFileViewSet, basename='data-file')\n\ndb_table_router = routers.NestedSimpleRouter(db_router, r'tables', lookup='table')\ndb_table_router.register(r'records', db_viewsets.RecordViewSet, basename='table-record')\ndb_table_router.register(r'settings', db_viewsets.TableSettingsViewSet, basename='table-setting')\ndb_table_router.register(r'columns', db_viewsets.ColumnViewSet, basename='table-column')\ndb_table_router.register(r'constraints', db_viewsets.ConstraintViewSet, basename='table-constraint')\n\nui_router = routers.DefaultRouter()\nui_router.register(r'version', ui_viewsets.VersionViewSet, basename='version')\nui_router.register(r'databases', ui_viewsets.DatabaseViewSet, basename='database')\nui_router.register(r'users', ui_viewsets.UserViewSet, basename='user')\nui_router.register(r'database_roles', ui_viewsets.DatabaseRoleViewSet, basename='database_role')\nui_router.register(r'schema_roles', ui_viewsets.SchemaRoleViewSet, basename='schema_role')\n\nurlpatterns = [\n path('api/db/v0/', include(db_router.urls)),\n path('api/db/v0/', include(db_table_router.urls)),\n path('api/ui/v0/', include(ui_router.urls)),\n path('api/ui/v0/reflect/', views.reflect_all, name='reflect_all'),\n path('auth/password_reset_confirm', MathesarPasswordResetConfirmView.as_view(), name='password_reset_confirm'),\n path('auth/login/', LoginView.as_view(redirect_authenticated_user=True), name='login'),\n path('auth/', include('django.contrib.auth.urls')),\n path('', views.home, name='home'),\n path('profile/', views.profile, name='profile'),\n path('administration/', views.admin_home, name='admin_home'),\n path('administration/users/', views.admin_home, name='admin_users_home'),\n path('administration/users/<user_id>/', views.admin_home, name='admin_users_edit'),\n path('administration/update/', views.admin_home, name='admin_update'),\n path('<db_name>/', views.schemas, name='schemas'),\n re_path(\n r'^(?P<db_name>\\w+)/(?P<schema_id>\\w+)/',\n views.schema_home,\n name='schema_home'\n ),\n]\n", "path": "mathesar/urls.py"}]} | 1,186 | 165 |
gh_patches_debug_19852 | rasdani/github-patches | git_diff | open-mmlab__mmcv-256 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: Deadlock 'fix' introduced deadlock.
Since https://github.com/open-mmlab/mmcv/pull/252 is merged my mmdetection code hangs after evaluation. After reverting the specific commit `git revert c203419f57c2e25ab4307420b9a3688f99e01dea`, my code runs again as expected..
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mmcv/runner/hooks/logger/text.py`
Content:
```
1 # Copyright (c) Open-MMLab. All rights reserved.
2 import datetime
3 import os.path as osp
4 from collections import OrderedDict
5
6 import torch
7 import torch.distributed as dist
8
9 import mmcv
10 from ..hook import HOOKS
11 from .base import LoggerHook
12
13
14 @HOOKS.register_module
15 class TextLoggerHook(LoggerHook):
16
17 def __init__(self, interval=10, ignore_last=True, reset_flag=False):
18 super(TextLoggerHook, self).__init__(interval, ignore_last, reset_flag)
19 self.time_sec_tot = 0
20
21 def before_run(self, runner):
22 super(TextLoggerHook, self).before_run(runner)
23 self.start_iter = runner.iter
24 self.json_log_path = osp.join(runner.work_dir,
25 f'{runner.timestamp}.log.json')
26 if runner.meta is not None:
27 self._dump_log(runner.meta, runner)
28
29 def _get_max_memory(self, runner):
30 mem = torch.cuda.max_memory_allocated()
31 mem_mb = torch.tensor([mem / (1024 * 1024)],
32 dtype=torch.int,
33 device=torch.device('cuda'))
34 if runner.world_size > 1:
35 dist.reduce(mem_mb, 0, op=dist.ReduceOp.MAX)
36 return mem_mb.item()
37
38 def _log_info(self, log_dict, runner):
39 if runner.mode == 'train':
40 log_str = f'Epoch [{log_dict["epoch"]}]' \
41 f'[{log_dict["iter"]}/{len(runner.data_loader)}]\t' \
42 f'lr: {log_dict["lr"]:.5f}, '
43 if 'time' in log_dict.keys():
44 self.time_sec_tot += (log_dict['time'] * self.interval)
45 time_sec_avg = self.time_sec_tot / (
46 runner.iter - self.start_iter + 1)
47 eta_sec = time_sec_avg * (runner.max_iters - runner.iter - 1)
48 eta_str = str(datetime.timedelta(seconds=int(eta_sec)))
49 log_str += f'eta: {eta_str}, '
50 log_str += f'time: {log_dict["time"]:.3f}, ' \
51 f'data_time: {log_dict["data_time"]:.3f}, '
52 # statistic memory
53 if torch.cuda.is_available():
54 log_str += f'memory: {log_dict["memory"]}, '
55 else:
56 log_str = 'Epoch({log_dict["mode"]}) ' \
57 f'[{log_dict["epoch"] - 1}][{log_dict["iter"]}]\t'
58 log_items = []
59 for name, val in log_dict.items():
60 # TODO: resolve this hack
61 # these items have been in log_str
62 if name in [
63 'mode', 'Epoch', 'iter', 'lr', 'time', 'data_time',
64 'memory', 'epoch'
65 ]:
66 continue
67 if isinstance(val, float):
68 val = f'{val:.4f}'
69 log_items.append(f'{name}: {val}')
70 log_str += ', '.join(log_items)
71 runner.logger.info(log_str)
72
73 def _dump_log(self, log_dict, runner):
74 # dump log in json format
75 json_log = OrderedDict()
76 for k, v in log_dict.items():
77 json_log[k] = self._round_float(v)
78 # only append log at last line
79 if runner.rank == 0:
80 with open(self.json_log_path, 'a+') as f:
81 mmcv.dump(json_log, f, file_format='json')
82 f.write('\n')
83
84 def _round_float(self, items):
85 if isinstance(items, list):
86 return [self._round_float(item) for item in items]
87 elif isinstance(items, float):
88 return round(items, 5)
89 else:
90 return items
91
92 def log(self, runner):
93 log_dict = OrderedDict()
94 # training mode if the output contains the key "time"
95 mode = 'train' if 'time' in runner.log_buffer.output else 'val'
96 log_dict['mode'] = mode
97 log_dict['epoch'] = runner.epoch + 1
98 log_dict['iter'] = runner.inner_iter + 1
99 # only record lr of the first param group
100 log_dict['lr'] = runner.current_lr()[0]
101 memory = None
102 if torch.cuda.is_available():
103 memory = self._get_max_memory(runner)
104 if mode == 'train':
105 log_dict['time'] = runner.log_buffer.output['time']
106 log_dict['data_time'] = runner.log_buffer.output['data_time']
107
108 # statistic memory
109 if memory is not None:
110 log_dict['memory'] = memory
111 for name, val in runner.log_buffer.output.items():
112 if name in ['time', 'data_time']:
113 continue
114 log_dict[name] = val
115
116 self._log_info(log_dict, runner)
117 self._dump_log(log_dict, runner)
118
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mmcv/runner/hooks/logger/text.py b/mmcv/runner/hooks/logger/text.py
--- a/mmcv/runner/hooks/logger/text.py
+++ b/mmcv/runner/hooks/logger/text.py
@@ -98,16 +98,13 @@
log_dict['iter'] = runner.inner_iter + 1
# only record lr of the first param group
log_dict['lr'] = runner.current_lr()[0]
- memory = None
- if torch.cuda.is_available():
- memory = self._get_max_memory(runner)
if mode == 'train':
log_dict['time'] = runner.log_buffer.output['time']
log_dict['data_time'] = runner.log_buffer.output['data_time']
# statistic memory
- if memory is not None:
- log_dict['memory'] = memory
+ if torch.cuda.is_available():
+ log_dict['memory'] = self._get_max_memory(runner)
for name, val in runner.log_buffer.output.items():
if name in ['time', 'data_time']:
continue
| {"golden_diff": "diff --git a/mmcv/runner/hooks/logger/text.py b/mmcv/runner/hooks/logger/text.py\n--- a/mmcv/runner/hooks/logger/text.py\n+++ b/mmcv/runner/hooks/logger/text.py\n@@ -98,16 +98,13 @@\n log_dict['iter'] = runner.inner_iter + 1\n # only record lr of the first param group\n log_dict['lr'] = runner.current_lr()[0]\n- memory = None\n- if torch.cuda.is_available():\n- memory = self._get_max_memory(runner)\n if mode == 'train':\n log_dict['time'] = runner.log_buffer.output['time']\n log_dict['data_time'] = runner.log_buffer.output['data_time']\n \n # statistic memory\n- if memory is not None:\n- log_dict['memory'] = memory\n+ if torch.cuda.is_available():\n+ log_dict['memory'] = self._get_max_memory(runner)\n for name, val in runner.log_buffer.output.items():\n if name in ['time', 'data_time']:\n continue\n", "issue": "Bug: Deadlock 'fix' introduced deadlock.\nSince https://github.com/open-mmlab/mmcv/pull/252 is merged my mmdetection code hangs after evaluation. After reverting the specific commit `git revert c203419f57c2e25ab4307420b9a3688f99e01dea`, my code runs again as expected..\n", "before_files": [{"content": "# Copyright (c) Open-MMLab. All rights reserved.\nimport datetime\nimport os.path as osp\nfrom collections import OrderedDict\n\nimport torch\nimport torch.distributed as dist\n\nimport mmcv\nfrom ..hook import HOOKS\nfrom .base import LoggerHook\n\n\[email protected]_module\nclass TextLoggerHook(LoggerHook):\n\n def __init__(self, interval=10, ignore_last=True, reset_flag=False):\n super(TextLoggerHook, self).__init__(interval, ignore_last, reset_flag)\n self.time_sec_tot = 0\n\n def before_run(self, runner):\n super(TextLoggerHook, self).before_run(runner)\n self.start_iter = runner.iter\n self.json_log_path = osp.join(runner.work_dir,\n f'{runner.timestamp}.log.json')\n if runner.meta is not None:\n self._dump_log(runner.meta, runner)\n\n def _get_max_memory(self, runner):\n mem = torch.cuda.max_memory_allocated()\n mem_mb = torch.tensor([mem / (1024 * 1024)],\n dtype=torch.int,\n device=torch.device('cuda'))\n if runner.world_size > 1:\n dist.reduce(mem_mb, 0, op=dist.ReduceOp.MAX)\n return mem_mb.item()\n\n def _log_info(self, log_dict, runner):\n if runner.mode == 'train':\n log_str = f'Epoch [{log_dict[\"epoch\"]}]' \\\n f'[{log_dict[\"iter\"]}/{len(runner.data_loader)}]\\t' \\\n f'lr: {log_dict[\"lr\"]:.5f}, '\n if 'time' in log_dict.keys():\n self.time_sec_tot += (log_dict['time'] * self.interval)\n time_sec_avg = self.time_sec_tot / (\n runner.iter - self.start_iter + 1)\n eta_sec = time_sec_avg * (runner.max_iters - runner.iter - 1)\n eta_str = str(datetime.timedelta(seconds=int(eta_sec)))\n log_str += f'eta: {eta_str}, '\n log_str += f'time: {log_dict[\"time\"]:.3f}, ' \\\n f'data_time: {log_dict[\"data_time\"]:.3f}, '\n # statistic memory\n if torch.cuda.is_available():\n log_str += f'memory: {log_dict[\"memory\"]}, '\n else:\n log_str = 'Epoch({log_dict[\"mode\"]}) ' \\\n f'[{log_dict[\"epoch\"] - 1}][{log_dict[\"iter\"]}]\\t'\n log_items = []\n for name, val in log_dict.items():\n # TODO: resolve this hack\n # these items have been in log_str\n if name in [\n 'mode', 'Epoch', 'iter', 'lr', 'time', 'data_time',\n 'memory', 'epoch'\n ]:\n continue\n if isinstance(val, float):\n val = f'{val:.4f}'\n log_items.append(f'{name}: {val}')\n log_str += ', '.join(log_items)\n runner.logger.info(log_str)\n\n def _dump_log(self, log_dict, runner):\n # dump log in json format\n json_log = OrderedDict()\n for k, v in log_dict.items():\n json_log[k] = self._round_float(v)\n # only append log at last line\n if runner.rank == 0:\n with open(self.json_log_path, 'a+') as f:\n mmcv.dump(json_log, f, file_format='json')\n f.write('\\n')\n\n def _round_float(self, items):\n if isinstance(items, list):\n return [self._round_float(item) for item in items]\n elif isinstance(items, float):\n return round(items, 5)\n else:\n return items\n\n def log(self, runner):\n log_dict = OrderedDict()\n # training mode if the output contains the key \"time\"\n mode = 'train' if 'time' in runner.log_buffer.output else 'val'\n log_dict['mode'] = mode\n log_dict['epoch'] = runner.epoch + 1\n log_dict['iter'] = runner.inner_iter + 1\n # only record lr of the first param group\n log_dict['lr'] = runner.current_lr()[0]\n memory = None\n if torch.cuda.is_available():\n memory = self._get_max_memory(runner)\n if mode == 'train':\n log_dict['time'] = runner.log_buffer.output['time']\n log_dict['data_time'] = runner.log_buffer.output['data_time']\n\n # statistic memory\n if memory is not None:\n log_dict['memory'] = memory\n for name, val in runner.log_buffer.output.items():\n if name in ['time', 'data_time']:\n continue\n log_dict[name] = val\n\n self._log_info(log_dict, runner)\n self._dump_log(log_dict, runner)\n", "path": "mmcv/runner/hooks/logger/text.py"}], "after_files": [{"content": "# Copyright (c) Open-MMLab. All rights reserved.\nimport datetime\nimport os.path as osp\nfrom collections import OrderedDict\n\nimport torch\nimport torch.distributed as dist\n\nimport mmcv\nfrom ..hook import HOOKS\nfrom .base import LoggerHook\n\n\[email protected]_module\nclass TextLoggerHook(LoggerHook):\n\n def __init__(self, interval=10, ignore_last=True, reset_flag=False):\n super(TextLoggerHook, self).__init__(interval, ignore_last, reset_flag)\n self.time_sec_tot = 0\n\n def before_run(self, runner):\n super(TextLoggerHook, self).before_run(runner)\n self.start_iter = runner.iter\n self.json_log_path = osp.join(runner.work_dir,\n f'{runner.timestamp}.log.json')\n if runner.meta is not None:\n self._dump_log(runner.meta, runner)\n\n def _get_max_memory(self, runner):\n mem = torch.cuda.max_memory_allocated()\n mem_mb = torch.tensor([mem / (1024 * 1024)],\n dtype=torch.int,\n device=torch.device('cuda'))\n if runner.world_size > 1:\n dist.reduce(mem_mb, 0, op=dist.ReduceOp.MAX)\n return mem_mb.item()\n\n def _log_info(self, log_dict, runner):\n if runner.mode == 'train':\n log_str = f'Epoch [{log_dict[\"epoch\"]}]' \\\n f'[{log_dict[\"iter\"]}/{len(runner.data_loader)}]\\t' \\\n f'lr: {log_dict[\"lr\"]:.5f}, '\n if 'time' in log_dict.keys():\n self.time_sec_tot += (log_dict['time'] * self.interval)\n time_sec_avg = self.time_sec_tot / (\n runner.iter - self.start_iter + 1)\n eta_sec = time_sec_avg * (runner.max_iters - runner.iter - 1)\n eta_str = str(datetime.timedelta(seconds=int(eta_sec)))\n log_str += f'eta: {eta_str}, '\n log_str += f'time: {log_dict[\"time\"]:.3f}, ' \\\n f'data_time: {log_dict[\"data_time\"]:.3f}, '\n # statistic memory\n if torch.cuda.is_available():\n log_str += f'memory: {log_dict[\"memory\"]}, '\n else:\n log_str = 'Epoch({log_dict[\"mode\"]}) ' \\\n f'[{log_dict[\"epoch\"] - 1}][{log_dict[\"iter\"]}]\\t'\n log_items = []\n for name, val in log_dict.items():\n # TODO: resolve this hack\n # these items have been in log_str\n if name in [\n 'mode', 'Epoch', 'iter', 'lr', 'time', 'data_time',\n 'memory', 'epoch'\n ]:\n continue\n if isinstance(val, float):\n val = f'{val:.4f}'\n log_items.append(f'{name}: {val}')\n log_str += ', '.join(log_items)\n runner.logger.info(log_str)\n\n def _dump_log(self, log_dict, runner):\n # dump log in json format\n json_log = OrderedDict()\n for k, v in log_dict.items():\n json_log[k] = self._round_float(v)\n # only append log at last line\n if runner.rank == 0:\n with open(self.json_log_path, 'a+') as f:\n mmcv.dump(json_log, f, file_format='json')\n f.write('\\n')\n\n def _round_float(self, items):\n if isinstance(items, list):\n return [self._round_float(item) for item in items]\n elif isinstance(items, float):\n return round(items, 5)\n else:\n return items\n\n def log(self, runner):\n log_dict = OrderedDict()\n # training mode if the output contains the key \"time\"\n mode = 'train' if 'time' in runner.log_buffer.output else 'val'\n log_dict['mode'] = mode\n log_dict['epoch'] = runner.epoch + 1\n log_dict['iter'] = runner.inner_iter + 1\n # only record lr of the first param group\n log_dict['lr'] = runner.current_lr()[0]\n if mode == 'train':\n log_dict['time'] = runner.log_buffer.output['time']\n log_dict['data_time'] = runner.log_buffer.output['data_time']\n\n # statistic memory\n if torch.cuda.is_available():\n log_dict['memory'] = self._get_max_memory(runner)\n for name, val in runner.log_buffer.output.items():\n if name in ['time', 'data_time']:\n continue\n log_dict[name] = val\n\n self._log_info(log_dict, runner)\n self._dump_log(log_dict, runner)\n", "path": "mmcv/runner/hooks/logger/text.py"}]} | 1,664 | 237 |
gh_patches_debug_21010 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-3169 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Condition Specified but with no condition passes linting but fails deploy
### CloudFormation Lint Version
0.83.1
### What operating system are you using?
mac/ubuntu
### Describe the bug
in a cfn template if you specify root level item `Conditions` but have no conditions this passes cfn-lint but always fails on deploy
### Expected behavior
cfn-lint should fail if there is a Conditions root level object but no array entries under it.
### Reproduction template
```
AWSTemplateFormatVersion: "2010-09-09"
Parameters:
myParam
Conditions:
Resources:
myTopic:
Type: AWS::SNS::Topic
Properties:
DisplayName: mytopic
TopicName: mytopic
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/rules/conditions/Configuration.py`
Content:
```
1 """
2 Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 SPDX-License-Identifier: MIT-0
4 """
5
6 from cfnlint.rules import CloudFormationLintRule, RuleMatch
7
8
9 class Configuration(CloudFormationLintRule):
10 """Check if Conditions are configured correctly"""
11
12 id = "E8001"
13 shortdesc = "Conditions have appropriate properties"
14 description = "Check if Conditions are properly configured"
15 source_url = "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/conditions-section-structure.html"
16 tags = ["conditions"]
17
18 condition_keys = [
19 "Condition",
20 "Fn::And",
21 "Fn::Equals",
22 "Fn::Not",
23 "Fn::Or",
24 ]
25
26 def match(self, cfn):
27 matches = []
28
29 conditions = cfn.template.get("Conditions", {})
30 if conditions:
31 for condname, condobj in conditions.items():
32 if not isinstance(condobj, dict):
33 message = "Condition {0} has invalid property"
34 matches.append(
35 RuleMatch(["Conditions", condname], message.format(condname))
36 )
37 else:
38 if len(condobj) != 1:
39 message = "Condition {0} has too many intrinsic conditions"
40 matches.append(
41 RuleMatch(
42 ["Conditions", condname], message.format(condname)
43 )
44 )
45 else:
46 for k, _ in condobj.items():
47 if k not in self.condition_keys:
48 message = "Condition {0} has invalid property {1}"
49 matches.append(
50 RuleMatch(
51 ["Conditions", condname] + [k],
52 message.format(condname, k),
53 )
54 )
55
56 return matches
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cfnlint/rules/conditions/Configuration.py b/src/cfnlint/rules/conditions/Configuration.py
--- a/src/cfnlint/rules/conditions/Configuration.py
+++ b/src/cfnlint/rules/conditions/Configuration.py
@@ -26,8 +26,10 @@
def match(self, cfn):
matches = []
- conditions = cfn.template.get("Conditions", {})
- if conditions:
+ if "Conditions" not in cfn.template:
+ return matches
+ conditions = cfn.template.get("Conditions", None)
+ if isinstance(conditions, dict):
for condname, condobj in conditions.items():
if not isinstance(condobj, dict):
message = "Condition {0} has invalid property"
@@ -52,5 +54,12 @@
message.format(condname, k),
)
)
+ else:
+ matches.append(
+ RuleMatch(
+ ["Conditions"],
+ "Condition must be an object",
+ )
+ )
return matches
| {"golden_diff": "diff --git a/src/cfnlint/rules/conditions/Configuration.py b/src/cfnlint/rules/conditions/Configuration.py\n--- a/src/cfnlint/rules/conditions/Configuration.py\n+++ b/src/cfnlint/rules/conditions/Configuration.py\n@@ -26,8 +26,10 @@\n def match(self, cfn):\n matches = []\n \n- conditions = cfn.template.get(\"Conditions\", {})\n- if conditions:\n+ if \"Conditions\" not in cfn.template:\n+ return matches\n+ conditions = cfn.template.get(\"Conditions\", None)\n+ if isinstance(conditions, dict):\n for condname, condobj in conditions.items():\n if not isinstance(condobj, dict):\n message = \"Condition {0} has invalid property\"\n@@ -52,5 +54,12 @@\n message.format(condname, k),\n )\n )\n+ else:\n+ matches.append(\n+ RuleMatch(\n+ [\"Conditions\"],\n+ \"Condition must be an object\",\n+ )\n+ )\n \n return matches\n", "issue": "Condition Specified but with no condition passes linting but fails deploy\n### CloudFormation Lint Version\r\n\r\n0.83.1\r\n\r\n### What operating system are you using?\r\n\r\nmac/ubuntu\r\n\r\n### Describe the bug\r\n\r\nin a cfn template if you specify root level item `Conditions` but have no conditions this passes cfn-lint but always fails on deploy\r\n\r\n### Expected behavior\r\n\r\ncfn-lint should fail if there is a Conditions root level object but no array entries under it.\r\n\r\n### Reproduction template\r\n\r\n```\r\nAWSTemplateFormatVersion: \"2010-09-09\"\r\nParameters:\r\n myParam\r\nConditions:\r\nResources:\r\n myTopic:\r\n Type: AWS::SNS::Topic\r\n Properties:\r\n DisplayName: mytopic\r\n TopicName: mytopic\r\n```\n", "before_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\n\nfrom cfnlint.rules import CloudFormationLintRule, RuleMatch\n\n\nclass Configuration(CloudFormationLintRule):\n \"\"\"Check if Conditions are configured correctly\"\"\"\n\n id = \"E8001\"\n shortdesc = \"Conditions have appropriate properties\"\n description = \"Check if Conditions are properly configured\"\n source_url = \"https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/conditions-section-structure.html\"\n tags = [\"conditions\"]\n\n condition_keys = [\n \"Condition\",\n \"Fn::And\",\n \"Fn::Equals\",\n \"Fn::Not\",\n \"Fn::Or\",\n ]\n\n def match(self, cfn):\n matches = []\n\n conditions = cfn.template.get(\"Conditions\", {})\n if conditions:\n for condname, condobj in conditions.items():\n if not isinstance(condobj, dict):\n message = \"Condition {0} has invalid property\"\n matches.append(\n RuleMatch([\"Conditions\", condname], message.format(condname))\n )\n else:\n if len(condobj) != 1:\n message = \"Condition {0} has too many intrinsic conditions\"\n matches.append(\n RuleMatch(\n [\"Conditions\", condname], message.format(condname)\n )\n )\n else:\n for k, _ in condobj.items():\n if k not in self.condition_keys:\n message = \"Condition {0} has invalid property {1}\"\n matches.append(\n RuleMatch(\n [\"Conditions\", condname] + [k],\n message.format(condname, k),\n )\n )\n\n return matches\n", "path": "src/cfnlint/rules/conditions/Configuration.py"}], "after_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\n\nfrom cfnlint.rules import CloudFormationLintRule, RuleMatch\n\n\nclass Configuration(CloudFormationLintRule):\n \"\"\"Check if Conditions are configured correctly\"\"\"\n\n id = \"E8001\"\n shortdesc = \"Conditions have appropriate properties\"\n description = \"Check if Conditions are properly configured\"\n source_url = \"https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/conditions-section-structure.html\"\n tags = [\"conditions\"]\n\n condition_keys = [\n \"Condition\",\n \"Fn::And\",\n \"Fn::Equals\",\n \"Fn::Not\",\n \"Fn::Or\",\n ]\n\n def match(self, cfn):\n matches = []\n\n if \"Conditions\" not in cfn.template:\n return matches\n conditions = cfn.template.get(\"Conditions\", None)\n if isinstance(conditions, dict):\n for condname, condobj in conditions.items():\n if not isinstance(condobj, dict):\n message = \"Condition {0} has invalid property\"\n matches.append(\n RuleMatch([\"Conditions\", condname], message.format(condname))\n )\n else:\n if len(condobj) != 1:\n message = \"Condition {0} has too many intrinsic conditions\"\n matches.append(\n RuleMatch(\n [\"Conditions\", condname], message.format(condname)\n )\n )\n else:\n for k, _ in condobj.items():\n if k not in self.condition_keys:\n message = \"Condition {0} has invalid property {1}\"\n matches.append(\n RuleMatch(\n [\"Conditions\", condname] + [k],\n message.format(condname, k),\n )\n )\n else:\n matches.append(\n RuleMatch(\n [\"Conditions\"],\n \"Condition must be an object\",\n )\n )\n\n return matches\n", "path": "src/cfnlint/rules/conditions/Configuration.py"}]} | 897 | 230 |
gh_patches_debug_632 | rasdani/github-patches | git_diff | pex-tool__pex-2245 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.147
On the docket:
+ [x] pex does not use .pip/pip.conf to resolve packages #336 / #838
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.146"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.146"
+__version__ = "2.1.147"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.146\"\n+__version__ = \"2.1.147\"\n", "issue": "Release 2.1.147\nOn the docket:\r\n+ [x] pex does not use .pip/pip.conf to resolve packages #336 / #838\r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.146\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.147\"\n", "path": "pex/version.py"}]} | 351 | 98 |
gh_patches_debug_24048 | rasdani/github-patches | git_diff | translate__pootle-5595 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Directory hashes are not expired when directories are added/removed
if you add or remove a directory it should expire hashes on all parents and related - ie /projects/x directories
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/apps/pootle_revision/receivers.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) Pootle contributors.
4 #
5 # This file is a part of the Pootle project. It is distributed under the GPL3
6 # or later license. See the LICENSE file for a copy of the license and the
7 # AUTHORS file for copyright and authorship information.
8
9 from django.db.models.signals import post_save
10 from django.dispatch import receiver
11
12 from pootle.core.delegate import revision_updater
13 from pootle_data.models import StoreData
14 from pootle_store.models import Store
15
16
17 @receiver(post_save, sender=StoreData)
18 def handle_storedata_save(**kwargs):
19 revision_updater.get(Store)(
20 context=kwargs["instance"].store).update(keys=["stats", "checks"])
21
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pootle/apps/pootle_revision/receivers.py b/pootle/apps/pootle_revision/receivers.py
--- a/pootle/apps/pootle_revision/receivers.py
+++ b/pootle/apps/pootle_revision/receivers.py
@@ -6,10 +6,11 @@
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
-from django.db.models.signals import post_save
+from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from pootle.core.delegate import revision_updater
+from pootle_app.models import Directory
from pootle_data.models import StoreData
from pootle_store.models import Store
@@ -18,3 +19,17 @@
def handle_storedata_save(**kwargs):
revision_updater.get(Store)(
context=kwargs["instance"].store).update(keys=["stats", "checks"])
+
+
+@receiver(post_save, sender=Directory)
+def handle_directory_save(**kwargs):
+ if kwargs.get("created"):
+ return
+ revision_updater.get(Directory)(
+ context=kwargs["instance"]).update(keys=["stats", "checks"])
+
+
+@receiver(pre_delete, sender=Directory)
+def handle_directory_delete(**kwargs):
+ revision_updater.get(Directory)(
+ context=kwargs["instance"].parent).update(keys=["stats", "checks"])
| {"golden_diff": "diff --git a/pootle/apps/pootle_revision/receivers.py b/pootle/apps/pootle_revision/receivers.py\n--- a/pootle/apps/pootle_revision/receivers.py\n+++ b/pootle/apps/pootle_revision/receivers.py\n@@ -6,10 +6,11 @@\n # or later license. See the LICENSE file for a copy of the license and the\n # AUTHORS file for copyright and authorship information.\n \n-from django.db.models.signals import post_save\n+from django.db.models.signals import post_save, pre_delete\n from django.dispatch import receiver\n \n from pootle.core.delegate import revision_updater\n+from pootle_app.models import Directory\n from pootle_data.models import StoreData\n from pootle_store.models import Store\n \n@@ -18,3 +19,17 @@\n def handle_storedata_save(**kwargs):\n revision_updater.get(Store)(\n context=kwargs[\"instance\"].store).update(keys=[\"stats\", \"checks\"])\n+\n+\n+@receiver(post_save, sender=Directory)\n+def handle_directory_save(**kwargs):\n+ if kwargs.get(\"created\"):\n+ return\n+ revision_updater.get(Directory)(\n+ context=kwargs[\"instance\"]).update(keys=[\"stats\", \"checks\"])\n+\n+\n+@receiver(pre_delete, sender=Directory)\n+def handle_directory_delete(**kwargs):\n+ revision_updater.get(Directory)(\n+ context=kwargs[\"instance\"].parent).update(keys=[\"stats\", \"checks\"])\n", "issue": "Directory hashes are not expired when directories are added/removed\nif you add or remove a directory it should expire hashes on all parents and related - ie /projects/x directories\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\nfrom pootle.core.delegate import revision_updater\nfrom pootle_data.models import StoreData\nfrom pootle_store.models import Store\n\n\n@receiver(post_save, sender=StoreData)\ndef handle_storedata_save(**kwargs):\n revision_updater.get(Store)(\n context=kwargs[\"instance\"].store).update(keys=[\"stats\", \"checks\"])\n", "path": "pootle/apps/pootle_revision/receivers.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.db.models.signals import post_save, pre_delete\nfrom django.dispatch import receiver\n\nfrom pootle.core.delegate import revision_updater\nfrom pootle_app.models import Directory\nfrom pootle_data.models import StoreData\nfrom pootle_store.models import Store\n\n\n@receiver(post_save, sender=StoreData)\ndef handle_storedata_save(**kwargs):\n revision_updater.get(Store)(\n context=kwargs[\"instance\"].store).update(keys=[\"stats\", \"checks\"])\n\n\n@receiver(post_save, sender=Directory)\ndef handle_directory_save(**kwargs):\n if kwargs.get(\"created\"):\n return\n revision_updater.get(Directory)(\n context=kwargs[\"instance\"]).update(keys=[\"stats\", \"checks\"])\n\n\n@receiver(pre_delete, sender=Directory)\ndef handle_directory_delete(**kwargs):\n revision_updater.get(Directory)(\n context=kwargs[\"instance\"].parent).update(keys=[\"stats\", \"checks\"])\n", "path": "pootle/apps/pootle_revision/receivers.py"}]} | 495 | 319 |
gh_patches_debug_43900 | rasdani/github-patches | git_diff | ansible__awx-8348 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
collection module tower_inventory_source_update should have "name" instead of "inventory_source"
<!-- Issues are for **concrete, actionable bugs and feature requests** only - if you're just asking for debugging help or technical support, please use:
- http://webchat.freenode.net/?channels=ansible-awx
- https://groups.google.com/forum/#!forum/awx-project
We have to limit this because of limited volunteer time to respond to issues! -->
##### ISSUE TYPE
- Feature Idea
##### SUMMARY
<!-- Briefly describe the problem or desired enhancement. -->
Module that works on an entity should reference that entity by `name`. It feels more ansibly anyway.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `awx_collection/plugins/modules/tower_inventory_source_update.py`
Content:
```
1 #!/usr/bin/python
2 # coding: utf-8 -*-
3
4 # (c) 2020, Bianca Henderson <[email protected]>
5 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
6
7 from __future__ import absolute_import, division, print_function
8 __metaclass__ = type
9
10
11 ANSIBLE_METADATA = {'metadata_version': '1.1',
12 'status': ['preview'],
13 'supported_by': 'community'}
14
15
16 DOCUMENTATION = '''
17 ---
18 module: tower_inventory_source_update
19 author: "Bianca Henderson (@beeankha)"
20 short_description: Update inventory source(s).
21 description:
22 - Update Ansible Tower inventory source(s). See
23 U(https://www.ansible.com/tower) for an overview.
24 options:
25 inventory:
26 description:
27 - Name of the inventory that contains the inventory source(s) to update.
28 required: True
29 type: str
30 inventory_source:
31 description:
32 - The name of the inventory source to update.
33 required: True
34 type: str
35 organization:
36 description:
37 - Name of the inventory source's inventory's organization.
38 type: str
39 wait:
40 description:
41 - Wait for the job to complete.
42 default: False
43 type: bool
44 interval:
45 description:
46 - The interval to request an update from Tower.
47 required: False
48 default: 1
49 type: float
50 timeout:
51 description:
52 - If waiting for the job to complete this will abort after this
53 amount of seconds
54 type: int
55 extends_documentation_fragment: awx.awx.auth
56 '''
57
58 EXAMPLES = '''
59 - name: Update a single inventory source
60 tower_inventory_source_update:
61 inventory: "My Inventory"
62 inventory_source: "Example Inventory Source"
63 organization: Default
64
65 - name: Update all inventory sources
66 tower_inventory_source_update:
67 inventory: "My Other Inventory"
68 inventory_source: "{{ item }}"
69 loop: "{{ query('awx.awx.tower_api', 'inventory_sources', query_params={ 'inventory': 30 }, return_ids=True ) }}"
70 '''
71
72 RETURN = '''
73 id:
74 description: id of the inventory update
75 returned: success
76 type: int
77 sample: 86
78 status:
79 description: status of the inventory update
80 returned: success
81 type: str
82 sample: pending
83 '''
84
85 from ..module_utils.tower_api import TowerAPIModule
86
87
88 def main():
89 # Any additional arguments that are not fields of the item can be added here
90 argument_spec = dict(
91 inventory=dict(required=True),
92 inventory_source=dict(required=True),
93 organization=dict(),
94 wait=dict(default=False, type='bool'),
95 interval=dict(default=1.0, type='float'),
96 timeout=dict(default=None, type='int'),
97 )
98
99 # Create a module for ourselves
100 module = TowerAPIModule(argument_spec=argument_spec)
101
102 # Extract our parameters
103 inventory = module.params.get('inventory')
104 inventory_source = module.params.get('inventory_source')
105 organization = module.params.get('organization')
106 wait = module.params.get('wait')
107 interval = module.params.get('interval')
108 timeout = module.params.get('timeout')
109
110 lookup_data = {}
111 if organization:
112 lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)
113 inventory_object = module.get_one('inventories', name_or_id=inventory, data=lookup_data)
114
115 if not inventory_object:
116 module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))
117
118 inventory_source_object = module.get_one('inventory_sources', name_or_id=inventory_source, **{
119 'data': {
120 'inventory': inventory_object['id'],
121 }
122 })
123
124 if not inventory_source_object:
125 module.fail_json(msg='The specified inventory source was not found.')
126
127 # Sync the inventory source(s)
128 inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'], **{'data': {}})
129
130 if inventory_source_update_results['status_code'] != 202:
131 module.fail_json(msg="Failed to update inventory source, see response for details", **{'response': inventory_source_update_results})
132
133 module.json_output['changed'] = True
134 module.json_output['id'] = inventory_source_update_results['json']['id']
135 module.json_output['status'] = inventory_source_update_results['json']['status']
136
137 if not wait:
138 module.exit_json(**module.json_output)
139
140 # Invoke wait function
141 module.wait_on_url(
142 url=inventory_source_update_results['json']['url'],
143 object_name=inventory_object,
144 object_type='inventory_update',
145 timeout=timeout, interval=interval
146 )
147
148 module.exit_json(**module.json_output)
149
150
151 if __name__ == '__main__':
152 main()
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/awx_collection/plugins/modules/tower_inventory_source_update.py b/awx_collection/plugins/modules/tower_inventory_source_update.py
--- a/awx_collection/plugins/modules/tower_inventory_source_update.py
+++ b/awx_collection/plugins/modules/tower_inventory_source_update.py
@@ -22,14 +22,16 @@
- Update Ansible Tower inventory source(s). See
U(https://www.ansible.com/tower) for an overview.
options:
- inventory:
+ name:
description:
- - Name of the inventory that contains the inventory source(s) to update.
+ - The name or id of the inventory source to update.
required: True
type: str
- inventory_source:
+ aliases:
+ - inventory_source
+ inventory:
description:
- - The name of the inventory source to update.
+ - Name or id of the inventory that contains the inventory source(s) to update.
required: True
type: str
organization:
@@ -58,14 +60,14 @@
EXAMPLES = '''
- name: Update a single inventory source
tower_inventory_source_update:
+ name: "Example Inventory Source"
inventory: "My Inventory"
- inventory_source: "Example Inventory Source"
organization: Default
- name: Update all inventory sources
tower_inventory_source_update:
+ name: "{{ item }}"
inventory: "My Other Inventory"
- inventory_source: "{{ item }}"
loop: "{{ query('awx.awx.tower_api', 'inventory_sources', query_params={ 'inventory': 30 }, return_ids=True ) }}"
'''
@@ -88,8 +90,8 @@
def main():
# Any additional arguments that are not fields of the item can be added here
argument_spec = dict(
+ name=dict(required=True, aliases=['inventory_source']),
inventory=dict(required=True),
- inventory_source=dict(required=True),
organization=dict(),
wait=dict(default=False, type='bool'),
interval=dict(default=1.0, type='float'),
@@ -100,8 +102,8 @@
module = TowerAPIModule(argument_spec=argument_spec)
# Extract our parameters
+ name = module.params.get('name')
inventory = module.params.get('inventory')
- inventory_source = module.params.get('inventory_source')
organization = module.params.get('organization')
wait = module.params.get('wait')
interval = module.params.get('interval')
@@ -115,20 +117,18 @@
if not inventory_object:
module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))
- inventory_source_object = module.get_one('inventory_sources', name_or_id=inventory_source, **{
- 'data': {
- 'inventory': inventory_object['id'],
- }
- })
+ inventory_source_object = module.get_one('inventory_sources',
+ name_or_id=name,
+ data={'inventory': inventory_object['id']})
if not inventory_source_object:
module.fail_json(msg='The specified inventory source was not found.')
# Sync the inventory source(s)
- inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'], **{'data': {}})
+ inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'])
if inventory_source_update_results['status_code'] != 202:
- module.fail_json(msg="Failed to update inventory source, see response for details", **{'response': inventory_source_update_results})
+ module.fail_json(msg="Failed to update inventory source, see response for details", response=inventory_source_update_results)
module.json_output['changed'] = True
module.json_output['id'] = inventory_source_update_results['json']['id']
| {"golden_diff": "diff --git a/awx_collection/plugins/modules/tower_inventory_source_update.py b/awx_collection/plugins/modules/tower_inventory_source_update.py\n--- a/awx_collection/plugins/modules/tower_inventory_source_update.py\n+++ b/awx_collection/plugins/modules/tower_inventory_source_update.py\n@@ -22,14 +22,16 @@\n - Update Ansible Tower inventory source(s). See\n U(https://www.ansible.com/tower) for an overview.\n options:\n- inventory:\n+ name:\n description:\n- - Name of the inventory that contains the inventory source(s) to update.\n+ - The name or id of the inventory source to update.\n required: True\n type: str\n- inventory_source:\n+ aliases:\n+ - inventory_source\n+ inventory:\n description:\n- - The name of the inventory source to update.\n+ - Name or id of the inventory that contains the inventory source(s) to update.\n required: True\n type: str\n organization:\n@@ -58,14 +60,14 @@\n EXAMPLES = '''\n - name: Update a single inventory source\n tower_inventory_source_update:\n+ name: \"Example Inventory Source\"\n inventory: \"My Inventory\"\n- inventory_source: \"Example Inventory Source\"\n organization: Default\n \n - name: Update all inventory sources\n tower_inventory_source_update:\n+ name: \"{{ item }}\"\n inventory: \"My Other Inventory\"\n- inventory_source: \"{{ item }}\"\n loop: \"{{ query('awx.awx.tower_api', 'inventory_sources', query_params={ 'inventory': 30 }, return_ids=True ) }}\"\n '''\n \n@@ -88,8 +90,8 @@\n def main():\n # Any additional arguments that are not fields of the item can be added here\n argument_spec = dict(\n+ name=dict(required=True, aliases=['inventory_source']),\n inventory=dict(required=True),\n- inventory_source=dict(required=True),\n organization=dict(),\n wait=dict(default=False, type='bool'),\n interval=dict(default=1.0, type='float'),\n@@ -100,8 +102,8 @@\n module = TowerAPIModule(argument_spec=argument_spec)\n \n # Extract our parameters\n+ name = module.params.get('name')\n inventory = module.params.get('inventory')\n- inventory_source = module.params.get('inventory_source')\n organization = module.params.get('organization')\n wait = module.params.get('wait')\n interval = module.params.get('interval')\n@@ -115,20 +117,18 @@\n if not inventory_object:\n module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))\n \n- inventory_source_object = module.get_one('inventory_sources', name_or_id=inventory_source, **{\n- 'data': {\n- 'inventory': inventory_object['id'],\n- }\n- })\n+ inventory_source_object = module.get_one('inventory_sources',\n+ name_or_id=name,\n+ data={'inventory': inventory_object['id']})\n \n if not inventory_source_object:\n module.fail_json(msg='The specified inventory source was not found.')\n \n # Sync the inventory source(s)\n- inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'], **{'data': {}})\n+ inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'])\n \n if inventory_source_update_results['status_code'] != 202:\n- module.fail_json(msg=\"Failed to update inventory source, see response for details\", **{'response': inventory_source_update_results})\n+ module.fail_json(msg=\"Failed to update inventory source, see response for details\", response=inventory_source_update_results)\n \n module.json_output['changed'] = True\n module.json_output['id'] = inventory_source_update_results['json']['id']\n", "issue": "collection module tower_inventory_source_update should have \"name\" instead of \"inventory_source\"\n<!-- Issues are for **concrete, actionable bugs and feature requests** only - if you're just asking for debugging help or technical support, please use:\r\n\r\n- http://webchat.freenode.net/?channels=ansible-awx\r\n- https://groups.google.com/forum/#!forum/awx-project\r\n\r\nWe have to limit this because of limited volunteer time to respond to issues! -->\r\n\r\n##### ISSUE TYPE\r\n - Feature Idea\r\n\r\n##### SUMMARY\r\n<!-- Briefly describe the problem or desired enhancement. -->\r\nModule that works on an entity should reference that entity by `name`. It feels more ansibly anyway.\n", "before_files": [{"content": "#!/usr/bin/python\n# coding: utf-8 -*-\n\n# (c) 2020, Bianca Henderson <[email protected]>\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nfrom __future__ import absolute_import, division, print_function\n__metaclass__ = type\n\n\nANSIBLE_METADATA = {'metadata_version': '1.1',\n 'status': ['preview'],\n 'supported_by': 'community'}\n\n\nDOCUMENTATION = '''\n---\nmodule: tower_inventory_source_update\nauthor: \"Bianca Henderson (@beeankha)\"\nshort_description: Update inventory source(s).\ndescription:\n - Update Ansible Tower inventory source(s). See\n U(https://www.ansible.com/tower) for an overview.\noptions:\n inventory:\n description:\n - Name of the inventory that contains the inventory source(s) to update.\n required: True\n type: str\n inventory_source:\n description:\n - The name of the inventory source to update.\n required: True\n type: str\n organization:\n description:\n - Name of the inventory source's inventory's organization.\n type: str\n wait:\n description:\n - Wait for the job to complete.\n default: False\n type: bool\n interval:\n description:\n - The interval to request an update from Tower.\n required: False\n default: 1\n type: float\n timeout:\n description:\n - If waiting for the job to complete this will abort after this\n amount of seconds\n type: int\nextends_documentation_fragment: awx.awx.auth\n'''\n\nEXAMPLES = '''\n- name: Update a single inventory source\n tower_inventory_source_update:\n inventory: \"My Inventory\"\n inventory_source: \"Example Inventory Source\"\n organization: Default\n\n- name: Update all inventory sources\n tower_inventory_source_update:\n inventory: \"My Other Inventory\"\n inventory_source: \"{{ item }}\"\n loop: \"{{ query('awx.awx.tower_api', 'inventory_sources', query_params={ 'inventory': 30 }, return_ids=True ) }}\"\n'''\n\nRETURN = '''\nid:\n description: id of the inventory update\n returned: success\n type: int\n sample: 86\nstatus:\n description: status of the inventory update\n returned: success\n type: str\n sample: pending\n'''\n\nfrom ..module_utils.tower_api import TowerAPIModule\n\n\ndef main():\n # Any additional arguments that are not fields of the item can be added here\n argument_spec = dict(\n inventory=dict(required=True),\n inventory_source=dict(required=True),\n organization=dict(),\n wait=dict(default=False, type='bool'),\n interval=dict(default=1.0, type='float'),\n timeout=dict(default=None, type='int'),\n )\n\n # Create a module for ourselves\n module = TowerAPIModule(argument_spec=argument_spec)\n\n # Extract our parameters\n inventory = module.params.get('inventory')\n inventory_source = module.params.get('inventory_source')\n organization = module.params.get('organization')\n wait = module.params.get('wait')\n interval = module.params.get('interval')\n timeout = module.params.get('timeout')\n\n lookup_data = {}\n if organization:\n lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)\n inventory_object = module.get_one('inventories', name_or_id=inventory, data=lookup_data)\n\n if not inventory_object:\n module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))\n\n inventory_source_object = module.get_one('inventory_sources', name_or_id=inventory_source, **{\n 'data': {\n 'inventory': inventory_object['id'],\n }\n })\n\n if not inventory_source_object:\n module.fail_json(msg='The specified inventory source was not found.')\n\n # Sync the inventory source(s)\n inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'], **{'data': {}})\n\n if inventory_source_update_results['status_code'] != 202:\n module.fail_json(msg=\"Failed to update inventory source, see response for details\", **{'response': inventory_source_update_results})\n\n module.json_output['changed'] = True\n module.json_output['id'] = inventory_source_update_results['json']['id']\n module.json_output['status'] = inventory_source_update_results['json']['status']\n\n if not wait:\n module.exit_json(**module.json_output)\n\n # Invoke wait function\n module.wait_on_url(\n url=inventory_source_update_results['json']['url'],\n object_name=inventory_object,\n object_type='inventory_update',\n timeout=timeout, interval=interval\n )\n\n module.exit_json(**module.json_output)\n\n\nif __name__ == '__main__':\n main()\n", "path": "awx_collection/plugins/modules/tower_inventory_source_update.py"}], "after_files": [{"content": "#!/usr/bin/python\n# coding: utf-8 -*-\n\n# (c) 2020, Bianca Henderson <[email protected]>\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nfrom __future__ import absolute_import, division, print_function\n__metaclass__ = type\n\n\nANSIBLE_METADATA = {'metadata_version': '1.1',\n 'status': ['preview'],\n 'supported_by': 'community'}\n\n\nDOCUMENTATION = '''\n---\nmodule: tower_inventory_source_update\nauthor: \"Bianca Henderson (@beeankha)\"\nshort_description: Update inventory source(s).\ndescription:\n - Update Ansible Tower inventory source(s). See\n U(https://www.ansible.com/tower) for an overview.\noptions:\n name:\n description:\n - The name or id of the inventory source to update.\n required: True\n type: str\n aliases:\n - inventory_source\n inventory:\n description:\n - Name or id of the inventory that contains the inventory source(s) to update.\n required: True\n type: str\n organization:\n description:\n - Name of the inventory source's inventory's organization.\n type: str\n wait:\n description:\n - Wait for the job to complete.\n default: False\n type: bool\n interval:\n description:\n - The interval to request an update from Tower.\n required: False\n default: 1\n type: float\n timeout:\n description:\n - If waiting for the job to complete this will abort after this\n amount of seconds\n type: int\nextends_documentation_fragment: awx.awx.auth\n'''\n\nEXAMPLES = '''\n- name: Update a single inventory source\n tower_inventory_source_update:\n name: \"Example Inventory Source\"\n inventory: \"My Inventory\"\n organization: Default\n\n- name: Update all inventory sources\n tower_inventory_source_update:\n name: \"{{ item }}\"\n inventory: \"My Other Inventory\"\n loop: \"{{ query('awx.awx.tower_api', 'inventory_sources', query_params={ 'inventory': 30 }, return_ids=True ) }}\"\n'''\n\nRETURN = '''\nid:\n description: id of the inventory update\n returned: success\n type: int\n sample: 86\nstatus:\n description: status of the inventory update\n returned: success\n type: str\n sample: pending\n'''\n\nfrom ..module_utils.tower_api import TowerAPIModule\n\n\ndef main():\n # Any additional arguments that are not fields of the item can be added here\n argument_spec = dict(\n name=dict(required=True, aliases=['inventory_source']),\n inventory=dict(required=True),\n organization=dict(),\n wait=dict(default=False, type='bool'),\n interval=dict(default=1.0, type='float'),\n timeout=dict(default=None, type='int'),\n )\n\n # Create a module for ourselves\n module = TowerAPIModule(argument_spec=argument_spec)\n\n # Extract our parameters\n name = module.params.get('name')\n inventory = module.params.get('inventory')\n organization = module.params.get('organization')\n wait = module.params.get('wait')\n interval = module.params.get('interval')\n timeout = module.params.get('timeout')\n\n lookup_data = {}\n if organization:\n lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)\n inventory_object = module.get_one('inventories', name_or_id=inventory, data=lookup_data)\n\n if not inventory_object:\n module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))\n\n inventory_source_object = module.get_one('inventory_sources',\n name_or_id=name,\n data={'inventory': inventory_object['id']})\n\n if not inventory_source_object:\n module.fail_json(msg='The specified inventory source was not found.')\n\n # Sync the inventory source(s)\n inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'])\n\n if inventory_source_update_results['status_code'] != 202:\n module.fail_json(msg=\"Failed to update inventory source, see response for details\", response=inventory_source_update_results)\n\n module.json_output['changed'] = True\n module.json_output['id'] = inventory_source_update_results['json']['id']\n module.json_output['status'] = inventory_source_update_results['json']['status']\n\n if not wait:\n module.exit_json(**module.json_output)\n\n # Invoke wait function\n module.wait_on_url(\n url=inventory_source_update_results['json']['url'],\n object_name=inventory_object,\n object_type='inventory_update',\n timeout=timeout, interval=interval\n )\n\n module.exit_json(**module.json_output)\n\n\nif __name__ == '__main__':\n main()\n", "path": "awx_collection/plugins/modules/tower_inventory_source_update.py"}]} | 1,812 | 834 |
gh_patches_debug_30833 | rasdani/github-patches | git_diff | numpy__numpy-13083 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Failed to compile scipy with Anaconda gfortran.
<!-- Please describe the issue in detail here, and fill in the fields below -->
The compilation fails with an error and prints a command it is trying to execute.
If I type the command at the end of the error log manually, it works.
I think this points to a quotation error. The additional fortran flags from the environment shall be added to the command line list after shell-style string split. It will produce this exact problem if the full list of fortran flags are added as a single string.
### Reproducing code example:
clone scipy from github;
activate Anaconda build enviroment.
python runtests.py
### Error message:
```
f951: Error: unrecognized command line option '-fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe'
Running from scipy source directory.
/home/yfeng1/anaconda3/install/lib/python3.6/site-packages/numpy/distutils/system_info.py:730: UserWarning: Specified path /usr/local/include/python3.6m is invalid.
return self.get_paths(self.section, key)
/home/yfeng1/anaconda3/install/lib/python3.6/site-packages/numpy/distutils/system_info.py:730: UserWarning: Specified path /usr/include/suitesparse/python3.6m is invalid.
return self.get_paths(self.section, key)
error: Command "/home/yfeng1/anaconda3/install/bin/x86_64-conda_cos6-linux-gnu-gfortran -Wall -g -ffixed-form -fno-second-underscore -fPIC -fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe -O3 -funroll-loops -I/home/yfeng1/anaconda3/install/lib/python3.6/site-packages/numpy/core/include -c -c scipy/fftpack/src/dfftpack/dcosqb.f -o build/temp.linux-x86_64-3.6/scipy/fftpack/src/dfftpack/dcosqb.o" failed with exit status 1
```
### Numpy/Python version information:
1.16.2.
The environment variables regarding fortran are
```
FORTRANFLAGS=-fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe
GFORTRAN=/home/yfeng1/anaconda3/install/bin/x86_64-conda_cos6-linux-gnu-gfortran
DEBUG_FORTRANFLAGS=-fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe -fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-all -fno-plt -Og -g -Wall -Wextra -fcheck=all -fbacktrace -fimplicit-none -fvar-tracking-assignments -ffunction-sections -pipe
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `numpy/distutils/fcompiler/environment.py`
Content:
```
1 from __future__ import division, absolute_import, print_function
2
3 import os
4 import warnings
5 from distutils.dist import Distribution
6
7 __metaclass__ = type
8
9 class EnvironmentConfig(object):
10 def __init__(self, distutils_section='ALL', **kw):
11 self._distutils_section = distutils_section
12 self._conf_keys = kw
13 self._conf = None
14 self._hook_handler = None
15
16 def dump_variable(self, name):
17 conf_desc = self._conf_keys[name]
18 hook, envvar, confvar, convert, append = conf_desc
19 if not convert:
20 convert = lambda x : x
21 print('%s.%s:' % (self._distutils_section, name))
22 v = self._hook_handler(name, hook)
23 print(' hook : %s' % (convert(v),))
24 if envvar:
25 v = os.environ.get(envvar, None)
26 print(' environ: %s' % (convert(v),))
27 if confvar and self._conf:
28 v = self._conf.get(confvar, (None, None))[1]
29 print(' config : %s' % (convert(v),))
30
31 def dump_variables(self):
32 for name in self._conf_keys:
33 self.dump_variable(name)
34
35 def __getattr__(self, name):
36 try:
37 conf_desc = self._conf_keys[name]
38 except KeyError:
39 raise AttributeError(name)
40 return self._get_var(name, conf_desc)
41
42 def get(self, name, default=None):
43 try:
44 conf_desc = self._conf_keys[name]
45 except KeyError:
46 return default
47 var = self._get_var(name, conf_desc)
48 if var is None:
49 var = default
50 return var
51
52 def _get_var(self, name, conf_desc):
53 hook, envvar, confvar, convert, append = conf_desc
54 var = self._hook_handler(name, hook)
55 if envvar is not None:
56 envvar_contents = os.environ.get(envvar)
57 if envvar_contents is not None:
58 if var and append:
59 if os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1':
60 var = var + [envvar_contents]
61 else:
62 var = envvar_contents
63 if 'NPY_DISTUTILS_APPEND_FLAGS' not in os.environ.keys():
64 msg = "{} is used as is, not appended ".format(envvar) + \
65 "to flags already defined " + \
66 "by numpy.distutils! Use NPY_DISTUTILS_APPEND_FLAGS=1 " + \
67 "to obtain appending behavior instead (this " + \
68 "behavior will become default in a future release)."
69 warnings.warn(msg, UserWarning, stacklevel=3)
70 else:
71 var = envvar_contents
72 if confvar is not None and self._conf:
73 var = self._conf.get(confvar, (None, var))[1]
74 if convert is not None:
75 var = convert(var)
76 return var
77
78 def clone(self, hook_handler):
79 ec = self.__class__(distutils_section=self._distutils_section,
80 **self._conf_keys)
81 ec._hook_handler = hook_handler
82 return ec
83
84 def use_distribution(self, dist):
85 if isinstance(dist, Distribution):
86 self._conf = dist.get_option_dict(self._distutils_section)
87 else:
88 self._conf = dist
89
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/numpy/distutils/fcompiler/environment.py b/numpy/distutils/fcompiler/environment.py
--- a/numpy/distutils/fcompiler/environment.py
+++ b/numpy/distutils/fcompiler/environment.py
@@ -51,13 +51,16 @@
def _get_var(self, name, conf_desc):
hook, envvar, confvar, convert, append = conf_desc
+ if convert is None:
+ convert = lambda x: x
var = self._hook_handler(name, hook)
if envvar is not None:
envvar_contents = os.environ.get(envvar)
if envvar_contents is not None:
+ envvar_contents = convert(envvar_contents)
if var and append:
if os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1':
- var = var + [envvar_contents]
+ var.extend(envvar_contents)
else:
var = envvar_contents
if 'NPY_DISTUTILS_APPEND_FLAGS' not in os.environ.keys():
@@ -70,11 +73,12 @@
else:
var = envvar_contents
if confvar is not None and self._conf:
- var = self._conf.get(confvar, (None, var))[1]
- if convert is not None:
- var = convert(var)
+ if confvar in self._conf:
+ source, confvar_contents = self._conf[confvar]
+ var = convert(confvar_contents)
return var
+
def clone(self, hook_handler):
ec = self.__class__(distutils_section=self._distutils_section,
**self._conf_keys)
| {"golden_diff": "diff --git a/numpy/distutils/fcompiler/environment.py b/numpy/distutils/fcompiler/environment.py\n--- a/numpy/distutils/fcompiler/environment.py\n+++ b/numpy/distutils/fcompiler/environment.py\n@@ -51,13 +51,16 @@\n \n def _get_var(self, name, conf_desc):\n hook, envvar, confvar, convert, append = conf_desc\n+ if convert is None:\n+ convert = lambda x: x\n var = self._hook_handler(name, hook)\n if envvar is not None:\n envvar_contents = os.environ.get(envvar)\n if envvar_contents is not None:\n+ envvar_contents = convert(envvar_contents)\n if var and append:\n if os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1':\n- var = var + [envvar_contents]\n+ var.extend(envvar_contents)\n else:\n var = envvar_contents\n if 'NPY_DISTUTILS_APPEND_FLAGS' not in os.environ.keys():\n@@ -70,11 +73,12 @@\n else:\n var = envvar_contents\n if confvar is not None and self._conf:\n- var = self._conf.get(confvar, (None, var))[1]\n- if convert is not None:\n- var = convert(var)\n+ if confvar in self._conf:\n+ source, confvar_contents = self._conf[confvar]\n+ var = convert(confvar_contents)\n return var\n \n+\n def clone(self, hook_handler):\n ec = self.__class__(distutils_section=self._distutils_section,\n **self._conf_keys)\n", "issue": "Failed to compile scipy with Anaconda gfortran.\n<!-- Please describe the issue in detail here, and fill in the fields below -->\r\n\r\nThe compilation fails with an error and prints a command it is trying to execute. \r\n\r\nIf I type the command at the end of the error log manually, it works.\r\n\r\nI think this points to a quotation error. The additional fortran flags from the environment shall be added to the command line list after shell-style string split. It will produce this exact problem if the full list of fortran flags are added as a single string.\r\n\r\n### Reproducing code example:\r\n\r\nclone scipy from github;\r\n\r\nactivate Anaconda build enviroment.\r\npython runtests.py\r\n\r\n### Error message:\r\n\r\n```\r\nf951: Error: unrecognized command line option '-fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe'\r\nRunning from scipy source directory.\r\n/home/yfeng1/anaconda3/install/lib/python3.6/site-packages/numpy/distutils/system_info.py:730: UserWarning: Specified path /usr/local/include/python3.6m is invalid.\r\n return self.get_paths(self.section, key)\r\n/home/yfeng1/anaconda3/install/lib/python3.6/site-packages/numpy/distutils/system_info.py:730: UserWarning: Specified path /usr/include/suitesparse/python3.6m is invalid.\r\n return self.get_paths(self.section, key)\r\nerror: Command \"/home/yfeng1/anaconda3/install/bin/x86_64-conda_cos6-linux-gnu-gfortran -Wall -g -ffixed-form -fno-second-underscore -fPIC -fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe -O3 -funroll-loops -I/home/yfeng1/anaconda3/install/lib/python3.6/site-packages/numpy/core/include -c -c scipy/fftpack/src/dfftpack/dcosqb.f -o build/temp.linux-x86_64-3.6/scipy/fftpack/src/dfftpack/dcosqb.o\" failed with exit status 1\r\n```\r\n\r\n### Numpy/Python version information:\r\n\r\n1.16.2.\r\n\r\nThe environment variables regarding fortran are\r\n```\r\nFORTRANFLAGS=-fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe\r\nGFORTRAN=/home/yfeng1/anaconda3/install/bin/x86_64-conda_cos6-linux-gnu-gfortran\r\nDEBUG_FORTRANFLAGS=-fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe -fopenmp -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-all -fno-plt -Og -g -Wall -Wextra -fcheck=all -fbacktrace -fimplicit-none -fvar-tracking-assignments -ffunction-sections -pipe\r\n```\n", "before_files": [{"content": "from __future__ import division, absolute_import, print_function\n\nimport os\nimport warnings\nfrom distutils.dist import Distribution\n\n__metaclass__ = type\n\nclass EnvironmentConfig(object):\n def __init__(self, distutils_section='ALL', **kw):\n self._distutils_section = distutils_section\n self._conf_keys = kw\n self._conf = None\n self._hook_handler = None\n\n def dump_variable(self, name):\n conf_desc = self._conf_keys[name]\n hook, envvar, confvar, convert, append = conf_desc\n if not convert:\n convert = lambda x : x\n print('%s.%s:' % (self._distutils_section, name))\n v = self._hook_handler(name, hook)\n print(' hook : %s' % (convert(v),))\n if envvar:\n v = os.environ.get(envvar, None)\n print(' environ: %s' % (convert(v),))\n if confvar and self._conf:\n v = self._conf.get(confvar, (None, None))[1]\n print(' config : %s' % (convert(v),))\n\n def dump_variables(self):\n for name in self._conf_keys:\n self.dump_variable(name)\n\n def __getattr__(self, name):\n try:\n conf_desc = self._conf_keys[name]\n except KeyError:\n raise AttributeError(name)\n return self._get_var(name, conf_desc)\n\n def get(self, name, default=None):\n try:\n conf_desc = self._conf_keys[name]\n except KeyError:\n return default\n var = self._get_var(name, conf_desc)\n if var is None:\n var = default\n return var\n\n def _get_var(self, name, conf_desc):\n hook, envvar, confvar, convert, append = conf_desc\n var = self._hook_handler(name, hook)\n if envvar is not None:\n envvar_contents = os.environ.get(envvar)\n if envvar_contents is not None:\n if var and append:\n if os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1':\n var = var + [envvar_contents]\n else:\n var = envvar_contents\n if 'NPY_DISTUTILS_APPEND_FLAGS' not in os.environ.keys():\n msg = \"{} is used as is, not appended \".format(envvar) + \\\n \"to flags already defined \" + \\\n \"by numpy.distutils! Use NPY_DISTUTILS_APPEND_FLAGS=1 \" + \\\n \"to obtain appending behavior instead (this \" + \\\n \"behavior will become default in a future release).\"\n warnings.warn(msg, UserWarning, stacklevel=3)\n else:\n var = envvar_contents\n if confvar is not None and self._conf:\n var = self._conf.get(confvar, (None, var))[1]\n if convert is not None:\n var = convert(var)\n return var\n\n def clone(self, hook_handler):\n ec = self.__class__(distutils_section=self._distutils_section,\n **self._conf_keys)\n ec._hook_handler = hook_handler\n return ec\n\n def use_distribution(self, dist):\n if isinstance(dist, Distribution):\n self._conf = dist.get_option_dict(self._distutils_section)\n else:\n self._conf = dist\n", "path": "numpy/distutils/fcompiler/environment.py"}], "after_files": [{"content": "from __future__ import division, absolute_import, print_function\n\nimport os\nimport warnings\nfrom distutils.dist import Distribution\n\n__metaclass__ = type\n\nclass EnvironmentConfig(object):\n def __init__(self, distutils_section='ALL', **kw):\n self._distutils_section = distutils_section\n self._conf_keys = kw\n self._conf = None\n self._hook_handler = None\n\n def dump_variable(self, name):\n conf_desc = self._conf_keys[name]\n hook, envvar, confvar, convert, append = conf_desc\n if not convert:\n convert = lambda x : x\n print('%s.%s:' % (self._distutils_section, name))\n v = self._hook_handler(name, hook)\n print(' hook : %s' % (convert(v),))\n if envvar:\n v = os.environ.get(envvar, None)\n print(' environ: %s' % (convert(v),))\n if confvar and self._conf:\n v = self._conf.get(confvar, (None, None))[1]\n print(' config : %s' % (convert(v),))\n\n def dump_variables(self):\n for name in self._conf_keys:\n self.dump_variable(name)\n\n def __getattr__(self, name):\n try:\n conf_desc = self._conf_keys[name]\n except KeyError:\n raise AttributeError(name)\n return self._get_var(name, conf_desc)\n\n def get(self, name, default=None):\n try:\n conf_desc = self._conf_keys[name]\n except KeyError:\n return default\n var = self._get_var(name, conf_desc)\n if var is None:\n var = default\n return var\n\n def _get_var(self, name, conf_desc):\n hook, envvar, confvar, convert, append = conf_desc\n if convert is None:\n convert = lambda x: x\n var = self._hook_handler(name, hook)\n if envvar is not None:\n envvar_contents = os.environ.get(envvar)\n if envvar_contents is not None:\n envvar_contents = convert(envvar_contents)\n if var and append:\n if os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1':\n var.extend(envvar_contents)\n else:\n var = envvar_contents\n if 'NPY_DISTUTILS_APPEND_FLAGS' not in os.environ.keys():\n msg = \"{} is used as is, not appended \".format(envvar) + \\\n \"to flags already defined \" + \\\n \"by numpy.distutils! Use NPY_DISTUTILS_APPEND_FLAGS=1 \" + \\\n \"to obtain appending behavior instead (this \" + \\\n \"behavior will become default in a future release).\"\n warnings.warn(msg, UserWarning, stacklevel=3)\n else:\n var = envvar_contents\n if confvar is not None and self._conf:\n if confvar in self._conf:\n source, confvar_contents = self._conf[confvar]\n var = convert(confvar_contents)\n return var\n\n\n def clone(self, hook_handler):\n ec = self.__class__(distutils_section=self._distutils_section,\n **self._conf_keys)\n ec._hook_handler = hook_handler\n return ec\n\n def use_distribution(self, dist):\n if isinstance(dist, Distribution):\n self._conf = dist.get_option_dict(self._distutils_section)\n else:\n self._conf = dist\n", "path": "numpy/distutils/fcompiler/environment.py"}]} | 1,899 | 365 |
gh_patches_debug_24492 | rasdani/github-patches | git_diff | scikit-hep__pyhf-186 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Updated setup.py for pytorch > 0.4 dependency
# Description
I had 0.3.1 for Torch and that caused issues with some of the doctesting as the distributions did not have `cdf` methods. I forced an upgrade pytorch and things are fine now.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup, find_packages
2 setup(
3 name = 'pyhf',
4 version = '0.0.8',
5 description = '(partial) pure python histfactory implementation',
6 url = '',
7 author = 'Lukas Heinrich',
8 author_email = '[email protected]',
9 packages = find_packages(),
10 include_package_data = True,
11 install_requires = [
12 'numpy>=1.14.3',
13 'scipy'
14 ],
15 extras_require = {
16 'xmlimport': [
17 'uproot',
18 ],
19 'torch': [
20 'torch'
21 ],
22 'mxnet':[
23 'mxnet',
24 ],
25 'develop': [
26 'pyflakes',
27 'pytest>=3.5.1',
28 'pytest-cov>=2.5.1',
29 'pytest-benchmark[histogram]',
30 'python-coveralls',
31 'matplotlib',
32 'jupyter',
33 'uproot',
34 'papermill',
35 'torch',
36 'tensorflow',
37 'mxnet>=1.0.0',
38 'graphviz',
39 'sphinx',
40 'sphinxcontrib-bibtex',
41 'sphinxcontrib-napoleon',
42 'sphinx_rtd_theme',
43 'nbsphinx',
44 'jsonpatch',
45 'jsonschema>=2.6.0'
46 ]
47 },
48 entry_points = {
49 },
50 dependency_links = [
51 ]
52 )
53
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -9,7 +9,7 @@
packages = find_packages(),
include_package_data = True,
install_requires = [
- 'numpy>=1.14.3',
+ 'numpy<=1.14.5,>=1.14.3', # required by tensorflow, mxnet, and us
'scipy'
],
extras_require = {
@@ -17,10 +17,18 @@
'uproot',
],
'torch': [
- 'torch'
+ 'torch>=0.4.0'
],
'mxnet':[
- 'mxnet',
+ 'mxnet>=1.0.0',
+ 'requests<2.19.0,>=2.18.4',
+ 'numpy<1.15.0,>=1.8.2',
+ 'requests<2.19.0,>=2.18.4',
+ ],
+ 'tensorflow':[
+ 'tensorflow==1.10.0',
+ 'numpy<=1.14.5,>=1.13.3',
+ 'setuptools<=39.1.0',
],
'develop': [
'pyflakes',
@@ -28,13 +36,11 @@
'pytest-cov>=2.5.1',
'pytest-benchmark[histogram]',
'python-coveralls',
+ 'coverage==4.0.3', # coveralls
'matplotlib',
'jupyter',
'uproot',
'papermill',
- 'torch',
- 'tensorflow',
- 'mxnet>=1.0.0',
'graphviz',
'sphinx',
'sphinxcontrib-bibtex',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -9,7 +9,7 @@\n packages = find_packages(),\n include_package_data = True,\n install_requires = [\n- 'numpy>=1.14.3',\n+ 'numpy<=1.14.5,>=1.14.3', # required by tensorflow, mxnet, and us\n 'scipy'\n ],\n extras_require = {\n@@ -17,10 +17,18 @@\n 'uproot',\n ],\n 'torch': [\n- 'torch'\n+ 'torch>=0.4.0'\n ],\n 'mxnet':[\n- 'mxnet',\n+ 'mxnet>=1.0.0',\n+ 'requests<2.19.0,>=2.18.4',\n+ 'numpy<1.15.0,>=1.8.2',\n+ 'requests<2.19.0,>=2.18.4',\n+ ],\n+ 'tensorflow':[\n+ 'tensorflow==1.10.0',\n+ 'numpy<=1.14.5,>=1.13.3',\n+ 'setuptools<=39.1.0',\n ],\n 'develop': [\n 'pyflakes',\n@@ -28,13 +36,11 @@\n 'pytest-cov>=2.5.1',\n 'pytest-benchmark[histogram]',\n 'python-coveralls',\n+ 'coverage==4.0.3', # coveralls\n 'matplotlib',\n 'jupyter',\n 'uproot',\n 'papermill',\n- 'torch',\n- 'tensorflow',\n- 'mxnet>=1.0.0',\n 'graphviz',\n 'sphinx',\n 'sphinxcontrib-bibtex',\n", "issue": "Updated setup.py for pytorch > 0.4 dependency\n# Description\r\n\r\nI had 0.3.1 for Torch and that caused issues with some of the doctesting as the distributions did not have `cdf` methods. I forced an upgrade pytorch and things are fine now.\n", "before_files": [{"content": "from setuptools import setup, find_packages\nsetup(\n name = 'pyhf',\n version = '0.0.8',\n description = '(partial) pure python histfactory implementation',\n url = '',\n author = 'Lukas Heinrich',\n author_email = '[email protected]',\n packages = find_packages(),\n include_package_data = True,\n install_requires = [\n 'numpy>=1.14.3',\n 'scipy'\n ],\n extras_require = {\n 'xmlimport': [\n 'uproot',\n ],\n 'torch': [\n 'torch'\n ],\n 'mxnet':[\n 'mxnet',\n ],\n 'develop': [\n 'pyflakes',\n 'pytest>=3.5.1',\n 'pytest-cov>=2.5.1',\n 'pytest-benchmark[histogram]',\n 'python-coveralls',\n 'matplotlib',\n 'jupyter',\n 'uproot',\n 'papermill',\n 'torch',\n 'tensorflow',\n 'mxnet>=1.0.0',\n 'graphviz',\n 'sphinx',\n 'sphinxcontrib-bibtex',\n 'sphinxcontrib-napoleon',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'jsonpatch',\n 'jsonschema>=2.6.0'\n ]\n },\n entry_points = {\n },\n dependency_links = [\n ]\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\nsetup(\n name = 'pyhf',\n version = '0.0.8',\n description = '(partial) pure python histfactory implementation',\n url = '',\n author = 'Lukas Heinrich',\n author_email = '[email protected]',\n packages = find_packages(),\n include_package_data = True,\n install_requires = [\n 'numpy<=1.14.5,>=1.14.3', # required by tensorflow, mxnet, and us\n 'scipy'\n ],\n extras_require = {\n 'xmlimport': [\n 'uproot',\n ],\n 'torch': [\n 'torch>=0.4.0'\n ],\n 'mxnet':[\n 'mxnet>=1.0.0',\n 'requests<2.19.0,>=2.18.4',\n 'numpy<1.15.0,>=1.8.2',\n 'requests<2.19.0,>=2.18.4',\n ],\n 'tensorflow':[\n 'tensorflow==1.10.0',\n 'numpy<=1.14.5,>=1.13.3',\n 'setuptools<=39.1.0',\n ],\n 'develop': [\n 'pyflakes',\n 'pytest>=3.5.1',\n 'pytest-cov>=2.5.1',\n 'pytest-benchmark[histogram]',\n 'python-coveralls',\n 'coverage==4.0.3', # coveralls\n 'matplotlib',\n 'jupyter',\n 'uproot',\n 'papermill',\n 'graphviz',\n 'sphinx',\n 'sphinxcontrib-bibtex',\n 'sphinxcontrib-napoleon',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'jsonpatch',\n 'jsonschema>=2.6.0'\n ]\n },\n entry_points = {\n },\n dependency_links = [\n ]\n)\n", "path": "setup.py"}]} | 722 | 415 |
gh_patches_debug_24803 | rasdani/github-patches | git_diff | facebookresearch__hydra-135 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error when using strict mode and multirun
Modifying the `6_sweep` example to have `@hydra.main(config_path="conf/config.yaml", strict=True)` causes this error:
```python3
6_sweep(master*)$ python3 experiment.py -m
Traceback (most recent call last):
File "experiment.py", line 15, in <module>
experiment()
File "/private/home/bda/repos/hydra/hydra/main.py", line 62, in decorated_main
run_hydra(get_args(), task_function, config_path, strict)
File "/private/home/bda/repos/hydra/hydra/_internal/utils.py", line 57, in run_hydra
hydra.multirun(overrides=args.overrides)
File "/private/home/bda/repos/hydra/hydra/_internal/hydra.py", line 124, in multirun
return sweeper.sweep(arguments=task_overrides)
File "/private/home/bda/repos/hydra/hydra/plugins/step_sweeper.py", line 63, in sweep
results = self.launcher.launch(batch)
File "/private/home/bda/repos/hydra/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py", line 67, in launch
self.config.hydra.job.num_jobs = num_jobs
File "/private/home/bda/anaconda3/lib/python3.7/site-packages/omegaconf-1.3.0-py3.7.egg/omegaconf/dictconfig.py", line 59, in __setattr__
self.__setitem__(key, value)
File "/private/home/bda/anaconda3/lib/python3.7/site-packages/omegaconf-1.3.0-py3.7.egg/omegaconf/dictconfig.py", line 28, in __setitem__
raise KeyError("Accessing unknown key in a struct : {}".format(self.get_full_key(key)))
KeyError: 'Accessing unknown key in a struct : hydra.job.num_jobs'
```
---
Perhaps hydra needs to add the right placeholders in the config here for the plugins to modify/write into, or the plugin needs to have a temporarily config that's not locked?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugins/submitit/hydra_plugins/submitit/submitit_launcher.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 import logging
3 import os
4
5 import hydra._internal.utils
6 import hydra.plugins.common.utils
7 from hydra.plugins import Launcher
8
9 from omegaconf import open_dict
10
11 # pylint: disable=C0103
12 log = logging.getLogger(__name__)
13
14
15 class SubmititLauncher(Launcher):
16 def __init__(self, queue, folder, queue_parameters, conda_file=None):
17 self.queue = queue
18 self.queue_parameters = queue_parameters
19 self.folder = folder
20 self.conda_file = conda_file
21 self.config = None
22 self.task_function = None
23 self.verbose = None
24 self.sweep_configs = None
25 self.config_loader = None
26
27 def setup(self, config, config_loader, task_function, verbose):
28 self.config = config
29 self.config_loader = config_loader
30 self.task_function = task_function
31 self.verbose = verbose
32
33 def launch_job(self, sweep_overrides, job_dir_key, job_num):
34 hydra.plugins.common.utils.configure_log(None, self.verbose)
35 hydra.plugins.common.utils.setup_globals()
36 sweep_config = self.config_loader.load_sweep_config(
37 self.config, sweep_overrides
38 )
39 with open_dict(sweep_config):
40 # Populate new job variables
41 if "SLURM_JOB_ID" in os.environ:
42 sweep_config.hydra.job.id = os.environ["SLURM_JOB_ID"]
43 elif "CHRONOS_JOB_ID" in os.environ:
44 sweep_config.hydra.job.id = os.environ["CHRONOS_JOB_ID"]
45 else:
46 sweep_config.hydra.job.id = "unknown"
47
48 sweep_config.hydra.job.num = job_num
49 sweep_config.hydra.job.override_dirname = hydra.plugins.common.utils.get_overrides_dirname(
50 sweep_config.hydra.overrides.task
51 )
52
53 return hydra.plugins.common.utils.run_job(
54 config=sweep_config,
55 task_function=self.task_function,
56 verbose=self.verbose,
57 job_dir_key=job_dir_key,
58 job_subdir_key="hydra.sweep.subdir",
59 )
60
61 def launch(self, job_overrides):
62 import submitit
63
64 num_jobs = len(job_overrides)
65 assert num_jobs > 0
66 self.config.hydra.job.num_jobs = num_jobs
67 if self.queue == "auto":
68 executor = submitit.AutoExecutor(
69 folder=self.folder, conda_file=self.conda_file
70 )
71 elif self.queue == "slurm":
72 executor = submitit.SlurmExecutor(folder=self.folder)
73 elif self.queue == "chronos":
74 executor = submitit.ChronosExecutor(
75 folder=self.folder, conda_file=self.conda_file
76 )
77 elif self.queue == "local":
78 executor = submitit.LocalExecutor(folder=self.folder)
79 else:
80 raise RuntimeError("Unsupported queue type {}".format(self.queue))
81
82 executor.update_parameters(**self.queue_parameters[self.queue])
83
84 log.info("Sweep output dir : {}".format(self.config.hydra.sweep.dir))
85 path_str = str(self.config.hydra.sweep.dir)
86 os.makedirs(path_str, exist_ok=True)
87 if self.config.hydra.sweep.mode is not None:
88 mode = int(str(self.config.hydra.sweep.mode), 8)
89 os.chmod(path_str, mode=mode)
90
91 jobs = []
92 for job_num in range(num_jobs):
93 sweep_override = list(job_overrides[job_num])
94 log.info(
95 "\t#{} : {}".format(
96 job_num,
97 " ".join(
98 hydra.plugins.common.utils.filter_overrides(sweep_override)
99 ),
100 )
101 )
102 job = executor.submit(
103 self.launch_job, sweep_override, "hydra.sweep.dir", job_num
104 )
105 jobs.append(job)
106
107 return [j.results() for j in jobs]
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py b/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py
--- a/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py
+++ b/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py
@@ -60,11 +60,14 @@
)
def launch(self, job_overrides):
+ # lazy import to ensurue plugin discovery remains fast
import submitit
num_jobs = len(job_overrides)
assert num_jobs > 0
- self.config.hydra.job.num_jobs = num_jobs
+ with open_dict(self.config):
+ self.config.hydra.job.num_jobs = num_jobs
+
if self.queue == "auto":
executor = submitit.AutoExecutor(
folder=self.folder, conda_file=self.conda_file, par_file=self.par_file
@@ -85,7 +88,7 @@
log.info("Sweep output dir : {}".format(self.config.hydra.sweep.dir))
path_str = str(self.config.hydra.sweep.dir)
os.makedirs(path_str, exist_ok=True)
- if self.config.hydra.sweep.mode is not None:
+ if "mode" in self.config.hydra.sweep:
mode = int(str(self.config.hydra.sweep.mode), 8)
os.chmod(path_str, mode=mode)
| {"golden_diff": "diff --git a/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py b/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py\n--- a/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py\n+++ b/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py\n@@ -60,11 +60,14 @@\n )\n \n def launch(self, job_overrides):\n+ # lazy import to ensurue plugin discovery remains fast\n import submitit\n \n num_jobs = len(job_overrides)\n assert num_jobs > 0\n- self.config.hydra.job.num_jobs = num_jobs\n+ with open_dict(self.config):\n+ self.config.hydra.job.num_jobs = num_jobs\n+\n if self.queue == \"auto\":\n executor = submitit.AutoExecutor(\n folder=self.folder, conda_file=self.conda_file, par_file=self.par_file\n@@ -85,7 +88,7 @@\n log.info(\"Sweep output dir : {}\".format(self.config.hydra.sweep.dir))\n path_str = str(self.config.hydra.sweep.dir)\n os.makedirs(path_str, exist_ok=True)\n- if self.config.hydra.sweep.mode is not None:\n+ if \"mode\" in self.config.hydra.sweep:\n mode = int(str(self.config.hydra.sweep.mode), 8)\n os.chmod(path_str, mode=mode)\n", "issue": "Error when using strict mode and multirun\nModifying the `6_sweep` example to have `@hydra.main(config_path=\"conf/config.yaml\", strict=True)` causes this error:\r\n\r\n```python3\r\n6_sweep(master*)$ python3 experiment.py -m\r\nTraceback (most recent call last):\r\n File \"experiment.py\", line 15, in <module>\r\n experiment()\r\n File \"/private/home/bda/repos/hydra/hydra/main.py\", line 62, in decorated_main\r\n run_hydra(get_args(), task_function, config_path, strict)\r\n File \"/private/home/bda/repos/hydra/hydra/_internal/utils.py\", line 57, in run_hydra\r\n hydra.multirun(overrides=args.overrides)\r\n File \"/private/home/bda/repos/hydra/hydra/_internal/hydra.py\", line 124, in multirun\r\n return sweeper.sweep(arguments=task_overrides)\r\n File \"/private/home/bda/repos/hydra/hydra/plugins/step_sweeper.py\", line 63, in sweep\r\n results = self.launcher.launch(batch)\r\n File \"/private/home/bda/repos/hydra/plugins/submitit/hydra_plugins/submitit/submitit_launcher.py\", line 67, in launch\r\n self.config.hydra.job.num_jobs = num_jobs\r\n File \"/private/home/bda/anaconda3/lib/python3.7/site-packages/omegaconf-1.3.0-py3.7.egg/omegaconf/dictconfig.py\", line 59, in __setattr__\r\n self.__setitem__(key, value)\r\n File \"/private/home/bda/anaconda3/lib/python3.7/site-packages/omegaconf-1.3.0-py3.7.egg/omegaconf/dictconfig.py\", line 28, in __setitem__\r\n raise KeyError(\"Accessing unknown key in a struct : {}\".format(self.get_full_key(key)))\r\nKeyError: 'Accessing unknown key in a struct : hydra.job.num_jobs'\r\n```\r\n\r\n---\r\n\r\nPerhaps hydra needs to add the right placeholders in the config here for the plugins to modify/write into, or the plugin needs to have a temporarily config that's not locked?\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport os\n\nimport hydra._internal.utils\nimport hydra.plugins.common.utils\nfrom hydra.plugins import Launcher\n\nfrom omegaconf import open_dict\n\n# pylint: disable=C0103\nlog = logging.getLogger(__name__)\n\n\nclass SubmititLauncher(Launcher):\n def __init__(self, queue, folder, queue_parameters, conda_file=None):\n self.queue = queue\n self.queue_parameters = queue_parameters\n self.folder = folder\n self.conda_file = conda_file\n self.config = None\n self.task_function = None\n self.verbose = None\n self.sweep_configs = None\n self.config_loader = None\n\n def setup(self, config, config_loader, task_function, verbose):\n self.config = config\n self.config_loader = config_loader\n self.task_function = task_function\n self.verbose = verbose\n\n def launch_job(self, sweep_overrides, job_dir_key, job_num):\n hydra.plugins.common.utils.configure_log(None, self.verbose)\n hydra.plugins.common.utils.setup_globals()\n sweep_config = self.config_loader.load_sweep_config(\n self.config, sweep_overrides\n )\n with open_dict(sweep_config):\n # Populate new job variables\n if \"SLURM_JOB_ID\" in os.environ:\n sweep_config.hydra.job.id = os.environ[\"SLURM_JOB_ID\"]\n elif \"CHRONOS_JOB_ID\" in os.environ:\n sweep_config.hydra.job.id = os.environ[\"CHRONOS_JOB_ID\"]\n else:\n sweep_config.hydra.job.id = \"unknown\"\n\n sweep_config.hydra.job.num = job_num\n sweep_config.hydra.job.override_dirname = hydra.plugins.common.utils.get_overrides_dirname(\n sweep_config.hydra.overrides.task\n )\n\n return hydra.plugins.common.utils.run_job(\n config=sweep_config,\n task_function=self.task_function,\n verbose=self.verbose,\n job_dir_key=job_dir_key,\n job_subdir_key=\"hydra.sweep.subdir\",\n )\n\n def launch(self, job_overrides):\n import submitit\n\n num_jobs = len(job_overrides)\n assert num_jobs > 0\n self.config.hydra.job.num_jobs = num_jobs\n if self.queue == \"auto\":\n executor = submitit.AutoExecutor(\n folder=self.folder, conda_file=self.conda_file\n )\n elif self.queue == \"slurm\":\n executor = submitit.SlurmExecutor(folder=self.folder)\n elif self.queue == \"chronos\":\n executor = submitit.ChronosExecutor(\n folder=self.folder, conda_file=self.conda_file\n )\n elif self.queue == \"local\":\n executor = submitit.LocalExecutor(folder=self.folder)\n else:\n raise RuntimeError(\"Unsupported queue type {}\".format(self.queue))\n\n executor.update_parameters(**self.queue_parameters[self.queue])\n\n log.info(\"Sweep output dir : {}\".format(self.config.hydra.sweep.dir))\n path_str = str(self.config.hydra.sweep.dir)\n os.makedirs(path_str, exist_ok=True)\n if self.config.hydra.sweep.mode is not None:\n mode = int(str(self.config.hydra.sweep.mode), 8)\n os.chmod(path_str, mode=mode)\n\n jobs = []\n for job_num in range(num_jobs):\n sweep_override = list(job_overrides[job_num])\n log.info(\n \"\\t#{} : {}\".format(\n job_num,\n \" \".join(\n hydra.plugins.common.utils.filter_overrides(sweep_override)\n ),\n )\n )\n job = executor.submit(\n self.launch_job, sweep_override, \"hydra.sweep.dir\", job_num\n )\n jobs.append(job)\n\n return [j.results() for j in jobs]\n", "path": "plugins/submitit/hydra_plugins/submitit/submitit_launcher.py"}], "after_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport os\n\nimport hydra._internal.utils\nimport hydra.plugins.common.utils\nfrom hydra.plugins import Launcher\n\nfrom omegaconf import open_dict\n\n# pylint: disable=C0103\nlog = logging.getLogger(__name__)\n\n\nclass SubmititLauncher(Launcher):\n def __init__(self, queue, folder, queue_parameters, conda_file=None, par_file=None):\n self.queue = queue\n self.queue_parameters = queue_parameters\n self.folder = folder\n self.conda_file = conda_file\n self.par_file = par_file\n self.config = None\n self.task_function = None\n self.verbose = None\n self.sweep_configs = None\n self.config_loader = None\n\n def setup(self, config, config_loader, task_function, verbose):\n self.config = config\n self.config_loader = config_loader\n self.task_function = task_function\n self.verbose = verbose\n\n def launch_job(self, sweep_overrides, job_dir_key, job_num):\n hydra.plugins.common.utils.configure_log(None, self.verbose)\n hydra.plugins.common.utils.setup_globals()\n sweep_config = self.config_loader.load_sweep_config(\n self.config, sweep_overrides\n )\n with open_dict(sweep_config):\n # Populate new job variables\n if \"SLURM_JOB_ID\" in os.environ:\n sweep_config.hydra.job.id = os.environ[\"SLURM_JOB_ID\"]\n elif \"CHRONOS_JOB_ID\" in os.environ:\n sweep_config.hydra.job.id = os.environ[\"CHRONOS_JOB_ID\"]\n else:\n sweep_config.hydra.job.id = \"unknown\"\n\n sweep_config.hydra.job.num = job_num\n sweep_config.hydra.job.override_dirname = hydra.plugins.common.utils.get_overrides_dirname(\n sweep_config.hydra.overrides.task\n )\n\n return hydra.plugins.common.utils.run_job(\n config=sweep_config,\n task_function=self.task_function,\n verbose=self.verbose,\n job_dir_key=job_dir_key,\n job_subdir_key=\"hydra.sweep.subdir\",\n )\n\n def launch(self, job_overrides):\n # lazy import to ensurue plugin discovery remains fast\n import submitit\n\n num_jobs = len(job_overrides)\n assert num_jobs > 0\n with open_dict(self.config):\n self.config.hydra.job.num_jobs = num_jobs\n\n if self.queue == \"auto\":\n executor = submitit.AutoExecutor(\n folder=self.folder, conda_file=self.conda_file, par_file=self.par_file\n )\n elif self.queue == \"slurm\":\n executor = submitit.SlurmExecutor(folder=self.folder)\n elif self.queue == \"chronos\":\n executor = submitit.ChronosExecutor(\n folder=self.folder, conda_file=self.conda_file, par_file=self.par_file\n )\n elif self.queue == \"local\":\n executor = submitit.LocalExecutor(folder=self.folder)\n else:\n raise RuntimeError(\"Unsupported queue type {}\".format(self.queue))\n\n executor.update_parameters(**self.queue_parameters[self.queue])\n\n log.info(\"Sweep output dir : {}\".format(self.config.hydra.sweep.dir))\n path_str = str(self.config.hydra.sweep.dir)\n os.makedirs(path_str, exist_ok=True)\n if \"mode\" in self.config.hydra.sweep:\n mode = int(str(self.config.hydra.sweep.mode), 8)\n os.chmod(path_str, mode=mode)\n\n jobs = []\n for job_num in range(num_jobs):\n sweep_override = list(job_overrides[job_num])\n log.info(\n \"\\t#{} : {}\".format(\n job_num,\n \" \".join(\n hydra.plugins.common.utils.filter_overrides(sweep_override)\n ),\n )\n )\n job = executor.submit(\n self.launch_job, sweep_override, \"hydra.sweep.dir\", job_num\n )\n jobs.append(job)\n\n return [j.results() for j in jobs]\n", "path": "plugins/submitit/hydra_plugins/submitit/submitit_launcher.py"}]} | 1,801 | 317 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.